VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 74251

Last change on this file since 74251 was 72515, checked in by vboxsync, 7 years ago

IEM: Extended testcase to make sure the IEM_MC_XXX macros are only used within IEM_MC_BEGIN.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 392.5 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 72515 2018-06-11 14:45:09Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_sizes
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv_RO
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283 }
2284
2285 case IEMMODE_32BIT:
2286 {
2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2288 {
2289 /* register operand */
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(3, 1);
2294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2295 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2297 IEM_MC_LOCAL(uint32_t, u32Tmp);
2298
2299 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2300 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2301 IEM_MC_REF_EFLAGS(pEFlags);
2302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2303 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2304
2305 IEM_MC_ADVANCE_RIP();
2306 IEM_MC_END();
2307 }
2308 else
2309 {
2310 /* memory operand */
2311 IEM_MC_BEGIN(3, 2);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG(uint32_t, u32Src, 1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2317
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2319 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2320 IEM_MC_ASSIGN(u32Src, u32Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2323 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2324 IEM_MC_REF_EFLAGS(pEFlags);
2325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332 }
2333
2334 case IEMMODE_64BIT:
2335 {
2336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2337 {
2338 /* register operand */
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341
2342 IEM_MC_BEGIN(3, 1);
2343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2344 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347
2348 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2350 IEM_MC_REF_EFLAGS(pEFlags);
2351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 else
2358 {
2359 /* memory operand */
2360 IEM_MC_BEGIN(3, 2);
2361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2362 IEM_MC_ARG(uint64_t, u64Src, 1);
2363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2368 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2369 IEM_MC_ASSIGN(u64Src, u64Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2373 IEM_MC_REF_EFLAGS(pEFlags);
2374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2376
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 return VINF_SUCCESS;
2381 }
2382 }
2383 AssertFailedReturn(VERR_IEM_IPE_9);
2384}
2385
2386
2387/**
2388 * @opcode 0x6a
2389 */
2390FNIEMOP_DEF(iemOp_push_Ib)
2391{
2392 IEMOP_MNEMONIC(push_Ib, "push Ib");
2393 IEMOP_HLP_MIN_186();
2394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2397
2398 IEM_MC_BEGIN(0,0);
2399 switch (pVCpu->iem.s.enmEffOpSize)
2400 {
2401 case IEMMODE_16BIT:
2402 IEM_MC_PUSH_U16(i8Imm);
2403 break;
2404 case IEMMODE_32BIT:
2405 IEM_MC_PUSH_U32(i8Imm);
2406 break;
2407 case IEMMODE_64BIT:
2408 IEM_MC_PUSH_U64(i8Imm);
2409 break;
2410 }
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/**
2418 * @opcode 0x6b
2419 */
2420FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2421{
2422 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2423 IEMOP_HLP_MIN_186();
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2426
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /* register operand */
2433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435
2436 IEM_MC_BEGIN(3, 1);
2437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2440 IEM_MC_LOCAL(uint16_t, u16Tmp);
2441
2442 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2443 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2444 IEM_MC_REF_EFLAGS(pEFlags);
2445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory operand */
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint16_t, u16Tmp);
2459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2460
2461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2463 IEM_MC_ASSIGN(u16Src, u16Imm);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2466 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2467 IEM_MC_REF_EFLAGS(pEFlags);
2468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475
2476 case IEMMODE_32BIT:
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* register operand */
2480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482
2483 IEM_MC_BEGIN(3, 1);
2484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2485 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2487 IEM_MC_LOCAL(uint32_t, u32Tmp);
2488
2489 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2490 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2491 IEM_MC_REF_EFLAGS(pEFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /* memory operand */
2501 IEM_MC_BEGIN(3, 2);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG(uint32_t, u32Src, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2507
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2510 IEM_MC_ASSIGN(u32Src, u32Imm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2513 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2514 IEM_MC_REF_EFLAGS(pEFlags);
2515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522
2523 case IEMMODE_64BIT:
2524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2525 {
2526 /* register operand */
2527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529
2530 IEM_MC_BEGIN(3, 1);
2531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2532 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint64_t, u64Tmp);
2535
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2537 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2538 IEM_MC_REF_EFLAGS(pEFlags);
2539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 /* memory operand */
2548 IEM_MC_BEGIN(3, 2);
2549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2550 IEM_MC_ARG(uint64_t, u64Src, 1);
2551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2557 IEM_MC_ASSIGN(u64Src, u64Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2560 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2561 IEM_MC_REF_EFLAGS(pEFlags);
2562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 return VINF_SUCCESS;
2569 }
2570 AssertFailedReturn(VERR_IEM_IPE_8);
2571}
2572
2573
2574/**
2575 * @opcode 0x6c
2576 */
2577FNIEMOP_DEF(iemOp_insb_Yb_DX)
2578{
2579 IEMOP_HLP_MIN_186();
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2582 {
2583 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 }
2592 else
2593 {
2594 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2595 switch (pVCpu->iem.s.enmEffAddrMode)
2596 {
2597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2601 }
2602 }
2603}
2604
2605
2606/**
2607 * @opcode 0x6d
2608 */
2609FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2610{
2611 IEMOP_HLP_MIN_186();
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2614 {
2615 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2616 switch (pVCpu->iem.s.enmEffOpSize)
2617 {
2618 case IEMMODE_16BIT:
2619 switch (pVCpu->iem.s.enmEffAddrMode)
2620 {
2621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2625 }
2626 break;
2627 case IEMMODE_64BIT:
2628 case IEMMODE_32BIT:
2629 switch (pVCpu->iem.s.enmEffAddrMode)
2630 {
2631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2635 }
2636 break;
2637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2638 }
2639 }
2640 else
2641 {
2642 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2643 switch (pVCpu->iem.s.enmEffOpSize)
2644 {
2645 case IEMMODE_16BIT:
2646 switch (pVCpu->iem.s.enmEffAddrMode)
2647 {
2648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2652 }
2653 break;
2654 case IEMMODE_64BIT:
2655 case IEMMODE_32BIT:
2656 switch (pVCpu->iem.s.enmEffAddrMode)
2657 {
2658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2662 }
2663 break;
2664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2665 }
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x6e
2672 */
2673FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2674{
2675 IEMOP_HLP_MIN_186();
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2678 {
2679 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2680 switch (pVCpu->iem.s.enmEffAddrMode)
2681 {
2682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687 }
2688 else
2689 {
2690 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2691 switch (pVCpu->iem.s.enmEffAddrMode)
2692 {
2693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x6f
2704 */
2705FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2706{
2707 IEMOP_HLP_MIN_186();
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2710 {
2711 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 switch (pVCpu->iem.s.enmEffAddrMode)
2716 {
2717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2721 }
2722 break;
2723 case IEMMODE_64BIT:
2724 case IEMMODE_32BIT:
2725 switch (pVCpu->iem.s.enmEffAddrMode)
2726 {
2727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2731 }
2732 break;
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2734 }
2735 }
2736 else
2737 {
2738 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2739 switch (pVCpu->iem.s.enmEffOpSize)
2740 {
2741 case IEMMODE_16BIT:
2742 switch (pVCpu->iem.s.enmEffAddrMode)
2743 {
2744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749 break;
2750 case IEMMODE_64BIT:
2751 case IEMMODE_32BIT:
2752 switch (pVCpu->iem.s.enmEffAddrMode)
2753 {
2754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2758 }
2759 break;
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762 }
2763}
2764
2765
2766/**
2767 * @opcode 0x70
2768 */
2769FNIEMOP_DEF(iemOp_jo_Jb)
2770{
2771 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2772 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2775
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2778 IEM_MC_REL_JMP_S8(i8Imm);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_ADVANCE_RIP();
2781 } IEM_MC_ENDIF();
2782 IEM_MC_END();
2783 return VINF_SUCCESS;
2784}
2785
2786
2787/**
2788 * @opcode 0x71
2789 */
2790FNIEMOP_DEF(iemOp_jno_Jb)
2791{
2792 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2796
2797 IEM_MC_BEGIN(0, 0);
2798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2799 IEM_MC_ADVANCE_RIP();
2800 } IEM_MC_ELSE() {
2801 IEM_MC_REL_JMP_S8(i8Imm);
2802 } IEM_MC_ENDIF();
2803 IEM_MC_END();
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x72
2809 */
2810FNIEMOP_DEF(iemOp_jc_Jb)
2811{
2812 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2813 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816
2817 IEM_MC_BEGIN(0, 0);
2818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2819 IEM_MC_REL_JMP_S8(i8Imm);
2820 } IEM_MC_ELSE() {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ENDIF();
2823 IEM_MC_END();
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/**
2829 * @opcode 0x73
2830 */
2831FNIEMOP_DEF(iemOp_jnc_Jb)
2832{
2833 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2834 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2837
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2840 IEM_MC_ADVANCE_RIP();
2841 } IEM_MC_ELSE() {
2842 IEM_MC_REL_JMP_S8(i8Imm);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_END();
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/**
2850 * @opcode 0x74
2851 */
2852FNIEMOP_DEF(iemOp_je_Jb)
2853{
2854 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2858
2859 IEM_MC_BEGIN(0, 0);
2860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2861 IEM_MC_REL_JMP_S8(i8Imm);
2862 } IEM_MC_ELSE() {
2863 IEM_MC_ADVANCE_RIP();
2864 } IEM_MC_ENDIF();
2865 IEM_MC_END();
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x75
2872 */
2873FNIEMOP_DEF(iemOp_jne_Jb)
2874{
2875 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2879
2880 IEM_MC_BEGIN(0, 0);
2881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2882 IEM_MC_ADVANCE_RIP();
2883 } IEM_MC_ELSE() {
2884 IEM_MC_REL_JMP_S8(i8Imm);
2885 } IEM_MC_ENDIF();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888}
2889
2890
2891/**
2892 * @opcode 0x76
2893 */
2894FNIEMOP_DEF(iemOp_jbe_Jb)
2895{
2896 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2903 IEM_MC_REL_JMP_S8(i8Imm);
2904 } IEM_MC_ELSE() {
2905 IEM_MC_ADVANCE_RIP();
2906 } IEM_MC_ENDIF();
2907 IEM_MC_END();
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * @opcode 0x77
2914 */
2915FNIEMOP_DEF(iemOp_jnbe_Jb)
2916{
2917 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2918 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2921
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2924 IEM_MC_ADVANCE_RIP();
2925 } IEM_MC_ELSE() {
2926 IEM_MC_REL_JMP_S8(i8Imm);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_END();
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/**
2934 * @opcode 0x78
2935 */
2936FNIEMOP_DEF(iemOp_js_Jb)
2937{
2938 IEMOP_MNEMONIC(js_Jb, "js Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2945 IEM_MC_REL_JMP_S8(i8Imm);
2946 } IEM_MC_ELSE() {
2947 IEM_MC_ADVANCE_RIP();
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/**
2955 * @opcode 0x79
2956 */
2957FNIEMOP_DEF(iemOp_jns_Jb)
2958{
2959 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2960 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2966 IEM_MC_ADVANCE_RIP();
2967 } IEM_MC_ELSE() {
2968 IEM_MC_REL_JMP_S8(i8Imm);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/**
2976 * @opcode 0x7a
2977 */
2978FNIEMOP_DEF(iemOp_jp_Jb)
2979{
2980 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2984
2985 IEM_MC_BEGIN(0, 0);
2986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2987 IEM_MC_REL_JMP_S8(i8Imm);
2988 } IEM_MC_ELSE() {
2989 IEM_MC_ADVANCE_RIP();
2990 } IEM_MC_ENDIF();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/**
2997 * @opcode 0x7b
2998 */
2999FNIEMOP_DEF(iemOp_jnp_Jb)
3000{
3001 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005
3006 IEM_MC_BEGIN(0, 0);
3007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3008 IEM_MC_ADVANCE_RIP();
3009 } IEM_MC_ELSE() {
3010 IEM_MC_REL_JMP_S8(i8Imm);
3011 } IEM_MC_ENDIF();
3012 IEM_MC_END();
3013 return VINF_SUCCESS;
3014}
3015
3016
3017/**
3018 * @opcode 0x7c
3019 */
3020FNIEMOP_DEF(iemOp_jl_Jb)
3021{
3022 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3023 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3026
3027 IEM_MC_BEGIN(0, 0);
3028 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3029 IEM_MC_REL_JMP_S8(i8Imm);
3030 } IEM_MC_ELSE() {
3031 IEM_MC_ADVANCE_RIP();
3032 } IEM_MC_ENDIF();
3033 IEM_MC_END();
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * @opcode 0x7d
3040 */
3041FNIEMOP_DEF(iemOp_jnl_Jb)
3042{
3043 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_ADVANCE_RIP();
3051 } IEM_MC_ELSE() {
3052 IEM_MC_REL_JMP_S8(i8Imm);
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056}
3057
3058
3059/**
3060 * @opcode 0x7e
3061 */
3062FNIEMOP_DEF(iemOp_jle_Jb)
3063{
3064 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3065 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3068
3069 IEM_MC_BEGIN(0, 0);
3070 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3071 IEM_MC_REL_JMP_S8(i8Imm);
3072 } IEM_MC_ELSE() {
3073 IEM_MC_ADVANCE_RIP();
3074 } IEM_MC_ENDIF();
3075 IEM_MC_END();
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/**
3081 * @opcode 0x7f
3082 */
3083FNIEMOP_DEF(iemOp_jnle_Jb)
3084{
3085 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3092 IEM_MC_ADVANCE_RIP();
3093 } IEM_MC_ELSE() {
3094 IEM_MC_REL_JMP_S8(i8Imm);
3095 } IEM_MC_ENDIF();
3096 IEM_MC_END();
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/**
3102 * @opcode 0x80
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3105{
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3108 {
3109 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3110 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3111 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3112 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3113 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3114 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3115 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3116 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3117 }
3118 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3119
3120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3121 {
3122 /* register target */
3123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_BEGIN(3, 0);
3126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3127 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129
3130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3131 IEM_MC_REF_EFLAGS(pEFlags);
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /* memory target */
3140 uint32_t fAccess;
3141 if (pImpl->pfnLockedU8)
3142 fAccess = IEM_ACCESS_DATA_RW;
3143 else /* CMP */
3144 fAccess = IEM_ACCESS_DATA_R;
3145 IEM_MC_BEGIN(3, 2);
3146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3153 if (pImpl->pfnLockedU8)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157
3158 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3159 IEM_MC_FETCH_EFLAGS(EFlags);
3160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3162 else
3163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3164
3165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3166 IEM_MC_COMMIT_EFLAGS(EFlags);
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 }
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/**
3175 * @opcode 0x81
3176 */
3177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3181 {
3182 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3183 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3184 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3185 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3186 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3187 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3188 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3189 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3190 }
3191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3192
3193 switch (pVCpu->iem.s.enmEffOpSize)
3194 {
3195 case IEMMODE_16BIT:
3196 {
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 /* register target */
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(3, 0);
3203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3206
3207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /* memory target */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP, TEST */
3221 fAccess = IEM_ACCESS_DATA_R;
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3224 IEM_MC_ARG(uint16_t, u16Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3229 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3230 IEM_MC_ASSIGN(u16Src, u16Imm);
3231 if (pImpl->pfnLockedU16)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 break;
3248 }
3249
3250 case IEMMODE_32BIT:
3251 {
3252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3253 {
3254 /* register target */
3255 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_BEGIN(3, 0);
3258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3259 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261
3262 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3266
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 }
3270 else
3271 {
3272 /* memory target */
3273 uint32_t fAccess;
3274 if (pImpl->pfnLockedU32)
3275 fAccess = IEM_ACCESS_DATA_RW;
3276 else /* CMP, TEST */
3277 fAccess = IEM_ACCESS_DATA_R;
3278 IEM_MC_BEGIN(3, 2);
3279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3280 IEM_MC_ARG(uint32_t, u32Src, 1);
3281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3286 IEM_MC_ASSIGN(u32Src, u32Imm);
3287 if (pImpl->pfnLockedU32)
3288 IEMOP_HLP_DONE_DECODING();
3289 else
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3292 IEM_MC_FETCH_EFLAGS(EFlags);
3293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3295 else
3296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3297
3298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3299 IEM_MC_COMMIT_EFLAGS(EFlags);
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 break;
3304 }
3305
3306 case IEMMODE_64BIT:
3307 {
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* register target */
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3315 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3317
3318 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3319 IEM_MC_REF_EFLAGS(pEFlags);
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /* memory target */
3328 uint32_t fAccess;
3329 if (pImpl->pfnLockedU64)
3330 fAccess = IEM_ACCESS_DATA_RW;
3331 else /* CMP */
3332 fAccess = IEM_ACCESS_DATA_R;
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3335 IEM_MC_ARG(uint64_t, u64Src, 1);
3336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 if (pImpl->pfnLockedU64)
3342 IEMOP_HLP_DONE_DECODING();
3343 else
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN(u64Src, u64Imm);
3346 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3347 IEM_MC_FETCH_EFLAGS(EFlags);
3348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350 else
3351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3352
3353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3354 IEM_MC_COMMIT_EFLAGS(EFlags);
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/**
3366 * @opcode 0x82
3367 * @opmnemonic grp1_82
3368 * @opgroup og_groups
3369 */
3370FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3371{
3372 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3373 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3374}
3375
3376
3377/**
3378 * @opcode 0x83
3379 */
3380FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3386 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3387 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3388 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3389 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3390 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3391 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3392 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3393 }
3394 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3395 to the 386 even if absent in the intel reference manuals and some
3396 3rd party opcode listings. */
3397 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3398
3399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3400 {
3401 /*
3402 * Register target
3403 */
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3406 switch (pVCpu->iem.s.enmEffOpSize)
3407 {
3408 case IEMMODE_16BIT:
3409 {
3410 IEM_MC_BEGIN(3, 0);
3411 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414
3415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_REF_EFLAGS(pEFlags);
3417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 IEM_MC_BEGIN(3, 0);
3427 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3428 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3430
3431 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_REF_EFLAGS(pEFlags);
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 break;
3439 }
3440
3441 case IEMMODE_64BIT:
3442 {
3443 IEM_MC_BEGIN(3, 0);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3447
3448 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_REF_EFLAGS(pEFlags);
3450 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 break;
3455 }
3456 }
3457 }
3458 else
3459 {
3460 /*
3461 * Memory target.
3462 */
3463 uint32_t fAccess;
3464 if (pImpl->pfnLockedU16)
3465 fAccess = IEM_ACCESS_DATA_RW;
3466 else /* CMP */
3467 fAccess = IEM_ACCESS_DATA_R;
3468
3469 switch (pVCpu->iem.s.enmEffOpSize)
3470 {
3471 case IEMMODE_16BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3475 IEM_MC_ARG(uint16_t, u16Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU16)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_32BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3504 IEM_MC_ARG(uint32_t, u32Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU32)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528
3529 case IEMMODE_64BIT:
3530 {
3531 IEM_MC_BEGIN(3, 2);
3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3539 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3540 if (pImpl->pfnLockedU64)
3541 IEMOP_HLP_DONE_DECODING();
3542 else
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3545 IEM_MC_FETCH_EFLAGS(EFlags);
3546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3548 else
3549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3550
3551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3552 IEM_MC_COMMIT_EFLAGS(EFlags);
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 break;
3556 }
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * @opcode 0x84
3565 */
3566FNIEMOP_DEF(iemOp_test_Eb_Gb)
3567{
3568 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3571}
3572
3573
3574/**
3575 * @opcode 0x85
3576 */
3577FNIEMOP_DEF(iemOp_test_Ev_Gv)
3578{
3579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3582}
3583
3584
3585/**
3586 * @opcode 0x86
3587 */
3588FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3592
3593 /*
3594 * If rm is denoting a register, no more instruction bytes.
3595 */
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(uint8_t, uTmp1);
3602 IEM_MC_LOCAL(uint8_t, uTmp2);
3603
3604 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3606 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3607 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * We're accessing memory.
3616 */
3617/** @todo the register must be committed separately! */
3618 IEM_MC_BEGIN(2, 2);
3619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/**
3637 * @opcode 0x87
3638 */
3639FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3640{
3641 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643
3644 /*
3645 * If rm is denoting a register, no more instruction bytes.
3646 */
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 switch (pVCpu->iem.s.enmEffOpSize)
3652 {
3653 case IEMMODE_16BIT:
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(uint16_t, uTmp1);
3656 IEM_MC_LOCAL(uint16_t, uTmp2);
3657
3658 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3659 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3660 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3662
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666
3667 case IEMMODE_32BIT:
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(uint32_t, uTmp1);
3670 IEM_MC_LOCAL(uint32_t, uTmp2);
3671
3672 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3673 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3674 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3675 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3676
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 return VINF_SUCCESS;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 2);
3683 IEM_MC_LOCAL(uint64_t, uTmp1);
3684 IEM_MC_LOCAL(uint64_t, uTmp2);
3685
3686 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 return VINF_SUCCESS;
3694
3695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3696 }
3697 }
3698 else
3699 {
3700 /*
3701 * We're accessing memory.
3702 */
3703 switch (pVCpu->iem.s.enmEffOpSize)
3704 {
3705/** @todo the register must be committed separately! */
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(2, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3709 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3713 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3714 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(2, 2);
3724 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3725 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3730 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3733
3734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 return VINF_SUCCESS;
3738
3739 case IEMMODE_64BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3742 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3756 }
3757 }
3758}
3759
3760
3761/**
3762 * @opcode 0x88
3763 */
3764FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3765{
3766 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3767
3768 uint8_t bRm;
3769 IEM_OPCODE_GET_NEXT_U8(&bRm);
3770
3771 /*
3772 * If rm is denoting a register, no more instruction bytes.
3773 */
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint8_t, u8Value);
3779 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3780 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * We're writing a register to memory.
3788 */
3789 IEM_MC_BEGIN(0, 2);
3790 IEM_MC_LOCAL(uint8_t, u8Value);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3795 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 }
3799 return VINF_SUCCESS;
3800
3801}
3802
3803
3804/**
3805 * @opcode 0x89
3806 */
3807FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3808{
3809 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3810
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 switch (pVCpu->iem.s.enmEffOpSize)
3820 {
3821 case IEMMODE_16BIT:
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint16_t, u16Value);
3824 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3825 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 break;
3829
3830 case IEMMODE_32BIT:
3831 IEM_MC_BEGIN(0, 1);
3832 IEM_MC_LOCAL(uint32_t, u32Value);
3833 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_64BIT:
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(uint64_t, u64Value);
3842 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 break;
3847 }
3848 }
3849 else
3850 {
3851 /*
3852 * We're writing a register to memory.
3853 */
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint16_t, u16Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3863 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(uint32_t, u32Value);
3871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3875 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 break;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint64_t, u64Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891 }
3892 }
3893 return VINF_SUCCESS;
3894}
3895
3896
3897/**
3898 * @opcode 0x8a
3899 */
3900FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3901{
3902 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3903
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905
3906 /*
3907 * If rm is denoting a register, no more instruction bytes.
3908 */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912 IEM_MC_BEGIN(0, 1);
3913 IEM_MC_LOCAL(uint8_t, u8Value);
3914 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3915 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /*
3922 * We're loading a register from memory.
3923 */
3924 IEM_MC_BEGIN(0, 2);
3925 IEM_MC_LOCAL(uint8_t, u8Value);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3930 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 return VINF_SUCCESS;
3935}
3936
3937
3938/**
3939 * @opcode 0x8b
3940 */
3941FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3942{
3943 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3944
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946
3947 /*
3948 * If rm is denoting a register, no more instruction bytes.
3949 */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 switch (pVCpu->iem.s.enmEffOpSize)
3954 {
3955 case IEMMODE_16BIT:
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint16_t, u16Value);
3958 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 break;
3963
3964 case IEMMODE_32BIT:
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(uint32_t, u32Value);
3967 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_64BIT:
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(uint64_t, u64Value);
3976 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 break;
3981 }
3982 }
3983 else
3984 {
3985 /*
3986 * We're loading a register from memory.
3987 */
3988 switch (pVCpu->iem.s.enmEffOpSize)
3989 {
3990 case IEMMODE_16BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint16_t, u16Value);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 break;
4001
4002 case IEMMODE_32BIT:
4003 IEM_MC_BEGIN(0, 2);
4004 IEM_MC_LOCAL(uint32_t, u32Value);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 break;
4013
4014 case IEMMODE_64BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint64_t, u64Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025 }
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/**
4032 * opcode 0x63
4033 * @todo Table fixme
4034 */
4035FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4036{
4037 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4038 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4039 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4040 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4041 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4042}
4043
4044
4045/**
4046 * @opcode 0x8c
4047 */
4048FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4049{
4050 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053
4054 /*
4055 * Check that the destination register exists. The REX.R prefix is ignored.
4056 */
4057 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4058 if ( iSegReg > X86_SREG_GS)
4059 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4060
4061 /*
4062 * If rm is denoting a register, no more instruction bytes.
4063 * In that case, the operand size is respected and the upper bits are
4064 * cleared (starting with some pentium).
4065 */
4066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4067 {
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 switch (pVCpu->iem.s.enmEffOpSize)
4070 {
4071 case IEMMODE_16BIT:
4072 IEM_MC_BEGIN(0, 1);
4073 IEM_MC_LOCAL(uint16_t, u16Value);
4074 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 break;
4079
4080 case IEMMODE_32BIT:
4081 IEM_MC_BEGIN(0, 1);
4082 IEM_MC_LOCAL(uint32_t, u32Value);
4083 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 break;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(0, 1);
4091 IEM_MC_LOCAL(uint64_t, u64Value);
4092 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 break;
4097 }
4098 }
4099 else
4100 {
4101 /*
4102 * We're saving the register to memory. The access is word sized
4103 * regardless of operand size prefixes.
4104 */
4105#if 0 /* not necessary */
4106 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4107#endif
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123
4124/**
4125 * @opcode 0x8d
4126 */
4127FNIEMOP_DEF(iemOp_lea_Gv_M)
4128{
4129 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4133
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4139 IEM_MC_LOCAL(uint16_t, u16Cast);
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, u32Cast);
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 return VINF_SUCCESS;
4169 }
4170 AssertFailedReturn(VERR_IEM_IPE_7);
4171}
4172
4173
4174/**
4175 * @opcode 0x8e
4176 */
4177FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4178{
4179 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * The practical operand size is 16-bit.
4185 */
4186#if 0 /* not necessary */
4187 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4188#endif
4189
4190 /*
4191 * Check that the destination register exists and can be used with this
4192 * instruction. The REX.R prefix is ignored.
4193 */
4194 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4195 if ( iSegReg == X86_SREG_CS
4196 || iSegReg > X86_SREG_GS)
4197 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4198
4199 /*
4200 * If rm is denoting a register, no more instruction bytes.
4201 */
4202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4203 {
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_BEGIN(2, 0);
4206 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4207 IEM_MC_ARG(uint16_t, u16Value, 1);
4208 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4209 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /*
4215 * We're loading the register from memory. The access is word sized
4216 * regardless of operand size prefixes.
4217 */
4218 IEM_MC_BEGIN(2, 1);
4219 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4220 IEM_MC_ARG(uint16_t, u16Value, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4225 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x8f /0. */
4233FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4234{
4235 /* This bugger is rather annoying as it requires rSP to be updated before
4236 doing the effective address calculations. Will eventually require a
4237 split between the R/M+SIB decoding and the effective address
4238 calculation - which is something that is required for any attempt at
4239 reusing this code for a recompiler. It may also be good to have if we
4240 need to delay #UD exception caused by invalid lock prefixes.
4241
4242 For now, we'll do a mostly safe interpreter-only implementation here. */
4243 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4244 * now until tests show it's checked.. */
4245 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4246
4247 /* Register access is relatively easy and can share code. */
4248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4249 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4250
4251 /*
4252 * Memory target.
4253 *
4254 * Intel says that RSP is incremented before it's used in any effective
4255 * address calcuations. This means some serious extra annoyance here since
4256 * we decode and calculate the effective address in one step and like to
4257 * delay committing registers till everything is done.
4258 *
4259 * So, we'll decode and calculate the effective address twice. This will
4260 * require some recoding if turned into a recompiler.
4261 */
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4263
4264#ifndef TST_IEM_CHECK_MC
4265 /* Calc effective address with modified ESP. */
4266/** @todo testcase */
4267 RTGCPTR GCPtrEff;
4268 VBOXSTRICTRC rcStrict;
4269 switch (pVCpu->iem.s.enmEffOpSize)
4270 {
4271 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4272 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4273 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4275 }
4276 if (rcStrict != VINF_SUCCESS)
4277 return rcStrict;
4278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4279
4280 /* Perform the operation - this should be CImpl. */
4281 RTUINT64U TmpRsp;
4282 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4283 switch (pVCpu->iem.s.enmEffOpSize)
4284 {
4285 case IEMMODE_16BIT:
4286 {
4287 uint16_t u16Value;
4288 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4289 if (rcStrict == VINF_SUCCESS)
4290 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4291 break;
4292 }
4293
4294 case IEMMODE_32BIT:
4295 {
4296 uint32_t u32Value;
4297 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4298 if (rcStrict == VINF_SUCCESS)
4299 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4300 break;
4301 }
4302
4303 case IEMMODE_64BIT:
4304 {
4305 uint64_t u64Value;
4306 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4307 if (rcStrict == VINF_SUCCESS)
4308 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4309 break;
4310 }
4311
4312 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4313 }
4314 if (rcStrict == VINF_SUCCESS)
4315 {
4316 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4317 iemRegUpdateRipAndClearRF(pVCpu);
4318 }
4319 return rcStrict;
4320
4321#else
4322 return VERR_IEM_IPE_2;
4323#endif
4324}
4325
4326
4327/**
4328 * @opcode 0x8f
4329 */
4330FNIEMOP_DEF(iemOp_Grp1A__xop)
4331{
4332 /*
4333 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4334 * three byte VEX prefix, except that the mmmmm field cannot have the values
4335 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4336 */
4337 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4338 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4339 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4340
4341 IEMOP_MNEMONIC(xop, "xop");
4342 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4343 {
4344 /** @todo Test when exctly the XOP conformance checks kick in during
4345 * instruction decoding and fetching (using \#PF). */
4346 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4347 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4348 if ( ( pVCpu->iem.s.fPrefixes
4349 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4350 == 0)
4351 {
4352 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4353 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4354 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4355 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4356 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4357 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4358 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4359 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4360 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4361
4362 /** @todo XOP: Just use new tables and decoders. */
4363 switch (bRm & 0x1f)
4364 {
4365 case 8: /* xop opcode map 8. */
4366 IEMOP_BITCH_ABOUT_STUB();
4367 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4368
4369 case 9: /* xop opcode map 9. */
4370 IEMOP_BITCH_ABOUT_STUB();
4371 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4372
4373 case 10: /* xop opcode map 10. */
4374 IEMOP_BITCH_ABOUT_STUB();
4375 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4376
4377 default:
4378 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4379 return IEMOP_RAISE_INVALID_OPCODE();
4380 }
4381 }
4382 else
4383 Log(("XOP: Invalid prefix mix!\n"));
4384 }
4385 else
4386 Log(("XOP: XOP support disabled!\n"));
4387 return IEMOP_RAISE_INVALID_OPCODE();
4388}
4389
4390
4391/**
4392 * Common 'xchg reg,rAX' helper.
4393 */
4394FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4395{
4396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4397
4398 iReg |= pVCpu->iem.s.uRexB;
4399 switch (pVCpu->iem.s.enmEffOpSize)
4400 {
4401 case IEMMODE_16BIT:
4402 IEM_MC_BEGIN(0, 2);
4403 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4404 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4405 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4406 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4407 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4408 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4409 IEM_MC_ADVANCE_RIP();
4410 IEM_MC_END();
4411 return VINF_SUCCESS;
4412
4413 case IEMMODE_32BIT:
4414 IEM_MC_BEGIN(0, 2);
4415 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4416 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4417 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4418 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4419 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4420 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4421 IEM_MC_ADVANCE_RIP();
4422 IEM_MC_END();
4423 return VINF_SUCCESS;
4424
4425 case IEMMODE_64BIT:
4426 IEM_MC_BEGIN(0, 2);
4427 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4428 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4429 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4430 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4431 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4432 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 return VINF_SUCCESS;
4436
4437 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4438 }
4439}
4440
4441
4442/**
4443 * @opcode 0x90
4444 */
4445FNIEMOP_DEF(iemOp_nop)
4446{
4447 /* R8/R8D and RAX/EAX can be exchanged. */
4448 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4449 {
4450 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4451 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4452 }
4453
4454 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4455 {
4456 IEMOP_MNEMONIC(pause, "pause");
4457#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4458 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4459 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4460#endif
4461 }
4462 else
4463 IEMOP_MNEMONIC(nop, "nop");
4464 IEM_MC_BEGIN(0, 0);
4465 IEM_MC_ADVANCE_RIP();
4466 IEM_MC_END();
4467 return VINF_SUCCESS;
4468}
4469
4470
4471/**
4472 * @opcode 0x91
4473 */
4474FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4475{
4476 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4477 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4478}
4479
4480
4481/**
4482 * @opcode 0x92
4483 */
4484FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4485{
4486 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4487 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4488}
4489
4490
4491/**
4492 * @opcode 0x93
4493 */
4494FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4495{
4496 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4497 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4498}
4499
4500
4501/**
4502 * @opcode 0x94
4503 */
4504FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4505{
4506 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4507 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4508}
4509
4510
4511/**
4512 * @opcode 0x95
4513 */
4514FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4515{
4516 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4517 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4518}
4519
4520
4521/**
4522 * @opcode 0x96
4523 */
4524FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4525{
4526 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4527 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4528}
4529
4530
4531/**
4532 * @opcode 0x97
4533 */
4534FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4535{
4536 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4537 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4538}
4539
4540
4541/**
4542 * @opcode 0x98
4543 */
4544FNIEMOP_DEF(iemOp_cbw)
4545{
4546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4547 switch (pVCpu->iem.s.enmEffOpSize)
4548 {
4549 case IEMMODE_16BIT:
4550 IEMOP_MNEMONIC(cbw, "cbw");
4551 IEM_MC_BEGIN(0, 1);
4552 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4553 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4554 } IEM_MC_ELSE() {
4555 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4556 } IEM_MC_ENDIF();
4557 IEM_MC_ADVANCE_RIP();
4558 IEM_MC_END();
4559 return VINF_SUCCESS;
4560
4561 case IEMMODE_32BIT:
4562 IEMOP_MNEMONIC(cwde, "cwde");
4563 IEM_MC_BEGIN(0, 1);
4564 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4565 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4566 } IEM_MC_ELSE() {
4567 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4568 } IEM_MC_ENDIF();
4569 IEM_MC_ADVANCE_RIP();
4570 IEM_MC_END();
4571 return VINF_SUCCESS;
4572
4573 case IEMMODE_64BIT:
4574 IEMOP_MNEMONIC(cdqe, "cdqe");
4575 IEM_MC_BEGIN(0, 1);
4576 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4577 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4578 } IEM_MC_ELSE() {
4579 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4580 } IEM_MC_ENDIF();
4581 IEM_MC_ADVANCE_RIP();
4582 IEM_MC_END();
4583 return VINF_SUCCESS;
4584
4585 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4586 }
4587}
4588
4589
4590/**
4591 * @opcode 0x99
4592 */
4593FNIEMOP_DEF(iemOp_cwd)
4594{
4595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4596 switch (pVCpu->iem.s.enmEffOpSize)
4597 {
4598 case IEMMODE_16BIT:
4599 IEMOP_MNEMONIC(cwd, "cwd");
4600 IEM_MC_BEGIN(0, 1);
4601 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4602 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4603 } IEM_MC_ELSE() {
4604 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4605 } IEM_MC_ENDIF();
4606 IEM_MC_ADVANCE_RIP();
4607 IEM_MC_END();
4608 return VINF_SUCCESS;
4609
4610 case IEMMODE_32BIT:
4611 IEMOP_MNEMONIC(cdq, "cdq");
4612 IEM_MC_BEGIN(0, 1);
4613 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4614 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4615 } IEM_MC_ELSE() {
4616 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4617 } IEM_MC_ENDIF();
4618 IEM_MC_ADVANCE_RIP();
4619 IEM_MC_END();
4620 return VINF_SUCCESS;
4621
4622 case IEMMODE_64BIT:
4623 IEMOP_MNEMONIC(cqo, "cqo");
4624 IEM_MC_BEGIN(0, 1);
4625 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4626 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4627 } IEM_MC_ELSE() {
4628 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4629 } IEM_MC_ENDIF();
4630 IEM_MC_ADVANCE_RIP();
4631 IEM_MC_END();
4632 return VINF_SUCCESS;
4633
4634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4635 }
4636}
4637
4638
4639/**
4640 * @opcode 0x9a
4641 */
4642FNIEMOP_DEF(iemOp_call_Ap)
4643{
4644 IEMOP_MNEMONIC(call_Ap, "call Ap");
4645 IEMOP_HLP_NO_64BIT();
4646
4647 /* Decode the far pointer address and pass it on to the far call C implementation. */
4648 uint32_t offSeg;
4649 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4650 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4651 else
4652 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4653 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4654 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4655 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4656}
4657
4658
4659/** Opcode 0x9b. (aka fwait) */
4660FNIEMOP_DEF(iemOp_wait)
4661{
4662 IEMOP_MNEMONIC(wait, "wait");
4663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4664
4665 IEM_MC_BEGIN(0, 0);
4666 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4668 IEM_MC_ADVANCE_RIP();
4669 IEM_MC_END();
4670 return VINF_SUCCESS;
4671}
4672
4673
4674/**
4675 * @opcode 0x9c
4676 */
4677FNIEMOP_DEF(iemOp_pushf_Fv)
4678{
4679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4680 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4681 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4682}
4683
4684
4685/**
4686 * @opcode 0x9d
4687 */
4688FNIEMOP_DEF(iemOp_popf_Fv)
4689{
4690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4691 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4692 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4693}
4694
4695
4696/**
4697 * @opcode 0x9e
4698 */
4699FNIEMOP_DEF(iemOp_sahf)
4700{
4701 IEMOP_MNEMONIC(sahf, "sahf");
4702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4703 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4704 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4705 return IEMOP_RAISE_INVALID_OPCODE();
4706 IEM_MC_BEGIN(0, 2);
4707 IEM_MC_LOCAL(uint32_t, u32Flags);
4708 IEM_MC_LOCAL(uint32_t, EFlags);
4709 IEM_MC_FETCH_EFLAGS(EFlags);
4710 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4711 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4712 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4713 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4714 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4715 IEM_MC_COMMIT_EFLAGS(EFlags);
4716 IEM_MC_ADVANCE_RIP();
4717 IEM_MC_END();
4718 return VINF_SUCCESS;
4719}
4720
4721
4722/**
4723 * @opcode 0x9f
4724 */
4725FNIEMOP_DEF(iemOp_lahf)
4726{
4727 IEMOP_MNEMONIC(lahf, "lahf");
4728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4729 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4730 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4731 return IEMOP_RAISE_INVALID_OPCODE();
4732 IEM_MC_BEGIN(0, 1);
4733 IEM_MC_LOCAL(uint8_t, u8Flags);
4734 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4735 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4736 IEM_MC_ADVANCE_RIP();
4737 IEM_MC_END();
4738 return VINF_SUCCESS;
4739}
4740
4741
4742/**
4743 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4744 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4745 * prefixes. Will return on failures.
4746 * @param a_GCPtrMemOff The variable to store the offset in.
4747 */
4748#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4749 do \
4750 { \
4751 switch (pVCpu->iem.s.enmEffAddrMode) \
4752 { \
4753 case IEMMODE_16BIT: \
4754 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4755 break; \
4756 case IEMMODE_32BIT: \
4757 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4758 break; \
4759 case IEMMODE_64BIT: \
4760 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4761 break; \
4762 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4763 } \
4764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4765 } while (0)
4766
4767/**
4768 * @opcode 0xa0
4769 */
4770FNIEMOP_DEF(iemOp_mov_AL_Ob)
4771{
4772 /*
4773 * Get the offset and fend of lock prefixes.
4774 */
4775 RTGCPTR GCPtrMemOff;
4776 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4777
4778 /*
4779 * Fetch AL.
4780 */
4781 IEM_MC_BEGIN(0,1);
4782 IEM_MC_LOCAL(uint8_t, u8Tmp);
4783 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4784 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4785 IEM_MC_ADVANCE_RIP();
4786 IEM_MC_END();
4787 return VINF_SUCCESS;
4788}
4789
4790
4791/**
4792 * @opcode 0xa1
4793 */
4794FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4795{
4796 /*
4797 * Get the offset and fend of lock prefixes.
4798 */
4799 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4800 RTGCPTR GCPtrMemOff;
4801 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4802
4803 /*
4804 * Fetch rAX.
4805 */
4806 switch (pVCpu->iem.s.enmEffOpSize)
4807 {
4808 case IEMMODE_16BIT:
4809 IEM_MC_BEGIN(0,1);
4810 IEM_MC_LOCAL(uint16_t, u16Tmp);
4811 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4812 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4813 IEM_MC_ADVANCE_RIP();
4814 IEM_MC_END();
4815 return VINF_SUCCESS;
4816
4817 case IEMMODE_32BIT:
4818 IEM_MC_BEGIN(0,1);
4819 IEM_MC_LOCAL(uint32_t, u32Tmp);
4820 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4821 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4822 IEM_MC_ADVANCE_RIP();
4823 IEM_MC_END();
4824 return VINF_SUCCESS;
4825
4826 case IEMMODE_64BIT:
4827 IEM_MC_BEGIN(0,1);
4828 IEM_MC_LOCAL(uint64_t, u64Tmp);
4829 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4830 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4831 IEM_MC_ADVANCE_RIP();
4832 IEM_MC_END();
4833 return VINF_SUCCESS;
4834
4835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4836 }
4837}
4838
4839
4840/**
4841 * @opcode 0xa2
4842 */
4843FNIEMOP_DEF(iemOp_mov_Ob_AL)
4844{
4845 /*
4846 * Get the offset and fend of lock prefixes.
4847 */
4848 RTGCPTR GCPtrMemOff;
4849 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4850
4851 /*
4852 * Store AL.
4853 */
4854 IEM_MC_BEGIN(0,1);
4855 IEM_MC_LOCAL(uint8_t, u8Tmp);
4856 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4857 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4858 IEM_MC_ADVANCE_RIP();
4859 IEM_MC_END();
4860 return VINF_SUCCESS;
4861}
4862
4863
4864/**
4865 * @opcode 0xa3
4866 */
4867FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4868{
4869 /*
4870 * Get the offset and fend of lock prefixes.
4871 */
4872 RTGCPTR GCPtrMemOff;
4873 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4874
4875 /*
4876 * Store rAX.
4877 */
4878 switch (pVCpu->iem.s.enmEffOpSize)
4879 {
4880 case IEMMODE_16BIT:
4881 IEM_MC_BEGIN(0,1);
4882 IEM_MC_LOCAL(uint16_t, u16Tmp);
4883 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4884 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4885 IEM_MC_ADVANCE_RIP();
4886 IEM_MC_END();
4887 return VINF_SUCCESS;
4888
4889 case IEMMODE_32BIT:
4890 IEM_MC_BEGIN(0,1);
4891 IEM_MC_LOCAL(uint32_t, u32Tmp);
4892 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4893 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4894 IEM_MC_ADVANCE_RIP();
4895 IEM_MC_END();
4896 return VINF_SUCCESS;
4897
4898 case IEMMODE_64BIT:
4899 IEM_MC_BEGIN(0,1);
4900 IEM_MC_LOCAL(uint64_t, u64Tmp);
4901 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4902 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4903 IEM_MC_ADVANCE_RIP();
4904 IEM_MC_END();
4905 return VINF_SUCCESS;
4906
4907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4908 }
4909}
4910
4911/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4912#define IEM_MOVS_CASE(ValBits, AddrBits) \
4913 IEM_MC_BEGIN(0, 2); \
4914 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4915 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4916 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4917 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4918 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4919 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4920 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4921 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4922 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4923 } IEM_MC_ELSE() { \
4924 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4925 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4926 } IEM_MC_ENDIF(); \
4927 IEM_MC_ADVANCE_RIP(); \
4928 IEM_MC_END();
4929
4930/**
4931 * @opcode 0xa4
4932 */
4933FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4934{
4935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4936
4937 /*
4938 * Use the C implementation if a repeat prefix is encountered.
4939 */
4940 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4941 {
4942 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4943 switch (pVCpu->iem.s.enmEffAddrMode)
4944 {
4945 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4946 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4947 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4948 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4949 }
4950 }
4951 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4952
4953 /*
4954 * Sharing case implementation with movs[wdq] below.
4955 */
4956 switch (pVCpu->iem.s.enmEffAddrMode)
4957 {
4958 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4959 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4960 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4961 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4962 }
4963 return VINF_SUCCESS;
4964}
4965
4966
4967/**
4968 * @opcode 0xa5
4969 */
4970FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4971{
4972 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4973
4974 /*
4975 * Use the C implementation if a repeat prefix is encountered.
4976 */
4977 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4978 {
4979 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4980 switch (pVCpu->iem.s.enmEffOpSize)
4981 {
4982 case IEMMODE_16BIT:
4983 switch (pVCpu->iem.s.enmEffAddrMode)
4984 {
4985 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4986 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4987 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4988 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4989 }
4990 break;
4991 case IEMMODE_32BIT:
4992 switch (pVCpu->iem.s.enmEffAddrMode)
4993 {
4994 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4995 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4996 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4998 }
4999 case IEMMODE_64BIT:
5000 switch (pVCpu->iem.s.enmEffAddrMode)
5001 {
5002 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5003 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5004 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5006 }
5007 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5008 }
5009 }
5010 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5011
5012 /*
5013 * Annoying double switch here.
5014 * Using ugly macro for implementing the cases, sharing it with movsb.
5015 */
5016 switch (pVCpu->iem.s.enmEffOpSize)
5017 {
5018 case IEMMODE_16BIT:
5019 switch (pVCpu->iem.s.enmEffAddrMode)
5020 {
5021 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5022 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5023 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5025 }
5026 break;
5027
5028 case IEMMODE_32BIT:
5029 switch (pVCpu->iem.s.enmEffAddrMode)
5030 {
5031 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5032 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5033 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5035 }
5036 break;
5037
5038 case IEMMODE_64BIT:
5039 switch (pVCpu->iem.s.enmEffAddrMode)
5040 {
5041 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5042 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5043 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5044 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5045 }
5046 break;
5047 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5048 }
5049 return VINF_SUCCESS;
5050}
5051
5052#undef IEM_MOVS_CASE
5053
5054/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5055#define IEM_CMPS_CASE(ValBits, AddrBits) \
5056 IEM_MC_BEGIN(3, 3); \
5057 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5058 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5059 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5060 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5061 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5062 \
5063 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5064 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5065 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5066 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5067 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5068 IEM_MC_REF_EFLAGS(pEFlags); \
5069 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5070 \
5071 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5072 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5073 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5074 } IEM_MC_ELSE() { \
5075 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5076 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5077 } IEM_MC_ENDIF(); \
5078 IEM_MC_ADVANCE_RIP(); \
5079 IEM_MC_END(); \
5080
5081/**
5082 * @opcode 0xa6
5083 */
5084FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5085{
5086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5087
5088 /*
5089 * Use the C implementation if a repeat prefix is encountered.
5090 */
5091 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5092 {
5093 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5094 switch (pVCpu->iem.s.enmEffAddrMode)
5095 {
5096 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5097 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5098 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5100 }
5101 }
5102 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5103 {
5104 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5105 switch (pVCpu->iem.s.enmEffAddrMode)
5106 {
5107 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5108 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5109 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5111 }
5112 }
5113 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5114
5115 /*
5116 * Sharing case implementation with cmps[wdq] below.
5117 */
5118 switch (pVCpu->iem.s.enmEffAddrMode)
5119 {
5120 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5121 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5122 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5124 }
5125 return VINF_SUCCESS;
5126
5127}
5128
5129
5130/**
5131 * @opcode 0xa7
5132 */
5133FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5134{
5135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5136
5137 /*
5138 * Use the C implementation if a repeat prefix is encountered.
5139 */
5140 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5141 {
5142 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5143 switch (pVCpu->iem.s.enmEffOpSize)
5144 {
5145 case IEMMODE_16BIT:
5146 switch (pVCpu->iem.s.enmEffAddrMode)
5147 {
5148 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5149 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5150 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5152 }
5153 break;
5154 case IEMMODE_32BIT:
5155 switch (pVCpu->iem.s.enmEffAddrMode)
5156 {
5157 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5158 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5159 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5160 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5161 }
5162 case IEMMODE_64BIT:
5163 switch (pVCpu->iem.s.enmEffAddrMode)
5164 {
5165 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5166 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5167 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5168 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5169 }
5170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5171 }
5172 }
5173
5174 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5175 {
5176 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5177 switch (pVCpu->iem.s.enmEffOpSize)
5178 {
5179 case IEMMODE_16BIT:
5180 switch (pVCpu->iem.s.enmEffAddrMode)
5181 {
5182 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5183 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5184 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5186 }
5187 break;
5188 case IEMMODE_32BIT:
5189 switch (pVCpu->iem.s.enmEffAddrMode)
5190 {
5191 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5192 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5193 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5195 }
5196 case IEMMODE_64BIT:
5197 switch (pVCpu->iem.s.enmEffAddrMode)
5198 {
5199 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5200 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5201 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5202 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5203 }
5204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5205 }
5206 }
5207
5208 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5209
5210 /*
5211 * Annoying double switch here.
5212 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5213 */
5214 switch (pVCpu->iem.s.enmEffOpSize)
5215 {
5216 case IEMMODE_16BIT:
5217 switch (pVCpu->iem.s.enmEffAddrMode)
5218 {
5219 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5220 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5221 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5223 }
5224 break;
5225
5226 case IEMMODE_32BIT:
5227 switch (pVCpu->iem.s.enmEffAddrMode)
5228 {
5229 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5230 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5231 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5233 }
5234 break;
5235
5236 case IEMMODE_64BIT:
5237 switch (pVCpu->iem.s.enmEffAddrMode)
5238 {
5239 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5240 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5241 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5242 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5243 }
5244 break;
5245 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5246 }
5247 return VINF_SUCCESS;
5248
5249}
5250
5251#undef IEM_CMPS_CASE
5252
5253/**
5254 * @opcode 0xa8
5255 */
5256FNIEMOP_DEF(iemOp_test_AL_Ib)
5257{
5258 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5259 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5261}
5262
5263
5264/**
5265 * @opcode 0xa9
5266 */
5267FNIEMOP_DEF(iemOp_test_eAX_Iz)
5268{
5269 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5271 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5272}
5273
5274
5275/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5276#define IEM_STOS_CASE(ValBits, AddrBits) \
5277 IEM_MC_BEGIN(0, 2); \
5278 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5279 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5280 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5281 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5282 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5283 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5284 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5285 } IEM_MC_ELSE() { \
5286 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5287 } IEM_MC_ENDIF(); \
5288 IEM_MC_ADVANCE_RIP(); \
5289 IEM_MC_END(); \
5290
5291/**
5292 * @opcode 0xaa
5293 */
5294FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5295{
5296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5297
5298 /*
5299 * Use the C implementation if a repeat prefix is encountered.
5300 */
5301 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5302 {
5303 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5304 switch (pVCpu->iem.s.enmEffAddrMode)
5305 {
5306 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5307 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5308 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5310 }
5311 }
5312 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5313
5314 /*
5315 * Sharing case implementation with stos[wdq] below.
5316 */
5317 switch (pVCpu->iem.s.enmEffAddrMode)
5318 {
5319 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5320 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5321 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5323 }
5324 return VINF_SUCCESS;
5325}
5326
5327
5328/**
5329 * @opcode 0xab
5330 */
5331FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5332{
5333 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5334
5335 /*
5336 * Use the C implementation if a repeat prefix is encountered.
5337 */
5338 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5339 {
5340 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5341 switch (pVCpu->iem.s.enmEffOpSize)
5342 {
5343 case IEMMODE_16BIT:
5344 switch (pVCpu->iem.s.enmEffAddrMode)
5345 {
5346 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5347 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5348 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5350 }
5351 break;
5352 case IEMMODE_32BIT:
5353 switch (pVCpu->iem.s.enmEffAddrMode)
5354 {
5355 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5356 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5357 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5358 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5359 }
5360 case IEMMODE_64BIT:
5361 switch (pVCpu->iem.s.enmEffAddrMode)
5362 {
5363 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5364 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5365 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5366 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5367 }
5368 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5369 }
5370 }
5371 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5372
5373 /*
5374 * Annoying double switch here.
5375 * Using ugly macro for implementing the cases, sharing it with stosb.
5376 */
5377 switch (pVCpu->iem.s.enmEffOpSize)
5378 {
5379 case IEMMODE_16BIT:
5380 switch (pVCpu->iem.s.enmEffAddrMode)
5381 {
5382 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5383 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5384 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5386 }
5387 break;
5388
5389 case IEMMODE_32BIT:
5390 switch (pVCpu->iem.s.enmEffAddrMode)
5391 {
5392 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5393 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5394 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5396 }
5397 break;
5398
5399 case IEMMODE_64BIT:
5400 switch (pVCpu->iem.s.enmEffAddrMode)
5401 {
5402 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5403 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5404 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5406 }
5407 break;
5408 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5409 }
5410 return VINF_SUCCESS;
5411}
5412
5413#undef IEM_STOS_CASE
5414
5415/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5416#define IEM_LODS_CASE(ValBits, AddrBits) \
5417 IEM_MC_BEGIN(0, 2); \
5418 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5419 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5420 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5421 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5422 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5423 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5424 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5425 } IEM_MC_ELSE() { \
5426 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5427 } IEM_MC_ENDIF(); \
5428 IEM_MC_ADVANCE_RIP(); \
5429 IEM_MC_END();
5430
5431/**
5432 * @opcode 0xac
5433 */
5434FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5435{
5436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5437
5438 /*
5439 * Use the C implementation if a repeat prefix is encountered.
5440 */
5441 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5442 {
5443 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5444 switch (pVCpu->iem.s.enmEffAddrMode)
5445 {
5446 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5447 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5448 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5450 }
5451 }
5452 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5453
5454 /*
5455 * Sharing case implementation with stos[wdq] below.
5456 */
5457 switch (pVCpu->iem.s.enmEffAddrMode)
5458 {
5459 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5460 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5461 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5462 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5463 }
5464 return VINF_SUCCESS;
5465}
5466
5467
5468/**
5469 * @opcode 0xad
5470 */
5471FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5472{
5473 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5474
5475 /*
5476 * Use the C implementation if a repeat prefix is encountered.
5477 */
5478 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5479 {
5480 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5481 switch (pVCpu->iem.s.enmEffOpSize)
5482 {
5483 case IEMMODE_16BIT:
5484 switch (pVCpu->iem.s.enmEffAddrMode)
5485 {
5486 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5487 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5488 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5490 }
5491 break;
5492 case IEMMODE_32BIT:
5493 switch (pVCpu->iem.s.enmEffAddrMode)
5494 {
5495 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5496 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5497 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5498 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5499 }
5500 case IEMMODE_64BIT:
5501 switch (pVCpu->iem.s.enmEffAddrMode)
5502 {
5503 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5504 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5505 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5507 }
5508 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5509 }
5510 }
5511 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5512
5513 /*
5514 * Annoying double switch here.
5515 * Using ugly macro for implementing the cases, sharing it with lodsb.
5516 */
5517 switch (pVCpu->iem.s.enmEffOpSize)
5518 {
5519 case IEMMODE_16BIT:
5520 switch (pVCpu->iem.s.enmEffAddrMode)
5521 {
5522 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5523 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5524 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5526 }
5527 break;
5528
5529 case IEMMODE_32BIT:
5530 switch (pVCpu->iem.s.enmEffAddrMode)
5531 {
5532 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5533 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5534 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5535 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5536 }
5537 break;
5538
5539 case IEMMODE_64BIT:
5540 switch (pVCpu->iem.s.enmEffAddrMode)
5541 {
5542 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5543 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5544 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5545 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5546 }
5547 break;
5548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5549 }
5550 return VINF_SUCCESS;
5551}
5552
5553#undef IEM_LODS_CASE
5554
5555/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5556#define IEM_SCAS_CASE(ValBits, AddrBits) \
5557 IEM_MC_BEGIN(3, 2); \
5558 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5559 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5560 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5561 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5562 \
5563 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5564 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5565 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5566 IEM_MC_REF_EFLAGS(pEFlags); \
5567 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5568 \
5569 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5570 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5571 } IEM_MC_ELSE() { \
5572 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5573 } IEM_MC_ENDIF(); \
5574 IEM_MC_ADVANCE_RIP(); \
5575 IEM_MC_END();
5576
5577/**
5578 * @opcode 0xae
5579 */
5580FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5581{
5582 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5583
5584 /*
5585 * Use the C implementation if a repeat prefix is encountered.
5586 */
5587 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5588 {
5589 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5590 switch (pVCpu->iem.s.enmEffAddrMode)
5591 {
5592 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5593 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5594 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5595 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5596 }
5597 }
5598 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5599 {
5600 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5601 switch (pVCpu->iem.s.enmEffAddrMode)
5602 {
5603 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5604 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5605 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5606 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5607 }
5608 }
5609 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5610
5611 /*
5612 * Sharing case implementation with stos[wdq] below.
5613 */
5614 switch (pVCpu->iem.s.enmEffAddrMode)
5615 {
5616 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5617 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5618 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5619 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5620 }
5621 return VINF_SUCCESS;
5622}
5623
5624
5625/**
5626 * @opcode 0xaf
5627 */
5628FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5629{
5630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5631
5632 /*
5633 * Use the C implementation if a repeat prefix is encountered.
5634 */
5635 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5636 {
5637 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5638 switch (pVCpu->iem.s.enmEffOpSize)
5639 {
5640 case IEMMODE_16BIT:
5641 switch (pVCpu->iem.s.enmEffAddrMode)
5642 {
5643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5647 }
5648 break;
5649 case IEMMODE_32BIT:
5650 switch (pVCpu->iem.s.enmEffAddrMode)
5651 {
5652 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5653 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5654 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5656 }
5657 case IEMMODE_64BIT:
5658 switch (pVCpu->iem.s.enmEffAddrMode)
5659 {
5660 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5664 }
5665 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5666 }
5667 }
5668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5669 {
5670 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5671 switch (pVCpu->iem.s.enmEffOpSize)
5672 {
5673 case IEMMODE_16BIT:
5674 switch (pVCpu->iem.s.enmEffAddrMode)
5675 {
5676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5680 }
5681 break;
5682 case IEMMODE_32BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 case IEMMODE_64BIT:
5691 switch (pVCpu->iem.s.enmEffAddrMode)
5692 {
5693 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5697 }
5698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5699 }
5700 }
5701 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5702
5703 /*
5704 * Annoying double switch here.
5705 * Using ugly macro for implementing the cases, sharing it with scasb.
5706 */
5707 switch (pVCpu->iem.s.enmEffOpSize)
5708 {
5709 case IEMMODE_16BIT:
5710 switch (pVCpu->iem.s.enmEffAddrMode)
5711 {
5712 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5713 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5714 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5715 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5716 }
5717 break;
5718
5719 case IEMMODE_32BIT:
5720 switch (pVCpu->iem.s.enmEffAddrMode)
5721 {
5722 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5723 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5724 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5725 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5726 }
5727 break;
5728
5729 case IEMMODE_64BIT:
5730 switch (pVCpu->iem.s.enmEffAddrMode)
5731 {
5732 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5733 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5734 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5735 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5736 }
5737 break;
5738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5739 }
5740 return VINF_SUCCESS;
5741}
5742
5743#undef IEM_SCAS_CASE
5744
5745/**
5746 * Common 'mov r8, imm8' helper.
5747 */
5748FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5749{
5750 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5752
5753 IEM_MC_BEGIN(0, 1);
5754 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5755 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5756 IEM_MC_ADVANCE_RIP();
5757 IEM_MC_END();
5758
5759 return VINF_SUCCESS;
5760}
5761
5762
5763/**
5764 * @opcode 0xb0
5765 */
5766FNIEMOP_DEF(iemOp_mov_AL_Ib)
5767{
5768 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5769 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5770}
5771
5772
5773/**
5774 * @opcode 0xb1
5775 */
5776FNIEMOP_DEF(iemOp_CL_Ib)
5777{
5778 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5779 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5780}
5781
5782
5783/**
5784 * @opcode 0xb2
5785 */
5786FNIEMOP_DEF(iemOp_DL_Ib)
5787{
5788 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5789 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5790}
5791
5792
5793/**
5794 * @opcode 0xb3
5795 */
5796FNIEMOP_DEF(iemOp_BL_Ib)
5797{
5798 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5799 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5800}
5801
5802
5803/**
5804 * @opcode 0xb4
5805 */
5806FNIEMOP_DEF(iemOp_mov_AH_Ib)
5807{
5808 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5809 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5810}
5811
5812
5813/**
5814 * @opcode 0xb5
5815 */
5816FNIEMOP_DEF(iemOp_CH_Ib)
5817{
5818 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5819 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5820}
5821
5822
5823/**
5824 * @opcode 0xb6
5825 */
5826FNIEMOP_DEF(iemOp_DH_Ib)
5827{
5828 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5829 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5830}
5831
5832
5833/**
5834 * @opcode 0xb7
5835 */
5836FNIEMOP_DEF(iemOp_BH_Ib)
5837{
5838 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5839 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5840}
5841
5842
5843/**
5844 * Common 'mov regX,immX' helper.
5845 */
5846FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5847{
5848 switch (pVCpu->iem.s.enmEffOpSize)
5849 {
5850 case IEMMODE_16BIT:
5851 {
5852 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5854
5855 IEM_MC_BEGIN(0, 1);
5856 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5857 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5858 IEM_MC_ADVANCE_RIP();
5859 IEM_MC_END();
5860 break;
5861 }
5862
5863 case IEMMODE_32BIT:
5864 {
5865 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867
5868 IEM_MC_BEGIN(0, 1);
5869 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5870 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5871 IEM_MC_ADVANCE_RIP();
5872 IEM_MC_END();
5873 break;
5874 }
5875 case IEMMODE_64BIT:
5876 {
5877 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5878 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5879
5880 IEM_MC_BEGIN(0, 1);
5881 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5882 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5883 IEM_MC_ADVANCE_RIP();
5884 IEM_MC_END();
5885 break;
5886 }
5887 }
5888
5889 return VINF_SUCCESS;
5890}
5891
5892
5893/**
5894 * @opcode 0xb8
5895 */
5896FNIEMOP_DEF(iemOp_eAX_Iv)
5897{
5898 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5899 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5900}
5901
5902
5903/**
5904 * @opcode 0xb9
5905 */
5906FNIEMOP_DEF(iemOp_eCX_Iv)
5907{
5908 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5909 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5910}
5911
5912
5913/**
5914 * @opcode 0xba
5915 */
5916FNIEMOP_DEF(iemOp_eDX_Iv)
5917{
5918 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5919 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5920}
5921
5922
5923/**
5924 * @opcode 0xbb
5925 */
5926FNIEMOP_DEF(iemOp_eBX_Iv)
5927{
5928 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5929 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5930}
5931
5932
5933/**
5934 * @opcode 0xbc
5935 */
5936FNIEMOP_DEF(iemOp_eSP_Iv)
5937{
5938 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5939 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5940}
5941
5942
5943/**
5944 * @opcode 0xbd
5945 */
5946FNIEMOP_DEF(iemOp_eBP_Iv)
5947{
5948 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5949 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5950}
5951
5952
5953/**
5954 * @opcode 0xbe
5955 */
5956FNIEMOP_DEF(iemOp_eSI_Iv)
5957{
5958 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5959 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5960}
5961
5962
5963/**
5964 * @opcode 0xbf
5965 */
5966FNIEMOP_DEF(iemOp_eDI_Iv)
5967{
5968 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5969 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5970}
5971
5972
5973/**
5974 * @opcode 0xc0
5975 */
5976FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5977{
5978 IEMOP_HLP_MIN_186();
5979 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5980 PCIEMOPSHIFTSIZES pImpl;
5981 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5982 {
5983 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5984 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5985 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5986 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5987 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5988 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5989 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5990 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5991 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5992 }
5993 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5994
5995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5996 {
5997 /* register */
5998 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6000 IEM_MC_BEGIN(3, 0);
6001 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6002 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6003 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6004 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6005 IEM_MC_REF_EFLAGS(pEFlags);
6006 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6007 IEM_MC_ADVANCE_RIP();
6008 IEM_MC_END();
6009 }
6010 else
6011 {
6012 /* memory */
6013 IEM_MC_BEGIN(3, 2);
6014 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6015 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6016 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6018
6019 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6020 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6021 IEM_MC_ASSIGN(cShiftArg, cShift);
6022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6023 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6024 IEM_MC_FETCH_EFLAGS(EFlags);
6025 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6026
6027 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6028 IEM_MC_COMMIT_EFLAGS(EFlags);
6029 IEM_MC_ADVANCE_RIP();
6030 IEM_MC_END();
6031 }
6032 return VINF_SUCCESS;
6033}
6034
6035
6036/**
6037 * @opcode 0xc1
6038 */
6039FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6040{
6041 IEMOP_HLP_MIN_186();
6042 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6043 PCIEMOPSHIFTSIZES pImpl;
6044 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6045 {
6046 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6047 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6048 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6049 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6050 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6051 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6052 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6053 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6054 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6055 }
6056 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6057
6058 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6059 {
6060 /* register */
6061 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6063 switch (pVCpu->iem.s.enmEffOpSize)
6064 {
6065 case IEMMODE_16BIT:
6066 IEM_MC_BEGIN(3, 0);
6067 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6068 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6069 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6070 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6071 IEM_MC_REF_EFLAGS(pEFlags);
6072 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6073 IEM_MC_ADVANCE_RIP();
6074 IEM_MC_END();
6075 return VINF_SUCCESS;
6076
6077 case IEMMODE_32BIT:
6078 IEM_MC_BEGIN(3, 0);
6079 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6080 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6081 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6082 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6083 IEM_MC_REF_EFLAGS(pEFlags);
6084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6085 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6086 IEM_MC_ADVANCE_RIP();
6087 IEM_MC_END();
6088 return VINF_SUCCESS;
6089
6090 case IEMMODE_64BIT:
6091 IEM_MC_BEGIN(3, 0);
6092 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6093 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6094 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6095 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6096 IEM_MC_REF_EFLAGS(pEFlags);
6097 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6098 IEM_MC_ADVANCE_RIP();
6099 IEM_MC_END();
6100 return VINF_SUCCESS;
6101
6102 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6103 }
6104 }
6105 else
6106 {
6107 /* memory */
6108 switch (pVCpu->iem.s.enmEffOpSize)
6109 {
6110 case IEMMODE_16BIT:
6111 IEM_MC_BEGIN(3, 2);
6112 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6113 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6114 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6115 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6116
6117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6118 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6119 IEM_MC_ASSIGN(cShiftArg, cShift);
6120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6121 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6122 IEM_MC_FETCH_EFLAGS(EFlags);
6123 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6124
6125 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6126 IEM_MC_COMMIT_EFLAGS(EFlags);
6127 IEM_MC_ADVANCE_RIP();
6128 IEM_MC_END();
6129 return VINF_SUCCESS;
6130
6131 case IEMMODE_32BIT:
6132 IEM_MC_BEGIN(3, 2);
6133 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6134 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6135 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6136 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6137
6138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6139 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6140 IEM_MC_ASSIGN(cShiftArg, cShift);
6141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6142 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6143 IEM_MC_FETCH_EFLAGS(EFlags);
6144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6145
6146 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6147 IEM_MC_COMMIT_EFLAGS(EFlags);
6148 IEM_MC_ADVANCE_RIP();
6149 IEM_MC_END();
6150 return VINF_SUCCESS;
6151
6152 case IEMMODE_64BIT:
6153 IEM_MC_BEGIN(3, 2);
6154 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6155 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6156 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6157 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6158
6159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6160 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6161 IEM_MC_ASSIGN(cShiftArg, cShift);
6162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6163 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6164 IEM_MC_FETCH_EFLAGS(EFlags);
6165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6166
6167 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6168 IEM_MC_COMMIT_EFLAGS(EFlags);
6169 IEM_MC_ADVANCE_RIP();
6170 IEM_MC_END();
6171 return VINF_SUCCESS;
6172
6173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6174 }
6175 }
6176}
6177
6178
6179/**
6180 * @opcode 0xc2
6181 */
6182FNIEMOP_DEF(iemOp_retn_Iw)
6183{
6184 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6185 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6187 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6188 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6189}
6190
6191
6192/**
6193 * @opcode 0xc3
6194 */
6195FNIEMOP_DEF(iemOp_retn)
6196{
6197 IEMOP_MNEMONIC(retn, "retn");
6198 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6200 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6201}
6202
6203
6204/**
6205 * @opcode 0xc4
6206 */
6207FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6208{
6209 /* The LDS instruction is invalid 64-bit mode. In legacy and
6210 compatability mode it is invalid with MOD=3.
6211 The use as a VEX prefix is made possible by assigning the inverted
6212 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6213 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6214 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6215 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6216 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6217 {
6218 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6219 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6220 {
6221 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6222 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6223 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6224 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6226 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6227 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6228 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6229 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6230 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6231 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6232 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6233 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6234
6235 switch (bRm & 0x1f)
6236 {
6237 case 1: /* 0x0f lead opcode byte. */
6238#ifdef IEM_WITH_VEX
6239 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6240#else
6241 IEMOP_BITCH_ABOUT_STUB();
6242 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6243#endif
6244
6245 case 2: /* 0x0f 0x38 lead opcode bytes. */
6246#ifdef IEM_WITH_VEX
6247 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6248#else
6249 IEMOP_BITCH_ABOUT_STUB();
6250 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6251#endif
6252
6253 case 3: /* 0x0f 0x3a lead opcode bytes. */
6254#ifdef IEM_WITH_VEX
6255 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6256#else
6257 IEMOP_BITCH_ABOUT_STUB();
6258 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6259#endif
6260
6261 default:
6262 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6263 return IEMOP_RAISE_INVALID_OPCODE();
6264 }
6265 }
6266 Log(("VEX3: AVX support disabled!\n"));
6267 return IEMOP_RAISE_INVALID_OPCODE();
6268 }
6269
6270 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6271 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6272}
6273
6274
6275/**
6276 * @opcode 0xc5
6277 */
6278FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6279{
6280 /* The LES instruction is invalid 64-bit mode. In legacy and
6281 compatability mode it is invalid with MOD=3.
6282 The use as a VEX prefix is made possible by assigning the inverted
6283 REX.R to the top MOD bit, and the top bit in the inverted register
6284 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6285 to accessing registers 0..7 in this VEX form. */
6286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6287 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6288 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6289 {
6290 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6291 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6292 {
6293 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6294 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6295 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6297 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6298 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6299 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6300 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6301
6302#ifdef IEM_WITH_VEX
6303 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6304#else
6305 IEMOP_BITCH_ABOUT_STUB();
6306 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6307#endif
6308 }
6309
6310 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6311 Log(("VEX2: AVX support disabled!\n"));
6312 return IEMOP_RAISE_INVALID_OPCODE();
6313 }
6314
6315 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6316 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6317}
6318
6319
6320/**
6321 * @opcode 0xc6
6322 */
6323FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6324{
6325 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6326 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6327 return IEMOP_RAISE_INVALID_OPCODE();
6328 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6329
6330 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6331 {
6332 /* register access */
6333 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6335 IEM_MC_BEGIN(0, 0);
6336 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6337 IEM_MC_ADVANCE_RIP();
6338 IEM_MC_END();
6339 }
6340 else
6341 {
6342 /* memory access. */
6343 IEM_MC_BEGIN(0, 1);
6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6345 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6346 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6347 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6348 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6349 IEM_MC_ADVANCE_RIP();
6350 IEM_MC_END();
6351 }
6352 return VINF_SUCCESS;
6353}
6354
6355
6356/**
6357 * @opcode 0xc7
6358 */
6359FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6360{
6361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6362 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6363 return IEMOP_RAISE_INVALID_OPCODE();
6364 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6365
6366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6367 {
6368 /* register access */
6369 switch (pVCpu->iem.s.enmEffOpSize)
6370 {
6371 case IEMMODE_16BIT:
6372 IEM_MC_BEGIN(0, 0);
6373 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6375 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6376 IEM_MC_ADVANCE_RIP();
6377 IEM_MC_END();
6378 return VINF_SUCCESS;
6379
6380 case IEMMODE_32BIT:
6381 IEM_MC_BEGIN(0, 0);
6382 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6384 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6385 IEM_MC_ADVANCE_RIP();
6386 IEM_MC_END();
6387 return VINF_SUCCESS;
6388
6389 case IEMMODE_64BIT:
6390 IEM_MC_BEGIN(0, 0);
6391 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6393 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6394 IEM_MC_ADVANCE_RIP();
6395 IEM_MC_END();
6396 return VINF_SUCCESS;
6397
6398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6399 }
6400 }
6401 else
6402 {
6403 /* memory access. */
6404 switch (pVCpu->iem.s.enmEffOpSize)
6405 {
6406 case IEMMODE_16BIT:
6407 IEM_MC_BEGIN(0, 1);
6408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6410 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6412 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6413 IEM_MC_ADVANCE_RIP();
6414 IEM_MC_END();
6415 return VINF_SUCCESS;
6416
6417 case IEMMODE_32BIT:
6418 IEM_MC_BEGIN(0, 1);
6419 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6421 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6423 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6424 IEM_MC_ADVANCE_RIP();
6425 IEM_MC_END();
6426 return VINF_SUCCESS;
6427
6428 case IEMMODE_64BIT:
6429 IEM_MC_BEGIN(0, 1);
6430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6432 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 return VINF_SUCCESS;
6438
6439 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6440 }
6441 }
6442}
6443
6444
6445
6446
6447/**
6448 * @opcode 0xc8
6449 */
6450FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6451{
6452 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6453 IEMOP_HLP_MIN_186();
6454 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6455 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6456 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6458 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6459}
6460
6461
6462/**
6463 * @opcode 0xc9
6464 */
6465FNIEMOP_DEF(iemOp_leave)
6466{
6467 IEMOP_MNEMONIC(leave, "leave");
6468 IEMOP_HLP_MIN_186();
6469 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6470 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6471 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6472}
6473
6474
6475/**
6476 * @opcode 0xca
6477 */
6478FNIEMOP_DEF(iemOp_retf_Iw)
6479{
6480 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6481 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6482 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6483 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6484 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6485}
6486
6487
6488/**
6489 * @opcode 0xcb
6490 */
6491FNIEMOP_DEF(iemOp_retf)
6492{
6493 IEMOP_MNEMONIC(retf, "retf");
6494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6495 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6496 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6497}
6498
6499
6500/**
6501 * @opcode 0xcc
6502 */
6503FNIEMOP_DEF(iemOp_int3)
6504{
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6507}
6508
6509
6510/**
6511 * @opcode 0xcd
6512 */
6513FNIEMOP_DEF(iemOp_int_Ib)
6514{
6515 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6518}
6519
6520
6521/**
6522 * @opcode 0xce
6523 */
6524FNIEMOP_DEF(iemOp_into)
6525{
6526 IEMOP_MNEMONIC(into, "into");
6527 IEMOP_HLP_NO_64BIT();
6528
6529 IEM_MC_BEGIN(2, 0);
6530 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6531 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6532 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6533 IEM_MC_END();
6534 return VINF_SUCCESS;
6535}
6536
6537
6538/**
6539 * @opcode 0xcf
6540 */
6541FNIEMOP_DEF(iemOp_iret)
6542{
6543 IEMOP_MNEMONIC(iret, "iret");
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6546}
6547
6548
6549/**
6550 * @opcode 0xd0
6551 */
6552FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6553{
6554 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6555 PCIEMOPSHIFTSIZES pImpl;
6556 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6557 {
6558 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6559 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6560 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6561 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6562 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6563 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6564 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6565 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6566 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6567 }
6568 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6569
6570 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6571 {
6572 /* register */
6573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6574 IEM_MC_BEGIN(3, 0);
6575 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6576 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6578 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6579 IEM_MC_REF_EFLAGS(pEFlags);
6580 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6581 IEM_MC_ADVANCE_RIP();
6582 IEM_MC_END();
6583 }
6584 else
6585 {
6586 /* memory */
6587 IEM_MC_BEGIN(3, 2);
6588 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6589 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6590 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6592
6593 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6594 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6595 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6596 IEM_MC_FETCH_EFLAGS(EFlags);
6597 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6598
6599 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6600 IEM_MC_COMMIT_EFLAGS(EFlags);
6601 IEM_MC_ADVANCE_RIP();
6602 IEM_MC_END();
6603 }
6604 return VINF_SUCCESS;
6605}
6606
6607
6608
6609/**
6610 * @opcode 0xd1
6611 */
6612FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6613{
6614 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6615 PCIEMOPSHIFTSIZES pImpl;
6616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6617 {
6618 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6619 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6620 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6621 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6622 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6623 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6624 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6625 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6626 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6627 }
6628 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6629
6630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6631 {
6632 /* register */
6633 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6634 switch (pVCpu->iem.s.enmEffOpSize)
6635 {
6636 case IEMMODE_16BIT:
6637 IEM_MC_BEGIN(3, 0);
6638 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6639 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6640 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6641 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6642 IEM_MC_REF_EFLAGS(pEFlags);
6643 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6644 IEM_MC_ADVANCE_RIP();
6645 IEM_MC_END();
6646 return VINF_SUCCESS;
6647
6648 case IEMMODE_32BIT:
6649 IEM_MC_BEGIN(3, 0);
6650 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6651 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6652 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6653 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6654 IEM_MC_REF_EFLAGS(pEFlags);
6655 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6656 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6657 IEM_MC_ADVANCE_RIP();
6658 IEM_MC_END();
6659 return VINF_SUCCESS;
6660
6661 case IEMMODE_64BIT:
6662 IEM_MC_BEGIN(3, 0);
6663 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6664 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6665 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6666 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6667 IEM_MC_REF_EFLAGS(pEFlags);
6668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6669 IEM_MC_ADVANCE_RIP();
6670 IEM_MC_END();
6671 return VINF_SUCCESS;
6672
6673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6674 }
6675 }
6676 else
6677 {
6678 /* memory */
6679 switch (pVCpu->iem.s.enmEffOpSize)
6680 {
6681 case IEMMODE_16BIT:
6682 IEM_MC_BEGIN(3, 2);
6683 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6684 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6685 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6687
6688 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6689 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6690 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6691 IEM_MC_FETCH_EFLAGS(EFlags);
6692 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6693
6694 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6695 IEM_MC_COMMIT_EFLAGS(EFlags);
6696 IEM_MC_ADVANCE_RIP();
6697 IEM_MC_END();
6698 return VINF_SUCCESS;
6699
6700 case IEMMODE_32BIT:
6701 IEM_MC_BEGIN(3, 2);
6702 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6703 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6704 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6705 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6706
6707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6709 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6710 IEM_MC_FETCH_EFLAGS(EFlags);
6711 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6712
6713 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6714 IEM_MC_COMMIT_EFLAGS(EFlags);
6715 IEM_MC_ADVANCE_RIP();
6716 IEM_MC_END();
6717 return VINF_SUCCESS;
6718
6719 case IEMMODE_64BIT:
6720 IEM_MC_BEGIN(3, 2);
6721 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6722 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6723 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6724 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6725
6726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6728 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6729 IEM_MC_FETCH_EFLAGS(EFlags);
6730 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6731
6732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6733 IEM_MC_COMMIT_EFLAGS(EFlags);
6734 IEM_MC_ADVANCE_RIP();
6735 IEM_MC_END();
6736 return VINF_SUCCESS;
6737
6738 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6739 }
6740 }
6741}
6742
6743
6744/**
6745 * @opcode 0xd2
6746 */
6747FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6748{
6749 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6750 PCIEMOPSHIFTSIZES pImpl;
6751 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6752 {
6753 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6754 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6755 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6756 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6757 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6758 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6759 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6760 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6761 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6762 }
6763 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6764
6765 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6766 {
6767 /* register */
6768 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6769 IEM_MC_BEGIN(3, 0);
6770 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6771 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6772 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6773 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6774 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6775 IEM_MC_REF_EFLAGS(pEFlags);
6776 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6777 IEM_MC_ADVANCE_RIP();
6778 IEM_MC_END();
6779 }
6780 else
6781 {
6782 /* memory */
6783 IEM_MC_BEGIN(3, 2);
6784 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6785 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6786 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6788
6789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6792 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6793 IEM_MC_FETCH_EFLAGS(EFlags);
6794 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6795
6796 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6797 IEM_MC_COMMIT_EFLAGS(EFlags);
6798 IEM_MC_ADVANCE_RIP();
6799 IEM_MC_END();
6800 }
6801 return VINF_SUCCESS;
6802}
6803
6804
6805/**
6806 * @opcode 0xd3
6807 */
6808FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6809{
6810 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6811 PCIEMOPSHIFTSIZES pImpl;
6812 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6813 {
6814 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6815 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6816 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6817 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6818 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6819 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6820 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6821 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6822 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6823 }
6824 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6825
6826 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6827 {
6828 /* register */
6829 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6830 switch (pVCpu->iem.s.enmEffOpSize)
6831 {
6832 case IEMMODE_16BIT:
6833 IEM_MC_BEGIN(3, 0);
6834 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6835 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6836 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6837 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6838 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6839 IEM_MC_REF_EFLAGS(pEFlags);
6840 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6841 IEM_MC_ADVANCE_RIP();
6842 IEM_MC_END();
6843 return VINF_SUCCESS;
6844
6845 case IEMMODE_32BIT:
6846 IEM_MC_BEGIN(3, 0);
6847 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6848 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6849 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6850 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6851 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6852 IEM_MC_REF_EFLAGS(pEFlags);
6853 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6854 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6855 IEM_MC_ADVANCE_RIP();
6856 IEM_MC_END();
6857 return VINF_SUCCESS;
6858
6859 case IEMMODE_64BIT:
6860 IEM_MC_BEGIN(3, 0);
6861 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6862 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6863 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6864 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6865 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6866 IEM_MC_REF_EFLAGS(pEFlags);
6867 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6868 IEM_MC_ADVANCE_RIP();
6869 IEM_MC_END();
6870 return VINF_SUCCESS;
6871
6872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6873 }
6874 }
6875 else
6876 {
6877 /* memory */
6878 switch (pVCpu->iem.s.enmEffOpSize)
6879 {
6880 case IEMMODE_16BIT:
6881 IEM_MC_BEGIN(3, 2);
6882 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6883 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6884 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6886
6887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6889 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6890 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6891 IEM_MC_FETCH_EFLAGS(EFlags);
6892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6893
6894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6895 IEM_MC_COMMIT_EFLAGS(EFlags);
6896 IEM_MC_ADVANCE_RIP();
6897 IEM_MC_END();
6898 return VINF_SUCCESS;
6899
6900 case IEMMODE_32BIT:
6901 IEM_MC_BEGIN(3, 2);
6902 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6903 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6904 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6906
6907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6909 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6910 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6911 IEM_MC_FETCH_EFLAGS(EFlags);
6912 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6913
6914 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6915 IEM_MC_COMMIT_EFLAGS(EFlags);
6916 IEM_MC_ADVANCE_RIP();
6917 IEM_MC_END();
6918 return VINF_SUCCESS;
6919
6920 case IEMMODE_64BIT:
6921 IEM_MC_BEGIN(3, 2);
6922 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6923 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6924 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6925 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6926
6927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6929 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6930 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6931 IEM_MC_FETCH_EFLAGS(EFlags);
6932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6933
6934 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6935 IEM_MC_COMMIT_EFLAGS(EFlags);
6936 IEM_MC_ADVANCE_RIP();
6937 IEM_MC_END();
6938 return VINF_SUCCESS;
6939
6940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6941 }
6942 }
6943}
6944
6945/**
6946 * @opcode 0xd4
6947 */
6948FNIEMOP_DEF(iemOp_aam_Ib)
6949{
6950 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6951 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6953 IEMOP_HLP_NO_64BIT();
6954 if (!bImm)
6955 return IEMOP_RAISE_DIVIDE_ERROR();
6956 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6957}
6958
6959
6960/**
6961 * @opcode 0xd5
6962 */
6963FNIEMOP_DEF(iemOp_aad_Ib)
6964{
6965 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6966 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6967 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6968 IEMOP_HLP_NO_64BIT();
6969 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6970}
6971
6972
6973/**
6974 * @opcode 0xd6
6975 */
6976FNIEMOP_DEF(iemOp_salc)
6977{
6978 IEMOP_MNEMONIC(salc, "salc");
6979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6980 IEMOP_HLP_NO_64BIT();
6981
6982 IEM_MC_BEGIN(0, 0);
6983 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6984 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6985 } IEM_MC_ELSE() {
6986 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6987 } IEM_MC_ENDIF();
6988 IEM_MC_ADVANCE_RIP();
6989 IEM_MC_END();
6990 return VINF_SUCCESS;
6991}
6992
6993
6994/**
6995 * @opcode 0xd7
6996 */
6997FNIEMOP_DEF(iemOp_xlat)
6998{
6999 IEMOP_MNEMONIC(xlat, "xlat");
7000 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7001 switch (pVCpu->iem.s.enmEffAddrMode)
7002 {
7003 case IEMMODE_16BIT:
7004 IEM_MC_BEGIN(2, 0);
7005 IEM_MC_LOCAL(uint8_t, u8Tmp);
7006 IEM_MC_LOCAL(uint16_t, u16Addr);
7007 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7008 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7009 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7010 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7011 IEM_MC_ADVANCE_RIP();
7012 IEM_MC_END();
7013 return VINF_SUCCESS;
7014
7015 case IEMMODE_32BIT:
7016 IEM_MC_BEGIN(2, 0);
7017 IEM_MC_LOCAL(uint8_t, u8Tmp);
7018 IEM_MC_LOCAL(uint32_t, u32Addr);
7019 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7020 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7021 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7022 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 return VINF_SUCCESS;
7026
7027 case IEMMODE_64BIT:
7028 IEM_MC_BEGIN(2, 0);
7029 IEM_MC_LOCAL(uint8_t, u8Tmp);
7030 IEM_MC_LOCAL(uint64_t, u64Addr);
7031 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7032 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7033 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7034 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7035 IEM_MC_ADVANCE_RIP();
7036 IEM_MC_END();
7037 return VINF_SUCCESS;
7038
7039 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7040 }
7041}
7042
7043
7044/**
7045 * Common worker for FPU instructions working on ST0 and STn, and storing the
7046 * result in ST0.
7047 *
7048 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7049 */
7050FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7051{
7052 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7053
7054 IEM_MC_BEGIN(3, 1);
7055 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7056 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7057 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7058 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7059
7060 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7061 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7062 IEM_MC_PREPARE_FPU_USAGE();
7063 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7064 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7065 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7066 IEM_MC_ELSE()
7067 IEM_MC_FPU_STACK_UNDERFLOW(0);
7068 IEM_MC_ENDIF();
7069 IEM_MC_ADVANCE_RIP();
7070
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073}
7074
7075
7076/**
7077 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7078 * flags.
7079 *
7080 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7081 */
7082FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7083{
7084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7085
7086 IEM_MC_BEGIN(3, 1);
7087 IEM_MC_LOCAL(uint16_t, u16Fsw);
7088 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7089 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7090 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7091
7092 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7093 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7094 IEM_MC_PREPARE_FPU_USAGE();
7095 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7096 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7097 IEM_MC_UPDATE_FSW(u16Fsw);
7098 IEM_MC_ELSE()
7099 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7100 IEM_MC_ENDIF();
7101 IEM_MC_ADVANCE_RIP();
7102
7103 IEM_MC_END();
7104 return VINF_SUCCESS;
7105}
7106
7107
7108/**
7109 * Common worker for FPU instructions working on ST0 and STn, only affecting
7110 * flags, and popping when done.
7111 *
7112 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7113 */
7114FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7115{
7116 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7117
7118 IEM_MC_BEGIN(3, 1);
7119 IEM_MC_LOCAL(uint16_t, u16Fsw);
7120 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7121 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7122 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7123
7124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7126 IEM_MC_PREPARE_FPU_USAGE();
7127 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7128 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7129 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7130 IEM_MC_ELSE()
7131 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7132 IEM_MC_ENDIF();
7133 IEM_MC_ADVANCE_RIP();
7134
7135 IEM_MC_END();
7136 return VINF_SUCCESS;
7137}
7138
7139
7140/** Opcode 0xd8 11/0. */
7141FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7142{
7143 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7145}
7146
7147
7148/** Opcode 0xd8 11/1. */
7149FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7150{
7151 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7152 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7153}
7154
7155
7156/** Opcode 0xd8 11/2. */
7157FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7158{
7159 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7160 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7161}
7162
7163
7164/** Opcode 0xd8 11/3. */
7165FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7166{
7167 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7168 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7169}
7170
7171
7172/** Opcode 0xd8 11/4. */
7173FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7174{
7175 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7177}
7178
7179
7180/** Opcode 0xd8 11/5. */
7181FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7182{
7183 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7184 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7185}
7186
7187
7188/** Opcode 0xd8 11/6. */
7189FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7190{
7191 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7192 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7193}
7194
7195
7196/** Opcode 0xd8 11/7. */
7197FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7198{
7199 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7201}
7202
7203
7204/**
7205 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7206 * the result in ST0.
7207 *
7208 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7209 */
7210FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7211{
7212 IEM_MC_BEGIN(3, 3);
7213 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7214 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7215 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7216 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7217 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7218 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7219
7220 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7222
7223 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7224 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7225 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7226
7227 IEM_MC_PREPARE_FPU_USAGE();
7228 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7229 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7230 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7231 IEM_MC_ELSE()
7232 IEM_MC_FPU_STACK_UNDERFLOW(0);
7233 IEM_MC_ENDIF();
7234 IEM_MC_ADVANCE_RIP();
7235
7236 IEM_MC_END();
7237 return VINF_SUCCESS;
7238}
7239
7240
7241/** Opcode 0xd8 !11/0. */
7242FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7243{
7244 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7245 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7246}
7247
7248
7249/** Opcode 0xd8 !11/1. */
7250FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7251{
7252 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7253 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7254}
7255
7256
7257/** Opcode 0xd8 !11/2. */
7258FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7259{
7260 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7261
7262 IEM_MC_BEGIN(3, 3);
7263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7264 IEM_MC_LOCAL(uint16_t, u16Fsw);
7265 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7266 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7267 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7268 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7269
7270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7272
7273 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7274 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7275 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7276
7277 IEM_MC_PREPARE_FPU_USAGE();
7278 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7279 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7280 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7281 IEM_MC_ELSE()
7282 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7283 IEM_MC_ENDIF();
7284 IEM_MC_ADVANCE_RIP();
7285
7286 IEM_MC_END();
7287 return VINF_SUCCESS;
7288}
7289
7290
7291/** Opcode 0xd8 !11/3. */
7292FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7293{
7294 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7295
7296 IEM_MC_BEGIN(3, 3);
7297 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7298 IEM_MC_LOCAL(uint16_t, u16Fsw);
7299 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7300 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7301 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7302 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7303
7304 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7305 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7306
7307 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7308 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7309 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7310
7311 IEM_MC_PREPARE_FPU_USAGE();
7312 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7313 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7314 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7315 IEM_MC_ELSE()
7316 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7317 IEM_MC_ENDIF();
7318 IEM_MC_ADVANCE_RIP();
7319
7320 IEM_MC_END();
7321 return VINF_SUCCESS;
7322}
7323
7324
7325/** Opcode 0xd8 !11/4. */
7326FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7327{
7328 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7329 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7330}
7331
7332
7333/** Opcode 0xd8 !11/5. */
7334FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7335{
7336 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7337 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7338}
7339
7340
7341/** Opcode 0xd8 !11/6. */
7342FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7343{
7344 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7345 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7346}
7347
7348
7349/** Opcode 0xd8 !11/7. */
7350FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7351{
7352 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7354}
7355
7356
7357/**
7358 * @opcode 0xd8
7359 */
7360FNIEMOP_DEF(iemOp_EscF0)
7361{
7362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7363 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7364
7365 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7366 {
7367 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7368 {
7369 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7370 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7371 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7372 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7373 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7374 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7375 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7376 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7377 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7378 }
7379 }
7380 else
7381 {
7382 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7383 {
7384 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7385 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7386 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7387 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7388 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7389 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7390 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7391 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7392 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7393 }
7394 }
7395}
7396
7397
7398/** Opcode 0xd9 /0 mem32real
7399 * @sa iemOp_fld_m64r */
7400FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7401{
7402 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7403
7404 IEM_MC_BEGIN(2, 3);
7405 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7406 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7407 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7408 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7409 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7410
7411 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7413
7414 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7415 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7416 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7417
7418 IEM_MC_PREPARE_FPU_USAGE();
7419 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7420 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7421 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7422 IEM_MC_ELSE()
7423 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7424 IEM_MC_ENDIF();
7425 IEM_MC_ADVANCE_RIP();
7426
7427 IEM_MC_END();
7428 return VINF_SUCCESS;
7429}
7430
7431
7432/** Opcode 0xd9 !11/2 mem32real */
7433FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7434{
7435 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7436 IEM_MC_BEGIN(3, 2);
7437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7438 IEM_MC_LOCAL(uint16_t, u16Fsw);
7439 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7440 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7441 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7442
7443 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7445 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7446 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7447
7448 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7449 IEM_MC_PREPARE_FPU_USAGE();
7450 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7451 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7452 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7453 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7454 IEM_MC_ELSE()
7455 IEM_MC_IF_FCW_IM()
7456 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7457 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7458 IEM_MC_ENDIF();
7459 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7460 IEM_MC_ENDIF();
7461 IEM_MC_ADVANCE_RIP();
7462
7463 IEM_MC_END();
7464 return VINF_SUCCESS;
7465}
7466
7467
7468/** Opcode 0xd9 !11/3 */
7469FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7470{
7471 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7472 IEM_MC_BEGIN(3, 2);
7473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7474 IEM_MC_LOCAL(uint16_t, u16Fsw);
7475 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7476 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7477 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7478
7479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7482 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7483
7484 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7485 IEM_MC_PREPARE_FPU_USAGE();
7486 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7487 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7488 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7489 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7490 IEM_MC_ELSE()
7491 IEM_MC_IF_FCW_IM()
7492 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7493 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7494 IEM_MC_ENDIF();
7495 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7496 IEM_MC_ENDIF();
7497 IEM_MC_ADVANCE_RIP();
7498
7499 IEM_MC_END();
7500 return VINF_SUCCESS;
7501}
7502
7503
7504/** Opcode 0xd9 !11/4 */
7505FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7506{
7507 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7508 IEM_MC_BEGIN(3, 0);
7509 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7510 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7511 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7514 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7515 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7516 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7517 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7518 IEM_MC_END();
7519 return VINF_SUCCESS;
7520}
7521
7522
7523/** Opcode 0xd9 !11/5 */
7524FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7525{
7526 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7527 IEM_MC_BEGIN(1, 1);
7528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7529 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7533 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7534 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7535 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7536 IEM_MC_END();
7537 return VINF_SUCCESS;
7538}
7539
7540
7541/** Opcode 0xd9 !11/6 */
7542FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7543{
7544 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7545 IEM_MC_BEGIN(3, 0);
7546 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7547 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7548 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7552 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7553 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7554 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7555 IEM_MC_END();
7556 return VINF_SUCCESS;
7557}
7558
7559
7560/** Opcode 0xd9 !11/7 */
7561FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7562{
7563 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7564 IEM_MC_BEGIN(2, 0);
7565 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7566 IEM_MC_LOCAL(uint16_t, u16Fcw);
7567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7570 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7571 IEM_MC_FETCH_FCW(u16Fcw);
7572 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7573 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7574 IEM_MC_END();
7575 return VINF_SUCCESS;
7576}
7577
7578
7579/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7580FNIEMOP_DEF(iemOp_fnop)
7581{
7582 IEMOP_MNEMONIC(fnop, "fnop");
7583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7584
7585 IEM_MC_BEGIN(0, 0);
7586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7588 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7589 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7590 * intel optimizations. Investigate. */
7591 IEM_MC_UPDATE_FPU_OPCODE_IP();
7592 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7593 IEM_MC_END();
7594 return VINF_SUCCESS;
7595}
7596
7597
7598/** Opcode 0xd9 11/0 stN */
7599FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7600{
7601 IEMOP_MNEMONIC(fld_stN, "fld stN");
7602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7603
7604 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7605 * indicates that it does. */
7606 IEM_MC_BEGIN(0, 2);
7607 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7608 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7609 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7610 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7611
7612 IEM_MC_PREPARE_FPU_USAGE();
7613 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7614 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7615 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7616 IEM_MC_ELSE()
7617 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7618 IEM_MC_ENDIF();
7619
7620 IEM_MC_ADVANCE_RIP();
7621 IEM_MC_END();
7622
7623 return VINF_SUCCESS;
7624}
7625
7626
7627/** Opcode 0xd9 11/3 stN */
7628FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7629{
7630 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7632
7633 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7634 * indicates that it does. */
7635 IEM_MC_BEGIN(1, 3);
7636 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7637 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7638 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7639 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7640 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7641 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7642
7643 IEM_MC_PREPARE_FPU_USAGE();
7644 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7645 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7646 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7647 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7648 IEM_MC_ELSE()
7649 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7650 IEM_MC_ENDIF();
7651
7652 IEM_MC_ADVANCE_RIP();
7653 IEM_MC_END();
7654
7655 return VINF_SUCCESS;
7656}
7657
7658
7659/** Opcode 0xd9 11/4, 0xdd 11/2. */
7660FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7661{
7662 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7664
7665 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7666 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7667 if (!iDstReg)
7668 {
7669 IEM_MC_BEGIN(0, 1);
7670 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7671 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7672 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7673
7674 IEM_MC_PREPARE_FPU_USAGE();
7675 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7676 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7677 IEM_MC_ELSE()
7678 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7679 IEM_MC_ENDIF();
7680
7681 IEM_MC_ADVANCE_RIP();
7682 IEM_MC_END();
7683 }
7684 else
7685 {
7686 IEM_MC_BEGIN(0, 2);
7687 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7689 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7690 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7691
7692 IEM_MC_PREPARE_FPU_USAGE();
7693 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7694 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7695 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7696 IEM_MC_ELSE()
7697 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7698 IEM_MC_ENDIF();
7699
7700 IEM_MC_ADVANCE_RIP();
7701 IEM_MC_END();
7702 }
7703 return VINF_SUCCESS;
7704}
7705
7706
7707/**
7708 * Common worker for FPU instructions working on ST0 and replaces it with the
7709 * result, i.e. unary operators.
7710 *
7711 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7712 */
7713FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7714{
7715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7716
7717 IEM_MC_BEGIN(2, 1);
7718 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7719 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7720 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7721
7722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7724 IEM_MC_PREPARE_FPU_USAGE();
7725 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7726 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7727 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7728 IEM_MC_ELSE()
7729 IEM_MC_FPU_STACK_UNDERFLOW(0);
7730 IEM_MC_ENDIF();
7731 IEM_MC_ADVANCE_RIP();
7732
7733 IEM_MC_END();
7734 return VINF_SUCCESS;
7735}
7736
7737
7738/** Opcode 0xd9 0xe0. */
7739FNIEMOP_DEF(iemOp_fchs)
7740{
7741 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7742 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7743}
7744
7745
7746/** Opcode 0xd9 0xe1. */
7747FNIEMOP_DEF(iemOp_fabs)
7748{
7749 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7750 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7751}
7752
7753
7754/**
7755 * Common worker for FPU instructions working on ST0 and only returns FSW.
7756 *
7757 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7758 */
7759FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7760{
7761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7762
7763 IEM_MC_BEGIN(2, 1);
7764 IEM_MC_LOCAL(uint16_t, u16Fsw);
7765 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7766 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7767
7768 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7769 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7770 IEM_MC_PREPARE_FPU_USAGE();
7771 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7772 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7773 IEM_MC_UPDATE_FSW(u16Fsw);
7774 IEM_MC_ELSE()
7775 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7776 IEM_MC_ENDIF();
7777 IEM_MC_ADVANCE_RIP();
7778
7779 IEM_MC_END();
7780 return VINF_SUCCESS;
7781}
7782
7783
7784/** Opcode 0xd9 0xe4. */
7785FNIEMOP_DEF(iemOp_ftst)
7786{
7787 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7788 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7789}
7790
7791
7792/** Opcode 0xd9 0xe5. */
7793FNIEMOP_DEF(iemOp_fxam)
7794{
7795 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7796 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7797}
7798
7799
7800/**
7801 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7802 *
7803 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7804 */
7805FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7806{
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808
7809 IEM_MC_BEGIN(1, 1);
7810 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7811 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7812
7813 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7814 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7815 IEM_MC_PREPARE_FPU_USAGE();
7816 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7817 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7818 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7819 IEM_MC_ELSE()
7820 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7821 IEM_MC_ENDIF();
7822 IEM_MC_ADVANCE_RIP();
7823
7824 IEM_MC_END();
7825 return VINF_SUCCESS;
7826}
7827
7828
7829/** Opcode 0xd9 0xe8. */
7830FNIEMOP_DEF(iemOp_fld1)
7831{
7832 IEMOP_MNEMONIC(fld1, "fld1");
7833 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7834}
7835
7836
7837/** Opcode 0xd9 0xe9. */
7838FNIEMOP_DEF(iemOp_fldl2t)
7839{
7840 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7841 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7842}
7843
7844
7845/** Opcode 0xd9 0xea. */
7846FNIEMOP_DEF(iemOp_fldl2e)
7847{
7848 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7849 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7850}
7851
7852/** Opcode 0xd9 0xeb. */
7853FNIEMOP_DEF(iemOp_fldpi)
7854{
7855 IEMOP_MNEMONIC(fldpi, "fldpi");
7856 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7857}
7858
7859
7860/** Opcode 0xd9 0xec. */
7861FNIEMOP_DEF(iemOp_fldlg2)
7862{
7863 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7864 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7865}
7866
7867/** Opcode 0xd9 0xed. */
7868FNIEMOP_DEF(iemOp_fldln2)
7869{
7870 IEMOP_MNEMONIC(fldln2, "fldln2");
7871 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7872}
7873
7874
7875/** Opcode 0xd9 0xee. */
7876FNIEMOP_DEF(iemOp_fldz)
7877{
7878 IEMOP_MNEMONIC(fldz, "fldz");
7879 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7880}
7881
7882
7883/** Opcode 0xd9 0xf0. */
7884FNIEMOP_DEF(iemOp_f2xm1)
7885{
7886 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7887 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7888}
7889
7890
7891/**
7892 * Common worker for FPU instructions working on STn and ST0, storing the result
7893 * in STn, and popping the stack unless IE, DE or ZE was raised.
7894 *
7895 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7896 */
7897FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7898{
7899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7900
7901 IEM_MC_BEGIN(3, 1);
7902 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7903 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7904 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7905 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7906
7907 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7908 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7909
7910 IEM_MC_PREPARE_FPU_USAGE();
7911 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7912 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7913 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7914 IEM_MC_ELSE()
7915 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7916 IEM_MC_ENDIF();
7917 IEM_MC_ADVANCE_RIP();
7918
7919 IEM_MC_END();
7920 return VINF_SUCCESS;
7921}
7922
7923
7924/** Opcode 0xd9 0xf1. */
7925FNIEMOP_DEF(iemOp_fyl2x)
7926{
7927 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7928 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7929}
7930
7931
7932/**
7933 * Common worker for FPU instructions working on ST0 and having two outputs, one
7934 * replacing ST0 and one pushed onto the stack.
7935 *
7936 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7937 */
7938FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7939{
7940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7941
7942 IEM_MC_BEGIN(2, 1);
7943 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7944 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7945 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7946
7947 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7948 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7949 IEM_MC_PREPARE_FPU_USAGE();
7950 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7951 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7952 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7953 IEM_MC_ELSE()
7954 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7955 IEM_MC_ENDIF();
7956 IEM_MC_ADVANCE_RIP();
7957
7958 IEM_MC_END();
7959 return VINF_SUCCESS;
7960}
7961
7962
7963/** Opcode 0xd9 0xf2. */
7964FNIEMOP_DEF(iemOp_fptan)
7965{
7966 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7967 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7968}
7969
7970
7971/** Opcode 0xd9 0xf3. */
7972FNIEMOP_DEF(iemOp_fpatan)
7973{
7974 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7975 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7976}
7977
7978
7979/** Opcode 0xd9 0xf4. */
7980FNIEMOP_DEF(iemOp_fxtract)
7981{
7982 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7983 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7984}
7985
7986
7987/** Opcode 0xd9 0xf5. */
7988FNIEMOP_DEF(iemOp_fprem1)
7989{
7990 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7991 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7992}
7993
7994
7995/** Opcode 0xd9 0xf6. */
7996FNIEMOP_DEF(iemOp_fdecstp)
7997{
7998 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8000 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8001 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8002 * FINCSTP and FDECSTP. */
8003
8004 IEM_MC_BEGIN(0,0);
8005
8006 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8007 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8008
8009 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8010 IEM_MC_FPU_STACK_DEC_TOP();
8011 IEM_MC_UPDATE_FSW_CONST(0);
8012
8013 IEM_MC_ADVANCE_RIP();
8014 IEM_MC_END();
8015 return VINF_SUCCESS;
8016}
8017
8018
8019/** Opcode 0xd9 0xf7. */
8020FNIEMOP_DEF(iemOp_fincstp)
8021{
8022 IEMOP_MNEMONIC(fincstp, "fincstp");
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8025 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8026 * FINCSTP and FDECSTP. */
8027
8028 IEM_MC_BEGIN(0,0);
8029
8030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8031 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8032
8033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8034 IEM_MC_FPU_STACK_INC_TOP();
8035 IEM_MC_UPDATE_FSW_CONST(0);
8036
8037 IEM_MC_ADVANCE_RIP();
8038 IEM_MC_END();
8039 return VINF_SUCCESS;
8040}
8041
8042
8043/** Opcode 0xd9 0xf8. */
8044FNIEMOP_DEF(iemOp_fprem)
8045{
8046 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8047 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8048}
8049
8050
8051/** Opcode 0xd9 0xf9. */
8052FNIEMOP_DEF(iemOp_fyl2xp1)
8053{
8054 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8055 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8056}
8057
8058
8059/** Opcode 0xd9 0xfa. */
8060FNIEMOP_DEF(iemOp_fsqrt)
8061{
8062 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8063 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8064}
8065
8066
8067/** Opcode 0xd9 0xfb. */
8068FNIEMOP_DEF(iemOp_fsincos)
8069{
8070 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8071 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8072}
8073
8074
8075/** Opcode 0xd9 0xfc. */
8076FNIEMOP_DEF(iemOp_frndint)
8077{
8078 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8079 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8080}
8081
8082
8083/** Opcode 0xd9 0xfd. */
8084FNIEMOP_DEF(iemOp_fscale)
8085{
8086 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8087 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8088}
8089
8090
8091/** Opcode 0xd9 0xfe. */
8092FNIEMOP_DEF(iemOp_fsin)
8093{
8094 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8095 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8096}
8097
8098
8099/** Opcode 0xd9 0xff. */
8100FNIEMOP_DEF(iemOp_fcos)
8101{
8102 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8103 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8104}
8105
8106
8107/** Used by iemOp_EscF1. */
8108IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8109{
8110 /* 0xe0 */ iemOp_fchs,
8111 /* 0xe1 */ iemOp_fabs,
8112 /* 0xe2 */ iemOp_Invalid,
8113 /* 0xe3 */ iemOp_Invalid,
8114 /* 0xe4 */ iemOp_ftst,
8115 /* 0xe5 */ iemOp_fxam,
8116 /* 0xe6 */ iemOp_Invalid,
8117 /* 0xe7 */ iemOp_Invalid,
8118 /* 0xe8 */ iemOp_fld1,
8119 /* 0xe9 */ iemOp_fldl2t,
8120 /* 0xea */ iemOp_fldl2e,
8121 /* 0xeb */ iemOp_fldpi,
8122 /* 0xec */ iemOp_fldlg2,
8123 /* 0xed */ iemOp_fldln2,
8124 /* 0xee */ iemOp_fldz,
8125 /* 0xef */ iemOp_Invalid,
8126 /* 0xf0 */ iemOp_f2xm1,
8127 /* 0xf1 */ iemOp_fyl2x,
8128 /* 0xf2 */ iemOp_fptan,
8129 /* 0xf3 */ iemOp_fpatan,
8130 /* 0xf4 */ iemOp_fxtract,
8131 /* 0xf5 */ iemOp_fprem1,
8132 /* 0xf6 */ iemOp_fdecstp,
8133 /* 0xf7 */ iemOp_fincstp,
8134 /* 0xf8 */ iemOp_fprem,
8135 /* 0xf9 */ iemOp_fyl2xp1,
8136 /* 0xfa */ iemOp_fsqrt,
8137 /* 0xfb */ iemOp_fsincos,
8138 /* 0xfc */ iemOp_frndint,
8139 /* 0xfd */ iemOp_fscale,
8140 /* 0xfe */ iemOp_fsin,
8141 /* 0xff */ iemOp_fcos
8142};
8143
8144
8145/**
8146 * @opcode 0xd9
8147 */
8148FNIEMOP_DEF(iemOp_EscF1)
8149{
8150 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8151 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8152
8153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8154 {
8155 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8156 {
8157 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8158 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8159 case 2:
8160 if (bRm == 0xd0)
8161 return FNIEMOP_CALL(iemOp_fnop);
8162 return IEMOP_RAISE_INVALID_OPCODE();
8163 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8164 case 4:
8165 case 5:
8166 case 6:
8167 case 7:
8168 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8169 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8170 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8171 }
8172 }
8173 else
8174 {
8175 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8176 {
8177 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8178 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8179 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8180 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8181 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8182 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8183 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8184 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8186 }
8187 }
8188}
8189
8190
8191/** Opcode 0xda 11/0. */
8192FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8193{
8194 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8196
8197 IEM_MC_BEGIN(0, 1);
8198 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8199
8200 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8201 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8202
8203 IEM_MC_PREPARE_FPU_USAGE();
8204 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8205 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8206 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8207 IEM_MC_ENDIF();
8208 IEM_MC_UPDATE_FPU_OPCODE_IP();
8209 IEM_MC_ELSE()
8210 IEM_MC_FPU_STACK_UNDERFLOW(0);
8211 IEM_MC_ENDIF();
8212 IEM_MC_ADVANCE_RIP();
8213
8214 IEM_MC_END();
8215 return VINF_SUCCESS;
8216}
8217
8218
8219/** Opcode 0xda 11/1. */
8220FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8221{
8222 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8224
8225 IEM_MC_BEGIN(0, 1);
8226 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8227
8228 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8229 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8230
8231 IEM_MC_PREPARE_FPU_USAGE();
8232 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8233 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8234 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8235 IEM_MC_ENDIF();
8236 IEM_MC_UPDATE_FPU_OPCODE_IP();
8237 IEM_MC_ELSE()
8238 IEM_MC_FPU_STACK_UNDERFLOW(0);
8239 IEM_MC_ENDIF();
8240 IEM_MC_ADVANCE_RIP();
8241
8242 IEM_MC_END();
8243 return VINF_SUCCESS;
8244}
8245
8246
8247/** Opcode 0xda 11/2. */
8248FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8249{
8250 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8252
8253 IEM_MC_BEGIN(0, 1);
8254 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8255
8256 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8257 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8258
8259 IEM_MC_PREPARE_FPU_USAGE();
8260 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8261 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8262 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8263 IEM_MC_ENDIF();
8264 IEM_MC_UPDATE_FPU_OPCODE_IP();
8265 IEM_MC_ELSE()
8266 IEM_MC_FPU_STACK_UNDERFLOW(0);
8267 IEM_MC_ENDIF();
8268 IEM_MC_ADVANCE_RIP();
8269
8270 IEM_MC_END();
8271 return VINF_SUCCESS;
8272}
8273
8274
8275/** Opcode 0xda 11/3. */
8276FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8277{
8278 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8280
8281 IEM_MC_BEGIN(0, 1);
8282 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8283
8284 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8285 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8286
8287 IEM_MC_PREPARE_FPU_USAGE();
8288 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8289 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8290 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8291 IEM_MC_ENDIF();
8292 IEM_MC_UPDATE_FPU_OPCODE_IP();
8293 IEM_MC_ELSE()
8294 IEM_MC_FPU_STACK_UNDERFLOW(0);
8295 IEM_MC_ENDIF();
8296 IEM_MC_ADVANCE_RIP();
8297
8298 IEM_MC_END();
8299 return VINF_SUCCESS;
8300}
8301
8302
8303/**
8304 * Common worker for FPU instructions working on ST0 and STn, only affecting
8305 * flags, and popping twice when done.
8306 *
8307 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8308 */
8309FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8310{
8311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8312
8313 IEM_MC_BEGIN(3, 1);
8314 IEM_MC_LOCAL(uint16_t, u16Fsw);
8315 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8316 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8317 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8318
8319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8321
8322 IEM_MC_PREPARE_FPU_USAGE();
8323 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8324 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8325 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8326 IEM_MC_ELSE()
8327 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8328 IEM_MC_ENDIF();
8329 IEM_MC_ADVANCE_RIP();
8330
8331 IEM_MC_END();
8332 return VINF_SUCCESS;
8333}
8334
8335
8336/** Opcode 0xda 0xe9. */
8337FNIEMOP_DEF(iemOp_fucompp)
8338{
8339 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8340 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8341}
8342
8343
8344/**
8345 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8346 * the result in ST0.
8347 *
8348 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8349 */
8350FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8351{
8352 IEM_MC_BEGIN(3, 3);
8353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8354 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8355 IEM_MC_LOCAL(int32_t, i32Val2);
8356 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8357 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8358 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8359
8360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8362
8363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8365 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8366
8367 IEM_MC_PREPARE_FPU_USAGE();
8368 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8369 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8370 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8371 IEM_MC_ELSE()
8372 IEM_MC_FPU_STACK_UNDERFLOW(0);
8373 IEM_MC_ENDIF();
8374 IEM_MC_ADVANCE_RIP();
8375
8376 IEM_MC_END();
8377 return VINF_SUCCESS;
8378}
8379
8380
8381/** Opcode 0xda !11/0. */
8382FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8383{
8384 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8385 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8386}
8387
8388
8389/** Opcode 0xda !11/1. */
8390FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8391{
8392 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8393 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8394}
8395
8396
8397/** Opcode 0xda !11/2. */
8398FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8399{
8400 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8401
8402 IEM_MC_BEGIN(3, 3);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8404 IEM_MC_LOCAL(uint16_t, u16Fsw);
8405 IEM_MC_LOCAL(int32_t, i32Val2);
8406 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8408 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412
8413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8415 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8416
8417 IEM_MC_PREPARE_FPU_USAGE();
8418 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8419 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8420 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8421 IEM_MC_ELSE()
8422 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8423 IEM_MC_ENDIF();
8424 IEM_MC_ADVANCE_RIP();
8425
8426 IEM_MC_END();
8427 return VINF_SUCCESS;
8428}
8429
8430
8431/** Opcode 0xda !11/3. */
8432FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8433{
8434 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8435
8436 IEM_MC_BEGIN(3, 3);
8437 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8438 IEM_MC_LOCAL(uint16_t, u16Fsw);
8439 IEM_MC_LOCAL(int32_t, i32Val2);
8440 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8441 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8442 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8443
8444 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8445 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8446
8447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8449 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8450
8451 IEM_MC_PREPARE_FPU_USAGE();
8452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8453 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8454 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8455 IEM_MC_ELSE()
8456 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8457 IEM_MC_ENDIF();
8458 IEM_MC_ADVANCE_RIP();
8459
8460 IEM_MC_END();
8461 return VINF_SUCCESS;
8462}
8463
8464
8465/** Opcode 0xda !11/4. */
8466FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8467{
8468 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8469 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8470}
8471
8472
8473/** Opcode 0xda !11/5. */
8474FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8475{
8476 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8477 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8478}
8479
8480
8481/** Opcode 0xda !11/6. */
8482FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8483{
8484 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8485 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8486}
8487
8488
8489/** Opcode 0xda !11/7. */
8490FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8491{
8492 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8494}
8495
8496
8497/**
8498 * @opcode 0xda
8499 */
8500FNIEMOP_DEF(iemOp_EscF2)
8501{
8502 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8503 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8504 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8505 {
8506 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8507 {
8508 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8509 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8510 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8511 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8512 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8513 case 5:
8514 if (bRm == 0xe9)
8515 return FNIEMOP_CALL(iemOp_fucompp);
8516 return IEMOP_RAISE_INVALID_OPCODE();
8517 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8518 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8520 }
8521 }
8522 else
8523 {
8524 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8525 {
8526 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8527 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8528 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8529 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8530 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8531 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8532 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8533 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8535 }
8536 }
8537}
8538
8539
8540/** Opcode 0xdb !11/0. */
8541FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8542{
8543 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8544
8545 IEM_MC_BEGIN(2, 3);
8546 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8547 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8548 IEM_MC_LOCAL(int32_t, i32Val);
8549 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8550 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8551
8552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8554
8555 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8556 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8557 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8558
8559 IEM_MC_PREPARE_FPU_USAGE();
8560 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8561 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8562 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8563 IEM_MC_ELSE()
8564 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8565 IEM_MC_ENDIF();
8566 IEM_MC_ADVANCE_RIP();
8567
8568 IEM_MC_END();
8569 return VINF_SUCCESS;
8570}
8571
8572
8573/** Opcode 0xdb !11/1. */
8574FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8575{
8576 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8577 IEM_MC_BEGIN(3, 2);
8578 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8579 IEM_MC_LOCAL(uint16_t, u16Fsw);
8580 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8581 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8582 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8583
8584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8587 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8588
8589 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8590 IEM_MC_PREPARE_FPU_USAGE();
8591 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8592 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8593 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8594 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8595 IEM_MC_ELSE()
8596 IEM_MC_IF_FCW_IM()
8597 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8598 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8599 IEM_MC_ENDIF();
8600 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8601 IEM_MC_ENDIF();
8602 IEM_MC_ADVANCE_RIP();
8603
8604 IEM_MC_END();
8605 return VINF_SUCCESS;
8606}
8607
8608
8609/** Opcode 0xdb !11/2. */
8610FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8611{
8612 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8613 IEM_MC_BEGIN(3, 2);
8614 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8615 IEM_MC_LOCAL(uint16_t, u16Fsw);
8616 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8617 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8618 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8619
8620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8622 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8623 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8624
8625 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8626 IEM_MC_PREPARE_FPU_USAGE();
8627 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8628 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8629 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8630 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8631 IEM_MC_ELSE()
8632 IEM_MC_IF_FCW_IM()
8633 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8634 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8635 IEM_MC_ENDIF();
8636 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8637 IEM_MC_ENDIF();
8638 IEM_MC_ADVANCE_RIP();
8639
8640 IEM_MC_END();
8641 return VINF_SUCCESS;
8642}
8643
8644
8645/** Opcode 0xdb !11/3. */
8646FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8647{
8648 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8649 IEM_MC_BEGIN(3, 2);
8650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8651 IEM_MC_LOCAL(uint16_t, u16Fsw);
8652 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8653 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8654 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8655
8656 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8658 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8659 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8660
8661 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8662 IEM_MC_PREPARE_FPU_USAGE();
8663 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8664 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8665 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8666 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8667 IEM_MC_ELSE()
8668 IEM_MC_IF_FCW_IM()
8669 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8670 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8671 IEM_MC_ENDIF();
8672 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8673 IEM_MC_ENDIF();
8674 IEM_MC_ADVANCE_RIP();
8675
8676 IEM_MC_END();
8677 return VINF_SUCCESS;
8678}
8679
8680
8681/** Opcode 0xdb !11/5. */
8682FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8683{
8684 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8685
8686 IEM_MC_BEGIN(2, 3);
8687 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8688 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8689 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8690 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8691 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8692
8693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8695
8696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8698 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8699
8700 IEM_MC_PREPARE_FPU_USAGE();
8701 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8702 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8703 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8704 IEM_MC_ELSE()
8705 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8706 IEM_MC_ENDIF();
8707 IEM_MC_ADVANCE_RIP();
8708
8709 IEM_MC_END();
8710 return VINF_SUCCESS;
8711}
8712
8713
8714/** Opcode 0xdb !11/7. */
8715FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8716{
8717 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8718 IEM_MC_BEGIN(3, 2);
8719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8720 IEM_MC_LOCAL(uint16_t, u16Fsw);
8721 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8722 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8723 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8724
8725 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8727 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8728 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8729
8730 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8731 IEM_MC_PREPARE_FPU_USAGE();
8732 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8733 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8734 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8735 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8736 IEM_MC_ELSE()
8737 IEM_MC_IF_FCW_IM()
8738 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8739 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8740 IEM_MC_ENDIF();
8741 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8742 IEM_MC_ENDIF();
8743 IEM_MC_ADVANCE_RIP();
8744
8745 IEM_MC_END();
8746 return VINF_SUCCESS;
8747}
8748
8749
8750/** Opcode 0xdb 11/0. */
8751FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8752{
8753 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8755
8756 IEM_MC_BEGIN(0, 1);
8757 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8758
8759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8761
8762 IEM_MC_PREPARE_FPU_USAGE();
8763 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8764 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8765 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8766 IEM_MC_ENDIF();
8767 IEM_MC_UPDATE_FPU_OPCODE_IP();
8768 IEM_MC_ELSE()
8769 IEM_MC_FPU_STACK_UNDERFLOW(0);
8770 IEM_MC_ENDIF();
8771 IEM_MC_ADVANCE_RIP();
8772
8773 IEM_MC_END();
8774 return VINF_SUCCESS;
8775}
8776
8777
8778/** Opcode 0xdb 11/1. */
8779FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8780{
8781 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8783
8784 IEM_MC_BEGIN(0, 1);
8785 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8786
8787 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8788 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8789
8790 IEM_MC_PREPARE_FPU_USAGE();
8791 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8792 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8793 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8794 IEM_MC_ENDIF();
8795 IEM_MC_UPDATE_FPU_OPCODE_IP();
8796 IEM_MC_ELSE()
8797 IEM_MC_FPU_STACK_UNDERFLOW(0);
8798 IEM_MC_ENDIF();
8799 IEM_MC_ADVANCE_RIP();
8800
8801 IEM_MC_END();
8802 return VINF_SUCCESS;
8803}
8804
8805
8806/** Opcode 0xdb 11/2. */
8807FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8808{
8809 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8811
8812 IEM_MC_BEGIN(0, 1);
8813 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8814
8815 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8816 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8817
8818 IEM_MC_PREPARE_FPU_USAGE();
8819 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8820 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8821 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8822 IEM_MC_ENDIF();
8823 IEM_MC_UPDATE_FPU_OPCODE_IP();
8824 IEM_MC_ELSE()
8825 IEM_MC_FPU_STACK_UNDERFLOW(0);
8826 IEM_MC_ENDIF();
8827 IEM_MC_ADVANCE_RIP();
8828
8829 IEM_MC_END();
8830 return VINF_SUCCESS;
8831}
8832
8833
8834/** Opcode 0xdb 11/3. */
8835FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8836{
8837 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8839
8840 IEM_MC_BEGIN(0, 1);
8841 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8842
8843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8845
8846 IEM_MC_PREPARE_FPU_USAGE();
8847 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8848 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8849 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8850 IEM_MC_ENDIF();
8851 IEM_MC_UPDATE_FPU_OPCODE_IP();
8852 IEM_MC_ELSE()
8853 IEM_MC_FPU_STACK_UNDERFLOW(0);
8854 IEM_MC_ENDIF();
8855 IEM_MC_ADVANCE_RIP();
8856
8857 IEM_MC_END();
8858 return VINF_SUCCESS;
8859}
8860
8861
8862/** Opcode 0xdb 0xe0. */
8863FNIEMOP_DEF(iemOp_fneni)
8864{
8865 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8867 IEM_MC_BEGIN(0,0);
8868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8869 IEM_MC_ADVANCE_RIP();
8870 IEM_MC_END();
8871 return VINF_SUCCESS;
8872}
8873
8874
8875/** Opcode 0xdb 0xe1. */
8876FNIEMOP_DEF(iemOp_fndisi)
8877{
8878 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8879 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8880 IEM_MC_BEGIN(0,0);
8881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8882 IEM_MC_ADVANCE_RIP();
8883 IEM_MC_END();
8884 return VINF_SUCCESS;
8885}
8886
8887
8888/** Opcode 0xdb 0xe2. */
8889FNIEMOP_DEF(iemOp_fnclex)
8890{
8891 IEMOP_MNEMONIC(fnclex, "fnclex");
8892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8893
8894 IEM_MC_BEGIN(0,0);
8895 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8896 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8897 IEM_MC_CLEAR_FSW_EX();
8898 IEM_MC_ADVANCE_RIP();
8899 IEM_MC_END();
8900 return VINF_SUCCESS;
8901}
8902
8903
8904/** Opcode 0xdb 0xe3. */
8905FNIEMOP_DEF(iemOp_fninit)
8906{
8907 IEMOP_MNEMONIC(fninit, "fninit");
8908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8909 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8910}
8911
8912
8913/** Opcode 0xdb 0xe4. */
8914FNIEMOP_DEF(iemOp_fnsetpm)
8915{
8916 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8917 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8918 IEM_MC_BEGIN(0,0);
8919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8920 IEM_MC_ADVANCE_RIP();
8921 IEM_MC_END();
8922 return VINF_SUCCESS;
8923}
8924
8925
8926/** Opcode 0xdb 0xe5. */
8927FNIEMOP_DEF(iemOp_frstpm)
8928{
8929 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8930#if 0 /* #UDs on newer CPUs */
8931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8932 IEM_MC_BEGIN(0,0);
8933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8934 IEM_MC_ADVANCE_RIP();
8935 IEM_MC_END();
8936 return VINF_SUCCESS;
8937#else
8938 return IEMOP_RAISE_INVALID_OPCODE();
8939#endif
8940}
8941
8942
8943/** Opcode 0xdb 11/5. */
8944FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8945{
8946 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8947 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8948}
8949
8950
8951/** Opcode 0xdb 11/6. */
8952FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8953{
8954 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8955 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8956}
8957
8958
8959/**
8960 * @opcode 0xdb
8961 */
8962FNIEMOP_DEF(iemOp_EscF3)
8963{
8964 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8965 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8966 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8967 {
8968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8969 {
8970 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8971 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8972 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8973 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8974 case 4:
8975 switch (bRm)
8976 {
8977 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8978 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8979 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8980 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8981 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8982 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8983 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8984 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8985 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8986 }
8987 break;
8988 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8989 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8990 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8992 }
8993 }
8994 else
8995 {
8996 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8997 {
8998 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8999 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9000 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9001 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9002 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9003 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9004 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9005 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9007 }
9008 }
9009}
9010
9011
9012/**
9013 * Common worker for FPU instructions working on STn and ST0, and storing the
9014 * result in STn unless IE, DE or ZE was raised.
9015 *
9016 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9017 */
9018FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9019{
9020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9021
9022 IEM_MC_BEGIN(3, 1);
9023 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9024 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9025 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9026 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9027
9028 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9029 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9030
9031 IEM_MC_PREPARE_FPU_USAGE();
9032 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9033 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9034 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9035 IEM_MC_ELSE()
9036 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9037 IEM_MC_ENDIF();
9038 IEM_MC_ADVANCE_RIP();
9039
9040 IEM_MC_END();
9041 return VINF_SUCCESS;
9042}
9043
9044
9045/** Opcode 0xdc 11/0. */
9046FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9047{
9048 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9049 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9050}
9051
9052
9053/** Opcode 0xdc 11/1. */
9054FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9055{
9056 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9057 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9058}
9059
9060
9061/** Opcode 0xdc 11/4. */
9062FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9063{
9064 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9065 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9066}
9067
9068
9069/** Opcode 0xdc 11/5. */
9070FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9071{
9072 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9073 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9074}
9075
9076
9077/** Opcode 0xdc 11/6. */
9078FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9079{
9080 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9081 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9082}
9083
9084
9085/** Opcode 0xdc 11/7. */
9086FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9089 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9090}
9091
9092
9093/**
9094 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9095 * memory operand, and storing the result in ST0.
9096 *
9097 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9098 */
9099FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9100{
9101 IEM_MC_BEGIN(3, 3);
9102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9103 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9104 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9105 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9106 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9107 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9108
9109 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9111 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9112 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9113
9114 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9115 IEM_MC_PREPARE_FPU_USAGE();
9116 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9117 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9118 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9119 IEM_MC_ELSE()
9120 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9121 IEM_MC_ENDIF();
9122 IEM_MC_ADVANCE_RIP();
9123
9124 IEM_MC_END();
9125 return VINF_SUCCESS;
9126}
9127
9128
9129/** Opcode 0xdc !11/0. */
9130FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9131{
9132 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9133 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9134}
9135
9136
9137/** Opcode 0xdc !11/1. */
9138FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9139{
9140 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9141 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9142}
9143
9144
9145/** Opcode 0xdc !11/2. */
9146FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9147{
9148 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9149
9150 IEM_MC_BEGIN(3, 3);
9151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9152 IEM_MC_LOCAL(uint16_t, u16Fsw);
9153 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9154 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9155 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9156 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9157
9158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9160
9161 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9162 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9163 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9164
9165 IEM_MC_PREPARE_FPU_USAGE();
9166 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9167 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9168 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9169 IEM_MC_ELSE()
9170 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9171 IEM_MC_ENDIF();
9172 IEM_MC_ADVANCE_RIP();
9173
9174 IEM_MC_END();
9175 return VINF_SUCCESS;
9176}
9177
9178
9179/** Opcode 0xdc !11/3. */
9180FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9181{
9182 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9183
9184 IEM_MC_BEGIN(3, 3);
9185 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9186 IEM_MC_LOCAL(uint16_t, u16Fsw);
9187 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9188 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9189 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9190 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9191
9192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9194
9195 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9196 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9197 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9198
9199 IEM_MC_PREPARE_FPU_USAGE();
9200 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9201 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9202 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9203 IEM_MC_ELSE()
9204 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9205 IEM_MC_ENDIF();
9206 IEM_MC_ADVANCE_RIP();
9207
9208 IEM_MC_END();
9209 return VINF_SUCCESS;
9210}
9211
9212
9213/** Opcode 0xdc !11/4. */
9214FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9215{
9216 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9217 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9218}
9219
9220
9221/** Opcode 0xdc !11/5. */
9222FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9223{
9224 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9225 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9226}
9227
9228
9229/** Opcode 0xdc !11/6. */
9230FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9231{
9232 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9233 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9234}
9235
9236
9237/** Opcode 0xdc !11/7. */
9238FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9239{
9240 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9241 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9242}
9243
9244
9245/**
9246 * @opcode 0xdc
9247 */
9248FNIEMOP_DEF(iemOp_EscF4)
9249{
9250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9251 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9253 {
9254 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9255 {
9256 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9257 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9258 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9259 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9260 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9261 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9262 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9263 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9265 }
9266 }
9267 else
9268 {
9269 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9270 {
9271 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9272 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9273 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9274 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9275 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9276 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9277 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9278 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9279 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9280 }
9281 }
9282}
9283
9284
9285/** Opcode 0xdd !11/0.
9286 * @sa iemOp_fld_m32r */
9287FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9288{
9289 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9290
9291 IEM_MC_BEGIN(2, 3);
9292 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9293 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9294 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9295 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9296 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9297
9298 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9300 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9301 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9302
9303 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9304 IEM_MC_PREPARE_FPU_USAGE();
9305 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9306 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9307 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9308 IEM_MC_ELSE()
9309 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9310 IEM_MC_ENDIF();
9311 IEM_MC_ADVANCE_RIP();
9312
9313 IEM_MC_END();
9314 return VINF_SUCCESS;
9315}
9316
9317
9318/** Opcode 0xdd !11/0. */
9319FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9320{
9321 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9322 IEM_MC_BEGIN(3, 2);
9323 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9324 IEM_MC_LOCAL(uint16_t, u16Fsw);
9325 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9326 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9327 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9328
9329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9333
9334 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9335 IEM_MC_PREPARE_FPU_USAGE();
9336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9337 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9338 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9339 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9340 IEM_MC_ELSE()
9341 IEM_MC_IF_FCW_IM()
9342 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9343 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9344 IEM_MC_ENDIF();
9345 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9346 IEM_MC_ENDIF();
9347 IEM_MC_ADVANCE_RIP();
9348
9349 IEM_MC_END();
9350 return VINF_SUCCESS;
9351}
9352
9353
9354/** Opcode 0xdd !11/0. */
9355FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9356{
9357 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9358 IEM_MC_BEGIN(3, 2);
9359 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9360 IEM_MC_LOCAL(uint16_t, u16Fsw);
9361 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9362 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9363 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9364
9365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9367 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9368 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9369
9370 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9371 IEM_MC_PREPARE_FPU_USAGE();
9372 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9373 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9374 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9375 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9376 IEM_MC_ELSE()
9377 IEM_MC_IF_FCW_IM()
9378 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9379 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9380 IEM_MC_ENDIF();
9381 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9382 IEM_MC_ENDIF();
9383 IEM_MC_ADVANCE_RIP();
9384
9385 IEM_MC_END();
9386 return VINF_SUCCESS;
9387}
9388
9389
9390
9391
9392/** Opcode 0xdd !11/0. */
9393FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9394{
9395 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9396 IEM_MC_BEGIN(3, 2);
9397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9398 IEM_MC_LOCAL(uint16_t, u16Fsw);
9399 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9400 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9401 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9402
9403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9407
9408 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9409 IEM_MC_PREPARE_FPU_USAGE();
9410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9411 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9412 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9413 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9414 IEM_MC_ELSE()
9415 IEM_MC_IF_FCW_IM()
9416 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9417 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9418 IEM_MC_ENDIF();
9419 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9420 IEM_MC_ENDIF();
9421 IEM_MC_ADVANCE_RIP();
9422
9423 IEM_MC_END();
9424 return VINF_SUCCESS;
9425}
9426
9427
9428/** Opcode 0xdd !11/0. */
9429FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9430{
9431 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9432 IEM_MC_BEGIN(3, 0);
9433 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9434 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9435 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9439 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9440 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9441 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9442 IEM_MC_END();
9443 return VINF_SUCCESS;
9444}
9445
9446
9447/** Opcode 0xdd !11/0. */
9448FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9449{
9450 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9451 IEM_MC_BEGIN(3, 0);
9452 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9453 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9454 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9455 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9457 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9458 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9459 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9460 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9461 IEM_MC_END();
9462 return VINF_SUCCESS;
9463
9464}
9465
9466/** Opcode 0xdd !11/0. */
9467FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9468{
9469 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9470
9471 IEM_MC_BEGIN(0, 2);
9472 IEM_MC_LOCAL(uint16_t, u16Tmp);
9473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9474
9475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9477 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9478
9479 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9480 IEM_MC_FETCH_FSW(u16Tmp);
9481 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9482 IEM_MC_ADVANCE_RIP();
9483
9484/** @todo Debug / drop a hint to the verifier that things may differ
9485 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9486 * NT4SP1. (X86_FSW_PE) */
9487 IEM_MC_END();
9488 return VINF_SUCCESS;
9489}
9490
9491
9492/** Opcode 0xdd 11/0. */
9493FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9494{
9495 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9497 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9498 unmodified. */
9499
9500 IEM_MC_BEGIN(0, 0);
9501
9502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9504
9505 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9506 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9507 IEM_MC_UPDATE_FPU_OPCODE_IP();
9508
9509 IEM_MC_ADVANCE_RIP();
9510 IEM_MC_END();
9511 return VINF_SUCCESS;
9512}
9513
9514
9515/** Opcode 0xdd 11/1. */
9516FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9517{
9518 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9519 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9520
9521 IEM_MC_BEGIN(0, 2);
9522 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9523 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9524 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9525 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9526
9527 IEM_MC_PREPARE_FPU_USAGE();
9528 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9529 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9530 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9531 IEM_MC_ELSE()
9532 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9533 IEM_MC_ENDIF();
9534
9535 IEM_MC_ADVANCE_RIP();
9536 IEM_MC_END();
9537 return VINF_SUCCESS;
9538}
9539
9540
9541/** Opcode 0xdd 11/3. */
9542FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9543{
9544 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9545 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9546}
9547
9548
9549/** Opcode 0xdd 11/4. */
9550FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9551{
9552 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9553 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9554}
9555
9556
9557/**
9558 * @opcode 0xdd
9559 */
9560FNIEMOP_DEF(iemOp_EscF5)
9561{
9562 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9563 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9564 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9565 {
9566 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9567 {
9568 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9569 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9570 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9571 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9572 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9573 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9574 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9575 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9576 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9577 }
9578 }
9579 else
9580 {
9581 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9582 {
9583 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9584 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9585 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9586 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9587 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9588 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9589 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9590 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9591 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9592 }
9593 }
9594}
9595
9596
9597/** Opcode 0xde 11/0. */
9598FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9599{
9600 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9601 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9602}
9603
9604
9605/** Opcode 0xde 11/0. */
9606FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9607{
9608 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9609 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9610}
9611
9612
9613/** Opcode 0xde 0xd9. */
9614FNIEMOP_DEF(iemOp_fcompp)
9615{
9616 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9617 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9618}
9619
9620
9621/** Opcode 0xde 11/4. */
9622FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9625 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9626}
9627
9628
9629/** Opcode 0xde 11/5. */
9630FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9631{
9632 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9633 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9634}
9635
9636
9637/** Opcode 0xde 11/6. */
9638FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9639{
9640 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9641 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9642}
9643
9644
9645/** Opcode 0xde 11/7. */
9646FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9647{
9648 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9649 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9650}
9651
9652
9653/**
9654 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9655 * the result in ST0.
9656 *
9657 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9658 */
9659FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9660{
9661 IEM_MC_BEGIN(3, 3);
9662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9663 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9664 IEM_MC_LOCAL(int16_t, i16Val2);
9665 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9667 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9668
9669 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9670 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9671
9672 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9673 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9674 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9675
9676 IEM_MC_PREPARE_FPU_USAGE();
9677 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9678 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9679 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9680 IEM_MC_ELSE()
9681 IEM_MC_FPU_STACK_UNDERFLOW(0);
9682 IEM_MC_ENDIF();
9683 IEM_MC_ADVANCE_RIP();
9684
9685 IEM_MC_END();
9686 return VINF_SUCCESS;
9687}
9688
9689
9690/** Opcode 0xde !11/0. */
9691FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9692{
9693 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9694 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9695}
9696
9697
9698/** Opcode 0xde !11/1. */
9699FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9700{
9701 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9702 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9703}
9704
9705
9706/** Opcode 0xde !11/2. */
9707FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9708{
9709 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9710
9711 IEM_MC_BEGIN(3, 3);
9712 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9713 IEM_MC_LOCAL(uint16_t, u16Fsw);
9714 IEM_MC_LOCAL(int16_t, i16Val2);
9715 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9716 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9717 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9718
9719 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9721
9722 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9723 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9724 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9725
9726 IEM_MC_PREPARE_FPU_USAGE();
9727 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9728 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9729 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9730 IEM_MC_ELSE()
9731 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9732 IEM_MC_ENDIF();
9733 IEM_MC_ADVANCE_RIP();
9734
9735 IEM_MC_END();
9736 return VINF_SUCCESS;
9737}
9738
9739
9740/** Opcode 0xde !11/3. */
9741FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9742{
9743 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9744
9745 IEM_MC_BEGIN(3, 3);
9746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9747 IEM_MC_LOCAL(uint16_t, u16Fsw);
9748 IEM_MC_LOCAL(int16_t, i16Val2);
9749 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9750 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9751 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9752
9753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9755
9756 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9757 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9758 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9759
9760 IEM_MC_PREPARE_FPU_USAGE();
9761 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9762 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9763 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9764 IEM_MC_ELSE()
9765 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9766 IEM_MC_ENDIF();
9767 IEM_MC_ADVANCE_RIP();
9768
9769 IEM_MC_END();
9770 return VINF_SUCCESS;
9771}
9772
9773
9774/** Opcode 0xde !11/4. */
9775FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9776{
9777 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9778 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9779}
9780
9781
9782/** Opcode 0xde !11/5. */
9783FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9784{
9785 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9786 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9787}
9788
9789
9790/** Opcode 0xde !11/6. */
9791FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9792{
9793 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9794 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9795}
9796
9797
9798/** Opcode 0xde !11/7. */
9799FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9800{
9801 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9802 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9803}
9804
9805
9806/**
9807 * @opcode 0xde
9808 */
9809FNIEMOP_DEF(iemOp_EscF6)
9810{
9811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9812 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9813 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9814 {
9815 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9816 {
9817 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9818 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9819 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9820 case 3: if (bRm == 0xd9)
9821 return FNIEMOP_CALL(iemOp_fcompp);
9822 return IEMOP_RAISE_INVALID_OPCODE();
9823 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9824 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9825 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9826 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9827 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9828 }
9829 }
9830 else
9831 {
9832 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9833 {
9834 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9835 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9836 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9837 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9838 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9839 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9840 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9841 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9842 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9843 }
9844 }
9845}
9846
9847
9848/** Opcode 0xdf 11/0.
9849 * Undocument instruction, assumed to work like ffree + fincstp. */
9850FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9851{
9852 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9853 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9854
9855 IEM_MC_BEGIN(0, 0);
9856
9857 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9858 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9859
9860 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9861 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9862 IEM_MC_FPU_STACK_INC_TOP();
9863 IEM_MC_UPDATE_FPU_OPCODE_IP();
9864
9865 IEM_MC_ADVANCE_RIP();
9866 IEM_MC_END();
9867 return VINF_SUCCESS;
9868}
9869
9870
9871/** Opcode 0xdf 0xe0. */
9872FNIEMOP_DEF(iemOp_fnstsw_ax)
9873{
9874 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9876
9877 IEM_MC_BEGIN(0, 1);
9878 IEM_MC_LOCAL(uint16_t, u16Tmp);
9879 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9880 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9881 IEM_MC_FETCH_FSW(u16Tmp);
9882 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9883 IEM_MC_ADVANCE_RIP();
9884 IEM_MC_END();
9885 return VINF_SUCCESS;
9886}
9887
9888
9889/** Opcode 0xdf 11/5. */
9890FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9891{
9892 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9893 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9894}
9895
9896
9897/** Opcode 0xdf 11/6. */
9898FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9899{
9900 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9901 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9902}
9903
9904
9905/** Opcode 0xdf !11/0. */
9906FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9907{
9908 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9909
9910 IEM_MC_BEGIN(2, 3);
9911 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9912 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9913 IEM_MC_LOCAL(int16_t, i16Val);
9914 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9915 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9916
9917 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9918 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9919
9920 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9921 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9922 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9923
9924 IEM_MC_PREPARE_FPU_USAGE();
9925 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9926 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9927 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9928 IEM_MC_ELSE()
9929 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9930 IEM_MC_ENDIF();
9931 IEM_MC_ADVANCE_RIP();
9932
9933 IEM_MC_END();
9934 return VINF_SUCCESS;
9935}
9936
9937
9938/** Opcode 0xdf !11/1. */
9939FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9940{
9941 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9942 IEM_MC_BEGIN(3, 2);
9943 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9944 IEM_MC_LOCAL(uint16_t, u16Fsw);
9945 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9946 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9947 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9948
9949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9952 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9953
9954 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9955 IEM_MC_PREPARE_FPU_USAGE();
9956 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9957 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9958 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9959 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9960 IEM_MC_ELSE()
9961 IEM_MC_IF_FCW_IM()
9962 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9963 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9964 IEM_MC_ENDIF();
9965 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9966 IEM_MC_ENDIF();
9967 IEM_MC_ADVANCE_RIP();
9968
9969 IEM_MC_END();
9970 return VINF_SUCCESS;
9971}
9972
9973
9974/** Opcode 0xdf !11/2. */
9975FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9976{
9977 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9978 IEM_MC_BEGIN(3, 2);
9979 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9980 IEM_MC_LOCAL(uint16_t, u16Fsw);
9981 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9982 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9983 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9984
9985 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9986 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9987 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9988 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9989
9990 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9991 IEM_MC_PREPARE_FPU_USAGE();
9992 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9993 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9994 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9995 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9996 IEM_MC_ELSE()
9997 IEM_MC_IF_FCW_IM()
9998 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9999 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10000 IEM_MC_ENDIF();
10001 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10002 IEM_MC_ENDIF();
10003 IEM_MC_ADVANCE_RIP();
10004
10005 IEM_MC_END();
10006 return VINF_SUCCESS;
10007}
10008
10009
10010/** Opcode 0xdf !11/3. */
10011FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10012{
10013 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10014 IEM_MC_BEGIN(3, 2);
10015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10016 IEM_MC_LOCAL(uint16_t, u16Fsw);
10017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10018 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10019 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10020
10021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10023 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10024 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10025
10026 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10027 IEM_MC_PREPARE_FPU_USAGE();
10028 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10029 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10030 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10031 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10032 IEM_MC_ELSE()
10033 IEM_MC_IF_FCW_IM()
10034 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10035 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10036 IEM_MC_ENDIF();
10037 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10038 IEM_MC_ENDIF();
10039 IEM_MC_ADVANCE_RIP();
10040
10041 IEM_MC_END();
10042 return VINF_SUCCESS;
10043}
10044
10045
10046/** Opcode 0xdf !11/4. */
10047FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10048
10049
10050/** Opcode 0xdf !11/5. */
10051FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10052{
10053 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10054
10055 IEM_MC_BEGIN(2, 3);
10056 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10057 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10058 IEM_MC_LOCAL(int64_t, i64Val);
10059 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10060 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10061
10062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10064
10065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10067 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10068
10069 IEM_MC_PREPARE_FPU_USAGE();
10070 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10071 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10072 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10073 IEM_MC_ELSE()
10074 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10075 IEM_MC_ENDIF();
10076 IEM_MC_ADVANCE_RIP();
10077
10078 IEM_MC_END();
10079 return VINF_SUCCESS;
10080}
10081
10082
10083/** Opcode 0xdf !11/6. */
10084FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10085
10086
10087/** Opcode 0xdf !11/7. */
10088FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10089{
10090 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10091 IEM_MC_BEGIN(3, 2);
10092 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10093 IEM_MC_LOCAL(uint16_t, u16Fsw);
10094 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10095 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10096 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10097
10098 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10102
10103 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10104 IEM_MC_PREPARE_FPU_USAGE();
10105 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10106 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10107 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10108 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10109 IEM_MC_ELSE()
10110 IEM_MC_IF_FCW_IM()
10111 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10112 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10113 IEM_MC_ENDIF();
10114 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10115 IEM_MC_ENDIF();
10116 IEM_MC_ADVANCE_RIP();
10117
10118 IEM_MC_END();
10119 return VINF_SUCCESS;
10120}
10121
10122
10123/**
10124 * @opcode 0xdf
10125 */
10126FNIEMOP_DEF(iemOp_EscF7)
10127{
10128 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10130 {
10131 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10132 {
10133 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10134 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10135 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10136 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10137 case 4: if (bRm == 0xe0)
10138 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10139 return IEMOP_RAISE_INVALID_OPCODE();
10140 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10141 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10142 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10144 }
10145 }
10146 else
10147 {
10148 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10149 {
10150 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10151 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10152 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10153 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10154 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10155 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10156 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10157 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10159 }
10160 }
10161}
10162
10163
10164/**
10165 * @opcode 0xe0
10166 */
10167FNIEMOP_DEF(iemOp_loopne_Jb)
10168{
10169 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10170 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10171 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10172 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10173
10174 switch (pVCpu->iem.s.enmEffAddrMode)
10175 {
10176 case IEMMODE_16BIT:
10177 IEM_MC_BEGIN(0,0);
10178 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10179 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10180 IEM_MC_REL_JMP_S8(i8Imm);
10181 } IEM_MC_ELSE() {
10182 IEM_MC_ADVANCE_RIP();
10183 } IEM_MC_ENDIF();
10184 IEM_MC_END();
10185 return VINF_SUCCESS;
10186
10187 case IEMMODE_32BIT:
10188 IEM_MC_BEGIN(0,0);
10189 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10190 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10191 IEM_MC_REL_JMP_S8(i8Imm);
10192 } IEM_MC_ELSE() {
10193 IEM_MC_ADVANCE_RIP();
10194 } IEM_MC_ENDIF();
10195 IEM_MC_END();
10196 return VINF_SUCCESS;
10197
10198 case IEMMODE_64BIT:
10199 IEM_MC_BEGIN(0,0);
10200 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10201 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10202 IEM_MC_REL_JMP_S8(i8Imm);
10203 } IEM_MC_ELSE() {
10204 IEM_MC_ADVANCE_RIP();
10205 } IEM_MC_ENDIF();
10206 IEM_MC_END();
10207 return VINF_SUCCESS;
10208
10209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10210 }
10211}
10212
10213
10214/**
10215 * @opcode 0xe1
10216 */
10217FNIEMOP_DEF(iemOp_loope_Jb)
10218{
10219 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10220 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10223
10224 switch (pVCpu->iem.s.enmEffAddrMode)
10225 {
10226 case IEMMODE_16BIT:
10227 IEM_MC_BEGIN(0,0);
10228 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10229 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10230 IEM_MC_REL_JMP_S8(i8Imm);
10231 } IEM_MC_ELSE() {
10232 IEM_MC_ADVANCE_RIP();
10233 } IEM_MC_ENDIF();
10234 IEM_MC_END();
10235 return VINF_SUCCESS;
10236
10237 case IEMMODE_32BIT:
10238 IEM_MC_BEGIN(0,0);
10239 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10240 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10241 IEM_MC_REL_JMP_S8(i8Imm);
10242 } IEM_MC_ELSE() {
10243 IEM_MC_ADVANCE_RIP();
10244 } IEM_MC_ENDIF();
10245 IEM_MC_END();
10246 return VINF_SUCCESS;
10247
10248 case IEMMODE_64BIT:
10249 IEM_MC_BEGIN(0,0);
10250 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10251 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10252 IEM_MC_REL_JMP_S8(i8Imm);
10253 } IEM_MC_ELSE() {
10254 IEM_MC_ADVANCE_RIP();
10255 } IEM_MC_ENDIF();
10256 IEM_MC_END();
10257 return VINF_SUCCESS;
10258
10259 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10260 }
10261}
10262
10263
10264/**
10265 * @opcode 0xe2
10266 */
10267FNIEMOP_DEF(iemOp_loop_Jb)
10268{
10269 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10270 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10272 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10273
10274 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10275 * using the 32-bit operand size override. How can that be restarted? See
10276 * weird pseudo code in intel manual. */
10277 switch (pVCpu->iem.s.enmEffAddrMode)
10278 {
10279 case IEMMODE_16BIT:
10280 IEM_MC_BEGIN(0,0);
10281 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10282 {
10283 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10284 IEM_MC_IF_CX_IS_NZ() {
10285 IEM_MC_REL_JMP_S8(i8Imm);
10286 } IEM_MC_ELSE() {
10287 IEM_MC_ADVANCE_RIP();
10288 } IEM_MC_ENDIF();
10289 }
10290 else
10291 {
10292 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10293 IEM_MC_ADVANCE_RIP();
10294 }
10295 IEM_MC_END();
10296 return VINF_SUCCESS;
10297
10298 case IEMMODE_32BIT:
10299 IEM_MC_BEGIN(0,0);
10300 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10301 {
10302 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10303 IEM_MC_IF_ECX_IS_NZ() {
10304 IEM_MC_REL_JMP_S8(i8Imm);
10305 } IEM_MC_ELSE() {
10306 IEM_MC_ADVANCE_RIP();
10307 } IEM_MC_ENDIF();
10308 }
10309 else
10310 {
10311 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10312 IEM_MC_ADVANCE_RIP();
10313 }
10314 IEM_MC_END();
10315 return VINF_SUCCESS;
10316
10317 case IEMMODE_64BIT:
10318 IEM_MC_BEGIN(0,0);
10319 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10320 {
10321 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10322 IEM_MC_IF_RCX_IS_NZ() {
10323 IEM_MC_REL_JMP_S8(i8Imm);
10324 } IEM_MC_ELSE() {
10325 IEM_MC_ADVANCE_RIP();
10326 } IEM_MC_ENDIF();
10327 }
10328 else
10329 {
10330 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10331 IEM_MC_ADVANCE_RIP();
10332 }
10333 IEM_MC_END();
10334 return VINF_SUCCESS;
10335
10336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10337 }
10338}
10339
10340
10341/**
10342 * @opcode 0xe3
10343 */
10344FNIEMOP_DEF(iemOp_jecxz_Jb)
10345{
10346 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10347 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10349 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10350
10351 switch (pVCpu->iem.s.enmEffAddrMode)
10352 {
10353 case IEMMODE_16BIT:
10354 IEM_MC_BEGIN(0,0);
10355 IEM_MC_IF_CX_IS_NZ() {
10356 IEM_MC_ADVANCE_RIP();
10357 } IEM_MC_ELSE() {
10358 IEM_MC_REL_JMP_S8(i8Imm);
10359 } IEM_MC_ENDIF();
10360 IEM_MC_END();
10361 return VINF_SUCCESS;
10362
10363 case IEMMODE_32BIT:
10364 IEM_MC_BEGIN(0,0);
10365 IEM_MC_IF_ECX_IS_NZ() {
10366 IEM_MC_ADVANCE_RIP();
10367 } IEM_MC_ELSE() {
10368 IEM_MC_REL_JMP_S8(i8Imm);
10369 } IEM_MC_ENDIF();
10370 IEM_MC_END();
10371 return VINF_SUCCESS;
10372
10373 case IEMMODE_64BIT:
10374 IEM_MC_BEGIN(0,0);
10375 IEM_MC_IF_RCX_IS_NZ() {
10376 IEM_MC_ADVANCE_RIP();
10377 } IEM_MC_ELSE() {
10378 IEM_MC_REL_JMP_S8(i8Imm);
10379 } IEM_MC_ENDIF();
10380 IEM_MC_END();
10381 return VINF_SUCCESS;
10382
10383 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10384 }
10385}
10386
10387
10388/** Opcode 0xe4 */
10389FNIEMOP_DEF(iemOp_in_AL_Ib)
10390{
10391 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10392 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10393 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10395}
10396
10397
10398/** Opcode 0xe5 */
10399FNIEMOP_DEF(iemOp_in_eAX_Ib)
10400{
10401 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10402 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10404 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10405}
10406
10407
10408/** Opcode 0xe6 */
10409FNIEMOP_DEF(iemOp_out_Ib_AL)
10410{
10411 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10412 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10414 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10415}
10416
10417
10418/** Opcode 0xe7 */
10419FNIEMOP_DEF(iemOp_out_Ib_eAX)
10420{
10421 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10422 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10424 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10425}
10426
10427
10428/**
10429 * @opcode 0xe8
10430 */
10431FNIEMOP_DEF(iemOp_call_Jv)
10432{
10433 IEMOP_MNEMONIC(call_Jv, "call Jv");
10434 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10435 switch (pVCpu->iem.s.enmEffOpSize)
10436 {
10437 case IEMMODE_16BIT:
10438 {
10439 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10440 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10441 }
10442
10443 case IEMMODE_32BIT:
10444 {
10445 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10446 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10447 }
10448
10449 case IEMMODE_64BIT:
10450 {
10451 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10452 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10453 }
10454
10455 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10456 }
10457}
10458
10459
10460/**
10461 * @opcode 0xe9
10462 */
10463FNIEMOP_DEF(iemOp_jmp_Jv)
10464{
10465 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10466 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10467 switch (pVCpu->iem.s.enmEffOpSize)
10468 {
10469 case IEMMODE_16BIT:
10470 {
10471 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10472 IEM_MC_BEGIN(0, 0);
10473 IEM_MC_REL_JMP_S16(i16Imm);
10474 IEM_MC_END();
10475 return VINF_SUCCESS;
10476 }
10477
10478 case IEMMODE_64BIT:
10479 case IEMMODE_32BIT:
10480 {
10481 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10482 IEM_MC_BEGIN(0, 0);
10483 IEM_MC_REL_JMP_S32(i32Imm);
10484 IEM_MC_END();
10485 return VINF_SUCCESS;
10486 }
10487
10488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10489 }
10490}
10491
10492
10493/**
10494 * @opcode 0xea
10495 */
10496FNIEMOP_DEF(iemOp_jmp_Ap)
10497{
10498 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10499 IEMOP_HLP_NO_64BIT();
10500
10501 /* Decode the far pointer address and pass it on to the far call C implementation. */
10502 uint32_t offSeg;
10503 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10504 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10505 else
10506 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10507 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10510}
10511
10512
10513/**
10514 * @opcode 0xeb
10515 */
10516FNIEMOP_DEF(iemOp_jmp_Jb)
10517{
10518 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10519 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10522
10523 IEM_MC_BEGIN(0, 0);
10524 IEM_MC_REL_JMP_S8(i8Imm);
10525 IEM_MC_END();
10526 return VINF_SUCCESS;
10527}
10528
10529
10530/** Opcode 0xec */
10531FNIEMOP_DEF(iemOp_in_AL_DX)
10532{
10533 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10535 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10536}
10537
10538
10539/** Opcode 0xed */
10540FNIEMOP_DEF(iemOp_eAX_DX)
10541{
10542 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10544 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10545}
10546
10547
10548/** Opcode 0xee */
10549FNIEMOP_DEF(iemOp_out_DX_AL)
10550{
10551 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10552 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10553 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10554}
10555
10556
10557/** Opcode 0xef */
10558FNIEMOP_DEF(iemOp_out_DX_eAX)
10559{
10560 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10562 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10563}
10564
10565
10566/**
10567 * @opcode 0xf0
10568 */
10569FNIEMOP_DEF(iemOp_lock)
10570{
10571 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10572 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10573
10574 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10575 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10576}
10577
10578
10579/**
10580 * @opcode 0xf1
10581 */
10582FNIEMOP_DEF(iemOp_int1)
10583{
10584 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10585 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10586 /** @todo testcase! */
10587 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10588}
10589
10590
10591/**
10592 * @opcode 0xf2
10593 */
10594FNIEMOP_DEF(iemOp_repne)
10595{
10596 /* This overrides any previous REPE prefix. */
10597 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10598 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10599 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10600
10601 /* For the 4 entry opcode tables, REPNZ overrides any previous
10602 REPZ and operand size prefixes. */
10603 pVCpu->iem.s.idxPrefix = 3;
10604
10605 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10606 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10607}
10608
10609
10610/**
10611 * @opcode 0xf3
10612 */
10613FNIEMOP_DEF(iemOp_repe)
10614{
10615 /* This overrides any previous REPNE prefix. */
10616 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10617 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10618 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10619
10620 /* For the 4 entry opcode tables, REPNZ overrides any previous
10621 REPNZ and operand size prefixes. */
10622 pVCpu->iem.s.idxPrefix = 2;
10623
10624 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10625 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10626}
10627
10628
10629/**
10630 * @opcode 0xf4
10631 */
10632FNIEMOP_DEF(iemOp_hlt)
10633{
10634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10635 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10636}
10637
10638
10639/**
10640 * @opcode 0xf5
10641 */
10642FNIEMOP_DEF(iemOp_cmc)
10643{
10644 IEMOP_MNEMONIC(cmc, "cmc");
10645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10646 IEM_MC_BEGIN(0, 0);
10647 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10648 IEM_MC_ADVANCE_RIP();
10649 IEM_MC_END();
10650 return VINF_SUCCESS;
10651}
10652
10653
10654/**
10655 * Common implementation of 'inc/dec/not/neg Eb'.
10656 *
10657 * @param bRm The RM byte.
10658 * @param pImpl The instruction implementation.
10659 */
10660FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10661{
10662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10663 {
10664 /* register access */
10665 IEM_MC_BEGIN(2, 0);
10666 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10667 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10668 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10669 IEM_MC_REF_EFLAGS(pEFlags);
10670 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10671 IEM_MC_ADVANCE_RIP();
10672 IEM_MC_END();
10673 }
10674 else
10675 {
10676 /* memory access. */
10677 IEM_MC_BEGIN(2, 2);
10678 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10679 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10680 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10681
10682 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10683 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10684 IEM_MC_FETCH_EFLAGS(EFlags);
10685 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10686 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10687 else
10688 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10689
10690 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10691 IEM_MC_COMMIT_EFLAGS(EFlags);
10692 IEM_MC_ADVANCE_RIP();
10693 IEM_MC_END();
10694 }
10695 return VINF_SUCCESS;
10696}
10697
10698
10699/**
10700 * Common implementation of 'inc/dec/not/neg Ev'.
10701 *
10702 * @param bRm The RM byte.
10703 * @param pImpl The instruction implementation.
10704 */
10705FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10706{
10707 /* Registers are handled by a common worker. */
10708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10709 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10710
10711 /* Memory we do here. */
10712 switch (pVCpu->iem.s.enmEffOpSize)
10713 {
10714 case IEMMODE_16BIT:
10715 IEM_MC_BEGIN(2, 2);
10716 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10717 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10718 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10719
10720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10721 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10722 IEM_MC_FETCH_EFLAGS(EFlags);
10723 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10724 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10725 else
10726 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10727
10728 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10729 IEM_MC_COMMIT_EFLAGS(EFlags);
10730 IEM_MC_ADVANCE_RIP();
10731 IEM_MC_END();
10732 return VINF_SUCCESS;
10733
10734 case IEMMODE_32BIT:
10735 IEM_MC_BEGIN(2, 2);
10736 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10737 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10739
10740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10741 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10742 IEM_MC_FETCH_EFLAGS(EFlags);
10743 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10744 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10745 else
10746 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10747
10748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10749 IEM_MC_COMMIT_EFLAGS(EFlags);
10750 IEM_MC_ADVANCE_RIP();
10751 IEM_MC_END();
10752 return VINF_SUCCESS;
10753
10754 case IEMMODE_64BIT:
10755 IEM_MC_BEGIN(2, 2);
10756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10757 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10758 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10759
10760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10761 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10762 IEM_MC_FETCH_EFLAGS(EFlags);
10763 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10764 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10765 else
10766 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10767
10768 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10769 IEM_MC_COMMIT_EFLAGS(EFlags);
10770 IEM_MC_ADVANCE_RIP();
10771 IEM_MC_END();
10772 return VINF_SUCCESS;
10773
10774 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10775 }
10776}
10777
10778
10779/** Opcode 0xf6 /0. */
10780FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10781{
10782 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10783 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10784
10785 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10786 {
10787 /* register access */
10788 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10790
10791 IEM_MC_BEGIN(3, 0);
10792 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10793 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10795 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10796 IEM_MC_REF_EFLAGS(pEFlags);
10797 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10798 IEM_MC_ADVANCE_RIP();
10799 IEM_MC_END();
10800 }
10801 else
10802 {
10803 /* memory access. */
10804 IEM_MC_BEGIN(3, 2);
10805 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10806 IEM_MC_ARG(uint8_t, u8Src, 1);
10807 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10808 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10809
10810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10811 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10812 IEM_MC_ASSIGN(u8Src, u8Imm);
10813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10814 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10815 IEM_MC_FETCH_EFLAGS(EFlags);
10816 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10817
10818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10819 IEM_MC_COMMIT_EFLAGS(EFlags);
10820 IEM_MC_ADVANCE_RIP();
10821 IEM_MC_END();
10822 }
10823 return VINF_SUCCESS;
10824}
10825
10826
10827/** Opcode 0xf7 /0. */
10828FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10829{
10830 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10831 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10832
10833 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10834 {
10835 /* register access */
10836 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10837 switch (pVCpu->iem.s.enmEffOpSize)
10838 {
10839 case IEMMODE_16BIT:
10840 {
10841 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10842 IEM_MC_BEGIN(3, 0);
10843 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10844 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10846 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10847 IEM_MC_REF_EFLAGS(pEFlags);
10848 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10849 IEM_MC_ADVANCE_RIP();
10850 IEM_MC_END();
10851 return VINF_SUCCESS;
10852 }
10853
10854 case IEMMODE_32BIT:
10855 {
10856 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10857 IEM_MC_BEGIN(3, 0);
10858 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10859 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10861 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10862 IEM_MC_REF_EFLAGS(pEFlags);
10863 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10864 /* No clearing the high dword here - test doesn't write back the result. */
10865 IEM_MC_ADVANCE_RIP();
10866 IEM_MC_END();
10867 return VINF_SUCCESS;
10868 }
10869
10870 case IEMMODE_64BIT:
10871 {
10872 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10873 IEM_MC_BEGIN(3, 0);
10874 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10875 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10876 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10877 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10878 IEM_MC_REF_EFLAGS(pEFlags);
10879 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10880 IEM_MC_ADVANCE_RIP();
10881 IEM_MC_END();
10882 return VINF_SUCCESS;
10883 }
10884
10885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10886 }
10887 }
10888 else
10889 {
10890 /* memory access. */
10891 switch (pVCpu->iem.s.enmEffOpSize)
10892 {
10893 case IEMMODE_16BIT:
10894 {
10895 IEM_MC_BEGIN(3, 2);
10896 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10897 IEM_MC_ARG(uint16_t, u16Src, 1);
10898 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10899 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10900
10901 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10902 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10903 IEM_MC_ASSIGN(u16Src, u16Imm);
10904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10905 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10906 IEM_MC_FETCH_EFLAGS(EFlags);
10907 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10908
10909 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10910 IEM_MC_COMMIT_EFLAGS(EFlags);
10911 IEM_MC_ADVANCE_RIP();
10912 IEM_MC_END();
10913 return VINF_SUCCESS;
10914 }
10915
10916 case IEMMODE_32BIT:
10917 {
10918 IEM_MC_BEGIN(3, 2);
10919 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10920 IEM_MC_ARG(uint32_t, u32Src, 1);
10921 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10923
10924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10925 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10926 IEM_MC_ASSIGN(u32Src, u32Imm);
10927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10928 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10929 IEM_MC_FETCH_EFLAGS(EFlags);
10930 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10931
10932 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10933 IEM_MC_COMMIT_EFLAGS(EFlags);
10934 IEM_MC_ADVANCE_RIP();
10935 IEM_MC_END();
10936 return VINF_SUCCESS;
10937 }
10938
10939 case IEMMODE_64BIT:
10940 {
10941 IEM_MC_BEGIN(3, 2);
10942 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10943 IEM_MC_ARG(uint64_t, u64Src, 1);
10944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10946
10947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10948 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10949 IEM_MC_ASSIGN(u64Src, u64Imm);
10950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10951 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10952 IEM_MC_FETCH_EFLAGS(EFlags);
10953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10954
10955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10956 IEM_MC_COMMIT_EFLAGS(EFlags);
10957 IEM_MC_ADVANCE_RIP();
10958 IEM_MC_END();
10959 return VINF_SUCCESS;
10960 }
10961
10962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10963 }
10964 }
10965}
10966
10967
10968/** Opcode 0xf6 /4, /5, /6 and /7. */
10969FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10970{
10971 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10972 {
10973 /* register access */
10974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10975 IEM_MC_BEGIN(3, 1);
10976 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10977 IEM_MC_ARG(uint8_t, u8Value, 1);
10978 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10979 IEM_MC_LOCAL(int32_t, rc);
10980
10981 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10982 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10983 IEM_MC_REF_EFLAGS(pEFlags);
10984 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10985 IEM_MC_IF_LOCAL_IS_Z(rc) {
10986 IEM_MC_ADVANCE_RIP();
10987 } IEM_MC_ELSE() {
10988 IEM_MC_RAISE_DIVIDE_ERROR();
10989 } IEM_MC_ENDIF();
10990
10991 IEM_MC_END();
10992 }
10993 else
10994 {
10995 /* memory access. */
10996 IEM_MC_BEGIN(3, 2);
10997 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10998 IEM_MC_ARG(uint8_t, u8Value, 1);
10999 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11000 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11001 IEM_MC_LOCAL(int32_t, rc);
11002
11003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11005 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11006 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11007 IEM_MC_REF_EFLAGS(pEFlags);
11008 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11009 IEM_MC_IF_LOCAL_IS_Z(rc) {
11010 IEM_MC_ADVANCE_RIP();
11011 } IEM_MC_ELSE() {
11012 IEM_MC_RAISE_DIVIDE_ERROR();
11013 } IEM_MC_ENDIF();
11014
11015 IEM_MC_END();
11016 }
11017 return VINF_SUCCESS;
11018}
11019
11020
11021/** Opcode 0xf7 /4, /5, /6 and /7. */
11022FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11023{
11024 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11025
11026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11027 {
11028 /* register access */
11029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11030 switch (pVCpu->iem.s.enmEffOpSize)
11031 {
11032 case IEMMODE_16BIT:
11033 {
11034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11035 IEM_MC_BEGIN(4, 1);
11036 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11037 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11038 IEM_MC_ARG(uint16_t, u16Value, 2);
11039 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11040 IEM_MC_LOCAL(int32_t, rc);
11041
11042 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11043 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11044 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11045 IEM_MC_REF_EFLAGS(pEFlags);
11046 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11047 IEM_MC_IF_LOCAL_IS_Z(rc) {
11048 IEM_MC_ADVANCE_RIP();
11049 } IEM_MC_ELSE() {
11050 IEM_MC_RAISE_DIVIDE_ERROR();
11051 } IEM_MC_ENDIF();
11052
11053 IEM_MC_END();
11054 return VINF_SUCCESS;
11055 }
11056
11057 case IEMMODE_32BIT:
11058 {
11059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11060 IEM_MC_BEGIN(4, 1);
11061 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11062 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11063 IEM_MC_ARG(uint32_t, u32Value, 2);
11064 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11065 IEM_MC_LOCAL(int32_t, rc);
11066
11067 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11068 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11069 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11070 IEM_MC_REF_EFLAGS(pEFlags);
11071 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11072 IEM_MC_IF_LOCAL_IS_Z(rc) {
11073 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11074 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11075 IEM_MC_ADVANCE_RIP();
11076 } IEM_MC_ELSE() {
11077 IEM_MC_RAISE_DIVIDE_ERROR();
11078 } IEM_MC_ENDIF();
11079
11080 IEM_MC_END();
11081 return VINF_SUCCESS;
11082 }
11083
11084 case IEMMODE_64BIT:
11085 {
11086 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11087 IEM_MC_BEGIN(4, 1);
11088 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11089 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11090 IEM_MC_ARG(uint64_t, u64Value, 2);
11091 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11092 IEM_MC_LOCAL(int32_t, rc);
11093
11094 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11095 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11096 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11097 IEM_MC_REF_EFLAGS(pEFlags);
11098 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11099 IEM_MC_IF_LOCAL_IS_Z(rc) {
11100 IEM_MC_ADVANCE_RIP();
11101 } IEM_MC_ELSE() {
11102 IEM_MC_RAISE_DIVIDE_ERROR();
11103 } IEM_MC_ENDIF();
11104
11105 IEM_MC_END();
11106 return VINF_SUCCESS;
11107 }
11108
11109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11110 }
11111 }
11112 else
11113 {
11114 /* memory access. */
11115 switch (pVCpu->iem.s.enmEffOpSize)
11116 {
11117 case IEMMODE_16BIT:
11118 {
11119 IEM_MC_BEGIN(4, 2);
11120 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11121 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11122 IEM_MC_ARG(uint16_t, u16Value, 2);
11123 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11124 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11125 IEM_MC_LOCAL(int32_t, rc);
11126
11127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11129 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11130 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11131 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11132 IEM_MC_REF_EFLAGS(pEFlags);
11133 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11134 IEM_MC_IF_LOCAL_IS_Z(rc) {
11135 IEM_MC_ADVANCE_RIP();
11136 } IEM_MC_ELSE() {
11137 IEM_MC_RAISE_DIVIDE_ERROR();
11138 } IEM_MC_ENDIF();
11139
11140 IEM_MC_END();
11141 return VINF_SUCCESS;
11142 }
11143
11144 case IEMMODE_32BIT:
11145 {
11146 IEM_MC_BEGIN(4, 2);
11147 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11148 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11149 IEM_MC_ARG(uint32_t, u32Value, 2);
11150 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11152 IEM_MC_LOCAL(int32_t, rc);
11153
11154 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11155 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11156 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11157 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11158 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11159 IEM_MC_REF_EFLAGS(pEFlags);
11160 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11161 IEM_MC_IF_LOCAL_IS_Z(rc) {
11162 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11163 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11164 IEM_MC_ADVANCE_RIP();
11165 } IEM_MC_ELSE() {
11166 IEM_MC_RAISE_DIVIDE_ERROR();
11167 } IEM_MC_ENDIF();
11168
11169 IEM_MC_END();
11170 return VINF_SUCCESS;
11171 }
11172
11173 case IEMMODE_64BIT:
11174 {
11175 IEM_MC_BEGIN(4, 2);
11176 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11177 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11178 IEM_MC_ARG(uint64_t, u64Value, 2);
11179 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11180 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11181 IEM_MC_LOCAL(int32_t, rc);
11182
11183 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11185 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11186 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11187 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11188 IEM_MC_REF_EFLAGS(pEFlags);
11189 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11190 IEM_MC_IF_LOCAL_IS_Z(rc) {
11191 IEM_MC_ADVANCE_RIP();
11192 } IEM_MC_ELSE() {
11193 IEM_MC_RAISE_DIVIDE_ERROR();
11194 } IEM_MC_ENDIF();
11195
11196 IEM_MC_END();
11197 return VINF_SUCCESS;
11198 }
11199
11200 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11201 }
11202 }
11203}
11204
11205/**
11206 * @opcode 0xf6
11207 */
11208FNIEMOP_DEF(iemOp_Grp3_Eb)
11209{
11210 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11211 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11212 {
11213 case 0:
11214 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11215 case 1:
11216/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11217 return IEMOP_RAISE_INVALID_OPCODE();
11218 case 2:
11219 IEMOP_MNEMONIC(not_Eb, "not Eb");
11220 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11221 case 3:
11222 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11223 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11224 case 4:
11225 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11226 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11227 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11228 case 5:
11229 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11230 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11231 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11232 case 6:
11233 IEMOP_MNEMONIC(div_Eb, "div Eb");
11234 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11235 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11236 case 7:
11237 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11238 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11239 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11241 }
11242}
11243
11244
11245/**
11246 * @opcode 0xf7
11247 */
11248FNIEMOP_DEF(iemOp_Grp3_Ev)
11249{
11250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11251 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11252 {
11253 case 0:
11254 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11255 case 1:
11256/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11257 return IEMOP_RAISE_INVALID_OPCODE();
11258 case 2:
11259 IEMOP_MNEMONIC(not_Ev, "not Ev");
11260 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11261 case 3:
11262 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11263 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11264 case 4:
11265 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11266 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11267 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11268 case 5:
11269 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11271 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11272 case 6:
11273 IEMOP_MNEMONIC(div_Ev, "div Ev");
11274 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11275 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11276 case 7:
11277 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11279 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11280 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11281 }
11282}
11283
11284
11285/**
11286 * @opcode 0xf8
11287 */
11288FNIEMOP_DEF(iemOp_clc)
11289{
11290 IEMOP_MNEMONIC(clc, "clc");
11291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11292 IEM_MC_BEGIN(0, 0);
11293 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11294 IEM_MC_ADVANCE_RIP();
11295 IEM_MC_END();
11296 return VINF_SUCCESS;
11297}
11298
11299
11300/**
11301 * @opcode 0xf9
11302 */
11303FNIEMOP_DEF(iemOp_stc)
11304{
11305 IEMOP_MNEMONIC(stc, "stc");
11306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11307 IEM_MC_BEGIN(0, 0);
11308 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11309 IEM_MC_ADVANCE_RIP();
11310 IEM_MC_END();
11311 return VINF_SUCCESS;
11312}
11313
11314
11315/**
11316 * @opcode 0xfa
11317 */
11318FNIEMOP_DEF(iemOp_cli)
11319{
11320 IEMOP_MNEMONIC(cli, "cli");
11321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11322 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11323}
11324
11325
11326FNIEMOP_DEF(iemOp_sti)
11327{
11328 IEMOP_MNEMONIC(sti, "sti");
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11331}
11332
11333
11334/**
11335 * @opcode 0xfc
11336 */
11337FNIEMOP_DEF(iemOp_cld)
11338{
11339 IEMOP_MNEMONIC(cld, "cld");
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341 IEM_MC_BEGIN(0, 0);
11342 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11343 IEM_MC_ADVANCE_RIP();
11344 IEM_MC_END();
11345 return VINF_SUCCESS;
11346}
11347
11348
11349/**
11350 * @opcode 0xfd
11351 */
11352FNIEMOP_DEF(iemOp_std)
11353{
11354 IEMOP_MNEMONIC(std, "std");
11355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11356 IEM_MC_BEGIN(0, 0);
11357 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11358 IEM_MC_ADVANCE_RIP();
11359 IEM_MC_END();
11360 return VINF_SUCCESS;
11361}
11362
11363
11364/**
11365 * @opcode 0xfe
11366 */
11367FNIEMOP_DEF(iemOp_Grp4)
11368{
11369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11370 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11371 {
11372 case 0:
11373 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11374 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11375 case 1:
11376 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11377 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11378 default:
11379 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11380 return IEMOP_RAISE_INVALID_OPCODE();
11381 }
11382}
11383
11384
11385/**
11386 * Opcode 0xff /2.
11387 * @param bRm The RM byte.
11388 */
11389FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11390{
11391 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11392 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11393
11394 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11395 {
11396 /* The new RIP is taken from a register. */
11397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11398 switch (pVCpu->iem.s.enmEffOpSize)
11399 {
11400 case IEMMODE_16BIT:
11401 IEM_MC_BEGIN(1, 0);
11402 IEM_MC_ARG(uint16_t, u16Target, 0);
11403 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11404 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11405 IEM_MC_END()
11406 return VINF_SUCCESS;
11407
11408 case IEMMODE_32BIT:
11409 IEM_MC_BEGIN(1, 0);
11410 IEM_MC_ARG(uint32_t, u32Target, 0);
11411 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11412 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11413 IEM_MC_END()
11414 return VINF_SUCCESS;
11415
11416 case IEMMODE_64BIT:
11417 IEM_MC_BEGIN(1, 0);
11418 IEM_MC_ARG(uint64_t, u64Target, 0);
11419 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11420 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11421 IEM_MC_END()
11422 return VINF_SUCCESS;
11423
11424 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11425 }
11426 }
11427 else
11428 {
11429 /* The new RIP is taken from a register. */
11430 switch (pVCpu->iem.s.enmEffOpSize)
11431 {
11432 case IEMMODE_16BIT:
11433 IEM_MC_BEGIN(1, 1);
11434 IEM_MC_ARG(uint16_t, u16Target, 0);
11435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11438 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11439 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11440 IEM_MC_END()
11441 return VINF_SUCCESS;
11442
11443 case IEMMODE_32BIT:
11444 IEM_MC_BEGIN(1, 1);
11445 IEM_MC_ARG(uint32_t, u32Target, 0);
11446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11447 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11449 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11450 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11451 IEM_MC_END()
11452 return VINF_SUCCESS;
11453
11454 case IEMMODE_64BIT:
11455 IEM_MC_BEGIN(1, 1);
11456 IEM_MC_ARG(uint64_t, u64Target, 0);
11457 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11458 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11460 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11461 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11462 IEM_MC_END()
11463 return VINF_SUCCESS;
11464
11465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11466 }
11467 }
11468}
11469
11470typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11471
11472FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11473{
11474 /* Registers? How?? */
11475 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11476 { /* likely */ }
11477 else
11478 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11479
11480 /* Far pointer loaded from memory. */
11481 switch (pVCpu->iem.s.enmEffOpSize)
11482 {
11483 case IEMMODE_16BIT:
11484 IEM_MC_BEGIN(3, 1);
11485 IEM_MC_ARG(uint16_t, u16Sel, 0);
11486 IEM_MC_ARG(uint16_t, offSeg, 1);
11487 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11488 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11489 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11491 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11492 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11493 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11494 IEM_MC_END();
11495 return VINF_SUCCESS;
11496
11497 case IEMMODE_64BIT:
11498 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11499 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11500 * and call far qword [rsp] encodings. */
11501 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11502 {
11503 IEM_MC_BEGIN(3, 1);
11504 IEM_MC_ARG(uint16_t, u16Sel, 0);
11505 IEM_MC_ARG(uint64_t, offSeg, 1);
11506 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11507 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11510 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11511 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11512 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11513 IEM_MC_END();
11514 return VINF_SUCCESS;
11515 }
11516 /* AMD falls thru. */
11517 RT_FALL_THRU();
11518
11519 case IEMMODE_32BIT:
11520 IEM_MC_BEGIN(3, 1);
11521 IEM_MC_ARG(uint16_t, u16Sel, 0);
11522 IEM_MC_ARG(uint32_t, offSeg, 1);
11523 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11524 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11525 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11527 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11528 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11529 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11530 IEM_MC_END();
11531 return VINF_SUCCESS;
11532
11533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11534 }
11535}
11536
11537
11538/**
11539 * Opcode 0xff /3.
11540 * @param bRm The RM byte.
11541 */
11542FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11543{
11544 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11545 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11546}
11547
11548
11549/**
11550 * Opcode 0xff /4.
11551 * @param bRm The RM byte.
11552 */
11553FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11554{
11555 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11557
11558 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11559 {
11560 /* The new RIP is taken from a register. */
11561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11562 switch (pVCpu->iem.s.enmEffOpSize)
11563 {
11564 case IEMMODE_16BIT:
11565 IEM_MC_BEGIN(0, 1);
11566 IEM_MC_LOCAL(uint16_t, u16Target);
11567 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11568 IEM_MC_SET_RIP_U16(u16Target);
11569 IEM_MC_END()
11570 return VINF_SUCCESS;
11571
11572 case IEMMODE_32BIT:
11573 IEM_MC_BEGIN(0, 1);
11574 IEM_MC_LOCAL(uint32_t, u32Target);
11575 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11576 IEM_MC_SET_RIP_U32(u32Target);
11577 IEM_MC_END()
11578 return VINF_SUCCESS;
11579
11580 case IEMMODE_64BIT:
11581 IEM_MC_BEGIN(0, 1);
11582 IEM_MC_LOCAL(uint64_t, u64Target);
11583 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11584 IEM_MC_SET_RIP_U64(u64Target);
11585 IEM_MC_END()
11586 return VINF_SUCCESS;
11587
11588 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11589 }
11590 }
11591 else
11592 {
11593 /* The new RIP is taken from a memory location. */
11594 switch (pVCpu->iem.s.enmEffOpSize)
11595 {
11596 case IEMMODE_16BIT:
11597 IEM_MC_BEGIN(0, 2);
11598 IEM_MC_LOCAL(uint16_t, u16Target);
11599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11602 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11603 IEM_MC_SET_RIP_U16(u16Target);
11604 IEM_MC_END()
11605 return VINF_SUCCESS;
11606
11607 case IEMMODE_32BIT:
11608 IEM_MC_BEGIN(0, 2);
11609 IEM_MC_LOCAL(uint32_t, u32Target);
11610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11613 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11614 IEM_MC_SET_RIP_U32(u32Target);
11615 IEM_MC_END()
11616 return VINF_SUCCESS;
11617
11618 case IEMMODE_64BIT:
11619 IEM_MC_BEGIN(0, 2);
11620 IEM_MC_LOCAL(uint64_t, u64Target);
11621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11624 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11625 IEM_MC_SET_RIP_U64(u64Target);
11626 IEM_MC_END()
11627 return VINF_SUCCESS;
11628
11629 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11630 }
11631 }
11632}
11633
11634
11635/**
11636 * Opcode 0xff /5.
11637 * @param bRm The RM byte.
11638 */
11639FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11640{
11641 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11642 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11643}
11644
11645
11646/**
11647 * Opcode 0xff /6.
11648 * @param bRm The RM byte.
11649 */
11650FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11651{
11652 IEMOP_MNEMONIC(push_Ev, "push Ev");
11653
11654 /* Registers are handled by a common worker. */
11655 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11656 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11657
11658 /* Memory we do here. */
11659 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11660 switch (pVCpu->iem.s.enmEffOpSize)
11661 {
11662 case IEMMODE_16BIT:
11663 IEM_MC_BEGIN(0, 2);
11664 IEM_MC_LOCAL(uint16_t, u16Src);
11665 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11668 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11669 IEM_MC_PUSH_U16(u16Src);
11670 IEM_MC_ADVANCE_RIP();
11671 IEM_MC_END();
11672 return VINF_SUCCESS;
11673
11674 case IEMMODE_32BIT:
11675 IEM_MC_BEGIN(0, 2);
11676 IEM_MC_LOCAL(uint32_t, u32Src);
11677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11680 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11681 IEM_MC_PUSH_U32(u32Src);
11682 IEM_MC_ADVANCE_RIP();
11683 IEM_MC_END();
11684 return VINF_SUCCESS;
11685
11686 case IEMMODE_64BIT:
11687 IEM_MC_BEGIN(0, 2);
11688 IEM_MC_LOCAL(uint64_t, u64Src);
11689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11693 IEM_MC_PUSH_U64(u64Src);
11694 IEM_MC_ADVANCE_RIP();
11695 IEM_MC_END();
11696 return VINF_SUCCESS;
11697
11698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11699 }
11700}
11701
11702
11703/**
11704 * @opcode 0xff
11705 */
11706FNIEMOP_DEF(iemOp_Grp5)
11707{
11708 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11709 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11710 {
11711 case 0:
11712 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11713 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11714 case 1:
11715 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11716 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11717 case 2:
11718 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11719 case 3:
11720 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11721 case 4:
11722 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11723 case 5:
11724 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11725 case 6:
11726 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11727 case 7:
11728 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11729 return IEMOP_RAISE_INVALID_OPCODE();
11730 }
11731 AssertFailedReturn(VERR_IEM_IPE_3);
11732}
11733
11734
11735
11736const PFNIEMOP g_apfnOneByteMap[256] =
11737{
11738 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11739 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11740 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11741 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11742 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11743 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11744 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11745 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11746 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11747 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11748 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11749 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11750 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11751 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11752 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11753 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11754 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11755 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11756 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11757 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11758 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11759 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11760 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11761 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11762 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11763 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11764 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11765 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11766 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11767 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11768 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11769 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11770 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11771 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11772 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11773 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11774 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11775 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11776 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11777 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11778 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11779 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11780 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11781 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11782 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11783 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11784 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11785 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11786 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11787 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11788 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11789 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11790 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11791 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11792 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11793 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11794 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11795 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11796 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11797 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11798 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11799 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11800 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11801 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11802};
11803
11804
11805/** @} */
11806
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette