VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 72064

Last change on this file since 72064 was 71834, checked in by vboxsync, 7 years ago

VMM/IEM: Nested hw.virt: Implement SVM pause-filter and pause-filter threshold.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 393.5 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 71834 2018-04-12 07:21:00Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2017 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_sizes
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv_RO
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2254 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2255
2256 IEM_MC_ADVANCE_RIP();
2257 IEM_MC_END();
2258 }
2259 else
2260 {
2261 /* memory operand */
2262 IEM_MC_BEGIN(3, 2);
2263 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2264 IEM_MC_ARG(uint16_t, u16Src, 1);
2265 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2266 IEM_MC_LOCAL(uint16_t, u16Tmp);
2267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2268
2269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2270 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2271 IEM_MC_ASSIGN(u16Src, u16Imm);
2272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2273 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2274 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2275 IEM_MC_REF_EFLAGS(pEFlags);
2276 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2277 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2278
2279 IEM_MC_ADVANCE_RIP();
2280 IEM_MC_END();
2281 }
2282 return VINF_SUCCESS;
2283 }
2284
2285 case IEMMODE_32BIT:
2286 {
2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2288 {
2289 /* register operand */
2290 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2292
2293 IEM_MC_BEGIN(3, 1);
2294 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2295 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2296 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2297 IEM_MC_LOCAL(uint32_t, u32Tmp);
2298
2299 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2300 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2301 IEM_MC_REF_EFLAGS(pEFlags);
2302 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2303 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2304
2305 IEM_MC_ADVANCE_RIP();
2306 IEM_MC_END();
2307 }
2308 else
2309 {
2310 /* memory operand */
2311 IEM_MC_BEGIN(3, 2);
2312 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2313 IEM_MC_ARG(uint32_t, u32Src, 1);
2314 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2315 IEM_MC_LOCAL(uint32_t, u32Tmp);
2316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2317
2318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2319 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2320 IEM_MC_ASSIGN(u32Src, u32Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2323 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2324 IEM_MC_REF_EFLAGS(pEFlags);
2325 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2326 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2327
2328 IEM_MC_ADVANCE_RIP();
2329 IEM_MC_END();
2330 }
2331 return VINF_SUCCESS;
2332 }
2333
2334 case IEMMODE_64BIT:
2335 {
2336 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2337 {
2338 /* register operand */
2339 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341
2342 IEM_MC_BEGIN(3, 1);
2343 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2344 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2345 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2346 IEM_MC_LOCAL(uint64_t, u64Tmp);
2347
2348 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2349 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2350 IEM_MC_REF_EFLAGS(pEFlags);
2351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2352 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2353
2354 IEM_MC_ADVANCE_RIP();
2355 IEM_MC_END();
2356 }
2357 else
2358 {
2359 /* memory operand */
2360 IEM_MC_BEGIN(3, 2);
2361 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2362 IEM_MC_ARG(uint64_t, u64Src, 1);
2363 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2364 IEM_MC_LOCAL(uint64_t, u64Tmp);
2365 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2366
2367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2368 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2369 IEM_MC_ASSIGN(u64Src, u64Imm);
2370 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2371 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2372 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2373 IEM_MC_REF_EFLAGS(pEFlags);
2374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2376
2377 IEM_MC_ADVANCE_RIP();
2378 IEM_MC_END();
2379 }
2380 return VINF_SUCCESS;
2381 }
2382 }
2383 AssertFailedReturn(VERR_IEM_IPE_9);
2384}
2385
2386
2387/**
2388 * @opcode 0x6a
2389 */
2390FNIEMOP_DEF(iemOp_push_Ib)
2391{
2392 IEMOP_MNEMONIC(push_Ib, "push Ib");
2393 IEMOP_HLP_MIN_186();
2394 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2397
2398 IEM_MC_BEGIN(0,0);
2399 switch (pVCpu->iem.s.enmEffOpSize)
2400 {
2401 case IEMMODE_16BIT:
2402 IEM_MC_PUSH_U16(i8Imm);
2403 break;
2404 case IEMMODE_32BIT:
2405 IEM_MC_PUSH_U32(i8Imm);
2406 break;
2407 case IEMMODE_64BIT:
2408 IEM_MC_PUSH_U64(i8Imm);
2409 break;
2410 }
2411 IEM_MC_ADVANCE_RIP();
2412 IEM_MC_END();
2413 return VINF_SUCCESS;
2414}
2415
2416
2417/**
2418 * @opcode 0x6b
2419 */
2420FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2421{
2422 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2423 IEMOP_HLP_MIN_186();
2424 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2425 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2426
2427 switch (pVCpu->iem.s.enmEffOpSize)
2428 {
2429 case IEMMODE_16BIT:
2430 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2431 {
2432 /* register operand */
2433 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2434 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2435
2436 IEM_MC_BEGIN(3, 1);
2437 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2438 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2439 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2440 IEM_MC_LOCAL(uint16_t, u16Tmp);
2441
2442 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2443 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2444 IEM_MC_REF_EFLAGS(pEFlags);
2445 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2446 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory operand */
2454 IEM_MC_BEGIN(3, 2);
2455 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2456 IEM_MC_ARG(uint16_t, u16Src, 1);
2457 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2458 IEM_MC_LOCAL(uint16_t, u16Tmp);
2459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2460
2461 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2463 IEM_MC_ASSIGN(u16Src, u16Imm);
2464 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2465 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2466 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2467 IEM_MC_REF_EFLAGS(pEFlags);
2468 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2469 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2470
2471 IEM_MC_ADVANCE_RIP();
2472 IEM_MC_END();
2473 }
2474 return VINF_SUCCESS;
2475
2476 case IEMMODE_32BIT:
2477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2478 {
2479 /* register operand */
2480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2482
2483 IEM_MC_BEGIN(3, 1);
2484 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2485 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2486 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2487 IEM_MC_LOCAL(uint32_t, u32Tmp);
2488
2489 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2490 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2491 IEM_MC_REF_EFLAGS(pEFlags);
2492 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2493 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2494
2495 IEM_MC_ADVANCE_RIP();
2496 IEM_MC_END();
2497 }
2498 else
2499 {
2500 /* memory operand */
2501 IEM_MC_BEGIN(3, 2);
2502 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2503 IEM_MC_ARG(uint32_t, u32Src, 1);
2504 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2505 IEM_MC_LOCAL(uint32_t, u32Tmp);
2506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2507
2508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2510 IEM_MC_ASSIGN(u32Src, u32Imm);
2511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2512 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2513 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2514 IEM_MC_REF_EFLAGS(pEFlags);
2515 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2516 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2517
2518 IEM_MC_ADVANCE_RIP();
2519 IEM_MC_END();
2520 }
2521 return VINF_SUCCESS;
2522
2523 case IEMMODE_64BIT:
2524 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2525 {
2526 /* register operand */
2527 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2529
2530 IEM_MC_BEGIN(3, 1);
2531 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2532 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2533 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2534 IEM_MC_LOCAL(uint64_t, u64Tmp);
2535
2536 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2537 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2538 IEM_MC_REF_EFLAGS(pEFlags);
2539 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2540 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2541
2542 IEM_MC_ADVANCE_RIP();
2543 IEM_MC_END();
2544 }
2545 else
2546 {
2547 /* memory operand */
2548 IEM_MC_BEGIN(3, 2);
2549 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2550 IEM_MC_ARG(uint64_t, u64Src, 1);
2551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2552 IEM_MC_LOCAL(uint64_t, u64Tmp);
2553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2554
2555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2556 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2557 IEM_MC_ASSIGN(u64Src, u64Imm);
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2560 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2561 IEM_MC_REF_EFLAGS(pEFlags);
2562 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2563 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2564
2565 IEM_MC_ADVANCE_RIP();
2566 IEM_MC_END();
2567 }
2568 return VINF_SUCCESS;
2569 }
2570 AssertFailedReturn(VERR_IEM_IPE_8);
2571}
2572
2573
2574/**
2575 * @opcode 0x6c
2576 */
2577FNIEMOP_DEF(iemOp_insb_Yb_DX)
2578{
2579 IEMOP_HLP_MIN_186();
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2582 {
2583 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2584 switch (pVCpu->iem.s.enmEffAddrMode)
2585 {
2586 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2587 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2588 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2589 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2590 }
2591 }
2592 else
2593 {
2594 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2595 switch (pVCpu->iem.s.enmEffAddrMode)
2596 {
2597 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2598 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2599 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2601 }
2602 }
2603}
2604
2605
2606/**
2607 * @opcode 0x6d
2608 */
2609FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2610{
2611 IEMOP_HLP_MIN_186();
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2614 {
2615 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2616 switch (pVCpu->iem.s.enmEffOpSize)
2617 {
2618 case IEMMODE_16BIT:
2619 switch (pVCpu->iem.s.enmEffAddrMode)
2620 {
2621 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2622 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2623 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2624 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2625 }
2626 break;
2627 case IEMMODE_64BIT:
2628 case IEMMODE_32BIT:
2629 switch (pVCpu->iem.s.enmEffAddrMode)
2630 {
2631 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2632 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2633 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2634 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2635 }
2636 break;
2637 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2638 }
2639 }
2640 else
2641 {
2642 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2643 switch (pVCpu->iem.s.enmEffOpSize)
2644 {
2645 case IEMMODE_16BIT:
2646 switch (pVCpu->iem.s.enmEffAddrMode)
2647 {
2648 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2649 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2650 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2651 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2652 }
2653 break;
2654 case IEMMODE_64BIT:
2655 case IEMMODE_32BIT:
2656 switch (pVCpu->iem.s.enmEffAddrMode)
2657 {
2658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2662 }
2663 break;
2664 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2665 }
2666 }
2667}
2668
2669
2670/**
2671 * @opcode 0x6e
2672 */
2673FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2674{
2675 IEMOP_HLP_MIN_186();
2676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2677 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2678 {
2679 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2680 switch (pVCpu->iem.s.enmEffAddrMode)
2681 {
2682 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2686 }
2687 }
2688 else
2689 {
2690 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2691 switch (pVCpu->iem.s.enmEffAddrMode)
2692 {
2693 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2697 }
2698 }
2699}
2700
2701
2702/**
2703 * @opcode 0x6f
2704 */
2705FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2706{
2707 IEMOP_HLP_MIN_186();
2708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2709 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2710 {
2711 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2712 switch (pVCpu->iem.s.enmEffOpSize)
2713 {
2714 case IEMMODE_16BIT:
2715 switch (pVCpu->iem.s.enmEffAddrMode)
2716 {
2717 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2721 }
2722 break;
2723 case IEMMODE_64BIT:
2724 case IEMMODE_32BIT:
2725 switch (pVCpu->iem.s.enmEffAddrMode)
2726 {
2727 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2728 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2729 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2730 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2731 }
2732 break;
2733 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2734 }
2735 }
2736 else
2737 {
2738 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2739 switch (pVCpu->iem.s.enmEffOpSize)
2740 {
2741 case IEMMODE_16BIT:
2742 switch (pVCpu->iem.s.enmEffAddrMode)
2743 {
2744 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2745 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2746 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2748 }
2749 break;
2750 case IEMMODE_64BIT:
2751 case IEMMODE_32BIT:
2752 switch (pVCpu->iem.s.enmEffAddrMode)
2753 {
2754 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2755 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2756 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2758 }
2759 break;
2760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2761 }
2762 }
2763}
2764
2765
2766/**
2767 * @opcode 0x70
2768 */
2769FNIEMOP_DEF(iemOp_jo_Jb)
2770{
2771 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2772 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2774 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2775
2776 IEM_MC_BEGIN(0, 0);
2777 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2778 IEM_MC_REL_JMP_S8(i8Imm);
2779 } IEM_MC_ELSE() {
2780 IEM_MC_ADVANCE_RIP();
2781 } IEM_MC_ENDIF();
2782 IEM_MC_END();
2783 return VINF_SUCCESS;
2784}
2785
2786
2787/**
2788 * @opcode 0x71
2789 */
2790FNIEMOP_DEF(iemOp_jno_Jb)
2791{
2792 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2793 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2795 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2796
2797 IEM_MC_BEGIN(0, 0);
2798 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2799 IEM_MC_ADVANCE_RIP();
2800 } IEM_MC_ELSE() {
2801 IEM_MC_REL_JMP_S8(i8Imm);
2802 } IEM_MC_ENDIF();
2803 IEM_MC_END();
2804 return VINF_SUCCESS;
2805}
2806
2807/**
2808 * @opcode 0x72
2809 */
2810FNIEMOP_DEF(iemOp_jc_Jb)
2811{
2812 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2813 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2814 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2815 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2816
2817 IEM_MC_BEGIN(0, 0);
2818 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2819 IEM_MC_REL_JMP_S8(i8Imm);
2820 } IEM_MC_ELSE() {
2821 IEM_MC_ADVANCE_RIP();
2822 } IEM_MC_ENDIF();
2823 IEM_MC_END();
2824 return VINF_SUCCESS;
2825}
2826
2827
2828/**
2829 * @opcode 0x73
2830 */
2831FNIEMOP_DEF(iemOp_jnc_Jb)
2832{
2833 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2834 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2836 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2837
2838 IEM_MC_BEGIN(0, 0);
2839 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2840 IEM_MC_ADVANCE_RIP();
2841 } IEM_MC_ELSE() {
2842 IEM_MC_REL_JMP_S8(i8Imm);
2843 } IEM_MC_ENDIF();
2844 IEM_MC_END();
2845 return VINF_SUCCESS;
2846}
2847
2848
2849/**
2850 * @opcode 0x74
2851 */
2852FNIEMOP_DEF(iemOp_je_Jb)
2853{
2854 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2855 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2857 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2858
2859 IEM_MC_BEGIN(0, 0);
2860 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2861 IEM_MC_REL_JMP_S8(i8Imm);
2862 } IEM_MC_ELSE() {
2863 IEM_MC_ADVANCE_RIP();
2864 } IEM_MC_ENDIF();
2865 IEM_MC_END();
2866 return VINF_SUCCESS;
2867}
2868
2869
2870/**
2871 * @opcode 0x75
2872 */
2873FNIEMOP_DEF(iemOp_jne_Jb)
2874{
2875 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2876 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2878 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2879
2880 IEM_MC_BEGIN(0, 0);
2881 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2882 IEM_MC_ADVANCE_RIP();
2883 } IEM_MC_ELSE() {
2884 IEM_MC_REL_JMP_S8(i8Imm);
2885 } IEM_MC_ENDIF();
2886 IEM_MC_END();
2887 return VINF_SUCCESS;
2888}
2889
2890
2891/**
2892 * @opcode 0x76
2893 */
2894FNIEMOP_DEF(iemOp_jbe_Jb)
2895{
2896 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2897 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2899 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2900
2901 IEM_MC_BEGIN(0, 0);
2902 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2903 IEM_MC_REL_JMP_S8(i8Imm);
2904 } IEM_MC_ELSE() {
2905 IEM_MC_ADVANCE_RIP();
2906 } IEM_MC_ENDIF();
2907 IEM_MC_END();
2908 return VINF_SUCCESS;
2909}
2910
2911
2912/**
2913 * @opcode 0x77
2914 */
2915FNIEMOP_DEF(iemOp_jnbe_Jb)
2916{
2917 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2918 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2920 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2921
2922 IEM_MC_BEGIN(0, 0);
2923 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2924 IEM_MC_ADVANCE_RIP();
2925 } IEM_MC_ELSE() {
2926 IEM_MC_REL_JMP_S8(i8Imm);
2927 } IEM_MC_ENDIF();
2928 IEM_MC_END();
2929 return VINF_SUCCESS;
2930}
2931
2932
2933/**
2934 * @opcode 0x78
2935 */
2936FNIEMOP_DEF(iemOp_js_Jb)
2937{
2938 IEMOP_MNEMONIC(js_Jb, "js Jb");
2939 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2941 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2942
2943 IEM_MC_BEGIN(0, 0);
2944 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2945 IEM_MC_REL_JMP_S8(i8Imm);
2946 } IEM_MC_ELSE() {
2947 IEM_MC_ADVANCE_RIP();
2948 } IEM_MC_ENDIF();
2949 IEM_MC_END();
2950 return VINF_SUCCESS;
2951}
2952
2953
2954/**
2955 * @opcode 0x79
2956 */
2957FNIEMOP_DEF(iemOp_jns_Jb)
2958{
2959 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2960 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2962 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2963
2964 IEM_MC_BEGIN(0, 0);
2965 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2966 IEM_MC_ADVANCE_RIP();
2967 } IEM_MC_ELSE() {
2968 IEM_MC_REL_JMP_S8(i8Imm);
2969 } IEM_MC_ENDIF();
2970 IEM_MC_END();
2971 return VINF_SUCCESS;
2972}
2973
2974
2975/**
2976 * @opcode 0x7a
2977 */
2978FNIEMOP_DEF(iemOp_jp_Jb)
2979{
2980 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2981 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2983 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2984
2985 IEM_MC_BEGIN(0, 0);
2986 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2987 IEM_MC_REL_JMP_S8(i8Imm);
2988 } IEM_MC_ELSE() {
2989 IEM_MC_ADVANCE_RIP();
2990 } IEM_MC_ENDIF();
2991 IEM_MC_END();
2992 return VINF_SUCCESS;
2993}
2994
2995
2996/**
2997 * @opcode 0x7b
2998 */
2999FNIEMOP_DEF(iemOp_jnp_Jb)
3000{
3001 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3002 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3004 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3005
3006 IEM_MC_BEGIN(0, 0);
3007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3008 IEM_MC_ADVANCE_RIP();
3009 } IEM_MC_ELSE() {
3010 IEM_MC_REL_JMP_S8(i8Imm);
3011 } IEM_MC_ENDIF();
3012 IEM_MC_END();
3013 return VINF_SUCCESS;
3014}
3015
3016
3017/**
3018 * @opcode 0x7c
3019 */
3020FNIEMOP_DEF(iemOp_jl_Jb)
3021{
3022 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3023 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3025 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3026
3027 IEM_MC_BEGIN(0, 0);
3028 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3029 IEM_MC_REL_JMP_S8(i8Imm);
3030 } IEM_MC_ELSE() {
3031 IEM_MC_ADVANCE_RIP();
3032 } IEM_MC_ENDIF();
3033 IEM_MC_END();
3034 return VINF_SUCCESS;
3035}
3036
3037
3038/**
3039 * @opcode 0x7d
3040 */
3041FNIEMOP_DEF(iemOp_jnl_Jb)
3042{
3043 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3044 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3047
3048 IEM_MC_BEGIN(0, 0);
3049 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3050 IEM_MC_ADVANCE_RIP();
3051 } IEM_MC_ELSE() {
3052 IEM_MC_REL_JMP_S8(i8Imm);
3053 } IEM_MC_ENDIF();
3054 IEM_MC_END();
3055 return VINF_SUCCESS;
3056}
3057
3058
3059/**
3060 * @opcode 0x7e
3061 */
3062FNIEMOP_DEF(iemOp_jle_Jb)
3063{
3064 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3065 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3066 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3067 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3068
3069 IEM_MC_BEGIN(0, 0);
3070 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3071 IEM_MC_REL_JMP_S8(i8Imm);
3072 } IEM_MC_ELSE() {
3073 IEM_MC_ADVANCE_RIP();
3074 } IEM_MC_ENDIF();
3075 IEM_MC_END();
3076 return VINF_SUCCESS;
3077}
3078
3079
3080/**
3081 * @opcode 0x7f
3082 */
3083FNIEMOP_DEF(iemOp_jnle_Jb)
3084{
3085 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3086 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3088 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3089
3090 IEM_MC_BEGIN(0, 0);
3091 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3092 IEM_MC_ADVANCE_RIP();
3093 } IEM_MC_ELSE() {
3094 IEM_MC_REL_JMP_S8(i8Imm);
3095 } IEM_MC_ENDIF();
3096 IEM_MC_END();
3097 return VINF_SUCCESS;
3098}
3099
3100
3101/**
3102 * @opcode 0x80
3103 */
3104FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3105{
3106 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3107 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3108 {
3109 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3110 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3111 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3112 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3113 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3114 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3115 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3116 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3117 }
3118 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3119
3120 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3121 {
3122 /* register target */
3123 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 IEM_MC_BEGIN(3, 0);
3126 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3127 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3128 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3129
3130 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3131 IEM_MC_REF_EFLAGS(pEFlags);
3132 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3133
3134 IEM_MC_ADVANCE_RIP();
3135 IEM_MC_END();
3136 }
3137 else
3138 {
3139 /* memory target */
3140 uint32_t fAccess;
3141 if (pImpl->pfnLockedU8)
3142 fAccess = IEM_ACCESS_DATA_RW;
3143 else /* CMP */
3144 fAccess = IEM_ACCESS_DATA_R;
3145 IEM_MC_BEGIN(3, 2);
3146 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3147 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3149
3150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3151 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3152 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3153 if (pImpl->pfnLockedU8)
3154 IEMOP_HLP_DONE_DECODING();
3155 else
3156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3157
3158 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3159 IEM_MC_FETCH_EFLAGS(EFlags);
3160 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3161 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3162 else
3163 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3164
3165 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3166 IEM_MC_COMMIT_EFLAGS(EFlags);
3167 IEM_MC_ADVANCE_RIP();
3168 IEM_MC_END();
3169 }
3170 return VINF_SUCCESS;
3171}
3172
3173
3174/**
3175 * @opcode 0x81
3176 */
3177FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3178{
3179 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3180 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3181 {
3182 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3183 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3184 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3185 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3186 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3187 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3188 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3189 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3190 }
3191 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3192
3193 switch (pVCpu->iem.s.enmEffOpSize)
3194 {
3195 case IEMMODE_16BIT:
3196 {
3197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3198 {
3199 /* register target */
3200 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3202 IEM_MC_BEGIN(3, 0);
3203 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3204 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3205 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3206
3207 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3208 IEM_MC_REF_EFLAGS(pEFlags);
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210
3211 IEM_MC_ADVANCE_RIP();
3212 IEM_MC_END();
3213 }
3214 else
3215 {
3216 /* memory target */
3217 uint32_t fAccess;
3218 if (pImpl->pfnLockedU16)
3219 fAccess = IEM_ACCESS_DATA_RW;
3220 else /* CMP, TEST */
3221 fAccess = IEM_ACCESS_DATA_R;
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3224 IEM_MC_ARG(uint16_t, u16Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3229 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3230 IEM_MC_ASSIGN(u16Src, u16Imm);
3231 if (pImpl->pfnLockedU16)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 }
3247 break;
3248 }
3249
3250 case IEMMODE_32BIT:
3251 {
3252 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3253 {
3254 /* register target */
3255 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3257 IEM_MC_BEGIN(3, 0);
3258 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3259 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3260 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3261
3262 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3263 IEM_MC_REF_EFLAGS(pEFlags);
3264 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3265 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3266
3267 IEM_MC_ADVANCE_RIP();
3268 IEM_MC_END();
3269 }
3270 else
3271 {
3272 /* memory target */
3273 uint32_t fAccess;
3274 if (pImpl->pfnLockedU32)
3275 fAccess = IEM_ACCESS_DATA_RW;
3276 else /* CMP, TEST */
3277 fAccess = IEM_ACCESS_DATA_R;
3278 IEM_MC_BEGIN(3, 2);
3279 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3280 IEM_MC_ARG(uint32_t, u32Src, 1);
3281 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3283
3284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3285 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3286 IEM_MC_ASSIGN(u32Src, u32Imm);
3287 if (pImpl->pfnLockedU32)
3288 IEMOP_HLP_DONE_DECODING();
3289 else
3290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3291 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3292 IEM_MC_FETCH_EFLAGS(EFlags);
3293 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3294 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3295 else
3296 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3297
3298 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3299 IEM_MC_COMMIT_EFLAGS(EFlags);
3300 IEM_MC_ADVANCE_RIP();
3301 IEM_MC_END();
3302 }
3303 break;
3304 }
3305
3306 case IEMMODE_64BIT:
3307 {
3308 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3309 {
3310 /* register target */
3311 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3312 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3313 IEM_MC_BEGIN(3, 0);
3314 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3315 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3316 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3317
3318 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3319 IEM_MC_REF_EFLAGS(pEFlags);
3320 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3321
3322 IEM_MC_ADVANCE_RIP();
3323 IEM_MC_END();
3324 }
3325 else
3326 {
3327 /* memory target */
3328 uint32_t fAccess;
3329 if (pImpl->pfnLockedU64)
3330 fAccess = IEM_ACCESS_DATA_RW;
3331 else /* CMP */
3332 fAccess = IEM_ACCESS_DATA_R;
3333 IEM_MC_BEGIN(3, 2);
3334 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3335 IEM_MC_ARG(uint64_t, u64Src, 1);
3336 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3337 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3338
3339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3340 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3341 if (pImpl->pfnLockedU64)
3342 IEMOP_HLP_DONE_DECODING();
3343 else
3344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3345 IEM_MC_ASSIGN(u64Src, u64Imm);
3346 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3347 IEM_MC_FETCH_EFLAGS(EFlags);
3348 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3349 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3350 else
3351 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3352
3353 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3354 IEM_MC_COMMIT_EFLAGS(EFlags);
3355 IEM_MC_ADVANCE_RIP();
3356 IEM_MC_END();
3357 }
3358 break;
3359 }
3360 }
3361 return VINF_SUCCESS;
3362}
3363
3364
3365/**
3366 * @opcode 0x82
3367 * @opmnemonic grp1_82
3368 * @opgroup og_groups
3369 */
3370FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3371{
3372 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3373 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3374}
3375
3376
3377/**
3378 * @opcode 0x83
3379 */
3380FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3381{
3382 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3383 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3384 {
3385 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3386 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3387 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3388 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3389 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3390 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3391 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3392 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3393 }
3394 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3395 to the 386 even if absent in the intel reference manuals and some
3396 3rd party opcode listings. */
3397 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3398
3399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3400 {
3401 /*
3402 * Register target
3403 */
3404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3405 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3406 switch (pVCpu->iem.s.enmEffOpSize)
3407 {
3408 case IEMMODE_16BIT:
3409 {
3410 IEM_MC_BEGIN(3, 0);
3411 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3412 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3413 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3414
3415 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3416 IEM_MC_REF_EFLAGS(pEFlags);
3417 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3418
3419 IEM_MC_ADVANCE_RIP();
3420 IEM_MC_END();
3421 break;
3422 }
3423
3424 case IEMMODE_32BIT:
3425 {
3426 IEM_MC_BEGIN(3, 0);
3427 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3428 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3429 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3430
3431 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3432 IEM_MC_REF_EFLAGS(pEFlags);
3433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3435
3436 IEM_MC_ADVANCE_RIP();
3437 IEM_MC_END();
3438 break;
3439 }
3440
3441 case IEMMODE_64BIT:
3442 {
3443 IEM_MC_BEGIN(3, 0);
3444 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3445 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3446 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3447
3448 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3449 IEM_MC_REF_EFLAGS(pEFlags);
3450 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3451
3452 IEM_MC_ADVANCE_RIP();
3453 IEM_MC_END();
3454 break;
3455 }
3456 }
3457 }
3458 else
3459 {
3460 /*
3461 * Memory target.
3462 */
3463 uint32_t fAccess;
3464 if (pImpl->pfnLockedU16)
3465 fAccess = IEM_ACCESS_DATA_RW;
3466 else /* CMP */
3467 fAccess = IEM_ACCESS_DATA_R;
3468
3469 switch (pVCpu->iem.s.enmEffOpSize)
3470 {
3471 case IEMMODE_16BIT:
3472 {
3473 IEM_MC_BEGIN(3, 2);
3474 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3475 IEM_MC_ARG(uint16_t, u16Src, 1);
3476 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3478
3479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3481 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3482 if (pImpl->pfnLockedU16)
3483 IEMOP_HLP_DONE_DECODING();
3484 else
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3487 IEM_MC_FETCH_EFLAGS(EFlags);
3488 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3489 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3490 else
3491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3492
3493 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3494 IEM_MC_COMMIT_EFLAGS(EFlags);
3495 IEM_MC_ADVANCE_RIP();
3496 IEM_MC_END();
3497 break;
3498 }
3499
3500 case IEMMODE_32BIT:
3501 {
3502 IEM_MC_BEGIN(3, 2);
3503 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3504 IEM_MC_ARG(uint32_t, u32Src, 1);
3505 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3507
3508 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3509 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3510 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3511 if (pImpl->pfnLockedU32)
3512 IEMOP_HLP_DONE_DECODING();
3513 else
3514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3515 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3516 IEM_MC_FETCH_EFLAGS(EFlags);
3517 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3518 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3519 else
3520 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3521
3522 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3523 IEM_MC_COMMIT_EFLAGS(EFlags);
3524 IEM_MC_ADVANCE_RIP();
3525 IEM_MC_END();
3526 break;
3527 }
3528
3529 case IEMMODE_64BIT:
3530 {
3531 IEM_MC_BEGIN(3, 2);
3532 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3533 IEM_MC_ARG(uint64_t, u64Src, 1);
3534 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3535 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3536
3537 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3538 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3539 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3540 if (pImpl->pfnLockedU64)
3541 IEMOP_HLP_DONE_DECODING();
3542 else
3543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3544 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3545 IEM_MC_FETCH_EFLAGS(EFlags);
3546 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3547 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3548 else
3549 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3550
3551 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3552 IEM_MC_COMMIT_EFLAGS(EFlags);
3553 IEM_MC_ADVANCE_RIP();
3554 IEM_MC_END();
3555 break;
3556 }
3557 }
3558 }
3559 return VINF_SUCCESS;
3560}
3561
3562
3563/**
3564 * @opcode 0x84
3565 */
3566FNIEMOP_DEF(iemOp_test_Eb_Gb)
3567{
3568 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3569 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3570 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3571}
3572
3573
3574/**
3575 * @opcode 0x85
3576 */
3577FNIEMOP_DEF(iemOp_test_Ev_Gv)
3578{
3579 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3582}
3583
3584
3585/**
3586 * @opcode 0x86
3587 */
3588FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3589{
3590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3591 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3592
3593 /*
3594 * If rm is denoting a register, no more instruction bytes.
3595 */
3596 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3597 {
3598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3599
3600 IEM_MC_BEGIN(0, 2);
3601 IEM_MC_LOCAL(uint8_t, uTmp1);
3602 IEM_MC_LOCAL(uint8_t, uTmp2);
3603
3604 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3605 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3606 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3607 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3608
3609 IEM_MC_ADVANCE_RIP();
3610 IEM_MC_END();
3611 }
3612 else
3613 {
3614 /*
3615 * We're accessing memory.
3616 */
3617/** @todo the register must be committed separately! */
3618 IEM_MC_BEGIN(2, 2);
3619 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3620 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3621 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3622
3623 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3627 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3628
3629 IEM_MC_ADVANCE_RIP();
3630 IEM_MC_END();
3631 }
3632 return VINF_SUCCESS;
3633}
3634
3635
3636/**
3637 * @opcode 0x87
3638 */
3639FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3640{
3641 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3642 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3643
3644 /*
3645 * If rm is denoting a register, no more instruction bytes.
3646 */
3647 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3648 {
3649 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3650
3651 switch (pVCpu->iem.s.enmEffOpSize)
3652 {
3653 case IEMMODE_16BIT:
3654 IEM_MC_BEGIN(0, 2);
3655 IEM_MC_LOCAL(uint16_t, uTmp1);
3656 IEM_MC_LOCAL(uint16_t, uTmp2);
3657
3658 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3659 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3660 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3661 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3662
3663 IEM_MC_ADVANCE_RIP();
3664 IEM_MC_END();
3665 return VINF_SUCCESS;
3666
3667 case IEMMODE_32BIT:
3668 IEM_MC_BEGIN(0, 2);
3669 IEM_MC_LOCAL(uint32_t, uTmp1);
3670 IEM_MC_LOCAL(uint32_t, uTmp2);
3671
3672 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3673 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3674 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3675 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3676
3677 IEM_MC_ADVANCE_RIP();
3678 IEM_MC_END();
3679 return VINF_SUCCESS;
3680
3681 case IEMMODE_64BIT:
3682 IEM_MC_BEGIN(0, 2);
3683 IEM_MC_LOCAL(uint64_t, uTmp1);
3684 IEM_MC_LOCAL(uint64_t, uTmp2);
3685
3686 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3687 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3689 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3690
3691 IEM_MC_ADVANCE_RIP();
3692 IEM_MC_END();
3693 return VINF_SUCCESS;
3694
3695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3696 }
3697 }
3698 else
3699 {
3700 /*
3701 * We're accessing memory.
3702 */
3703 switch (pVCpu->iem.s.enmEffOpSize)
3704 {
3705/** @todo the register must be committed separately! */
3706 case IEMMODE_16BIT:
3707 IEM_MC_BEGIN(2, 2);
3708 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3709 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3710 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3711
3712 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3713 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3714 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3717
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 return VINF_SUCCESS;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(2, 2);
3724 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3725 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3727
3728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3729 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3730 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3732 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3733
3734 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3735 IEM_MC_ADVANCE_RIP();
3736 IEM_MC_END();
3737 return VINF_SUCCESS;
3738
3739 case IEMMODE_64BIT:
3740 IEM_MC_BEGIN(2, 2);
3741 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3742 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3744
3745 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3746 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3747 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3749 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3750
3751 IEM_MC_ADVANCE_RIP();
3752 IEM_MC_END();
3753 return VINF_SUCCESS;
3754
3755 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3756 }
3757 }
3758}
3759
3760
3761/**
3762 * @opcode 0x88
3763 */
3764FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3765{
3766 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3767
3768 uint8_t bRm;
3769 IEM_OPCODE_GET_NEXT_U8(&bRm);
3770
3771 /*
3772 * If rm is denoting a register, no more instruction bytes.
3773 */
3774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3775 {
3776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3777 IEM_MC_BEGIN(0, 1);
3778 IEM_MC_LOCAL(uint8_t, u8Value);
3779 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3780 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 }
3784 else
3785 {
3786 /*
3787 * We're writing a register to memory.
3788 */
3789 IEM_MC_BEGIN(0, 2);
3790 IEM_MC_LOCAL(uint8_t, u8Value);
3791 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3792 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3794 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3795 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 }
3799 return VINF_SUCCESS;
3800
3801}
3802
3803
3804/**
3805 * @opcode 0x89
3806 */
3807FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3808{
3809 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3810
3811 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3812
3813 /*
3814 * If rm is denoting a register, no more instruction bytes.
3815 */
3816 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3817 {
3818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3819 switch (pVCpu->iem.s.enmEffOpSize)
3820 {
3821 case IEMMODE_16BIT:
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_LOCAL(uint16_t, u16Value);
3824 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3825 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3826 IEM_MC_ADVANCE_RIP();
3827 IEM_MC_END();
3828 break;
3829
3830 case IEMMODE_32BIT:
3831 IEM_MC_BEGIN(0, 1);
3832 IEM_MC_LOCAL(uint32_t, u32Value);
3833 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3834 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 break;
3838
3839 case IEMMODE_64BIT:
3840 IEM_MC_BEGIN(0, 1);
3841 IEM_MC_LOCAL(uint64_t, u64Value);
3842 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3843 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3844 IEM_MC_ADVANCE_RIP();
3845 IEM_MC_END();
3846 break;
3847 }
3848 }
3849 else
3850 {
3851 /*
3852 * We're writing a register to memory.
3853 */
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(uint16_t, u16Value);
3859 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3863 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 break;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(uint32_t, u32Value);
3871 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3875 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 break;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint64_t, u64Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891 }
3892 }
3893 return VINF_SUCCESS;
3894}
3895
3896
3897/**
3898 * @opcode 0x8a
3899 */
3900FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3901{
3902 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3903
3904 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3905
3906 /*
3907 * If rm is denoting a register, no more instruction bytes.
3908 */
3909 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3910 {
3911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3912 IEM_MC_BEGIN(0, 1);
3913 IEM_MC_LOCAL(uint8_t, u8Value);
3914 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3915 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3916 IEM_MC_ADVANCE_RIP();
3917 IEM_MC_END();
3918 }
3919 else
3920 {
3921 /*
3922 * We're loading a register from memory.
3923 */
3924 IEM_MC_BEGIN(0, 2);
3925 IEM_MC_LOCAL(uint8_t, u8Value);
3926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3927 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3930 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3931 IEM_MC_ADVANCE_RIP();
3932 IEM_MC_END();
3933 }
3934 return VINF_SUCCESS;
3935}
3936
3937
3938/**
3939 * @opcode 0x8b
3940 */
3941FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3942{
3943 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3944
3945 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3946
3947 /*
3948 * If rm is denoting a register, no more instruction bytes.
3949 */
3950 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3951 {
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 switch (pVCpu->iem.s.enmEffOpSize)
3954 {
3955 case IEMMODE_16BIT:
3956 IEM_MC_BEGIN(0, 1);
3957 IEM_MC_LOCAL(uint16_t, u16Value);
3958 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3959 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3960 IEM_MC_ADVANCE_RIP();
3961 IEM_MC_END();
3962 break;
3963
3964 case IEMMODE_32BIT:
3965 IEM_MC_BEGIN(0, 1);
3966 IEM_MC_LOCAL(uint32_t, u32Value);
3967 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3968 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3969 IEM_MC_ADVANCE_RIP();
3970 IEM_MC_END();
3971 break;
3972
3973 case IEMMODE_64BIT:
3974 IEM_MC_BEGIN(0, 1);
3975 IEM_MC_LOCAL(uint64_t, u64Value);
3976 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3977 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 break;
3981 }
3982 }
3983 else
3984 {
3985 /*
3986 * We're loading a register from memory.
3987 */
3988 switch (pVCpu->iem.s.enmEffOpSize)
3989 {
3990 case IEMMODE_16BIT:
3991 IEM_MC_BEGIN(0, 2);
3992 IEM_MC_LOCAL(uint16_t, u16Value);
3993 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3996 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3998 IEM_MC_ADVANCE_RIP();
3999 IEM_MC_END();
4000 break;
4001
4002 case IEMMODE_32BIT:
4003 IEM_MC_BEGIN(0, 2);
4004 IEM_MC_LOCAL(uint32_t, u32Value);
4005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4007 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4008 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4009 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4010 IEM_MC_ADVANCE_RIP();
4011 IEM_MC_END();
4012 break;
4013
4014 case IEMMODE_64BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint64_t, u64Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025 }
4026 }
4027 return VINF_SUCCESS;
4028}
4029
4030
4031/**
4032 * opcode 0x63
4033 * @todo Table fixme
4034 */
4035FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4036{
4037 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4038 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4039 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4040 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4041 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4042}
4043
4044
4045/**
4046 * @opcode 0x8c
4047 */
4048FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4049{
4050 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4051
4052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4053
4054 /*
4055 * Check that the destination register exists. The REX.R prefix is ignored.
4056 */
4057 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4058 if ( iSegReg > X86_SREG_GS)
4059 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4060
4061 /*
4062 * If rm is denoting a register, no more instruction bytes.
4063 * In that case, the operand size is respected and the upper bits are
4064 * cleared (starting with some pentium).
4065 */
4066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4067 {
4068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4069 switch (pVCpu->iem.s.enmEffOpSize)
4070 {
4071 case IEMMODE_16BIT:
4072 IEM_MC_BEGIN(0, 1);
4073 IEM_MC_LOCAL(uint16_t, u16Value);
4074 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4076 IEM_MC_ADVANCE_RIP();
4077 IEM_MC_END();
4078 break;
4079
4080 case IEMMODE_32BIT:
4081 IEM_MC_BEGIN(0, 1);
4082 IEM_MC_LOCAL(uint32_t, u32Value);
4083 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4085 IEM_MC_ADVANCE_RIP();
4086 IEM_MC_END();
4087 break;
4088
4089 case IEMMODE_64BIT:
4090 IEM_MC_BEGIN(0, 1);
4091 IEM_MC_LOCAL(uint64_t, u64Value);
4092 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4094 IEM_MC_ADVANCE_RIP();
4095 IEM_MC_END();
4096 break;
4097 }
4098 }
4099 else
4100 {
4101 /*
4102 * We're saving the register to memory. The access is word sized
4103 * regardless of operand size prefixes.
4104 */
4105#if 0 /* not necessary */
4106 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4107#endif
4108 IEM_MC_BEGIN(0, 2);
4109 IEM_MC_LOCAL(uint16_t, u16Value);
4110 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4111 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4113 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4114 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4115 IEM_MC_ADVANCE_RIP();
4116 IEM_MC_END();
4117 }
4118 return VINF_SUCCESS;
4119}
4120
4121
4122
4123
4124/**
4125 * @opcode 0x8d
4126 */
4127FNIEMOP_DEF(iemOp_lea_Gv_M)
4128{
4129 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4131 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4132 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4133
4134 switch (pVCpu->iem.s.enmEffOpSize)
4135 {
4136 case IEMMODE_16BIT:
4137 IEM_MC_BEGIN(0, 2);
4138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4139 IEM_MC_LOCAL(uint16_t, u16Cast);
4140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4142 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4143 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4144 IEM_MC_ADVANCE_RIP();
4145 IEM_MC_END();
4146 return VINF_SUCCESS;
4147
4148 case IEMMODE_32BIT:
4149 IEM_MC_BEGIN(0, 2);
4150 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4151 IEM_MC_LOCAL(uint32_t, u32Cast);
4152 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4154 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4155 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4156 IEM_MC_ADVANCE_RIP();
4157 IEM_MC_END();
4158 return VINF_SUCCESS;
4159
4160 case IEMMODE_64BIT:
4161 IEM_MC_BEGIN(0, 1);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4164 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4165 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4166 IEM_MC_ADVANCE_RIP();
4167 IEM_MC_END();
4168 return VINF_SUCCESS;
4169 }
4170 AssertFailedReturn(VERR_IEM_IPE_7);
4171}
4172
4173
4174/**
4175 * @opcode 0x8e
4176 */
4177FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4178{
4179 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4180
4181 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4182
4183 /*
4184 * The practical operand size is 16-bit.
4185 */
4186#if 0 /* not necessary */
4187 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4188#endif
4189
4190 /*
4191 * Check that the destination register exists and can be used with this
4192 * instruction. The REX.R prefix is ignored.
4193 */
4194 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4195 if ( iSegReg == X86_SREG_CS
4196 || iSegReg > X86_SREG_GS)
4197 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4198
4199 /*
4200 * If rm is denoting a register, no more instruction bytes.
4201 */
4202 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4203 {
4204 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4205 IEM_MC_BEGIN(2, 0);
4206 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4207 IEM_MC_ARG(uint16_t, u16Value, 1);
4208 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4209 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4210 IEM_MC_END();
4211 }
4212 else
4213 {
4214 /*
4215 * We're loading the register from memory. The access is word sized
4216 * regardless of operand size prefixes.
4217 */
4218 IEM_MC_BEGIN(2, 1);
4219 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4220 IEM_MC_ARG(uint16_t, u16Value, 1);
4221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4224 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4225 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4226 IEM_MC_END();
4227 }
4228 return VINF_SUCCESS;
4229}
4230
4231
4232/** Opcode 0x8f /0. */
4233FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4234{
4235 /* This bugger is rather annoying as it requires rSP to be updated before
4236 doing the effective address calculations. Will eventually require a
4237 split between the R/M+SIB decoding and the effective address
4238 calculation - which is something that is required for any attempt at
4239 reusing this code for a recompiler. It may also be good to have if we
4240 need to delay #UD exception caused by invalid lock prefixes.
4241
4242 For now, we'll do a mostly safe interpreter-only implementation here. */
4243 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4244 * now until tests show it's checked.. */
4245 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4246
4247 /* Register access is relatively easy and can share code. */
4248 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4249 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4250
4251 /*
4252 * Memory target.
4253 *
4254 * Intel says that RSP is incremented before it's used in any effective
4255 * address calcuations. This means some serious extra annoyance here since
4256 * we decode and calculate the effective address in one step and like to
4257 * delay committing registers till everything is done.
4258 *
4259 * So, we'll decode and calculate the effective address twice. This will
4260 * require some recoding if turned into a recompiler.
4261 */
4262 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4263
4264#ifndef TST_IEM_CHECK_MC
4265 /* Calc effective address with modified ESP. */
4266/** @todo testcase */
4267 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4268 RTGCPTR GCPtrEff;
4269 VBOXSTRICTRC rcStrict;
4270 switch (pVCpu->iem.s.enmEffOpSize)
4271 {
4272 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4273 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4274 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4276 }
4277 if (rcStrict != VINF_SUCCESS)
4278 return rcStrict;
4279 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4280
4281 /* Perform the operation - this should be CImpl. */
4282 RTUINT64U TmpRsp;
4283 TmpRsp.u = pCtx->rsp;
4284 switch (pVCpu->iem.s.enmEffOpSize)
4285 {
4286 case IEMMODE_16BIT:
4287 {
4288 uint16_t u16Value;
4289 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4290 if (rcStrict == VINF_SUCCESS)
4291 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4292 break;
4293 }
4294
4295 case IEMMODE_32BIT:
4296 {
4297 uint32_t u32Value;
4298 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4299 if (rcStrict == VINF_SUCCESS)
4300 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4301 break;
4302 }
4303
4304 case IEMMODE_64BIT:
4305 {
4306 uint64_t u64Value;
4307 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4308 if (rcStrict == VINF_SUCCESS)
4309 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4310 break;
4311 }
4312
4313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4314 }
4315 if (rcStrict == VINF_SUCCESS)
4316 {
4317 pCtx->rsp = TmpRsp.u;
4318 iemRegUpdateRipAndClearRF(pVCpu);
4319 }
4320 return rcStrict;
4321
4322#else
4323 return VERR_IEM_IPE_2;
4324#endif
4325}
4326
4327
4328/**
4329 * @opcode 0x8f
4330 */
4331FNIEMOP_DEF(iemOp_Grp1A__xop)
4332{
4333 /*
4334 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4335 * three byte VEX prefix, except that the mmmmm field cannot have the values
4336 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4337 */
4338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4339 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4340 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4341
4342 IEMOP_MNEMONIC(xop, "xop");
4343 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4344 {
4345 /** @todo Test when exctly the XOP conformance checks kick in during
4346 * instruction decoding and fetching (using \#PF). */
4347 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4348 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4349 if ( ( pVCpu->iem.s.fPrefixes
4350 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4351 == 0)
4352 {
4353 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4354 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4355 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4356 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4357 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4358 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4359 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4360 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4361 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4362
4363 /** @todo XOP: Just use new tables and decoders. */
4364 switch (bRm & 0x1f)
4365 {
4366 case 8: /* xop opcode map 8. */
4367 IEMOP_BITCH_ABOUT_STUB();
4368 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4369
4370 case 9: /* xop opcode map 9. */
4371 IEMOP_BITCH_ABOUT_STUB();
4372 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4373
4374 case 10: /* xop opcode map 10. */
4375 IEMOP_BITCH_ABOUT_STUB();
4376 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4377
4378 default:
4379 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4380 return IEMOP_RAISE_INVALID_OPCODE();
4381 }
4382 }
4383 else
4384 Log(("XOP: Invalid prefix mix!\n"));
4385 }
4386 else
4387 Log(("XOP: XOP support disabled!\n"));
4388 return IEMOP_RAISE_INVALID_OPCODE();
4389}
4390
4391
4392/**
4393 * Common 'xchg reg,rAX' helper.
4394 */
4395FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4396{
4397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4398
4399 iReg |= pVCpu->iem.s.uRexB;
4400 switch (pVCpu->iem.s.enmEffOpSize)
4401 {
4402 case IEMMODE_16BIT:
4403 IEM_MC_BEGIN(0, 2);
4404 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4405 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4406 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4407 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4408 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4409 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4410 IEM_MC_ADVANCE_RIP();
4411 IEM_MC_END();
4412 return VINF_SUCCESS;
4413
4414 case IEMMODE_32BIT:
4415 IEM_MC_BEGIN(0, 2);
4416 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4417 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4418 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4419 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4420 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4421 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4422 IEM_MC_ADVANCE_RIP();
4423 IEM_MC_END();
4424 return VINF_SUCCESS;
4425
4426 case IEMMODE_64BIT:
4427 IEM_MC_BEGIN(0, 2);
4428 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4429 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4430 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4431 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4432 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4433 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4434 IEM_MC_ADVANCE_RIP();
4435 IEM_MC_END();
4436 return VINF_SUCCESS;
4437
4438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4439 }
4440}
4441
4442
4443/**
4444 * @opcode 0x90
4445 */
4446FNIEMOP_DEF(iemOp_nop)
4447{
4448 /* R8/R8D and RAX/EAX can be exchanged. */
4449 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4450 {
4451 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4452 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4453 }
4454
4455 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4456 {
4457 IEMOP_MNEMONIC(pause, "pause");
4458#ifdef VBOX_WITH_NESTED_HWVIRT
4459 bool fCheckIntercept = true;
4460 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvmPauseFilter)
4461 {
4462 /* TSC based pause-filter thresholding. */
4463 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
4464 if ( IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvmPauseFilterThreshold
4465 && pCtx->hwvirt.svm.cPauseFilterThreshold > 0)
4466 {
4467 uint64_t const uTick = TMCpuTickGet(pVCpu);
4468 if (uTick - pCtx->hwvirt.svm.uPrevPauseTick > pCtx->hwvirt.svm.cPauseFilterThreshold)
4469 pCtx->hwvirt.svm.cPauseFilter = IEM_GET_SVM_PAUSE_FILTER_COUNT(pVCpu);
4470 pCtx->hwvirt.svm.uPrevPauseTick = uTick;
4471 }
4472
4473 /* Simple pause-filter counter. */
4474 if (pCtx->hwvirt.svm.cPauseFilter > 0)
4475 {
4476 --pCtx->hwvirt.svm.cPauseFilter;
4477 fCheckIntercept = false;
4478 }
4479 }
4480 if (fCheckIntercept)
4481 IEMOP_HLP_SVM_INSTR_INTERCEPT_AND_NRIP(pVCpu, SVM_CTRL_INTERCEPT_PAUSE, SVM_EXIT_PAUSE, 0, 0);
4482#endif
4483 }
4484 else
4485 IEMOP_MNEMONIC(nop, "nop");
4486 IEM_MC_BEGIN(0, 0);
4487 IEM_MC_ADVANCE_RIP();
4488 IEM_MC_END();
4489 return VINF_SUCCESS;
4490}
4491
4492
4493/**
4494 * @opcode 0x91
4495 */
4496FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4497{
4498 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4499 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4500}
4501
4502
4503/**
4504 * @opcode 0x92
4505 */
4506FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4507{
4508 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4509 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4510}
4511
4512
4513/**
4514 * @opcode 0x93
4515 */
4516FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4517{
4518 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4519 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4520}
4521
4522
4523/**
4524 * @opcode 0x94
4525 */
4526FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4527{
4528 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4529 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4530}
4531
4532
4533/**
4534 * @opcode 0x95
4535 */
4536FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4537{
4538 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4539 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4540}
4541
4542
4543/**
4544 * @opcode 0x96
4545 */
4546FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4547{
4548 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4549 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4550}
4551
4552
4553/**
4554 * @opcode 0x97
4555 */
4556FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4557{
4558 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4559 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4560}
4561
4562
4563/**
4564 * @opcode 0x98
4565 */
4566FNIEMOP_DEF(iemOp_cbw)
4567{
4568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4569 switch (pVCpu->iem.s.enmEffOpSize)
4570 {
4571 case IEMMODE_16BIT:
4572 IEMOP_MNEMONIC(cbw, "cbw");
4573 IEM_MC_BEGIN(0, 1);
4574 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4575 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4576 } IEM_MC_ELSE() {
4577 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4578 } IEM_MC_ENDIF();
4579 IEM_MC_ADVANCE_RIP();
4580 IEM_MC_END();
4581 return VINF_SUCCESS;
4582
4583 case IEMMODE_32BIT:
4584 IEMOP_MNEMONIC(cwde, "cwde");
4585 IEM_MC_BEGIN(0, 1);
4586 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4587 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4588 } IEM_MC_ELSE() {
4589 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4590 } IEM_MC_ENDIF();
4591 IEM_MC_ADVANCE_RIP();
4592 IEM_MC_END();
4593 return VINF_SUCCESS;
4594
4595 case IEMMODE_64BIT:
4596 IEMOP_MNEMONIC(cdqe, "cdqe");
4597 IEM_MC_BEGIN(0, 1);
4598 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4599 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4600 } IEM_MC_ELSE() {
4601 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4602 } IEM_MC_ENDIF();
4603 IEM_MC_ADVANCE_RIP();
4604 IEM_MC_END();
4605 return VINF_SUCCESS;
4606
4607 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4608 }
4609}
4610
4611
4612/**
4613 * @opcode 0x99
4614 */
4615FNIEMOP_DEF(iemOp_cwd)
4616{
4617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4618 switch (pVCpu->iem.s.enmEffOpSize)
4619 {
4620 case IEMMODE_16BIT:
4621 IEMOP_MNEMONIC(cwd, "cwd");
4622 IEM_MC_BEGIN(0, 1);
4623 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4624 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4625 } IEM_MC_ELSE() {
4626 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4627 } IEM_MC_ENDIF();
4628 IEM_MC_ADVANCE_RIP();
4629 IEM_MC_END();
4630 return VINF_SUCCESS;
4631
4632 case IEMMODE_32BIT:
4633 IEMOP_MNEMONIC(cdq, "cdq");
4634 IEM_MC_BEGIN(0, 1);
4635 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4636 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4637 } IEM_MC_ELSE() {
4638 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4639 } IEM_MC_ENDIF();
4640 IEM_MC_ADVANCE_RIP();
4641 IEM_MC_END();
4642 return VINF_SUCCESS;
4643
4644 case IEMMODE_64BIT:
4645 IEMOP_MNEMONIC(cqo, "cqo");
4646 IEM_MC_BEGIN(0, 1);
4647 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4648 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4649 } IEM_MC_ELSE() {
4650 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4651 } IEM_MC_ENDIF();
4652 IEM_MC_ADVANCE_RIP();
4653 IEM_MC_END();
4654 return VINF_SUCCESS;
4655
4656 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4657 }
4658}
4659
4660
4661/**
4662 * @opcode 0x9a
4663 */
4664FNIEMOP_DEF(iemOp_call_Ap)
4665{
4666 IEMOP_MNEMONIC(call_Ap, "call Ap");
4667 IEMOP_HLP_NO_64BIT();
4668
4669 /* Decode the far pointer address and pass it on to the far call C implementation. */
4670 uint32_t offSeg;
4671 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4672 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4673 else
4674 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4675 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4676 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4677 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4678}
4679
4680
4681/** Opcode 0x9b. (aka fwait) */
4682FNIEMOP_DEF(iemOp_wait)
4683{
4684 IEMOP_MNEMONIC(wait, "wait");
4685 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4686
4687 IEM_MC_BEGIN(0, 0);
4688 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4689 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4690 IEM_MC_ADVANCE_RIP();
4691 IEM_MC_END();
4692 return VINF_SUCCESS;
4693}
4694
4695
4696/**
4697 * @opcode 0x9c
4698 */
4699FNIEMOP_DEF(iemOp_pushf_Fv)
4700{
4701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4702 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4703 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4704}
4705
4706
4707/**
4708 * @opcode 0x9d
4709 */
4710FNIEMOP_DEF(iemOp_popf_Fv)
4711{
4712 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4713 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4714 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4715}
4716
4717
4718/**
4719 * @opcode 0x9e
4720 */
4721FNIEMOP_DEF(iemOp_sahf)
4722{
4723 IEMOP_MNEMONIC(sahf, "sahf");
4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4725 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4726 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4727 return IEMOP_RAISE_INVALID_OPCODE();
4728 IEM_MC_BEGIN(0, 2);
4729 IEM_MC_LOCAL(uint32_t, u32Flags);
4730 IEM_MC_LOCAL(uint32_t, EFlags);
4731 IEM_MC_FETCH_EFLAGS(EFlags);
4732 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4733 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4734 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4735 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4736 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4737 IEM_MC_COMMIT_EFLAGS(EFlags);
4738 IEM_MC_ADVANCE_RIP();
4739 IEM_MC_END();
4740 return VINF_SUCCESS;
4741}
4742
4743
4744/**
4745 * @opcode 0x9f
4746 */
4747FNIEMOP_DEF(iemOp_lahf)
4748{
4749 IEMOP_MNEMONIC(lahf, "lahf");
4750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4751 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4752 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4753 return IEMOP_RAISE_INVALID_OPCODE();
4754 IEM_MC_BEGIN(0, 1);
4755 IEM_MC_LOCAL(uint8_t, u8Flags);
4756 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4757 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4758 IEM_MC_ADVANCE_RIP();
4759 IEM_MC_END();
4760 return VINF_SUCCESS;
4761}
4762
4763
4764/**
4765 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4766 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4767 * prefixes. Will return on failures.
4768 * @param a_GCPtrMemOff The variable to store the offset in.
4769 */
4770#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4771 do \
4772 { \
4773 switch (pVCpu->iem.s.enmEffAddrMode) \
4774 { \
4775 case IEMMODE_16BIT: \
4776 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4777 break; \
4778 case IEMMODE_32BIT: \
4779 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4780 break; \
4781 case IEMMODE_64BIT: \
4782 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4783 break; \
4784 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4785 } \
4786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4787 } while (0)
4788
4789/**
4790 * @opcode 0xa0
4791 */
4792FNIEMOP_DEF(iemOp_mov_AL_Ob)
4793{
4794 /*
4795 * Get the offset and fend of lock prefixes.
4796 */
4797 RTGCPTR GCPtrMemOff;
4798 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4799
4800 /*
4801 * Fetch AL.
4802 */
4803 IEM_MC_BEGIN(0,1);
4804 IEM_MC_LOCAL(uint8_t, u8Tmp);
4805 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4806 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4807 IEM_MC_ADVANCE_RIP();
4808 IEM_MC_END();
4809 return VINF_SUCCESS;
4810}
4811
4812
4813/**
4814 * @opcode 0xa1
4815 */
4816FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4817{
4818 /*
4819 * Get the offset and fend of lock prefixes.
4820 */
4821 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4822 RTGCPTR GCPtrMemOff;
4823 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4824
4825 /*
4826 * Fetch rAX.
4827 */
4828 switch (pVCpu->iem.s.enmEffOpSize)
4829 {
4830 case IEMMODE_16BIT:
4831 IEM_MC_BEGIN(0,1);
4832 IEM_MC_LOCAL(uint16_t, u16Tmp);
4833 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4834 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4835 IEM_MC_ADVANCE_RIP();
4836 IEM_MC_END();
4837 return VINF_SUCCESS;
4838
4839 case IEMMODE_32BIT:
4840 IEM_MC_BEGIN(0,1);
4841 IEM_MC_LOCAL(uint32_t, u32Tmp);
4842 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4843 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4844 IEM_MC_ADVANCE_RIP();
4845 IEM_MC_END();
4846 return VINF_SUCCESS;
4847
4848 case IEMMODE_64BIT:
4849 IEM_MC_BEGIN(0,1);
4850 IEM_MC_LOCAL(uint64_t, u64Tmp);
4851 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4852 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4853 IEM_MC_ADVANCE_RIP();
4854 IEM_MC_END();
4855 return VINF_SUCCESS;
4856
4857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4858 }
4859}
4860
4861
4862/**
4863 * @opcode 0xa2
4864 */
4865FNIEMOP_DEF(iemOp_mov_Ob_AL)
4866{
4867 /*
4868 * Get the offset and fend of lock prefixes.
4869 */
4870 RTGCPTR GCPtrMemOff;
4871 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4872
4873 /*
4874 * Store AL.
4875 */
4876 IEM_MC_BEGIN(0,1);
4877 IEM_MC_LOCAL(uint8_t, u8Tmp);
4878 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4879 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4880 IEM_MC_ADVANCE_RIP();
4881 IEM_MC_END();
4882 return VINF_SUCCESS;
4883}
4884
4885
4886/**
4887 * @opcode 0xa3
4888 */
4889FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4890{
4891 /*
4892 * Get the offset and fend of lock prefixes.
4893 */
4894 RTGCPTR GCPtrMemOff;
4895 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4896
4897 /*
4898 * Store rAX.
4899 */
4900 switch (pVCpu->iem.s.enmEffOpSize)
4901 {
4902 case IEMMODE_16BIT:
4903 IEM_MC_BEGIN(0,1);
4904 IEM_MC_LOCAL(uint16_t, u16Tmp);
4905 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4906 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4907 IEM_MC_ADVANCE_RIP();
4908 IEM_MC_END();
4909 return VINF_SUCCESS;
4910
4911 case IEMMODE_32BIT:
4912 IEM_MC_BEGIN(0,1);
4913 IEM_MC_LOCAL(uint32_t, u32Tmp);
4914 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4915 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4916 IEM_MC_ADVANCE_RIP();
4917 IEM_MC_END();
4918 return VINF_SUCCESS;
4919
4920 case IEMMODE_64BIT:
4921 IEM_MC_BEGIN(0,1);
4922 IEM_MC_LOCAL(uint64_t, u64Tmp);
4923 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4924 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4925 IEM_MC_ADVANCE_RIP();
4926 IEM_MC_END();
4927 return VINF_SUCCESS;
4928
4929 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4930 }
4931}
4932
4933/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4934#define IEM_MOVS_CASE(ValBits, AddrBits) \
4935 IEM_MC_BEGIN(0, 2); \
4936 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4937 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4938 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4939 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4940 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4941 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4942 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4943 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4944 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4945 } IEM_MC_ELSE() { \
4946 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4947 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4948 } IEM_MC_ENDIF(); \
4949 IEM_MC_ADVANCE_RIP(); \
4950 IEM_MC_END();
4951
4952/**
4953 * @opcode 0xa4
4954 */
4955FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4956{
4957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4958
4959 /*
4960 * Use the C implementation if a repeat prefix is encountered.
4961 */
4962 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4963 {
4964 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4965 switch (pVCpu->iem.s.enmEffAddrMode)
4966 {
4967 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4968 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4969 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4970 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4971 }
4972 }
4973 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4974
4975 /*
4976 * Sharing case implementation with movs[wdq] below.
4977 */
4978 switch (pVCpu->iem.s.enmEffAddrMode)
4979 {
4980 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4981 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4982 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4983 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4984 }
4985 return VINF_SUCCESS;
4986}
4987
4988
4989/**
4990 * @opcode 0xa5
4991 */
4992FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4993{
4994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4995
4996 /*
4997 * Use the C implementation if a repeat prefix is encountered.
4998 */
4999 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5000 {
5001 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5002 switch (pVCpu->iem.s.enmEffOpSize)
5003 {
5004 case IEMMODE_16BIT:
5005 switch (pVCpu->iem.s.enmEffAddrMode)
5006 {
5007 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5008 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5009 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5010 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5011 }
5012 break;
5013 case IEMMODE_32BIT:
5014 switch (pVCpu->iem.s.enmEffAddrMode)
5015 {
5016 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5017 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5018 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5019 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5020 }
5021 case IEMMODE_64BIT:
5022 switch (pVCpu->iem.s.enmEffAddrMode)
5023 {
5024 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5025 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5026 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5027 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5028 }
5029 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5030 }
5031 }
5032 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5033
5034 /*
5035 * Annoying double switch here.
5036 * Using ugly macro for implementing the cases, sharing it with movsb.
5037 */
5038 switch (pVCpu->iem.s.enmEffOpSize)
5039 {
5040 case IEMMODE_16BIT:
5041 switch (pVCpu->iem.s.enmEffAddrMode)
5042 {
5043 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5044 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5045 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5046 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5047 }
5048 break;
5049
5050 case IEMMODE_32BIT:
5051 switch (pVCpu->iem.s.enmEffAddrMode)
5052 {
5053 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5054 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5055 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5056 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5057 }
5058 break;
5059
5060 case IEMMODE_64BIT:
5061 switch (pVCpu->iem.s.enmEffAddrMode)
5062 {
5063 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5064 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5065 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5066 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5067 }
5068 break;
5069 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5070 }
5071 return VINF_SUCCESS;
5072}
5073
5074#undef IEM_MOVS_CASE
5075
5076/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5077#define IEM_CMPS_CASE(ValBits, AddrBits) \
5078 IEM_MC_BEGIN(3, 3); \
5079 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5080 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5081 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5082 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5083 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5084 \
5085 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5086 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5087 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5088 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5089 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5090 IEM_MC_REF_EFLAGS(pEFlags); \
5091 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5092 \
5093 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5094 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5095 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5096 } IEM_MC_ELSE() { \
5097 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5098 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5099 } IEM_MC_ENDIF(); \
5100 IEM_MC_ADVANCE_RIP(); \
5101 IEM_MC_END(); \
5102
5103/**
5104 * @opcode 0xa6
5105 */
5106FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5107{
5108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5109
5110 /*
5111 * Use the C implementation if a repeat prefix is encountered.
5112 */
5113 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5114 {
5115 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5116 switch (pVCpu->iem.s.enmEffAddrMode)
5117 {
5118 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5119 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5120 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5121 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5122 }
5123 }
5124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5125 {
5126 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5127 switch (pVCpu->iem.s.enmEffAddrMode)
5128 {
5129 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5130 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5131 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5133 }
5134 }
5135 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5136
5137 /*
5138 * Sharing case implementation with cmps[wdq] below.
5139 */
5140 switch (pVCpu->iem.s.enmEffAddrMode)
5141 {
5142 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5143 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5144 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5146 }
5147 return VINF_SUCCESS;
5148
5149}
5150
5151
5152/**
5153 * @opcode 0xa7
5154 */
5155FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5156{
5157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5158
5159 /*
5160 * Use the C implementation if a repeat prefix is encountered.
5161 */
5162 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5163 {
5164 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5165 switch (pVCpu->iem.s.enmEffOpSize)
5166 {
5167 case IEMMODE_16BIT:
5168 switch (pVCpu->iem.s.enmEffAddrMode)
5169 {
5170 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5171 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5172 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5173 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5174 }
5175 break;
5176 case IEMMODE_32BIT:
5177 switch (pVCpu->iem.s.enmEffAddrMode)
5178 {
5179 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5180 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5181 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5183 }
5184 case IEMMODE_64BIT:
5185 switch (pVCpu->iem.s.enmEffAddrMode)
5186 {
5187 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5188 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5189 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5190 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5191 }
5192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5193 }
5194 }
5195
5196 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5197 {
5198 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5199 switch (pVCpu->iem.s.enmEffOpSize)
5200 {
5201 case IEMMODE_16BIT:
5202 switch (pVCpu->iem.s.enmEffAddrMode)
5203 {
5204 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5205 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5206 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5207 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5208 }
5209 break;
5210 case IEMMODE_32BIT:
5211 switch (pVCpu->iem.s.enmEffAddrMode)
5212 {
5213 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5214 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5215 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5216 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5217 }
5218 case IEMMODE_64BIT:
5219 switch (pVCpu->iem.s.enmEffAddrMode)
5220 {
5221 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5222 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5223 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5225 }
5226 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5227 }
5228 }
5229
5230 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5231
5232 /*
5233 * Annoying double switch here.
5234 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5235 */
5236 switch (pVCpu->iem.s.enmEffOpSize)
5237 {
5238 case IEMMODE_16BIT:
5239 switch (pVCpu->iem.s.enmEffAddrMode)
5240 {
5241 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5242 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5243 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5244 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5245 }
5246 break;
5247
5248 case IEMMODE_32BIT:
5249 switch (pVCpu->iem.s.enmEffAddrMode)
5250 {
5251 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5252 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5253 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5255 }
5256 break;
5257
5258 case IEMMODE_64BIT:
5259 switch (pVCpu->iem.s.enmEffAddrMode)
5260 {
5261 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5262 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5263 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5265 }
5266 break;
5267 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5268 }
5269 return VINF_SUCCESS;
5270
5271}
5272
5273#undef IEM_CMPS_CASE
5274
5275/**
5276 * @opcode 0xa8
5277 */
5278FNIEMOP_DEF(iemOp_test_AL_Ib)
5279{
5280 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5282 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5283}
5284
5285
5286/**
5287 * @opcode 0xa9
5288 */
5289FNIEMOP_DEF(iemOp_test_eAX_Iz)
5290{
5291 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5293 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5294}
5295
5296
5297/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5298#define IEM_STOS_CASE(ValBits, AddrBits) \
5299 IEM_MC_BEGIN(0, 2); \
5300 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5301 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5302 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5303 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5304 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5305 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5306 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5307 } IEM_MC_ELSE() { \
5308 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5309 } IEM_MC_ENDIF(); \
5310 IEM_MC_ADVANCE_RIP(); \
5311 IEM_MC_END(); \
5312
5313/**
5314 * @opcode 0xaa
5315 */
5316FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5317{
5318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5319
5320 /*
5321 * Use the C implementation if a repeat prefix is encountered.
5322 */
5323 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5324 {
5325 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5326 switch (pVCpu->iem.s.enmEffAddrMode)
5327 {
5328 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5329 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5330 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5331 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5332 }
5333 }
5334 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5335
5336 /*
5337 * Sharing case implementation with stos[wdq] below.
5338 */
5339 switch (pVCpu->iem.s.enmEffAddrMode)
5340 {
5341 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5342 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5343 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5345 }
5346 return VINF_SUCCESS;
5347}
5348
5349
5350/**
5351 * @opcode 0xab
5352 */
5353FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5354{
5355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5356
5357 /*
5358 * Use the C implementation if a repeat prefix is encountered.
5359 */
5360 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5361 {
5362 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5363 switch (pVCpu->iem.s.enmEffOpSize)
5364 {
5365 case IEMMODE_16BIT:
5366 switch (pVCpu->iem.s.enmEffAddrMode)
5367 {
5368 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5369 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5370 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5372 }
5373 break;
5374 case IEMMODE_32BIT:
5375 switch (pVCpu->iem.s.enmEffAddrMode)
5376 {
5377 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5378 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5379 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5381 }
5382 case IEMMODE_64BIT:
5383 switch (pVCpu->iem.s.enmEffAddrMode)
5384 {
5385 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5386 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5387 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5389 }
5390 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5391 }
5392 }
5393 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5394
5395 /*
5396 * Annoying double switch here.
5397 * Using ugly macro for implementing the cases, sharing it with stosb.
5398 */
5399 switch (pVCpu->iem.s.enmEffOpSize)
5400 {
5401 case IEMMODE_16BIT:
5402 switch (pVCpu->iem.s.enmEffAddrMode)
5403 {
5404 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5405 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5406 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5408 }
5409 break;
5410
5411 case IEMMODE_32BIT:
5412 switch (pVCpu->iem.s.enmEffAddrMode)
5413 {
5414 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5415 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5416 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5417 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5418 }
5419 break;
5420
5421 case IEMMODE_64BIT:
5422 switch (pVCpu->iem.s.enmEffAddrMode)
5423 {
5424 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5425 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5426 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5428 }
5429 break;
5430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5431 }
5432 return VINF_SUCCESS;
5433}
5434
5435#undef IEM_STOS_CASE
5436
5437/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5438#define IEM_LODS_CASE(ValBits, AddrBits) \
5439 IEM_MC_BEGIN(0, 2); \
5440 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5441 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5442 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5443 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5444 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5445 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5446 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5447 } IEM_MC_ELSE() { \
5448 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5449 } IEM_MC_ENDIF(); \
5450 IEM_MC_ADVANCE_RIP(); \
5451 IEM_MC_END();
5452
5453/**
5454 * @opcode 0xac
5455 */
5456FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5457{
5458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5459
5460 /*
5461 * Use the C implementation if a repeat prefix is encountered.
5462 */
5463 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5464 {
5465 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5466 switch (pVCpu->iem.s.enmEffAddrMode)
5467 {
5468 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5469 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5470 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5471 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5472 }
5473 }
5474 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5475
5476 /*
5477 * Sharing case implementation with stos[wdq] below.
5478 */
5479 switch (pVCpu->iem.s.enmEffAddrMode)
5480 {
5481 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5482 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5483 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5484 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5485 }
5486 return VINF_SUCCESS;
5487}
5488
5489
5490/**
5491 * @opcode 0xad
5492 */
5493FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5494{
5495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5496
5497 /*
5498 * Use the C implementation if a repeat prefix is encountered.
5499 */
5500 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5501 {
5502 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5503 switch (pVCpu->iem.s.enmEffOpSize)
5504 {
5505 case IEMMODE_16BIT:
5506 switch (pVCpu->iem.s.enmEffAddrMode)
5507 {
5508 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5512 }
5513 break;
5514 case IEMMODE_32BIT:
5515 switch (pVCpu->iem.s.enmEffAddrMode)
5516 {
5517 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5518 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5519 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5520 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5521 }
5522 case IEMMODE_64BIT:
5523 switch (pVCpu->iem.s.enmEffAddrMode)
5524 {
5525 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5526 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5527 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5529 }
5530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5531 }
5532 }
5533 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5534
5535 /*
5536 * Annoying double switch here.
5537 * Using ugly macro for implementing the cases, sharing it with lodsb.
5538 */
5539 switch (pVCpu->iem.s.enmEffOpSize)
5540 {
5541 case IEMMODE_16BIT:
5542 switch (pVCpu->iem.s.enmEffAddrMode)
5543 {
5544 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5545 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5546 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5547 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5548 }
5549 break;
5550
5551 case IEMMODE_32BIT:
5552 switch (pVCpu->iem.s.enmEffAddrMode)
5553 {
5554 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5555 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5556 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5558 }
5559 break;
5560
5561 case IEMMODE_64BIT:
5562 switch (pVCpu->iem.s.enmEffAddrMode)
5563 {
5564 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5565 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5566 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5567 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5568 }
5569 break;
5570 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5571 }
5572 return VINF_SUCCESS;
5573}
5574
5575#undef IEM_LODS_CASE
5576
5577/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5578#define IEM_SCAS_CASE(ValBits, AddrBits) \
5579 IEM_MC_BEGIN(3, 2); \
5580 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5581 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5582 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5583 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5584 \
5585 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5586 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5587 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5588 IEM_MC_REF_EFLAGS(pEFlags); \
5589 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5590 \
5591 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5592 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5593 } IEM_MC_ELSE() { \
5594 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5595 } IEM_MC_ENDIF(); \
5596 IEM_MC_ADVANCE_RIP(); \
5597 IEM_MC_END();
5598
5599/**
5600 * @opcode 0xae
5601 */
5602FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5603{
5604 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5605
5606 /*
5607 * Use the C implementation if a repeat prefix is encountered.
5608 */
5609 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5610 {
5611 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5612 switch (pVCpu->iem.s.enmEffAddrMode)
5613 {
5614 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5615 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5616 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5617 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5618 }
5619 }
5620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5621 {
5622 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5623 switch (pVCpu->iem.s.enmEffAddrMode)
5624 {
5625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5629 }
5630 }
5631 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5632
5633 /*
5634 * Sharing case implementation with stos[wdq] below.
5635 */
5636 switch (pVCpu->iem.s.enmEffAddrMode)
5637 {
5638 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5639 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5640 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5641 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5642 }
5643 return VINF_SUCCESS;
5644}
5645
5646
5647/**
5648 * @opcode 0xaf
5649 */
5650FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5651{
5652 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5653
5654 /*
5655 * Use the C implementation if a repeat prefix is encountered.
5656 */
5657 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5658 {
5659 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5660 switch (pVCpu->iem.s.enmEffOpSize)
5661 {
5662 case IEMMODE_16BIT:
5663 switch (pVCpu->iem.s.enmEffAddrMode)
5664 {
5665 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5666 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5667 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5668 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5669 }
5670 break;
5671 case IEMMODE_32BIT:
5672 switch (pVCpu->iem.s.enmEffAddrMode)
5673 {
5674 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5675 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5676 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5677 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5678 }
5679 case IEMMODE_64BIT:
5680 switch (pVCpu->iem.s.enmEffAddrMode)
5681 {
5682 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5683 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5684 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5685 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5686 }
5687 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5688 }
5689 }
5690 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5691 {
5692 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5693 switch (pVCpu->iem.s.enmEffOpSize)
5694 {
5695 case IEMMODE_16BIT:
5696 switch (pVCpu->iem.s.enmEffAddrMode)
5697 {
5698 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5699 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5700 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5701 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5702 }
5703 break;
5704 case IEMMODE_32BIT:
5705 switch (pVCpu->iem.s.enmEffAddrMode)
5706 {
5707 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5708 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5709 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5711 }
5712 case IEMMODE_64BIT:
5713 switch (pVCpu->iem.s.enmEffAddrMode)
5714 {
5715 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5716 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5717 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5718 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5719 }
5720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5721 }
5722 }
5723 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5724
5725 /*
5726 * Annoying double switch here.
5727 * Using ugly macro for implementing the cases, sharing it with scasb.
5728 */
5729 switch (pVCpu->iem.s.enmEffOpSize)
5730 {
5731 case IEMMODE_16BIT:
5732 switch (pVCpu->iem.s.enmEffAddrMode)
5733 {
5734 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5735 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5736 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5737 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5738 }
5739 break;
5740
5741 case IEMMODE_32BIT:
5742 switch (pVCpu->iem.s.enmEffAddrMode)
5743 {
5744 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5745 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5746 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5747 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5748 }
5749 break;
5750
5751 case IEMMODE_64BIT:
5752 switch (pVCpu->iem.s.enmEffAddrMode)
5753 {
5754 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5755 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5756 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5757 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5758 }
5759 break;
5760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5761 }
5762 return VINF_SUCCESS;
5763}
5764
5765#undef IEM_SCAS_CASE
5766
5767/**
5768 * Common 'mov r8, imm8' helper.
5769 */
5770FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5771{
5772 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5773 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5774
5775 IEM_MC_BEGIN(0, 1);
5776 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5777 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5778 IEM_MC_ADVANCE_RIP();
5779 IEM_MC_END();
5780
5781 return VINF_SUCCESS;
5782}
5783
5784
5785/**
5786 * @opcode 0xb0
5787 */
5788FNIEMOP_DEF(iemOp_mov_AL_Ib)
5789{
5790 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5791 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5792}
5793
5794
5795/**
5796 * @opcode 0xb1
5797 */
5798FNIEMOP_DEF(iemOp_CL_Ib)
5799{
5800 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5801 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5802}
5803
5804
5805/**
5806 * @opcode 0xb2
5807 */
5808FNIEMOP_DEF(iemOp_DL_Ib)
5809{
5810 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5811 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5812}
5813
5814
5815/**
5816 * @opcode 0xb3
5817 */
5818FNIEMOP_DEF(iemOp_BL_Ib)
5819{
5820 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5821 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5822}
5823
5824
5825/**
5826 * @opcode 0xb4
5827 */
5828FNIEMOP_DEF(iemOp_mov_AH_Ib)
5829{
5830 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5831 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5832}
5833
5834
5835/**
5836 * @opcode 0xb5
5837 */
5838FNIEMOP_DEF(iemOp_CH_Ib)
5839{
5840 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5841 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5842}
5843
5844
5845/**
5846 * @opcode 0xb6
5847 */
5848FNIEMOP_DEF(iemOp_DH_Ib)
5849{
5850 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5851 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5852}
5853
5854
5855/**
5856 * @opcode 0xb7
5857 */
5858FNIEMOP_DEF(iemOp_BH_Ib)
5859{
5860 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5861 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5862}
5863
5864
5865/**
5866 * Common 'mov regX,immX' helper.
5867 */
5868FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5869{
5870 switch (pVCpu->iem.s.enmEffOpSize)
5871 {
5872 case IEMMODE_16BIT:
5873 {
5874 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5876
5877 IEM_MC_BEGIN(0, 1);
5878 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5879 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5880 IEM_MC_ADVANCE_RIP();
5881 IEM_MC_END();
5882 break;
5883 }
5884
5885 case IEMMODE_32BIT:
5886 {
5887 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889
5890 IEM_MC_BEGIN(0, 1);
5891 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5892 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5893 IEM_MC_ADVANCE_RIP();
5894 IEM_MC_END();
5895 break;
5896 }
5897 case IEMMODE_64BIT:
5898 {
5899 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5900 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5901
5902 IEM_MC_BEGIN(0, 1);
5903 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5904 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5905 IEM_MC_ADVANCE_RIP();
5906 IEM_MC_END();
5907 break;
5908 }
5909 }
5910
5911 return VINF_SUCCESS;
5912}
5913
5914
5915/**
5916 * @opcode 0xb8
5917 */
5918FNIEMOP_DEF(iemOp_eAX_Iv)
5919{
5920 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5921 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5922}
5923
5924
5925/**
5926 * @opcode 0xb9
5927 */
5928FNIEMOP_DEF(iemOp_eCX_Iv)
5929{
5930 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5931 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5932}
5933
5934
5935/**
5936 * @opcode 0xba
5937 */
5938FNIEMOP_DEF(iemOp_eDX_Iv)
5939{
5940 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5941 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5942}
5943
5944
5945/**
5946 * @opcode 0xbb
5947 */
5948FNIEMOP_DEF(iemOp_eBX_Iv)
5949{
5950 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5951 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5952}
5953
5954
5955/**
5956 * @opcode 0xbc
5957 */
5958FNIEMOP_DEF(iemOp_eSP_Iv)
5959{
5960 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5961 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5962}
5963
5964
5965/**
5966 * @opcode 0xbd
5967 */
5968FNIEMOP_DEF(iemOp_eBP_Iv)
5969{
5970 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5971 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5972}
5973
5974
5975/**
5976 * @opcode 0xbe
5977 */
5978FNIEMOP_DEF(iemOp_eSI_Iv)
5979{
5980 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5981 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5982}
5983
5984
5985/**
5986 * @opcode 0xbf
5987 */
5988FNIEMOP_DEF(iemOp_eDI_Iv)
5989{
5990 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5991 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5992}
5993
5994
5995/**
5996 * @opcode 0xc0
5997 */
5998FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5999{
6000 IEMOP_HLP_MIN_186();
6001 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6002 PCIEMOPSHIFTSIZES pImpl;
6003 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6004 {
6005 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6006 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6007 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6008 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6009 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6010 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6011 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6012 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6013 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6014 }
6015 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6016
6017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6018 {
6019 /* register */
6020 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6022 IEM_MC_BEGIN(3, 0);
6023 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6024 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6025 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6026 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6027 IEM_MC_REF_EFLAGS(pEFlags);
6028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6029 IEM_MC_ADVANCE_RIP();
6030 IEM_MC_END();
6031 }
6032 else
6033 {
6034 /* memory */
6035 IEM_MC_BEGIN(3, 2);
6036 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6037 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6038 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6040
6041 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6042 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6043 IEM_MC_ASSIGN(cShiftArg, cShift);
6044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6045 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6046 IEM_MC_FETCH_EFLAGS(EFlags);
6047 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6048
6049 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6050 IEM_MC_COMMIT_EFLAGS(EFlags);
6051 IEM_MC_ADVANCE_RIP();
6052 IEM_MC_END();
6053 }
6054 return VINF_SUCCESS;
6055}
6056
6057
6058/**
6059 * @opcode 0xc1
6060 */
6061FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6062{
6063 IEMOP_HLP_MIN_186();
6064 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6065 PCIEMOPSHIFTSIZES pImpl;
6066 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6067 {
6068 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6069 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6070 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6071 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6072 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6073 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6074 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6075 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6076 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6077 }
6078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6079
6080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6081 {
6082 /* register */
6083 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6085 switch (pVCpu->iem.s.enmEffOpSize)
6086 {
6087 case IEMMODE_16BIT:
6088 IEM_MC_BEGIN(3, 0);
6089 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6090 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6091 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6092 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6093 IEM_MC_REF_EFLAGS(pEFlags);
6094 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6095 IEM_MC_ADVANCE_RIP();
6096 IEM_MC_END();
6097 return VINF_SUCCESS;
6098
6099 case IEMMODE_32BIT:
6100 IEM_MC_BEGIN(3, 0);
6101 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6102 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6103 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6104 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6105 IEM_MC_REF_EFLAGS(pEFlags);
6106 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6107 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6108 IEM_MC_ADVANCE_RIP();
6109 IEM_MC_END();
6110 return VINF_SUCCESS;
6111
6112 case IEMMODE_64BIT:
6113 IEM_MC_BEGIN(3, 0);
6114 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6115 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6116 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6117 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6118 IEM_MC_REF_EFLAGS(pEFlags);
6119 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6120 IEM_MC_ADVANCE_RIP();
6121 IEM_MC_END();
6122 return VINF_SUCCESS;
6123
6124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6125 }
6126 }
6127 else
6128 {
6129 /* memory */
6130 switch (pVCpu->iem.s.enmEffOpSize)
6131 {
6132 case IEMMODE_16BIT:
6133 IEM_MC_BEGIN(3, 2);
6134 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6135 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6136 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6138
6139 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6140 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6141 IEM_MC_ASSIGN(cShiftArg, cShift);
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6144 IEM_MC_FETCH_EFLAGS(EFlags);
6145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6146
6147 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6148 IEM_MC_COMMIT_EFLAGS(EFlags);
6149 IEM_MC_ADVANCE_RIP();
6150 IEM_MC_END();
6151 return VINF_SUCCESS;
6152
6153 case IEMMODE_32BIT:
6154 IEM_MC_BEGIN(3, 2);
6155 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6156 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6157 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6159
6160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6161 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6162 IEM_MC_ASSIGN(cShiftArg, cShift);
6163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6164 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6165 IEM_MC_FETCH_EFLAGS(EFlags);
6166 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6167
6168 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6169 IEM_MC_COMMIT_EFLAGS(EFlags);
6170 IEM_MC_ADVANCE_RIP();
6171 IEM_MC_END();
6172 return VINF_SUCCESS;
6173
6174 case IEMMODE_64BIT:
6175 IEM_MC_BEGIN(3, 2);
6176 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6177 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6178 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6180
6181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6182 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6183 IEM_MC_ASSIGN(cShiftArg, cShift);
6184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6185 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6186 IEM_MC_FETCH_EFLAGS(EFlags);
6187 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6188
6189 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6190 IEM_MC_COMMIT_EFLAGS(EFlags);
6191 IEM_MC_ADVANCE_RIP();
6192 IEM_MC_END();
6193 return VINF_SUCCESS;
6194
6195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6196 }
6197 }
6198}
6199
6200
6201/**
6202 * @opcode 0xc2
6203 */
6204FNIEMOP_DEF(iemOp_retn_Iw)
6205{
6206 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6207 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6209 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6210 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6211}
6212
6213
6214/**
6215 * @opcode 0xc3
6216 */
6217FNIEMOP_DEF(iemOp_retn)
6218{
6219 IEMOP_MNEMONIC(retn, "retn");
6220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6222 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6223}
6224
6225
6226/**
6227 * @opcode 0xc4
6228 */
6229FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6230{
6231 /* The LDS instruction is invalid 64-bit mode. In legacy and
6232 compatability mode it is invalid with MOD=3.
6233 The use as a VEX prefix is made possible by assigning the inverted
6234 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6235 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6237 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6238 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6239 {
6240 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6241 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6242 {
6243 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6244 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6245 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6246 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6247 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6248 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6249 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6250 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6251 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6252 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6253 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6254 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6255 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6256
6257 switch (bRm & 0x1f)
6258 {
6259 case 1: /* 0x0f lead opcode byte. */
6260#ifdef IEM_WITH_VEX
6261 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6262#else
6263 IEMOP_BITCH_ABOUT_STUB();
6264 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6265#endif
6266
6267 case 2: /* 0x0f 0x38 lead opcode bytes. */
6268#ifdef IEM_WITH_VEX
6269 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6270#else
6271 IEMOP_BITCH_ABOUT_STUB();
6272 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6273#endif
6274
6275 case 3: /* 0x0f 0x3a lead opcode bytes. */
6276#ifdef IEM_WITH_VEX
6277 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6278#else
6279 IEMOP_BITCH_ABOUT_STUB();
6280 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6281#endif
6282
6283 default:
6284 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6285 return IEMOP_RAISE_INVALID_OPCODE();
6286 }
6287 }
6288 Log(("VEX3: AVX support disabled!\n"));
6289 return IEMOP_RAISE_INVALID_OPCODE();
6290 }
6291
6292 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6293 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6294}
6295
6296
6297/**
6298 * @opcode 0xc5
6299 */
6300FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6301{
6302 /* The LES instruction is invalid 64-bit mode. In legacy and
6303 compatability mode it is invalid with MOD=3.
6304 The use as a VEX prefix is made possible by assigning the inverted
6305 REX.R to the top MOD bit, and the top bit in the inverted register
6306 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6307 to accessing registers 0..7 in this VEX form. */
6308 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6309 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6310 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6311 {
6312 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6313 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6314 {
6315 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6316 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6317 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6318 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6319 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6320 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6321 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6322 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6323
6324#ifdef IEM_WITH_VEX
6325 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6326#else
6327 IEMOP_BITCH_ABOUT_STUB();
6328 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6329#endif
6330 }
6331
6332 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6333 Log(("VEX2: AVX support disabled!\n"));
6334 return IEMOP_RAISE_INVALID_OPCODE();
6335 }
6336
6337 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6338 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6339}
6340
6341
6342/**
6343 * @opcode 0xc6
6344 */
6345FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6346{
6347 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6348 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6349 return IEMOP_RAISE_INVALID_OPCODE();
6350 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6351
6352 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6353 {
6354 /* register access */
6355 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6357 IEM_MC_BEGIN(0, 0);
6358 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 }
6362 else
6363 {
6364 /* memory access. */
6365 IEM_MC_BEGIN(0, 1);
6366 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6367 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6368 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6370 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6371 IEM_MC_ADVANCE_RIP();
6372 IEM_MC_END();
6373 }
6374 return VINF_SUCCESS;
6375}
6376
6377
6378/**
6379 * @opcode 0xc7
6380 */
6381FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6382{
6383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6384 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6385 return IEMOP_RAISE_INVALID_OPCODE();
6386 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6387
6388 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6389 {
6390 /* register access */
6391 switch (pVCpu->iem.s.enmEffOpSize)
6392 {
6393 case IEMMODE_16BIT:
6394 IEM_MC_BEGIN(0, 0);
6395 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6397 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6398 IEM_MC_ADVANCE_RIP();
6399 IEM_MC_END();
6400 return VINF_SUCCESS;
6401
6402 case IEMMODE_32BIT:
6403 IEM_MC_BEGIN(0, 0);
6404 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6406 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6407 IEM_MC_ADVANCE_RIP();
6408 IEM_MC_END();
6409 return VINF_SUCCESS;
6410
6411 case IEMMODE_64BIT:
6412 IEM_MC_BEGIN(0, 0);
6413 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6415 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6416 IEM_MC_ADVANCE_RIP();
6417 IEM_MC_END();
6418 return VINF_SUCCESS;
6419
6420 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6421 }
6422 }
6423 else
6424 {
6425 /* memory access. */
6426 switch (pVCpu->iem.s.enmEffOpSize)
6427 {
6428 case IEMMODE_16BIT:
6429 IEM_MC_BEGIN(0, 1);
6430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6432 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6434 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6435 IEM_MC_ADVANCE_RIP();
6436 IEM_MC_END();
6437 return VINF_SUCCESS;
6438
6439 case IEMMODE_32BIT:
6440 IEM_MC_BEGIN(0, 1);
6441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6443 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6445 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 case IEMMODE_64BIT:
6451 IEM_MC_BEGIN(0, 1);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6454 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6456 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6462 }
6463 }
6464}
6465
6466
6467
6468
6469/**
6470 * @opcode 0xc8
6471 */
6472FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6473{
6474 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6475 IEMOP_HLP_MIN_186();
6476 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6477 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6478 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6480 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6481}
6482
6483
6484/**
6485 * @opcode 0xc9
6486 */
6487FNIEMOP_DEF(iemOp_leave)
6488{
6489 IEMOP_MNEMONIC(leave, "leave");
6490 IEMOP_HLP_MIN_186();
6491 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6493 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6494}
6495
6496
6497/**
6498 * @opcode 0xca
6499 */
6500FNIEMOP_DEF(iemOp_retf_Iw)
6501{
6502 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6503 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6505 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6506 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6507}
6508
6509
6510/**
6511 * @opcode 0xcb
6512 */
6513FNIEMOP_DEF(iemOp_retf)
6514{
6515 IEMOP_MNEMONIC(retf, "retf");
6516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6517 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6518 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6519}
6520
6521
6522/**
6523 * @opcode 0xcc
6524 */
6525FNIEMOP_DEF(iemOp_int3)
6526{
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6529}
6530
6531
6532/**
6533 * @opcode 0xcd
6534 */
6535FNIEMOP_DEF(iemOp_int_Ib)
6536{
6537 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6539 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6540}
6541
6542
6543/**
6544 * @opcode 0xce
6545 */
6546FNIEMOP_DEF(iemOp_into)
6547{
6548 IEMOP_MNEMONIC(into, "into");
6549 IEMOP_HLP_NO_64BIT();
6550
6551 IEM_MC_BEGIN(2, 0);
6552 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6553 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6554 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6555 IEM_MC_END();
6556 return VINF_SUCCESS;
6557}
6558
6559
6560/**
6561 * @opcode 0xcf
6562 */
6563FNIEMOP_DEF(iemOp_iret)
6564{
6565 IEMOP_MNEMONIC(iret, "iret");
6566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6567 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6568}
6569
6570
6571/**
6572 * @opcode 0xd0
6573 */
6574FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6575{
6576 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6577 PCIEMOPSHIFTSIZES pImpl;
6578 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6579 {
6580 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6581 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6582 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6583 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6584 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6585 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6586 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6587 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6588 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6589 }
6590 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6591
6592 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6593 {
6594 /* register */
6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6596 IEM_MC_BEGIN(3, 0);
6597 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6598 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6600 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6601 IEM_MC_REF_EFLAGS(pEFlags);
6602 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6603 IEM_MC_ADVANCE_RIP();
6604 IEM_MC_END();
6605 }
6606 else
6607 {
6608 /* memory */
6609 IEM_MC_BEGIN(3, 2);
6610 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6611 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6612 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6614
6615 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6617 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6618 IEM_MC_FETCH_EFLAGS(EFlags);
6619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6620
6621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6622 IEM_MC_COMMIT_EFLAGS(EFlags);
6623 IEM_MC_ADVANCE_RIP();
6624 IEM_MC_END();
6625 }
6626 return VINF_SUCCESS;
6627}
6628
6629
6630
6631/**
6632 * @opcode 0xd1
6633 */
6634FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6635{
6636 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6637 PCIEMOPSHIFTSIZES pImpl;
6638 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6639 {
6640 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6641 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6642 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6643 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6644 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6645 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6646 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6647 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6648 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6649 }
6650 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6651
6652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6653 {
6654 /* register */
6655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6656 switch (pVCpu->iem.s.enmEffOpSize)
6657 {
6658 case IEMMODE_16BIT:
6659 IEM_MC_BEGIN(3, 0);
6660 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6661 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6662 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6663 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6664 IEM_MC_REF_EFLAGS(pEFlags);
6665 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6666 IEM_MC_ADVANCE_RIP();
6667 IEM_MC_END();
6668 return VINF_SUCCESS;
6669
6670 case IEMMODE_32BIT:
6671 IEM_MC_BEGIN(3, 0);
6672 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6673 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6674 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6675 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6676 IEM_MC_REF_EFLAGS(pEFlags);
6677 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6678 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6679 IEM_MC_ADVANCE_RIP();
6680 IEM_MC_END();
6681 return VINF_SUCCESS;
6682
6683 case IEMMODE_64BIT:
6684 IEM_MC_BEGIN(3, 0);
6685 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6686 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6688 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6689 IEM_MC_REF_EFLAGS(pEFlags);
6690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6691 IEM_MC_ADVANCE_RIP();
6692 IEM_MC_END();
6693 return VINF_SUCCESS;
6694
6695 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6696 }
6697 }
6698 else
6699 {
6700 /* memory */
6701 switch (pVCpu->iem.s.enmEffOpSize)
6702 {
6703 case IEMMODE_16BIT:
6704 IEM_MC_BEGIN(3, 2);
6705 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6706 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6707 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6709
6710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6711 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6712 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6713 IEM_MC_FETCH_EFLAGS(EFlags);
6714 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6715
6716 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6717 IEM_MC_COMMIT_EFLAGS(EFlags);
6718 IEM_MC_ADVANCE_RIP();
6719 IEM_MC_END();
6720 return VINF_SUCCESS;
6721
6722 case IEMMODE_32BIT:
6723 IEM_MC_BEGIN(3, 2);
6724 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6725 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6726 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6728
6729 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6730 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6731 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6732 IEM_MC_FETCH_EFLAGS(EFlags);
6733 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6734
6735 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6736 IEM_MC_COMMIT_EFLAGS(EFlags);
6737 IEM_MC_ADVANCE_RIP();
6738 IEM_MC_END();
6739 return VINF_SUCCESS;
6740
6741 case IEMMODE_64BIT:
6742 IEM_MC_BEGIN(3, 2);
6743 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6744 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6745 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6746 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6747
6748 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6749 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6750 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6751 IEM_MC_FETCH_EFLAGS(EFlags);
6752 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6753
6754 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6755 IEM_MC_COMMIT_EFLAGS(EFlags);
6756 IEM_MC_ADVANCE_RIP();
6757 IEM_MC_END();
6758 return VINF_SUCCESS;
6759
6760 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6761 }
6762 }
6763}
6764
6765
6766/**
6767 * @opcode 0xd2
6768 */
6769FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6770{
6771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6772 PCIEMOPSHIFTSIZES pImpl;
6773 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6774 {
6775 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6776 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6777 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6778 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6779 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6780 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6781 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6782 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6783 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6784 }
6785 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6786
6787 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6788 {
6789 /* register */
6790 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6791 IEM_MC_BEGIN(3, 0);
6792 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6793 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6794 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6795 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6796 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6797 IEM_MC_REF_EFLAGS(pEFlags);
6798 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6799 IEM_MC_ADVANCE_RIP();
6800 IEM_MC_END();
6801 }
6802 else
6803 {
6804 /* memory */
6805 IEM_MC_BEGIN(3, 2);
6806 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6807 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6808 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6810
6811 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6812 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6813 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6814 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6815 IEM_MC_FETCH_EFLAGS(EFlags);
6816 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6817
6818 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6819 IEM_MC_COMMIT_EFLAGS(EFlags);
6820 IEM_MC_ADVANCE_RIP();
6821 IEM_MC_END();
6822 }
6823 return VINF_SUCCESS;
6824}
6825
6826
6827/**
6828 * @opcode 0xd3
6829 */
6830FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6831{
6832 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6833 PCIEMOPSHIFTSIZES pImpl;
6834 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6835 {
6836 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6837 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6838 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6839 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6840 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6841 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6842 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6843 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6844 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6845 }
6846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6847
6848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6849 {
6850 /* register */
6851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6852 switch (pVCpu->iem.s.enmEffOpSize)
6853 {
6854 case IEMMODE_16BIT:
6855 IEM_MC_BEGIN(3, 0);
6856 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6857 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6858 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6859 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6860 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6861 IEM_MC_REF_EFLAGS(pEFlags);
6862 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6863 IEM_MC_ADVANCE_RIP();
6864 IEM_MC_END();
6865 return VINF_SUCCESS;
6866
6867 case IEMMODE_32BIT:
6868 IEM_MC_BEGIN(3, 0);
6869 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6872 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6873 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6874 IEM_MC_REF_EFLAGS(pEFlags);
6875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6876 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6877 IEM_MC_ADVANCE_RIP();
6878 IEM_MC_END();
6879 return VINF_SUCCESS;
6880
6881 case IEMMODE_64BIT:
6882 IEM_MC_BEGIN(3, 0);
6883 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6884 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6885 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6886 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6887 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6888 IEM_MC_REF_EFLAGS(pEFlags);
6889 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6890 IEM_MC_ADVANCE_RIP();
6891 IEM_MC_END();
6892 return VINF_SUCCESS;
6893
6894 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6895 }
6896 }
6897 else
6898 {
6899 /* memory */
6900 switch (pVCpu->iem.s.enmEffOpSize)
6901 {
6902 case IEMMODE_16BIT:
6903 IEM_MC_BEGIN(3, 2);
6904 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6905 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6906 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6908
6909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6911 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6912 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6913 IEM_MC_FETCH_EFLAGS(EFlags);
6914 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6915
6916 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6917 IEM_MC_COMMIT_EFLAGS(EFlags);
6918 IEM_MC_ADVANCE_RIP();
6919 IEM_MC_END();
6920 return VINF_SUCCESS;
6921
6922 case IEMMODE_32BIT:
6923 IEM_MC_BEGIN(3, 2);
6924 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6925 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6926 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6927 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6928
6929 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6930 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6931 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6932 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6933 IEM_MC_FETCH_EFLAGS(EFlags);
6934 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6935
6936 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6937 IEM_MC_COMMIT_EFLAGS(EFlags);
6938 IEM_MC_ADVANCE_RIP();
6939 IEM_MC_END();
6940 return VINF_SUCCESS;
6941
6942 case IEMMODE_64BIT:
6943 IEM_MC_BEGIN(3, 2);
6944 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6945 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6946 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6948
6949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6951 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6952 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6953 IEM_MC_FETCH_EFLAGS(EFlags);
6954 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6955
6956 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6957 IEM_MC_COMMIT_EFLAGS(EFlags);
6958 IEM_MC_ADVANCE_RIP();
6959 IEM_MC_END();
6960 return VINF_SUCCESS;
6961
6962 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6963 }
6964 }
6965}
6966
6967/**
6968 * @opcode 0xd4
6969 */
6970FNIEMOP_DEF(iemOp_aam_Ib)
6971{
6972 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6973 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6975 IEMOP_HLP_NO_64BIT();
6976 if (!bImm)
6977 return IEMOP_RAISE_DIVIDE_ERROR();
6978 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6979}
6980
6981
6982/**
6983 * @opcode 0xd5
6984 */
6985FNIEMOP_DEF(iemOp_aad_Ib)
6986{
6987 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6988 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6990 IEMOP_HLP_NO_64BIT();
6991 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6992}
6993
6994
6995/**
6996 * @opcode 0xd6
6997 */
6998FNIEMOP_DEF(iemOp_salc)
6999{
7000 IEMOP_MNEMONIC(salc, "salc");
7001 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
7002 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7003 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7004 IEMOP_HLP_NO_64BIT();
7005
7006 IEM_MC_BEGIN(0, 0);
7007 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7008 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7009 } IEM_MC_ELSE() {
7010 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7011 } IEM_MC_ENDIF();
7012 IEM_MC_ADVANCE_RIP();
7013 IEM_MC_END();
7014 return VINF_SUCCESS;
7015}
7016
7017
7018/**
7019 * @opcode 0xd7
7020 */
7021FNIEMOP_DEF(iemOp_xlat)
7022{
7023 IEMOP_MNEMONIC(xlat, "xlat");
7024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7025 switch (pVCpu->iem.s.enmEffAddrMode)
7026 {
7027 case IEMMODE_16BIT:
7028 IEM_MC_BEGIN(2, 0);
7029 IEM_MC_LOCAL(uint8_t, u8Tmp);
7030 IEM_MC_LOCAL(uint16_t, u16Addr);
7031 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7032 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7033 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7034 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7035 IEM_MC_ADVANCE_RIP();
7036 IEM_MC_END();
7037 return VINF_SUCCESS;
7038
7039 case IEMMODE_32BIT:
7040 IEM_MC_BEGIN(2, 0);
7041 IEM_MC_LOCAL(uint8_t, u8Tmp);
7042 IEM_MC_LOCAL(uint32_t, u32Addr);
7043 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7044 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7045 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7046 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7047 IEM_MC_ADVANCE_RIP();
7048 IEM_MC_END();
7049 return VINF_SUCCESS;
7050
7051 case IEMMODE_64BIT:
7052 IEM_MC_BEGIN(2, 0);
7053 IEM_MC_LOCAL(uint8_t, u8Tmp);
7054 IEM_MC_LOCAL(uint64_t, u64Addr);
7055 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7056 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7057 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7058 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7059 IEM_MC_ADVANCE_RIP();
7060 IEM_MC_END();
7061 return VINF_SUCCESS;
7062
7063 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7064 }
7065}
7066
7067
7068/**
7069 * Common worker for FPU instructions working on ST0 and STn, and storing the
7070 * result in ST0.
7071 *
7072 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7073 */
7074FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7075{
7076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7077
7078 IEM_MC_BEGIN(3, 1);
7079 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7080 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7081 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7082 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7083
7084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7086 IEM_MC_PREPARE_FPU_USAGE();
7087 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7088 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7089 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7090 IEM_MC_ELSE()
7091 IEM_MC_FPU_STACK_UNDERFLOW(0);
7092 IEM_MC_ENDIF();
7093 IEM_MC_ADVANCE_RIP();
7094
7095 IEM_MC_END();
7096 return VINF_SUCCESS;
7097}
7098
7099
7100/**
7101 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7102 * flags.
7103 *
7104 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7105 */
7106FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7107{
7108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7109
7110 IEM_MC_BEGIN(3, 1);
7111 IEM_MC_LOCAL(uint16_t, u16Fsw);
7112 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7113 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7114 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7115
7116 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7117 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7118 IEM_MC_PREPARE_FPU_USAGE();
7119 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7120 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7121 IEM_MC_UPDATE_FSW(u16Fsw);
7122 IEM_MC_ELSE()
7123 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7124 IEM_MC_ENDIF();
7125 IEM_MC_ADVANCE_RIP();
7126
7127 IEM_MC_END();
7128 return VINF_SUCCESS;
7129}
7130
7131
7132/**
7133 * Common worker for FPU instructions working on ST0 and STn, only affecting
7134 * flags, and popping when done.
7135 *
7136 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7137 */
7138FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7139{
7140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7141
7142 IEM_MC_BEGIN(3, 1);
7143 IEM_MC_LOCAL(uint16_t, u16Fsw);
7144 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7145 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7146 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7147
7148 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7149 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7150 IEM_MC_PREPARE_FPU_USAGE();
7151 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7152 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7153 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7154 IEM_MC_ELSE()
7155 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7156 IEM_MC_ENDIF();
7157 IEM_MC_ADVANCE_RIP();
7158
7159 IEM_MC_END();
7160 return VINF_SUCCESS;
7161}
7162
7163
7164/** Opcode 0xd8 11/0. */
7165FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7166{
7167 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7168 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7169}
7170
7171
7172/** Opcode 0xd8 11/1. */
7173FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7174{
7175 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7176 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7177}
7178
7179
7180/** Opcode 0xd8 11/2. */
7181FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7182{
7183 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7184 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7185}
7186
7187
7188/** Opcode 0xd8 11/3. */
7189FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7190{
7191 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7192 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7193}
7194
7195
7196/** Opcode 0xd8 11/4. */
7197FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7198{
7199 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7200 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7201}
7202
7203
7204/** Opcode 0xd8 11/5. */
7205FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7206{
7207 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7208 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7209}
7210
7211
7212/** Opcode 0xd8 11/6. */
7213FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7214{
7215 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7216 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7217}
7218
7219
7220/** Opcode 0xd8 11/7. */
7221FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7222{
7223 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7224 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7225}
7226
7227
7228/**
7229 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7230 * the result in ST0.
7231 *
7232 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7233 */
7234FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7235{
7236 IEM_MC_BEGIN(3, 3);
7237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7238 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7239 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7240 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7241 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7242 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7243
7244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7246
7247 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7248 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7249 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7250
7251 IEM_MC_PREPARE_FPU_USAGE();
7252 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7253 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7254 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7255 IEM_MC_ELSE()
7256 IEM_MC_FPU_STACK_UNDERFLOW(0);
7257 IEM_MC_ENDIF();
7258 IEM_MC_ADVANCE_RIP();
7259
7260 IEM_MC_END();
7261 return VINF_SUCCESS;
7262}
7263
7264
7265/** Opcode 0xd8 !11/0. */
7266FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7267{
7268 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7269 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7270}
7271
7272
7273/** Opcode 0xd8 !11/1. */
7274FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7275{
7276 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7277 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7278}
7279
7280
7281/** Opcode 0xd8 !11/2. */
7282FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7283{
7284 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7285
7286 IEM_MC_BEGIN(3, 3);
7287 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7288 IEM_MC_LOCAL(uint16_t, u16Fsw);
7289 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7290 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7291 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7292 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7293
7294 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7296
7297 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7298 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7299 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7300
7301 IEM_MC_PREPARE_FPU_USAGE();
7302 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7303 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7304 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7305 IEM_MC_ELSE()
7306 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7307 IEM_MC_ENDIF();
7308 IEM_MC_ADVANCE_RIP();
7309
7310 IEM_MC_END();
7311 return VINF_SUCCESS;
7312}
7313
7314
7315/** Opcode 0xd8 !11/3. */
7316FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7317{
7318 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7319
7320 IEM_MC_BEGIN(3, 3);
7321 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7322 IEM_MC_LOCAL(uint16_t, u16Fsw);
7323 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7324 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7325 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7326 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7327
7328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7330
7331 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7332 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7333 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7334
7335 IEM_MC_PREPARE_FPU_USAGE();
7336 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7337 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7338 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7339 IEM_MC_ELSE()
7340 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7341 IEM_MC_ENDIF();
7342 IEM_MC_ADVANCE_RIP();
7343
7344 IEM_MC_END();
7345 return VINF_SUCCESS;
7346}
7347
7348
7349/** Opcode 0xd8 !11/4. */
7350FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7351{
7352 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7353 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7354}
7355
7356
7357/** Opcode 0xd8 !11/5. */
7358FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7359{
7360 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7361 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7362}
7363
7364
7365/** Opcode 0xd8 !11/6. */
7366FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7367{
7368 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7369 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7370}
7371
7372
7373/** Opcode 0xd8 !11/7. */
7374FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7375{
7376 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7377 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7378}
7379
7380
7381/**
7382 * @opcode 0xd8
7383 */
7384FNIEMOP_DEF(iemOp_EscF0)
7385{
7386 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7387 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7388
7389 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7390 {
7391 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7392 {
7393 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7394 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7395 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7396 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7397 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7398 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7399 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7400 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7402 }
7403 }
7404 else
7405 {
7406 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7407 {
7408 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7409 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7410 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7411 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7412 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7413 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7414 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7415 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7417 }
7418 }
7419}
7420
7421
7422/** Opcode 0xd9 /0 mem32real
7423 * @sa iemOp_fld_m64r */
7424FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7425{
7426 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7427
7428 IEM_MC_BEGIN(2, 3);
7429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7430 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7431 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7432 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7433 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7434
7435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7437
7438 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7439 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7440 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7441
7442 IEM_MC_PREPARE_FPU_USAGE();
7443 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7444 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7445 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7446 IEM_MC_ELSE()
7447 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7448 IEM_MC_ENDIF();
7449 IEM_MC_ADVANCE_RIP();
7450
7451 IEM_MC_END();
7452 return VINF_SUCCESS;
7453}
7454
7455
7456/** Opcode 0xd9 !11/2 mem32real */
7457FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7458{
7459 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7460 IEM_MC_BEGIN(3, 2);
7461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7462 IEM_MC_LOCAL(uint16_t, u16Fsw);
7463 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7464 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7465 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7466
7467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7468 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7469 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7470 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7471
7472 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7473 IEM_MC_PREPARE_FPU_USAGE();
7474 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7475 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7476 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7477 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7478 IEM_MC_ELSE()
7479 IEM_MC_IF_FCW_IM()
7480 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7481 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7482 IEM_MC_ENDIF();
7483 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7484 IEM_MC_ENDIF();
7485 IEM_MC_ADVANCE_RIP();
7486
7487 IEM_MC_END();
7488 return VINF_SUCCESS;
7489}
7490
7491
7492/** Opcode 0xd9 !11/3 */
7493FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7494{
7495 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7496 IEM_MC_BEGIN(3, 2);
7497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7498 IEM_MC_LOCAL(uint16_t, u16Fsw);
7499 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7500 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7501 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7502
7503 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7504 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7505 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7506 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7507
7508 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7509 IEM_MC_PREPARE_FPU_USAGE();
7510 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7511 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7512 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7513 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7514 IEM_MC_ELSE()
7515 IEM_MC_IF_FCW_IM()
7516 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7517 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7518 IEM_MC_ENDIF();
7519 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7520 IEM_MC_ENDIF();
7521 IEM_MC_ADVANCE_RIP();
7522
7523 IEM_MC_END();
7524 return VINF_SUCCESS;
7525}
7526
7527
7528/** Opcode 0xd9 !11/4 */
7529FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7530{
7531 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7532 IEM_MC_BEGIN(3, 0);
7533 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7534 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7535 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7539 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7540 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7541 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7542 IEM_MC_END();
7543 return VINF_SUCCESS;
7544}
7545
7546
7547/** Opcode 0xd9 !11/5 */
7548FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7549{
7550 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7551 IEM_MC_BEGIN(1, 1);
7552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7553 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7556 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7557 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7558 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7559 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7560 IEM_MC_END();
7561 return VINF_SUCCESS;
7562}
7563
7564
7565/** Opcode 0xd9 !11/6 */
7566FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7567{
7568 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7569 IEM_MC_BEGIN(3, 0);
7570 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7571 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7572 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7575 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7576 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7577 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7578 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7579 IEM_MC_END();
7580 return VINF_SUCCESS;
7581}
7582
7583
7584/** Opcode 0xd9 !11/7 */
7585FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7586{
7587 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7588 IEM_MC_BEGIN(2, 0);
7589 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7590 IEM_MC_LOCAL(uint16_t, u16Fcw);
7591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7593 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7594 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7595 IEM_MC_FETCH_FCW(u16Fcw);
7596 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7597 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7598 IEM_MC_END();
7599 return VINF_SUCCESS;
7600}
7601
7602
7603/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7604FNIEMOP_DEF(iemOp_fnop)
7605{
7606 IEMOP_MNEMONIC(fnop, "fnop");
7607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7608
7609 IEM_MC_BEGIN(0, 0);
7610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7611 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7612 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7613 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7614 * intel optimizations. Investigate. */
7615 IEM_MC_UPDATE_FPU_OPCODE_IP();
7616 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7617 IEM_MC_END();
7618 return VINF_SUCCESS;
7619}
7620
7621
7622/** Opcode 0xd9 11/0 stN */
7623FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7624{
7625 IEMOP_MNEMONIC(fld_stN, "fld stN");
7626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7627
7628 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7629 * indicates that it does. */
7630 IEM_MC_BEGIN(0, 2);
7631 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7632 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7633 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7634 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7635
7636 IEM_MC_PREPARE_FPU_USAGE();
7637 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7638 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7639 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7640 IEM_MC_ELSE()
7641 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7642 IEM_MC_ENDIF();
7643
7644 IEM_MC_ADVANCE_RIP();
7645 IEM_MC_END();
7646
7647 return VINF_SUCCESS;
7648}
7649
7650
7651/** Opcode 0xd9 11/3 stN */
7652FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7653{
7654 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7656
7657 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7658 * indicates that it does. */
7659 IEM_MC_BEGIN(1, 3);
7660 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7661 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7662 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7663 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7666
7667 IEM_MC_PREPARE_FPU_USAGE();
7668 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7669 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7670 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7671 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7672 IEM_MC_ELSE()
7673 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7674 IEM_MC_ENDIF();
7675
7676 IEM_MC_ADVANCE_RIP();
7677 IEM_MC_END();
7678
7679 return VINF_SUCCESS;
7680}
7681
7682
7683/** Opcode 0xd9 11/4, 0xdd 11/2. */
7684FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7685{
7686 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7688
7689 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7690 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7691 if (!iDstReg)
7692 {
7693 IEM_MC_BEGIN(0, 1);
7694 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7695 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7696 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7697
7698 IEM_MC_PREPARE_FPU_USAGE();
7699 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7700 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7701 IEM_MC_ELSE()
7702 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7703 IEM_MC_ENDIF();
7704
7705 IEM_MC_ADVANCE_RIP();
7706 IEM_MC_END();
7707 }
7708 else
7709 {
7710 IEM_MC_BEGIN(0, 2);
7711 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7712 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7713 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7714 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7715
7716 IEM_MC_PREPARE_FPU_USAGE();
7717 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7718 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7719 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7720 IEM_MC_ELSE()
7721 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7722 IEM_MC_ENDIF();
7723
7724 IEM_MC_ADVANCE_RIP();
7725 IEM_MC_END();
7726 }
7727 return VINF_SUCCESS;
7728}
7729
7730
7731/**
7732 * Common worker for FPU instructions working on ST0 and replaces it with the
7733 * result, i.e. unary operators.
7734 *
7735 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7736 */
7737FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7738{
7739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7740
7741 IEM_MC_BEGIN(2, 1);
7742 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7745
7746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7748 IEM_MC_PREPARE_FPU_USAGE();
7749 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7750 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7751 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7752 IEM_MC_ELSE()
7753 IEM_MC_FPU_STACK_UNDERFLOW(0);
7754 IEM_MC_ENDIF();
7755 IEM_MC_ADVANCE_RIP();
7756
7757 IEM_MC_END();
7758 return VINF_SUCCESS;
7759}
7760
7761
7762/** Opcode 0xd9 0xe0. */
7763FNIEMOP_DEF(iemOp_fchs)
7764{
7765 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7766 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7767}
7768
7769
7770/** Opcode 0xd9 0xe1. */
7771FNIEMOP_DEF(iemOp_fabs)
7772{
7773 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7774 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7775}
7776
7777
7778/**
7779 * Common worker for FPU instructions working on ST0 and only returns FSW.
7780 *
7781 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7782 */
7783FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7784{
7785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7786
7787 IEM_MC_BEGIN(2, 1);
7788 IEM_MC_LOCAL(uint16_t, u16Fsw);
7789 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7790 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7791
7792 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7793 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7794 IEM_MC_PREPARE_FPU_USAGE();
7795 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7796 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7797 IEM_MC_UPDATE_FSW(u16Fsw);
7798 IEM_MC_ELSE()
7799 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7800 IEM_MC_ENDIF();
7801 IEM_MC_ADVANCE_RIP();
7802
7803 IEM_MC_END();
7804 return VINF_SUCCESS;
7805}
7806
7807
7808/** Opcode 0xd9 0xe4. */
7809FNIEMOP_DEF(iemOp_ftst)
7810{
7811 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7812 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7813}
7814
7815
7816/** Opcode 0xd9 0xe5. */
7817FNIEMOP_DEF(iemOp_fxam)
7818{
7819 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7820 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7821}
7822
7823
7824/**
7825 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7826 *
7827 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7828 */
7829FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7830{
7831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7832
7833 IEM_MC_BEGIN(1, 1);
7834 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7835 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7836
7837 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7838 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7839 IEM_MC_PREPARE_FPU_USAGE();
7840 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7841 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7842 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7843 IEM_MC_ELSE()
7844 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7845 IEM_MC_ENDIF();
7846 IEM_MC_ADVANCE_RIP();
7847
7848 IEM_MC_END();
7849 return VINF_SUCCESS;
7850}
7851
7852
7853/** Opcode 0xd9 0xe8. */
7854FNIEMOP_DEF(iemOp_fld1)
7855{
7856 IEMOP_MNEMONIC(fld1, "fld1");
7857 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7858}
7859
7860
7861/** Opcode 0xd9 0xe9. */
7862FNIEMOP_DEF(iemOp_fldl2t)
7863{
7864 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7865 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7866}
7867
7868
7869/** Opcode 0xd9 0xea. */
7870FNIEMOP_DEF(iemOp_fldl2e)
7871{
7872 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7873 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7874}
7875
7876/** Opcode 0xd9 0xeb. */
7877FNIEMOP_DEF(iemOp_fldpi)
7878{
7879 IEMOP_MNEMONIC(fldpi, "fldpi");
7880 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7881}
7882
7883
7884/** Opcode 0xd9 0xec. */
7885FNIEMOP_DEF(iemOp_fldlg2)
7886{
7887 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7888 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7889}
7890
7891/** Opcode 0xd9 0xed. */
7892FNIEMOP_DEF(iemOp_fldln2)
7893{
7894 IEMOP_MNEMONIC(fldln2, "fldln2");
7895 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7896}
7897
7898
7899/** Opcode 0xd9 0xee. */
7900FNIEMOP_DEF(iemOp_fldz)
7901{
7902 IEMOP_MNEMONIC(fldz, "fldz");
7903 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7904}
7905
7906
7907/** Opcode 0xd9 0xf0. */
7908FNIEMOP_DEF(iemOp_f2xm1)
7909{
7910 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7911 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7912}
7913
7914
7915/**
7916 * Common worker for FPU instructions working on STn and ST0, storing the result
7917 * in STn, and popping the stack unless IE, DE or ZE was raised.
7918 *
7919 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7920 */
7921FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7922{
7923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7924
7925 IEM_MC_BEGIN(3, 1);
7926 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7927 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7928 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7929 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7930
7931 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7932 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7933
7934 IEM_MC_PREPARE_FPU_USAGE();
7935 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7936 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7937 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7938 IEM_MC_ELSE()
7939 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7940 IEM_MC_ENDIF();
7941 IEM_MC_ADVANCE_RIP();
7942
7943 IEM_MC_END();
7944 return VINF_SUCCESS;
7945}
7946
7947
7948/** Opcode 0xd9 0xf1. */
7949FNIEMOP_DEF(iemOp_fyl2x)
7950{
7951 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7952 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7953}
7954
7955
7956/**
7957 * Common worker for FPU instructions working on ST0 and having two outputs, one
7958 * replacing ST0 and one pushed onto the stack.
7959 *
7960 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7961 */
7962FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7963{
7964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7965
7966 IEM_MC_BEGIN(2, 1);
7967 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7968 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7969 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7970
7971 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7972 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7973 IEM_MC_PREPARE_FPU_USAGE();
7974 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7975 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7976 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7977 IEM_MC_ELSE()
7978 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7979 IEM_MC_ENDIF();
7980 IEM_MC_ADVANCE_RIP();
7981
7982 IEM_MC_END();
7983 return VINF_SUCCESS;
7984}
7985
7986
7987/** Opcode 0xd9 0xf2. */
7988FNIEMOP_DEF(iemOp_fptan)
7989{
7990 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7991 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7992}
7993
7994
7995/** Opcode 0xd9 0xf3. */
7996FNIEMOP_DEF(iemOp_fpatan)
7997{
7998 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7999 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8000}
8001
8002
8003/** Opcode 0xd9 0xf4. */
8004FNIEMOP_DEF(iemOp_fxtract)
8005{
8006 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8007 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8008}
8009
8010
8011/** Opcode 0xd9 0xf5. */
8012FNIEMOP_DEF(iemOp_fprem1)
8013{
8014 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8015 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8016}
8017
8018
8019/** Opcode 0xd9 0xf6. */
8020FNIEMOP_DEF(iemOp_fdecstp)
8021{
8022 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8024 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8025 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8026 * FINCSTP and FDECSTP. */
8027
8028 IEM_MC_BEGIN(0,0);
8029
8030 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8031 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8032
8033 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8034 IEM_MC_FPU_STACK_DEC_TOP();
8035 IEM_MC_UPDATE_FSW_CONST(0);
8036
8037 IEM_MC_ADVANCE_RIP();
8038 IEM_MC_END();
8039 return VINF_SUCCESS;
8040}
8041
8042
8043/** Opcode 0xd9 0xf7. */
8044FNIEMOP_DEF(iemOp_fincstp)
8045{
8046 IEMOP_MNEMONIC(fincstp, "fincstp");
8047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8048 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8049 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8050 * FINCSTP and FDECSTP. */
8051
8052 IEM_MC_BEGIN(0,0);
8053
8054 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8055 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8056
8057 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8058 IEM_MC_FPU_STACK_INC_TOP();
8059 IEM_MC_UPDATE_FSW_CONST(0);
8060
8061 IEM_MC_ADVANCE_RIP();
8062 IEM_MC_END();
8063 return VINF_SUCCESS;
8064}
8065
8066
8067/** Opcode 0xd9 0xf8. */
8068FNIEMOP_DEF(iemOp_fprem)
8069{
8070 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8071 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8072}
8073
8074
8075/** Opcode 0xd9 0xf9. */
8076FNIEMOP_DEF(iemOp_fyl2xp1)
8077{
8078 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8079 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8080}
8081
8082
8083/** Opcode 0xd9 0xfa. */
8084FNIEMOP_DEF(iemOp_fsqrt)
8085{
8086 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8087 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8088}
8089
8090
8091/** Opcode 0xd9 0xfb. */
8092FNIEMOP_DEF(iemOp_fsincos)
8093{
8094 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8095 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8096}
8097
8098
8099/** Opcode 0xd9 0xfc. */
8100FNIEMOP_DEF(iemOp_frndint)
8101{
8102 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8103 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8104}
8105
8106
8107/** Opcode 0xd9 0xfd. */
8108FNIEMOP_DEF(iemOp_fscale)
8109{
8110 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8111 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8112}
8113
8114
8115/** Opcode 0xd9 0xfe. */
8116FNIEMOP_DEF(iemOp_fsin)
8117{
8118 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8119 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8120}
8121
8122
8123/** Opcode 0xd9 0xff. */
8124FNIEMOP_DEF(iemOp_fcos)
8125{
8126 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8127 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8128}
8129
8130
8131/** Used by iemOp_EscF1. */
8132IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8133{
8134 /* 0xe0 */ iemOp_fchs,
8135 /* 0xe1 */ iemOp_fabs,
8136 /* 0xe2 */ iemOp_Invalid,
8137 /* 0xe3 */ iemOp_Invalid,
8138 /* 0xe4 */ iemOp_ftst,
8139 /* 0xe5 */ iemOp_fxam,
8140 /* 0xe6 */ iemOp_Invalid,
8141 /* 0xe7 */ iemOp_Invalid,
8142 /* 0xe8 */ iemOp_fld1,
8143 /* 0xe9 */ iemOp_fldl2t,
8144 /* 0xea */ iemOp_fldl2e,
8145 /* 0xeb */ iemOp_fldpi,
8146 /* 0xec */ iemOp_fldlg2,
8147 /* 0xed */ iemOp_fldln2,
8148 /* 0xee */ iemOp_fldz,
8149 /* 0xef */ iemOp_Invalid,
8150 /* 0xf0 */ iemOp_f2xm1,
8151 /* 0xf1 */ iemOp_fyl2x,
8152 /* 0xf2 */ iemOp_fptan,
8153 /* 0xf3 */ iemOp_fpatan,
8154 /* 0xf4 */ iemOp_fxtract,
8155 /* 0xf5 */ iemOp_fprem1,
8156 /* 0xf6 */ iemOp_fdecstp,
8157 /* 0xf7 */ iemOp_fincstp,
8158 /* 0xf8 */ iemOp_fprem,
8159 /* 0xf9 */ iemOp_fyl2xp1,
8160 /* 0xfa */ iemOp_fsqrt,
8161 /* 0xfb */ iemOp_fsincos,
8162 /* 0xfc */ iemOp_frndint,
8163 /* 0xfd */ iemOp_fscale,
8164 /* 0xfe */ iemOp_fsin,
8165 /* 0xff */ iemOp_fcos
8166};
8167
8168
8169/**
8170 * @opcode 0xd9
8171 */
8172FNIEMOP_DEF(iemOp_EscF1)
8173{
8174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8175 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8176
8177 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8178 {
8179 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8180 {
8181 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8182 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8183 case 2:
8184 if (bRm == 0xd0)
8185 return FNIEMOP_CALL(iemOp_fnop);
8186 return IEMOP_RAISE_INVALID_OPCODE();
8187 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8188 case 4:
8189 case 5:
8190 case 6:
8191 case 7:
8192 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8193 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8194 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8195 }
8196 }
8197 else
8198 {
8199 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8200 {
8201 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8202 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8203 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8204 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8205 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8206 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8207 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8208 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8209 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8210 }
8211 }
8212}
8213
8214
8215/** Opcode 0xda 11/0. */
8216FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8217{
8218 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8220
8221 IEM_MC_BEGIN(0, 1);
8222 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8223
8224 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8225 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8226
8227 IEM_MC_PREPARE_FPU_USAGE();
8228 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8229 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8230 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8231 IEM_MC_ENDIF();
8232 IEM_MC_UPDATE_FPU_OPCODE_IP();
8233 IEM_MC_ELSE()
8234 IEM_MC_FPU_STACK_UNDERFLOW(0);
8235 IEM_MC_ENDIF();
8236 IEM_MC_ADVANCE_RIP();
8237
8238 IEM_MC_END();
8239 return VINF_SUCCESS;
8240}
8241
8242
8243/** Opcode 0xda 11/1. */
8244FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8245{
8246 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8248
8249 IEM_MC_BEGIN(0, 1);
8250 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8251
8252 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8253 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8254
8255 IEM_MC_PREPARE_FPU_USAGE();
8256 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8257 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8258 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8259 IEM_MC_ENDIF();
8260 IEM_MC_UPDATE_FPU_OPCODE_IP();
8261 IEM_MC_ELSE()
8262 IEM_MC_FPU_STACK_UNDERFLOW(0);
8263 IEM_MC_ENDIF();
8264 IEM_MC_ADVANCE_RIP();
8265
8266 IEM_MC_END();
8267 return VINF_SUCCESS;
8268}
8269
8270
8271/** Opcode 0xda 11/2. */
8272FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8273{
8274 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276
8277 IEM_MC_BEGIN(0, 1);
8278 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8279
8280 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8281 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8282
8283 IEM_MC_PREPARE_FPU_USAGE();
8284 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8285 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8286 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8287 IEM_MC_ENDIF();
8288 IEM_MC_UPDATE_FPU_OPCODE_IP();
8289 IEM_MC_ELSE()
8290 IEM_MC_FPU_STACK_UNDERFLOW(0);
8291 IEM_MC_ENDIF();
8292 IEM_MC_ADVANCE_RIP();
8293
8294 IEM_MC_END();
8295 return VINF_SUCCESS;
8296}
8297
8298
8299/** Opcode 0xda 11/3. */
8300FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8301{
8302 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8303 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8304
8305 IEM_MC_BEGIN(0, 1);
8306 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8307
8308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8310
8311 IEM_MC_PREPARE_FPU_USAGE();
8312 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8313 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8314 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8315 IEM_MC_ENDIF();
8316 IEM_MC_UPDATE_FPU_OPCODE_IP();
8317 IEM_MC_ELSE()
8318 IEM_MC_FPU_STACK_UNDERFLOW(0);
8319 IEM_MC_ENDIF();
8320 IEM_MC_ADVANCE_RIP();
8321
8322 IEM_MC_END();
8323 return VINF_SUCCESS;
8324}
8325
8326
8327/**
8328 * Common worker for FPU instructions working on ST0 and STn, only affecting
8329 * flags, and popping twice when done.
8330 *
8331 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8332 */
8333FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8334{
8335 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8336
8337 IEM_MC_BEGIN(3, 1);
8338 IEM_MC_LOCAL(uint16_t, u16Fsw);
8339 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8340 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8341 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8342
8343 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8344 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8345
8346 IEM_MC_PREPARE_FPU_USAGE();
8347 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8348 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8349 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8350 IEM_MC_ELSE()
8351 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8352 IEM_MC_ENDIF();
8353 IEM_MC_ADVANCE_RIP();
8354
8355 IEM_MC_END();
8356 return VINF_SUCCESS;
8357}
8358
8359
8360/** Opcode 0xda 0xe9. */
8361FNIEMOP_DEF(iemOp_fucompp)
8362{
8363 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8364 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8365}
8366
8367
8368/**
8369 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8370 * the result in ST0.
8371 *
8372 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8373 */
8374FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8375{
8376 IEM_MC_BEGIN(3, 3);
8377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8378 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8379 IEM_MC_LOCAL(int32_t, i32Val2);
8380 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8381 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8382 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8383
8384 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8386
8387 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8388 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8389 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8390
8391 IEM_MC_PREPARE_FPU_USAGE();
8392 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8393 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8394 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8395 IEM_MC_ELSE()
8396 IEM_MC_FPU_STACK_UNDERFLOW(0);
8397 IEM_MC_ENDIF();
8398 IEM_MC_ADVANCE_RIP();
8399
8400 IEM_MC_END();
8401 return VINF_SUCCESS;
8402}
8403
8404
8405/** Opcode 0xda !11/0. */
8406FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8407{
8408 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8409 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8410}
8411
8412
8413/** Opcode 0xda !11/1. */
8414FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8415{
8416 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8417 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8418}
8419
8420
8421/** Opcode 0xda !11/2. */
8422FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8423{
8424 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8425
8426 IEM_MC_BEGIN(3, 3);
8427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8428 IEM_MC_LOCAL(uint16_t, u16Fsw);
8429 IEM_MC_LOCAL(int32_t, i32Val2);
8430 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8431 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8432 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8433
8434 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8435 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8436
8437 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8438 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8439 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8440
8441 IEM_MC_PREPARE_FPU_USAGE();
8442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8443 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8444 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8445 IEM_MC_ELSE()
8446 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8447 IEM_MC_ENDIF();
8448 IEM_MC_ADVANCE_RIP();
8449
8450 IEM_MC_END();
8451 return VINF_SUCCESS;
8452}
8453
8454
8455/** Opcode 0xda !11/3. */
8456FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8457{
8458 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8459
8460 IEM_MC_BEGIN(3, 3);
8461 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8462 IEM_MC_LOCAL(uint16_t, u16Fsw);
8463 IEM_MC_LOCAL(int32_t, i32Val2);
8464 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8465 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8466 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8467
8468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8470
8471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8473 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8474
8475 IEM_MC_PREPARE_FPU_USAGE();
8476 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8477 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8478 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8479 IEM_MC_ELSE()
8480 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8481 IEM_MC_ENDIF();
8482 IEM_MC_ADVANCE_RIP();
8483
8484 IEM_MC_END();
8485 return VINF_SUCCESS;
8486}
8487
8488
8489/** Opcode 0xda !11/4. */
8490FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8491{
8492 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8493 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8494}
8495
8496
8497/** Opcode 0xda !11/5. */
8498FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8499{
8500 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8501 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8502}
8503
8504
8505/** Opcode 0xda !11/6. */
8506FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8507{
8508 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8509 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8510}
8511
8512
8513/** Opcode 0xda !11/7. */
8514FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8515{
8516 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8517 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8518}
8519
8520
8521/**
8522 * @opcode 0xda
8523 */
8524FNIEMOP_DEF(iemOp_EscF2)
8525{
8526 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8527 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8528 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8529 {
8530 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8531 {
8532 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8533 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8534 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8535 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8536 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8537 case 5:
8538 if (bRm == 0xe9)
8539 return FNIEMOP_CALL(iemOp_fucompp);
8540 return IEMOP_RAISE_INVALID_OPCODE();
8541 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8542 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8543 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8544 }
8545 }
8546 else
8547 {
8548 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8549 {
8550 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8551 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8552 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8553 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8554 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8555 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8556 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8557 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8559 }
8560 }
8561}
8562
8563
8564/** Opcode 0xdb !11/0. */
8565FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8566{
8567 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8568
8569 IEM_MC_BEGIN(2, 3);
8570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8571 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8572 IEM_MC_LOCAL(int32_t, i32Val);
8573 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8574 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8575
8576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8577 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8578
8579 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8580 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8581 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8582
8583 IEM_MC_PREPARE_FPU_USAGE();
8584 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8585 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8586 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8587 IEM_MC_ELSE()
8588 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8589 IEM_MC_ENDIF();
8590 IEM_MC_ADVANCE_RIP();
8591
8592 IEM_MC_END();
8593 return VINF_SUCCESS;
8594}
8595
8596
8597/** Opcode 0xdb !11/1. */
8598FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8599{
8600 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8601 IEM_MC_BEGIN(3, 2);
8602 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8603 IEM_MC_LOCAL(uint16_t, u16Fsw);
8604 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8605 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8606 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8607
8608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8611 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8612
8613 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8614 IEM_MC_PREPARE_FPU_USAGE();
8615 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8616 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8617 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8618 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8619 IEM_MC_ELSE()
8620 IEM_MC_IF_FCW_IM()
8621 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8622 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8623 IEM_MC_ENDIF();
8624 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8625 IEM_MC_ENDIF();
8626 IEM_MC_ADVANCE_RIP();
8627
8628 IEM_MC_END();
8629 return VINF_SUCCESS;
8630}
8631
8632
8633/** Opcode 0xdb !11/2. */
8634FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8635{
8636 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8637 IEM_MC_BEGIN(3, 2);
8638 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8639 IEM_MC_LOCAL(uint16_t, u16Fsw);
8640 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8641 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8642 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8643
8644 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8646 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8647 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8648
8649 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8650 IEM_MC_PREPARE_FPU_USAGE();
8651 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8652 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8653 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8654 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8655 IEM_MC_ELSE()
8656 IEM_MC_IF_FCW_IM()
8657 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8658 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8659 IEM_MC_ENDIF();
8660 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8661 IEM_MC_ENDIF();
8662 IEM_MC_ADVANCE_RIP();
8663
8664 IEM_MC_END();
8665 return VINF_SUCCESS;
8666}
8667
8668
8669/** Opcode 0xdb !11/3. */
8670FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8671{
8672 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8673 IEM_MC_BEGIN(3, 2);
8674 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8675 IEM_MC_LOCAL(uint16_t, u16Fsw);
8676 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8677 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8678 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8679
8680 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8682 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8683 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8684
8685 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8686 IEM_MC_PREPARE_FPU_USAGE();
8687 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8688 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8689 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8690 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8691 IEM_MC_ELSE()
8692 IEM_MC_IF_FCW_IM()
8693 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8694 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8695 IEM_MC_ENDIF();
8696 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8697 IEM_MC_ENDIF();
8698 IEM_MC_ADVANCE_RIP();
8699
8700 IEM_MC_END();
8701 return VINF_SUCCESS;
8702}
8703
8704
8705/** Opcode 0xdb !11/5. */
8706FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8707{
8708 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8709
8710 IEM_MC_BEGIN(2, 3);
8711 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8712 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8713 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8714 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8715 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8716
8717 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8718 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8719
8720 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8721 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8722 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8723
8724 IEM_MC_PREPARE_FPU_USAGE();
8725 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8726 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8727 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8728 IEM_MC_ELSE()
8729 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8730 IEM_MC_ENDIF();
8731 IEM_MC_ADVANCE_RIP();
8732
8733 IEM_MC_END();
8734 return VINF_SUCCESS;
8735}
8736
8737
8738/** Opcode 0xdb !11/7. */
8739FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8740{
8741 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8742 IEM_MC_BEGIN(3, 2);
8743 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8744 IEM_MC_LOCAL(uint16_t, u16Fsw);
8745 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8746 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8747 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8748
8749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8751 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8752 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8753
8754 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8755 IEM_MC_PREPARE_FPU_USAGE();
8756 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8757 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8758 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8759 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8760 IEM_MC_ELSE()
8761 IEM_MC_IF_FCW_IM()
8762 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8763 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8764 IEM_MC_ENDIF();
8765 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8766 IEM_MC_ENDIF();
8767 IEM_MC_ADVANCE_RIP();
8768
8769 IEM_MC_END();
8770 return VINF_SUCCESS;
8771}
8772
8773
8774/** Opcode 0xdb 11/0. */
8775FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8776{
8777 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8779
8780 IEM_MC_BEGIN(0, 1);
8781 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8782
8783 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8784 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8785
8786 IEM_MC_PREPARE_FPU_USAGE();
8787 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8788 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8789 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8790 IEM_MC_ENDIF();
8791 IEM_MC_UPDATE_FPU_OPCODE_IP();
8792 IEM_MC_ELSE()
8793 IEM_MC_FPU_STACK_UNDERFLOW(0);
8794 IEM_MC_ENDIF();
8795 IEM_MC_ADVANCE_RIP();
8796
8797 IEM_MC_END();
8798 return VINF_SUCCESS;
8799}
8800
8801
8802/** Opcode 0xdb 11/1. */
8803FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8807
8808 IEM_MC_BEGIN(0, 1);
8809 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8810
8811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8813
8814 IEM_MC_PREPARE_FPU_USAGE();
8815 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8816 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8817 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8818 IEM_MC_ENDIF();
8819 IEM_MC_UPDATE_FPU_OPCODE_IP();
8820 IEM_MC_ELSE()
8821 IEM_MC_FPU_STACK_UNDERFLOW(0);
8822 IEM_MC_ENDIF();
8823 IEM_MC_ADVANCE_RIP();
8824
8825 IEM_MC_END();
8826 return VINF_SUCCESS;
8827}
8828
8829
8830/** Opcode 0xdb 11/2. */
8831FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8832{
8833 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8834 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8835
8836 IEM_MC_BEGIN(0, 1);
8837 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8838
8839 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8840 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8841
8842 IEM_MC_PREPARE_FPU_USAGE();
8843 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8844 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8845 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8846 IEM_MC_ENDIF();
8847 IEM_MC_UPDATE_FPU_OPCODE_IP();
8848 IEM_MC_ELSE()
8849 IEM_MC_FPU_STACK_UNDERFLOW(0);
8850 IEM_MC_ENDIF();
8851 IEM_MC_ADVANCE_RIP();
8852
8853 IEM_MC_END();
8854 return VINF_SUCCESS;
8855}
8856
8857
8858/** Opcode 0xdb 11/3. */
8859FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8860{
8861 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8863
8864 IEM_MC_BEGIN(0, 1);
8865 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8866
8867 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8868 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8869
8870 IEM_MC_PREPARE_FPU_USAGE();
8871 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8872 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8873 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8874 IEM_MC_ENDIF();
8875 IEM_MC_UPDATE_FPU_OPCODE_IP();
8876 IEM_MC_ELSE()
8877 IEM_MC_FPU_STACK_UNDERFLOW(0);
8878 IEM_MC_ENDIF();
8879 IEM_MC_ADVANCE_RIP();
8880
8881 IEM_MC_END();
8882 return VINF_SUCCESS;
8883}
8884
8885
8886/** Opcode 0xdb 0xe0. */
8887FNIEMOP_DEF(iemOp_fneni)
8888{
8889 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8891 IEM_MC_BEGIN(0,0);
8892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8893 IEM_MC_ADVANCE_RIP();
8894 IEM_MC_END();
8895 return VINF_SUCCESS;
8896}
8897
8898
8899/** Opcode 0xdb 0xe1. */
8900FNIEMOP_DEF(iemOp_fndisi)
8901{
8902 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8904 IEM_MC_BEGIN(0,0);
8905 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8906 IEM_MC_ADVANCE_RIP();
8907 IEM_MC_END();
8908 return VINF_SUCCESS;
8909}
8910
8911
8912/** Opcode 0xdb 0xe2. */
8913FNIEMOP_DEF(iemOp_fnclex)
8914{
8915 IEMOP_MNEMONIC(fnclex, "fnclex");
8916 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8917
8918 IEM_MC_BEGIN(0,0);
8919 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8920 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8921 IEM_MC_CLEAR_FSW_EX();
8922 IEM_MC_ADVANCE_RIP();
8923 IEM_MC_END();
8924 return VINF_SUCCESS;
8925}
8926
8927
8928/** Opcode 0xdb 0xe3. */
8929FNIEMOP_DEF(iemOp_fninit)
8930{
8931 IEMOP_MNEMONIC(fninit, "fninit");
8932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8933 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8934}
8935
8936
8937/** Opcode 0xdb 0xe4. */
8938FNIEMOP_DEF(iemOp_fnsetpm)
8939{
8940 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8942 IEM_MC_BEGIN(0,0);
8943 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8944 IEM_MC_ADVANCE_RIP();
8945 IEM_MC_END();
8946 return VINF_SUCCESS;
8947}
8948
8949
8950/** Opcode 0xdb 0xe5. */
8951FNIEMOP_DEF(iemOp_frstpm)
8952{
8953 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8954#if 0 /* #UDs on newer CPUs */
8955 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8956 IEM_MC_BEGIN(0,0);
8957 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8958 IEM_MC_ADVANCE_RIP();
8959 IEM_MC_END();
8960 return VINF_SUCCESS;
8961#else
8962 return IEMOP_RAISE_INVALID_OPCODE();
8963#endif
8964}
8965
8966
8967/** Opcode 0xdb 11/5. */
8968FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8969{
8970 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8971 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8972}
8973
8974
8975/** Opcode 0xdb 11/6. */
8976FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8977{
8978 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8979 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8980}
8981
8982
8983/**
8984 * @opcode 0xdb
8985 */
8986FNIEMOP_DEF(iemOp_EscF3)
8987{
8988 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8989 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8990 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8991 {
8992 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8993 {
8994 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8995 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8996 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8997 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8998 case 4:
8999 switch (bRm)
9000 {
9001 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9002 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9003 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9004 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9005 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9006 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9007 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9008 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9010 }
9011 break;
9012 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9013 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9014 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9015 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9016 }
9017 }
9018 else
9019 {
9020 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9021 {
9022 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9023 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9024 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9025 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9026 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9027 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9028 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9029 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9031 }
9032 }
9033}
9034
9035
9036/**
9037 * Common worker for FPU instructions working on STn and ST0, and storing the
9038 * result in STn unless IE, DE or ZE was raised.
9039 *
9040 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9041 */
9042FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9043{
9044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9045
9046 IEM_MC_BEGIN(3, 1);
9047 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9048 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9049 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9050 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9051
9052 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9053 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9054
9055 IEM_MC_PREPARE_FPU_USAGE();
9056 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9057 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9058 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9059 IEM_MC_ELSE()
9060 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9061 IEM_MC_ENDIF();
9062 IEM_MC_ADVANCE_RIP();
9063
9064 IEM_MC_END();
9065 return VINF_SUCCESS;
9066}
9067
9068
9069/** Opcode 0xdc 11/0. */
9070FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9071{
9072 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9073 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9074}
9075
9076
9077/** Opcode 0xdc 11/1. */
9078FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9079{
9080 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9081 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9082}
9083
9084
9085/** Opcode 0xdc 11/4. */
9086FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9087{
9088 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9089 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9090}
9091
9092
9093/** Opcode 0xdc 11/5. */
9094FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9095{
9096 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9097 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9098}
9099
9100
9101/** Opcode 0xdc 11/6. */
9102FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9103{
9104 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9105 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9106}
9107
9108
9109/** Opcode 0xdc 11/7. */
9110FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9111{
9112 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9113 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9114}
9115
9116
9117/**
9118 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9119 * memory operand, and storing the result in ST0.
9120 *
9121 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9122 */
9123FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9124{
9125 IEM_MC_BEGIN(3, 3);
9126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9127 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9128 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9129 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9130 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9131 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9132
9133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9134 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9135 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9136 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9137
9138 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9139 IEM_MC_PREPARE_FPU_USAGE();
9140 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9141 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9142 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9143 IEM_MC_ELSE()
9144 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9145 IEM_MC_ENDIF();
9146 IEM_MC_ADVANCE_RIP();
9147
9148 IEM_MC_END();
9149 return VINF_SUCCESS;
9150}
9151
9152
9153/** Opcode 0xdc !11/0. */
9154FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9155{
9156 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9157 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9158}
9159
9160
9161/** Opcode 0xdc !11/1. */
9162FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9163{
9164 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9165 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9166}
9167
9168
9169/** Opcode 0xdc !11/2. */
9170FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9171{
9172 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9173
9174 IEM_MC_BEGIN(3, 3);
9175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9176 IEM_MC_LOCAL(uint16_t, u16Fsw);
9177 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9178 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9179 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9180 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9181
9182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9183 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9184
9185 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9186 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9187 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9188
9189 IEM_MC_PREPARE_FPU_USAGE();
9190 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9191 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9192 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9193 IEM_MC_ELSE()
9194 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9195 IEM_MC_ENDIF();
9196 IEM_MC_ADVANCE_RIP();
9197
9198 IEM_MC_END();
9199 return VINF_SUCCESS;
9200}
9201
9202
9203/** Opcode 0xdc !11/3. */
9204FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9205{
9206 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9207
9208 IEM_MC_BEGIN(3, 3);
9209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9210 IEM_MC_LOCAL(uint16_t, u16Fsw);
9211 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9212 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9213 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9214 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9215
9216 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9217 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9218
9219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9221 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9222
9223 IEM_MC_PREPARE_FPU_USAGE();
9224 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9225 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9226 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9227 IEM_MC_ELSE()
9228 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9229 IEM_MC_ENDIF();
9230 IEM_MC_ADVANCE_RIP();
9231
9232 IEM_MC_END();
9233 return VINF_SUCCESS;
9234}
9235
9236
9237/** Opcode 0xdc !11/4. */
9238FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9239{
9240 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9241 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9242}
9243
9244
9245/** Opcode 0xdc !11/5. */
9246FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9247{
9248 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9249 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9250}
9251
9252
9253/** Opcode 0xdc !11/6. */
9254FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9255{
9256 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9257 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9258}
9259
9260
9261/** Opcode 0xdc !11/7. */
9262FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9263{
9264 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9265 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9266}
9267
9268
9269/**
9270 * @opcode 0xdc
9271 */
9272FNIEMOP_DEF(iemOp_EscF4)
9273{
9274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9275 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9277 {
9278 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9279 {
9280 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9281 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9282 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9283 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9284 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9285 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9286 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9287 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9288 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9289 }
9290 }
9291 else
9292 {
9293 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9294 {
9295 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9296 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9297 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9298 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9299 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9300 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9301 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9302 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9303 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9304 }
9305 }
9306}
9307
9308
9309/** Opcode 0xdd !11/0.
9310 * @sa iemOp_fld_m32r */
9311FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9312{
9313 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9314
9315 IEM_MC_BEGIN(2, 3);
9316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9317 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9318 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9319 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9320 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9321
9322 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9323 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9324 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9325 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9326
9327 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9328 IEM_MC_PREPARE_FPU_USAGE();
9329 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9330 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9331 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9332 IEM_MC_ELSE()
9333 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9334 IEM_MC_ENDIF();
9335 IEM_MC_ADVANCE_RIP();
9336
9337 IEM_MC_END();
9338 return VINF_SUCCESS;
9339}
9340
9341
9342/** Opcode 0xdd !11/0. */
9343FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9344{
9345 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9346 IEM_MC_BEGIN(3, 2);
9347 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9348 IEM_MC_LOCAL(uint16_t, u16Fsw);
9349 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9350 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9352
9353 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9354 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9355 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9356 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9357
9358 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9359 IEM_MC_PREPARE_FPU_USAGE();
9360 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9361 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9362 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9363 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9364 IEM_MC_ELSE()
9365 IEM_MC_IF_FCW_IM()
9366 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9367 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9368 IEM_MC_ENDIF();
9369 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9370 IEM_MC_ENDIF();
9371 IEM_MC_ADVANCE_RIP();
9372
9373 IEM_MC_END();
9374 return VINF_SUCCESS;
9375}
9376
9377
9378/** Opcode 0xdd !11/0. */
9379FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9380{
9381 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9382 IEM_MC_BEGIN(3, 2);
9383 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9384 IEM_MC_LOCAL(uint16_t, u16Fsw);
9385 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9386 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9387 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9388
9389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9390 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9391 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9392 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9393
9394 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9395 IEM_MC_PREPARE_FPU_USAGE();
9396 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9397 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9398 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9399 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9400 IEM_MC_ELSE()
9401 IEM_MC_IF_FCW_IM()
9402 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9403 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9404 IEM_MC_ENDIF();
9405 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9406 IEM_MC_ENDIF();
9407 IEM_MC_ADVANCE_RIP();
9408
9409 IEM_MC_END();
9410 return VINF_SUCCESS;
9411}
9412
9413
9414
9415
9416/** Opcode 0xdd !11/0. */
9417FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9418{
9419 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9420 IEM_MC_BEGIN(3, 2);
9421 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9422 IEM_MC_LOCAL(uint16_t, u16Fsw);
9423 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9424 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9425 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9426
9427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9430 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9431
9432 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9433 IEM_MC_PREPARE_FPU_USAGE();
9434 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9435 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9436 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9437 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9438 IEM_MC_ELSE()
9439 IEM_MC_IF_FCW_IM()
9440 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9441 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9442 IEM_MC_ENDIF();
9443 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9444 IEM_MC_ENDIF();
9445 IEM_MC_ADVANCE_RIP();
9446
9447 IEM_MC_END();
9448 return VINF_SUCCESS;
9449}
9450
9451
9452/** Opcode 0xdd !11/0. */
9453FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9454{
9455 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9456 IEM_MC_BEGIN(3, 0);
9457 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9458 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9459 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9462 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9463 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9464 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9465 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9466 IEM_MC_END();
9467 return VINF_SUCCESS;
9468}
9469
9470
9471/** Opcode 0xdd !11/0. */
9472FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9473{
9474 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9475 IEM_MC_BEGIN(3, 0);
9476 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9477 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9478 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9481 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9482 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9483 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9484 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9485 IEM_MC_END();
9486 return VINF_SUCCESS;
9487
9488}
9489
9490/** Opcode 0xdd !11/0. */
9491FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9492{
9493 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9494
9495 IEM_MC_BEGIN(0, 2);
9496 IEM_MC_LOCAL(uint16_t, u16Tmp);
9497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9498
9499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9501 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9502
9503 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9504 IEM_MC_FETCH_FSW(u16Tmp);
9505 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9506 IEM_MC_ADVANCE_RIP();
9507
9508/** @todo Debug / drop a hint to the verifier that things may differ
9509 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9510 * NT4SP1. (X86_FSW_PE) */
9511 IEM_MC_END();
9512 return VINF_SUCCESS;
9513}
9514
9515
9516/** Opcode 0xdd 11/0. */
9517FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9518{
9519 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9521 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9522 unmodified. */
9523
9524 IEM_MC_BEGIN(0, 0);
9525
9526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9527 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9528
9529 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9530 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9531 IEM_MC_UPDATE_FPU_OPCODE_IP();
9532
9533 IEM_MC_ADVANCE_RIP();
9534 IEM_MC_END();
9535 return VINF_SUCCESS;
9536}
9537
9538
9539/** Opcode 0xdd 11/1. */
9540FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9541{
9542 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9543 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9544
9545 IEM_MC_BEGIN(0, 2);
9546 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9547 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9548 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9549 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9550
9551 IEM_MC_PREPARE_FPU_USAGE();
9552 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9553 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9554 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9555 IEM_MC_ELSE()
9556 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9557 IEM_MC_ENDIF();
9558
9559 IEM_MC_ADVANCE_RIP();
9560 IEM_MC_END();
9561 return VINF_SUCCESS;
9562}
9563
9564
9565/** Opcode 0xdd 11/3. */
9566FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9567{
9568 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9569 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9570}
9571
9572
9573/** Opcode 0xdd 11/4. */
9574FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9575{
9576 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9577 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9578}
9579
9580
9581/**
9582 * @opcode 0xdd
9583 */
9584FNIEMOP_DEF(iemOp_EscF5)
9585{
9586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9587 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9589 {
9590 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9591 {
9592 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9593 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9594 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9595 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9596 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9597 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9598 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9599 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9600 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9601 }
9602 }
9603 else
9604 {
9605 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9606 {
9607 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9608 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9609 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9610 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9611 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9612 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9613 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9614 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9615 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9616 }
9617 }
9618}
9619
9620
9621/** Opcode 0xde 11/0. */
9622FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9623{
9624 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9625 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9626}
9627
9628
9629/** Opcode 0xde 11/0. */
9630FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9631{
9632 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9633 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9634}
9635
9636
9637/** Opcode 0xde 0xd9. */
9638FNIEMOP_DEF(iemOp_fcompp)
9639{
9640 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9641 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9642}
9643
9644
9645/** Opcode 0xde 11/4. */
9646FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9647{
9648 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9649 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9650}
9651
9652
9653/** Opcode 0xde 11/5. */
9654FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9655{
9656 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9657 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9658}
9659
9660
9661/** Opcode 0xde 11/6. */
9662FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9663{
9664 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9665 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9666}
9667
9668
9669/** Opcode 0xde 11/7. */
9670FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9671{
9672 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9673 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9674}
9675
9676
9677/**
9678 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9679 * the result in ST0.
9680 *
9681 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9682 */
9683FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9684{
9685 IEM_MC_BEGIN(3, 3);
9686 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9687 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9688 IEM_MC_LOCAL(int16_t, i16Val2);
9689 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9690 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9691 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9692
9693 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9694 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9695
9696 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9697 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9698 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9699
9700 IEM_MC_PREPARE_FPU_USAGE();
9701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9702 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9703 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9704 IEM_MC_ELSE()
9705 IEM_MC_FPU_STACK_UNDERFLOW(0);
9706 IEM_MC_ENDIF();
9707 IEM_MC_ADVANCE_RIP();
9708
9709 IEM_MC_END();
9710 return VINF_SUCCESS;
9711}
9712
9713
9714/** Opcode 0xde !11/0. */
9715FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9716{
9717 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9718 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9719}
9720
9721
9722/** Opcode 0xde !11/1. */
9723FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9724{
9725 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9726 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9727}
9728
9729
9730/** Opcode 0xde !11/2. */
9731FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9732{
9733 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9734
9735 IEM_MC_BEGIN(3, 3);
9736 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9737 IEM_MC_LOCAL(uint16_t, u16Fsw);
9738 IEM_MC_LOCAL(int16_t, i16Val2);
9739 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9740 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9741 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9742
9743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9745
9746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9748 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9749
9750 IEM_MC_PREPARE_FPU_USAGE();
9751 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9752 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9753 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9754 IEM_MC_ELSE()
9755 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9756 IEM_MC_ENDIF();
9757 IEM_MC_ADVANCE_RIP();
9758
9759 IEM_MC_END();
9760 return VINF_SUCCESS;
9761}
9762
9763
9764/** Opcode 0xde !11/3. */
9765FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9766{
9767 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9768
9769 IEM_MC_BEGIN(3, 3);
9770 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9771 IEM_MC_LOCAL(uint16_t, u16Fsw);
9772 IEM_MC_LOCAL(int16_t, i16Val2);
9773 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9774 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9775 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9776
9777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9779
9780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9782 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9783
9784 IEM_MC_PREPARE_FPU_USAGE();
9785 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9786 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9787 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9788 IEM_MC_ELSE()
9789 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9790 IEM_MC_ENDIF();
9791 IEM_MC_ADVANCE_RIP();
9792
9793 IEM_MC_END();
9794 return VINF_SUCCESS;
9795}
9796
9797
9798/** Opcode 0xde !11/4. */
9799FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9800{
9801 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9802 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9803}
9804
9805
9806/** Opcode 0xde !11/5. */
9807FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9808{
9809 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9810 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9811}
9812
9813
9814/** Opcode 0xde !11/6. */
9815FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9816{
9817 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9818 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9819}
9820
9821
9822/** Opcode 0xde !11/7. */
9823FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9824{
9825 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9826 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9827}
9828
9829
9830/**
9831 * @opcode 0xde
9832 */
9833FNIEMOP_DEF(iemOp_EscF6)
9834{
9835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9836 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9837 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9838 {
9839 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9840 {
9841 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9842 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9843 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9844 case 3: if (bRm == 0xd9)
9845 return FNIEMOP_CALL(iemOp_fcompp);
9846 return IEMOP_RAISE_INVALID_OPCODE();
9847 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9848 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9849 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9850 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9851 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9852 }
9853 }
9854 else
9855 {
9856 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9857 {
9858 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9859 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9860 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9861 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9862 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9863 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9864 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9865 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9867 }
9868 }
9869}
9870
9871
9872/** Opcode 0xdf 11/0.
9873 * Undocument instruction, assumed to work like ffree + fincstp. */
9874FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9875{
9876 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9878
9879 IEM_MC_BEGIN(0, 0);
9880
9881 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9882 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9883
9884 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9885 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9886 IEM_MC_FPU_STACK_INC_TOP();
9887 IEM_MC_UPDATE_FPU_OPCODE_IP();
9888
9889 IEM_MC_ADVANCE_RIP();
9890 IEM_MC_END();
9891 return VINF_SUCCESS;
9892}
9893
9894
9895/** Opcode 0xdf 0xe0. */
9896FNIEMOP_DEF(iemOp_fnstsw_ax)
9897{
9898 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9900
9901 IEM_MC_BEGIN(0, 1);
9902 IEM_MC_LOCAL(uint16_t, u16Tmp);
9903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9904 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9905 IEM_MC_FETCH_FSW(u16Tmp);
9906 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9907 IEM_MC_ADVANCE_RIP();
9908 IEM_MC_END();
9909 return VINF_SUCCESS;
9910}
9911
9912
9913/** Opcode 0xdf 11/5. */
9914FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9915{
9916 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9917 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9918}
9919
9920
9921/** Opcode 0xdf 11/6. */
9922FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9923{
9924 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9925 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9926}
9927
9928
9929/** Opcode 0xdf !11/0. */
9930FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9931{
9932 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9933
9934 IEM_MC_BEGIN(2, 3);
9935 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9936 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9937 IEM_MC_LOCAL(int16_t, i16Val);
9938 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9939 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9940
9941 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943
9944 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9945 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9946 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9947
9948 IEM_MC_PREPARE_FPU_USAGE();
9949 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9950 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9951 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9952 IEM_MC_ELSE()
9953 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9954 IEM_MC_ENDIF();
9955 IEM_MC_ADVANCE_RIP();
9956
9957 IEM_MC_END();
9958 return VINF_SUCCESS;
9959}
9960
9961
9962/** Opcode 0xdf !11/1. */
9963FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9964{
9965 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9966 IEM_MC_BEGIN(3, 2);
9967 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9968 IEM_MC_LOCAL(uint16_t, u16Fsw);
9969 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9970 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9971 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9972
9973 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9975 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9976 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9977
9978 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9979 IEM_MC_PREPARE_FPU_USAGE();
9980 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9981 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9982 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9983 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9984 IEM_MC_ELSE()
9985 IEM_MC_IF_FCW_IM()
9986 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9987 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9988 IEM_MC_ENDIF();
9989 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9990 IEM_MC_ENDIF();
9991 IEM_MC_ADVANCE_RIP();
9992
9993 IEM_MC_END();
9994 return VINF_SUCCESS;
9995}
9996
9997
9998/** Opcode 0xdf !11/2. */
9999FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10000{
10001 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10002 IEM_MC_BEGIN(3, 2);
10003 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10004 IEM_MC_LOCAL(uint16_t, u16Fsw);
10005 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10006 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10007 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10008
10009 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10011 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10012 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10013
10014 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10015 IEM_MC_PREPARE_FPU_USAGE();
10016 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10017 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10018 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10019 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10020 IEM_MC_ELSE()
10021 IEM_MC_IF_FCW_IM()
10022 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10023 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10024 IEM_MC_ENDIF();
10025 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10026 IEM_MC_ENDIF();
10027 IEM_MC_ADVANCE_RIP();
10028
10029 IEM_MC_END();
10030 return VINF_SUCCESS;
10031}
10032
10033
10034/** Opcode 0xdf !11/3. */
10035FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10036{
10037 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10038 IEM_MC_BEGIN(3, 2);
10039 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10040 IEM_MC_LOCAL(uint16_t, u16Fsw);
10041 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10042 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10043 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10044
10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10047 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10048 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10049
10050 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10051 IEM_MC_PREPARE_FPU_USAGE();
10052 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10053 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10054 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10055 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10056 IEM_MC_ELSE()
10057 IEM_MC_IF_FCW_IM()
10058 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10059 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10060 IEM_MC_ENDIF();
10061 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10062 IEM_MC_ENDIF();
10063 IEM_MC_ADVANCE_RIP();
10064
10065 IEM_MC_END();
10066 return VINF_SUCCESS;
10067}
10068
10069
10070/** Opcode 0xdf !11/4. */
10071FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10072
10073
10074/** Opcode 0xdf !11/5. */
10075FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10076{
10077 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10078
10079 IEM_MC_BEGIN(2, 3);
10080 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10081 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10082 IEM_MC_LOCAL(int64_t, i64Val);
10083 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10084 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10085
10086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10088
10089 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10090 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10091 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10092
10093 IEM_MC_PREPARE_FPU_USAGE();
10094 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10095 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10096 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10097 IEM_MC_ELSE()
10098 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10099 IEM_MC_ENDIF();
10100 IEM_MC_ADVANCE_RIP();
10101
10102 IEM_MC_END();
10103 return VINF_SUCCESS;
10104}
10105
10106
10107/** Opcode 0xdf !11/6. */
10108FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
10109
10110
10111/** Opcode 0xdf !11/7. */
10112FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10113{
10114 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10115 IEM_MC_BEGIN(3, 2);
10116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10117 IEM_MC_LOCAL(uint16_t, u16Fsw);
10118 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10119 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10120 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10121
10122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10124 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10125 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10126
10127 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10128 IEM_MC_PREPARE_FPU_USAGE();
10129 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10130 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10131 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10132 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10133 IEM_MC_ELSE()
10134 IEM_MC_IF_FCW_IM()
10135 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10136 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10137 IEM_MC_ENDIF();
10138 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10139 IEM_MC_ENDIF();
10140 IEM_MC_ADVANCE_RIP();
10141
10142 IEM_MC_END();
10143 return VINF_SUCCESS;
10144}
10145
10146
10147/**
10148 * @opcode 0xdf
10149 */
10150FNIEMOP_DEF(iemOp_EscF7)
10151{
10152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10153 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10154 {
10155 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10156 {
10157 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10158 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10159 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10160 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10161 case 4: if (bRm == 0xe0)
10162 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10163 return IEMOP_RAISE_INVALID_OPCODE();
10164 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10165 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10166 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10167 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10168 }
10169 }
10170 else
10171 {
10172 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10173 {
10174 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10175 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10176 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10177 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10178 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10179 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10180 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10181 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10183 }
10184 }
10185}
10186
10187
10188/**
10189 * @opcode 0xe0
10190 */
10191FNIEMOP_DEF(iemOp_loopne_Jb)
10192{
10193 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10194 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10197
10198 switch (pVCpu->iem.s.enmEffAddrMode)
10199 {
10200 case IEMMODE_16BIT:
10201 IEM_MC_BEGIN(0,0);
10202 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10203 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10204 IEM_MC_REL_JMP_S8(i8Imm);
10205 } IEM_MC_ELSE() {
10206 IEM_MC_ADVANCE_RIP();
10207 } IEM_MC_ENDIF();
10208 IEM_MC_END();
10209 return VINF_SUCCESS;
10210
10211 case IEMMODE_32BIT:
10212 IEM_MC_BEGIN(0,0);
10213 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10214 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10215 IEM_MC_REL_JMP_S8(i8Imm);
10216 } IEM_MC_ELSE() {
10217 IEM_MC_ADVANCE_RIP();
10218 } IEM_MC_ENDIF();
10219 IEM_MC_END();
10220 return VINF_SUCCESS;
10221
10222 case IEMMODE_64BIT:
10223 IEM_MC_BEGIN(0,0);
10224 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10225 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10226 IEM_MC_REL_JMP_S8(i8Imm);
10227 } IEM_MC_ELSE() {
10228 IEM_MC_ADVANCE_RIP();
10229 } IEM_MC_ENDIF();
10230 IEM_MC_END();
10231 return VINF_SUCCESS;
10232
10233 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10234 }
10235}
10236
10237
10238/**
10239 * @opcode 0xe1
10240 */
10241FNIEMOP_DEF(iemOp_loope_Jb)
10242{
10243 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10244 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10247
10248 switch (pVCpu->iem.s.enmEffAddrMode)
10249 {
10250 case IEMMODE_16BIT:
10251 IEM_MC_BEGIN(0,0);
10252 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10253 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10254 IEM_MC_REL_JMP_S8(i8Imm);
10255 } IEM_MC_ELSE() {
10256 IEM_MC_ADVANCE_RIP();
10257 } IEM_MC_ENDIF();
10258 IEM_MC_END();
10259 return VINF_SUCCESS;
10260
10261 case IEMMODE_32BIT:
10262 IEM_MC_BEGIN(0,0);
10263 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10264 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10265 IEM_MC_REL_JMP_S8(i8Imm);
10266 } IEM_MC_ELSE() {
10267 IEM_MC_ADVANCE_RIP();
10268 } IEM_MC_ENDIF();
10269 IEM_MC_END();
10270 return VINF_SUCCESS;
10271
10272 case IEMMODE_64BIT:
10273 IEM_MC_BEGIN(0,0);
10274 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10275 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10276 IEM_MC_REL_JMP_S8(i8Imm);
10277 } IEM_MC_ELSE() {
10278 IEM_MC_ADVANCE_RIP();
10279 } IEM_MC_ENDIF();
10280 IEM_MC_END();
10281 return VINF_SUCCESS;
10282
10283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10284 }
10285}
10286
10287
10288/**
10289 * @opcode 0xe2
10290 */
10291FNIEMOP_DEF(iemOp_loop_Jb)
10292{
10293 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10294 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10295 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10297
10298 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10299 * using the 32-bit operand size override. How can that be restarted? See
10300 * weird pseudo code in intel manual. */
10301 switch (pVCpu->iem.s.enmEffAddrMode)
10302 {
10303 case IEMMODE_16BIT:
10304 IEM_MC_BEGIN(0,0);
10305 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10306 {
10307 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10308 IEM_MC_IF_CX_IS_NZ() {
10309 IEM_MC_REL_JMP_S8(i8Imm);
10310 } IEM_MC_ELSE() {
10311 IEM_MC_ADVANCE_RIP();
10312 } IEM_MC_ENDIF();
10313 }
10314 else
10315 {
10316 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10317 IEM_MC_ADVANCE_RIP();
10318 }
10319 IEM_MC_END();
10320 return VINF_SUCCESS;
10321
10322 case IEMMODE_32BIT:
10323 IEM_MC_BEGIN(0,0);
10324 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10325 {
10326 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10327 IEM_MC_IF_ECX_IS_NZ() {
10328 IEM_MC_REL_JMP_S8(i8Imm);
10329 } IEM_MC_ELSE() {
10330 IEM_MC_ADVANCE_RIP();
10331 } IEM_MC_ENDIF();
10332 }
10333 else
10334 {
10335 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10336 IEM_MC_ADVANCE_RIP();
10337 }
10338 IEM_MC_END();
10339 return VINF_SUCCESS;
10340
10341 case IEMMODE_64BIT:
10342 IEM_MC_BEGIN(0,0);
10343 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10344 {
10345 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10346 IEM_MC_IF_RCX_IS_NZ() {
10347 IEM_MC_REL_JMP_S8(i8Imm);
10348 } IEM_MC_ELSE() {
10349 IEM_MC_ADVANCE_RIP();
10350 } IEM_MC_ENDIF();
10351 }
10352 else
10353 {
10354 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10355 IEM_MC_ADVANCE_RIP();
10356 }
10357 IEM_MC_END();
10358 return VINF_SUCCESS;
10359
10360 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10361 }
10362}
10363
10364
10365/**
10366 * @opcode 0xe3
10367 */
10368FNIEMOP_DEF(iemOp_jecxz_Jb)
10369{
10370 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10371 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10373 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10374
10375 switch (pVCpu->iem.s.enmEffAddrMode)
10376 {
10377 case IEMMODE_16BIT:
10378 IEM_MC_BEGIN(0,0);
10379 IEM_MC_IF_CX_IS_NZ() {
10380 IEM_MC_ADVANCE_RIP();
10381 } IEM_MC_ELSE() {
10382 IEM_MC_REL_JMP_S8(i8Imm);
10383 } IEM_MC_ENDIF();
10384 IEM_MC_END();
10385 return VINF_SUCCESS;
10386
10387 case IEMMODE_32BIT:
10388 IEM_MC_BEGIN(0,0);
10389 IEM_MC_IF_ECX_IS_NZ() {
10390 IEM_MC_ADVANCE_RIP();
10391 } IEM_MC_ELSE() {
10392 IEM_MC_REL_JMP_S8(i8Imm);
10393 } IEM_MC_ENDIF();
10394 IEM_MC_END();
10395 return VINF_SUCCESS;
10396
10397 case IEMMODE_64BIT:
10398 IEM_MC_BEGIN(0,0);
10399 IEM_MC_IF_RCX_IS_NZ() {
10400 IEM_MC_ADVANCE_RIP();
10401 } IEM_MC_ELSE() {
10402 IEM_MC_REL_JMP_S8(i8Imm);
10403 } IEM_MC_ENDIF();
10404 IEM_MC_END();
10405 return VINF_SUCCESS;
10406
10407 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10408 }
10409}
10410
10411
10412/** Opcode 0xe4 */
10413FNIEMOP_DEF(iemOp_in_AL_Ib)
10414{
10415 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10416 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10418 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10419}
10420
10421
10422/** Opcode 0xe5 */
10423FNIEMOP_DEF(iemOp_in_eAX_Ib)
10424{
10425 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10426 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10428 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10429}
10430
10431
10432/** Opcode 0xe6 */
10433FNIEMOP_DEF(iemOp_out_Ib_AL)
10434{
10435 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10436 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10438 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10439}
10440
10441
10442/** Opcode 0xe7 */
10443FNIEMOP_DEF(iemOp_out_Ib_eAX)
10444{
10445 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10446 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10449}
10450
10451
10452/**
10453 * @opcode 0xe8
10454 */
10455FNIEMOP_DEF(iemOp_call_Jv)
10456{
10457 IEMOP_MNEMONIC(call_Jv, "call Jv");
10458 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10459 switch (pVCpu->iem.s.enmEffOpSize)
10460 {
10461 case IEMMODE_16BIT:
10462 {
10463 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10464 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10465 }
10466
10467 case IEMMODE_32BIT:
10468 {
10469 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10470 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10471 }
10472
10473 case IEMMODE_64BIT:
10474 {
10475 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10476 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10477 }
10478
10479 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10480 }
10481}
10482
10483
10484/**
10485 * @opcode 0xe9
10486 */
10487FNIEMOP_DEF(iemOp_jmp_Jv)
10488{
10489 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10490 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10491 switch (pVCpu->iem.s.enmEffOpSize)
10492 {
10493 case IEMMODE_16BIT:
10494 {
10495 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10496 IEM_MC_BEGIN(0, 0);
10497 IEM_MC_REL_JMP_S16(i16Imm);
10498 IEM_MC_END();
10499 return VINF_SUCCESS;
10500 }
10501
10502 case IEMMODE_64BIT:
10503 case IEMMODE_32BIT:
10504 {
10505 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10506 IEM_MC_BEGIN(0, 0);
10507 IEM_MC_REL_JMP_S32(i32Imm);
10508 IEM_MC_END();
10509 return VINF_SUCCESS;
10510 }
10511
10512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10513 }
10514}
10515
10516
10517/**
10518 * @opcode 0xea
10519 */
10520FNIEMOP_DEF(iemOp_jmp_Ap)
10521{
10522 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10523 IEMOP_HLP_NO_64BIT();
10524
10525 /* Decode the far pointer address and pass it on to the far call C implementation. */
10526 uint32_t offSeg;
10527 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10528 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10529 else
10530 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10531 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10533 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10534}
10535
10536
10537/**
10538 * @opcode 0xeb
10539 */
10540FNIEMOP_DEF(iemOp_jmp_Jb)
10541{
10542 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10543 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10545 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10546
10547 IEM_MC_BEGIN(0, 0);
10548 IEM_MC_REL_JMP_S8(i8Imm);
10549 IEM_MC_END();
10550 return VINF_SUCCESS;
10551}
10552
10553
10554/** Opcode 0xec */
10555FNIEMOP_DEF(iemOp_in_AL_DX)
10556{
10557 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10559 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10560}
10561
10562
10563/** Opcode 0xed */
10564FNIEMOP_DEF(iemOp_eAX_DX)
10565{
10566 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10567 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10568 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10569}
10570
10571
10572/** Opcode 0xee */
10573FNIEMOP_DEF(iemOp_out_DX_AL)
10574{
10575 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10577 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10578}
10579
10580
10581/** Opcode 0xef */
10582FNIEMOP_DEF(iemOp_out_DX_eAX)
10583{
10584 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10586 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10587}
10588
10589
10590/**
10591 * @opcode 0xf0
10592 */
10593FNIEMOP_DEF(iemOp_lock)
10594{
10595 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10596 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10597
10598 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10599 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10600}
10601
10602
10603/**
10604 * @opcode 0xf1
10605 */
10606FNIEMOP_DEF(iemOp_int1)
10607{
10608 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10609 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10610 /** @todo testcase! */
10611 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10612}
10613
10614
10615/**
10616 * @opcode 0xf2
10617 */
10618FNIEMOP_DEF(iemOp_repne)
10619{
10620 /* This overrides any previous REPE prefix. */
10621 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10622 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10623 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10624
10625 /* For the 4 entry opcode tables, REPNZ overrides any previous
10626 REPZ and operand size prefixes. */
10627 pVCpu->iem.s.idxPrefix = 3;
10628
10629 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10630 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10631}
10632
10633
10634/**
10635 * @opcode 0xf3
10636 */
10637FNIEMOP_DEF(iemOp_repe)
10638{
10639 /* This overrides any previous REPNE prefix. */
10640 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10641 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10642 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10643
10644 /* For the 4 entry opcode tables, REPNZ overrides any previous
10645 REPNZ and operand size prefixes. */
10646 pVCpu->iem.s.idxPrefix = 2;
10647
10648 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10649 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10650}
10651
10652
10653/**
10654 * @opcode 0xf4
10655 */
10656FNIEMOP_DEF(iemOp_hlt)
10657{
10658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10659 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10660}
10661
10662
10663/**
10664 * @opcode 0xf5
10665 */
10666FNIEMOP_DEF(iemOp_cmc)
10667{
10668 IEMOP_MNEMONIC(cmc, "cmc");
10669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10670 IEM_MC_BEGIN(0, 0);
10671 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10672 IEM_MC_ADVANCE_RIP();
10673 IEM_MC_END();
10674 return VINF_SUCCESS;
10675}
10676
10677
10678/**
10679 * Common implementation of 'inc/dec/not/neg Eb'.
10680 *
10681 * @param bRm The RM byte.
10682 * @param pImpl The instruction implementation.
10683 */
10684FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10685{
10686 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10687 {
10688 /* register access */
10689 IEM_MC_BEGIN(2, 0);
10690 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10691 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10692 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10693 IEM_MC_REF_EFLAGS(pEFlags);
10694 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10695 IEM_MC_ADVANCE_RIP();
10696 IEM_MC_END();
10697 }
10698 else
10699 {
10700 /* memory access. */
10701 IEM_MC_BEGIN(2, 2);
10702 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10703 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10705
10706 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10707 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10708 IEM_MC_FETCH_EFLAGS(EFlags);
10709 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10710 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10711 else
10712 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10713
10714 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10715 IEM_MC_COMMIT_EFLAGS(EFlags);
10716 IEM_MC_ADVANCE_RIP();
10717 IEM_MC_END();
10718 }
10719 return VINF_SUCCESS;
10720}
10721
10722
10723/**
10724 * Common implementation of 'inc/dec/not/neg Ev'.
10725 *
10726 * @param bRm The RM byte.
10727 * @param pImpl The instruction implementation.
10728 */
10729FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10730{
10731 /* Registers are handled by a common worker. */
10732 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10733 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10734
10735 /* Memory we do here. */
10736 switch (pVCpu->iem.s.enmEffOpSize)
10737 {
10738 case IEMMODE_16BIT:
10739 IEM_MC_BEGIN(2, 2);
10740 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10741 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10743
10744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10745 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10746 IEM_MC_FETCH_EFLAGS(EFlags);
10747 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10748 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10749 else
10750 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10751
10752 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10753 IEM_MC_COMMIT_EFLAGS(EFlags);
10754 IEM_MC_ADVANCE_RIP();
10755 IEM_MC_END();
10756 return VINF_SUCCESS;
10757
10758 case IEMMODE_32BIT:
10759 IEM_MC_BEGIN(2, 2);
10760 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10761 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10762 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10763
10764 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10765 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10766 IEM_MC_FETCH_EFLAGS(EFlags);
10767 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10768 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10769 else
10770 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10771
10772 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10773 IEM_MC_COMMIT_EFLAGS(EFlags);
10774 IEM_MC_ADVANCE_RIP();
10775 IEM_MC_END();
10776 return VINF_SUCCESS;
10777
10778 case IEMMODE_64BIT:
10779 IEM_MC_BEGIN(2, 2);
10780 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10781 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10782 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10783
10784 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10785 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10786 IEM_MC_FETCH_EFLAGS(EFlags);
10787 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10788 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10789 else
10790 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10791
10792 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10793 IEM_MC_COMMIT_EFLAGS(EFlags);
10794 IEM_MC_ADVANCE_RIP();
10795 IEM_MC_END();
10796 return VINF_SUCCESS;
10797
10798 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10799 }
10800}
10801
10802
10803/** Opcode 0xf6 /0. */
10804FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10805{
10806 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10808
10809 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10810 {
10811 /* register access */
10812 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10814
10815 IEM_MC_BEGIN(3, 0);
10816 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10817 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10818 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10819 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10820 IEM_MC_REF_EFLAGS(pEFlags);
10821 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10822 IEM_MC_ADVANCE_RIP();
10823 IEM_MC_END();
10824 }
10825 else
10826 {
10827 /* memory access. */
10828 IEM_MC_BEGIN(3, 2);
10829 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10830 IEM_MC_ARG(uint8_t, u8Src, 1);
10831 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10832 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10833
10834 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10835 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10836 IEM_MC_ASSIGN(u8Src, u8Imm);
10837 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10838 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10839 IEM_MC_FETCH_EFLAGS(EFlags);
10840 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10841
10842 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10843 IEM_MC_COMMIT_EFLAGS(EFlags);
10844 IEM_MC_ADVANCE_RIP();
10845 IEM_MC_END();
10846 }
10847 return VINF_SUCCESS;
10848}
10849
10850
10851/** Opcode 0xf7 /0. */
10852FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10853{
10854 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10855 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10856
10857 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10858 {
10859 /* register access */
10860 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10861 switch (pVCpu->iem.s.enmEffOpSize)
10862 {
10863 case IEMMODE_16BIT:
10864 {
10865 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10866 IEM_MC_BEGIN(3, 0);
10867 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10868 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10869 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10870 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10871 IEM_MC_REF_EFLAGS(pEFlags);
10872 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10873 IEM_MC_ADVANCE_RIP();
10874 IEM_MC_END();
10875 return VINF_SUCCESS;
10876 }
10877
10878 case IEMMODE_32BIT:
10879 {
10880 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10881 IEM_MC_BEGIN(3, 0);
10882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10883 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10885 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10886 IEM_MC_REF_EFLAGS(pEFlags);
10887 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10888 /* No clearing the high dword here - test doesn't write back the result. */
10889 IEM_MC_ADVANCE_RIP();
10890 IEM_MC_END();
10891 return VINF_SUCCESS;
10892 }
10893
10894 case IEMMODE_64BIT:
10895 {
10896 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10897 IEM_MC_BEGIN(3, 0);
10898 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10899 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10900 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10901 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10902 IEM_MC_REF_EFLAGS(pEFlags);
10903 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10904 IEM_MC_ADVANCE_RIP();
10905 IEM_MC_END();
10906 return VINF_SUCCESS;
10907 }
10908
10909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10910 }
10911 }
10912 else
10913 {
10914 /* memory access. */
10915 switch (pVCpu->iem.s.enmEffOpSize)
10916 {
10917 case IEMMODE_16BIT:
10918 {
10919 IEM_MC_BEGIN(3, 2);
10920 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10921 IEM_MC_ARG(uint16_t, u16Src, 1);
10922 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10923 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10924
10925 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10926 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10927 IEM_MC_ASSIGN(u16Src, u16Imm);
10928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10929 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10930 IEM_MC_FETCH_EFLAGS(EFlags);
10931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10932
10933 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10934 IEM_MC_COMMIT_EFLAGS(EFlags);
10935 IEM_MC_ADVANCE_RIP();
10936 IEM_MC_END();
10937 return VINF_SUCCESS;
10938 }
10939
10940 case IEMMODE_32BIT:
10941 {
10942 IEM_MC_BEGIN(3, 2);
10943 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10944 IEM_MC_ARG(uint32_t, u32Src, 1);
10945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10947
10948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10949 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10950 IEM_MC_ASSIGN(u32Src, u32Imm);
10951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10952 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10953 IEM_MC_FETCH_EFLAGS(EFlags);
10954 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10955
10956 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10957 IEM_MC_COMMIT_EFLAGS(EFlags);
10958 IEM_MC_ADVANCE_RIP();
10959 IEM_MC_END();
10960 return VINF_SUCCESS;
10961 }
10962
10963 case IEMMODE_64BIT:
10964 {
10965 IEM_MC_BEGIN(3, 2);
10966 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10967 IEM_MC_ARG(uint64_t, u64Src, 1);
10968 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10969 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10970
10971 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10972 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10973 IEM_MC_ASSIGN(u64Src, u64Imm);
10974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10975 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10976 IEM_MC_FETCH_EFLAGS(EFlags);
10977 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10978
10979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10980 IEM_MC_COMMIT_EFLAGS(EFlags);
10981 IEM_MC_ADVANCE_RIP();
10982 IEM_MC_END();
10983 return VINF_SUCCESS;
10984 }
10985
10986 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10987 }
10988 }
10989}
10990
10991
10992/** Opcode 0xf6 /4, /5, /6 and /7. */
10993FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10994{
10995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10996 {
10997 /* register access */
10998 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10999 IEM_MC_BEGIN(3, 1);
11000 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11001 IEM_MC_ARG(uint8_t, u8Value, 1);
11002 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11003 IEM_MC_LOCAL(int32_t, rc);
11004
11005 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11006 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11007 IEM_MC_REF_EFLAGS(pEFlags);
11008 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11009 IEM_MC_IF_LOCAL_IS_Z(rc) {
11010 IEM_MC_ADVANCE_RIP();
11011 } IEM_MC_ELSE() {
11012 IEM_MC_RAISE_DIVIDE_ERROR();
11013 } IEM_MC_ENDIF();
11014
11015 IEM_MC_END();
11016 }
11017 else
11018 {
11019 /* memory access. */
11020 IEM_MC_BEGIN(3, 2);
11021 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11022 IEM_MC_ARG(uint8_t, u8Value, 1);
11023 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11025 IEM_MC_LOCAL(int32_t, rc);
11026
11027 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11029 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11030 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11031 IEM_MC_REF_EFLAGS(pEFlags);
11032 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11033 IEM_MC_IF_LOCAL_IS_Z(rc) {
11034 IEM_MC_ADVANCE_RIP();
11035 } IEM_MC_ELSE() {
11036 IEM_MC_RAISE_DIVIDE_ERROR();
11037 } IEM_MC_ENDIF();
11038
11039 IEM_MC_END();
11040 }
11041 return VINF_SUCCESS;
11042}
11043
11044
11045/** Opcode 0xf7 /4, /5, /6 and /7. */
11046FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11047{
11048 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11049
11050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11051 {
11052 /* register access */
11053 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11054 switch (pVCpu->iem.s.enmEffOpSize)
11055 {
11056 case IEMMODE_16BIT:
11057 {
11058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11059 IEM_MC_BEGIN(4, 1);
11060 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11061 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11062 IEM_MC_ARG(uint16_t, u16Value, 2);
11063 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11064 IEM_MC_LOCAL(int32_t, rc);
11065
11066 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11067 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11068 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11069 IEM_MC_REF_EFLAGS(pEFlags);
11070 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11071 IEM_MC_IF_LOCAL_IS_Z(rc) {
11072 IEM_MC_ADVANCE_RIP();
11073 } IEM_MC_ELSE() {
11074 IEM_MC_RAISE_DIVIDE_ERROR();
11075 } IEM_MC_ENDIF();
11076
11077 IEM_MC_END();
11078 return VINF_SUCCESS;
11079 }
11080
11081 case IEMMODE_32BIT:
11082 {
11083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11084 IEM_MC_BEGIN(4, 1);
11085 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11086 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11087 IEM_MC_ARG(uint32_t, u32Value, 2);
11088 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11089 IEM_MC_LOCAL(int32_t, rc);
11090
11091 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11092 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11093 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11094 IEM_MC_REF_EFLAGS(pEFlags);
11095 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11096 IEM_MC_IF_LOCAL_IS_Z(rc) {
11097 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11098 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11099 IEM_MC_ADVANCE_RIP();
11100 } IEM_MC_ELSE() {
11101 IEM_MC_RAISE_DIVIDE_ERROR();
11102 } IEM_MC_ENDIF();
11103
11104 IEM_MC_END();
11105 return VINF_SUCCESS;
11106 }
11107
11108 case IEMMODE_64BIT:
11109 {
11110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11111 IEM_MC_BEGIN(4, 1);
11112 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11113 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11114 IEM_MC_ARG(uint64_t, u64Value, 2);
11115 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11116 IEM_MC_LOCAL(int32_t, rc);
11117
11118 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11119 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11120 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11121 IEM_MC_REF_EFLAGS(pEFlags);
11122 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11123 IEM_MC_IF_LOCAL_IS_Z(rc) {
11124 IEM_MC_ADVANCE_RIP();
11125 } IEM_MC_ELSE() {
11126 IEM_MC_RAISE_DIVIDE_ERROR();
11127 } IEM_MC_ENDIF();
11128
11129 IEM_MC_END();
11130 return VINF_SUCCESS;
11131 }
11132
11133 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11134 }
11135 }
11136 else
11137 {
11138 /* memory access. */
11139 switch (pVCpu->iem.s.enmEffOpSize)
11140 {
11141 case IEMMODE_16BIT:
11142 {
11143 IEM_MC_BEGIN(4, 2);
11144 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11145 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11146 IEM_MC_ARG(uint16_t, u16Value, 2);
11147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11149 IEM_MC_LOCAL(int32_t, rc);
11150
11151 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11153 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11154 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11155 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11156 IEM_MC_REF_EFLAGS(pEFlags);
11157 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11158 IEM_MC_IF_LOCAL_IS_Z(rc) {
11159 IEM_MC_ADVANCE_RIP();
11160 } IEM_MC_ELSE() {
11161 IEM_MC_RAISE_DIVIDE_ERROR();
11162 } IEM_MC_ENDIF();
11163
11164 IEM_MC_END();
11165 return VINF_SUCCESS;
11166 }
11167
11168 case IEMMODE_32BIT:
11169 {
11170 IEM_MC_BEGIN(4, 2);
11171 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11172 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11173 IEM_MC_ARG(uint32_t, u32Value, 2);
11174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11175 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11176 IEM_MC_LOCAL(int32_t, rc);
11177
11178 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11179 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11180 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11181 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11182 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11183 IEM_MC_REF_EFLAGS(pEFlags);
11184 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11185 IEM_MC_IF_LOCAL_IS_Z(rc) {
11186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11187 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11188 IEM_MC_ADVANCE_RIP();
11189 } IEM_MC_ELSE() {
11190 IEM_MC_RAISE_DIVIDE_ERROR();
11191 } IEM_MC_ENDIF();
11192
11193 IEM_MC_END();
11194 return VINF_SUCCESS;
11195 }
11196
11197 case IEMMODE_64BIT:
11198 {
11199 IEM_MC_BEGIN(4, 2);
11200 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11201 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11202 IEM_MC_ARG(uint64_t, u64Value, 2);
11203 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11205 IEM_MC_LOCAL(int32_t, rc);
11206
11207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11209 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11210 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11211 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11212 IEM_MC_REF_EFLAGS(pEFlags);
11213 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11214 IEM_MC_IF_LOCAL_IS_Z(rc) {
11215 IEM_MC_ADVANCE_RIP();
11216 } IEM_MC_ELSE() {
11217 IEM_MC_RAISE_DIVIDE_ERROR();
11218 } IEM_MC_ENDIF();
11219
11220 IEM_MC_END();
11221 return VINF_SUCCESS;
11222 }
11223
11224 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11225 }
11226 }
11227}
11228
11229/**
11230 * @opcode 0xf6
11231 */
11232FNIEMOP_DEF(iemOp_Grp3_Eb)
11233{
11234 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11235 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11236 {
11237 case 0:
11238 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11239 case 1:
11240/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11241 return IEMOP_RAISE_INVALID_OPCODE();
11242 case 2:
11243 IEMOP_MNEMONIC(not_Eb, "not Eb");
11244 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11245 case 3:
11246 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11247 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11248 case 4:
11249 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11250 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11251 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
11252 case 5:
11253 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11254 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11255 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
11256 case 6:
11257 IEMOP_MNEMONIC(div_Eb, "div Eb");
11258 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11259 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
11260 case 7:
11261 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11262 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11263 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
11264 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11265 }
11266}
11267
11268
11269/**
11270 * @opcode 0xf7
11271 */
11272FNIEMOP_DEF(iemOp_Grp3_Ev)
11273{
11274 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11275 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11276 {
11277 case 0:
11278 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11279 case 1:
11280/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11281 return IEMOP_RAISE_INVALID_OPCODE();
11282 case 2:
11283 IEMOP_MNEMONIC(not_Ev, "not Ev");
11284 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11285 case 3:
11286 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11287 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11288 case 4:
11289 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11290 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11291 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
11292 case 5:
11293 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11294 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11295 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
11296 case 6:
11297 IEMOP_MNEMONIC(div_Ev, "div Ev");
11298 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11299 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
11300 case 7:
11301 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11302 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11303 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
11304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11305 }
11306}
11307
11308
11309/**
11310 * @opcode 0xf8
11311 */
11312FNIEMOP_DEF(iemOp_clc)
11313{
11314 IEMOP_MNEMONIC(clc, "clc");
11315 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11316 IEM_MC_BEGIN(0, 0);
11317 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11318 IEM_MC_ADVANCE_RIP();
11319 IEM_MC_END();
11320 return VINF_SUCCESS;
11321}
11322
11323
11324/**
11325 * @opcode 0xf9
11326 */
11327FNIEMOP_DEF(iemOp_stc)
11328{
11329 IEMOP_MNEMONIC(stc, "stc");
11330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11331 IEM_MC_BEGIN(0, 0);
11332 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11333 IEM_MC_ADVANCE_RIP();
11334 IEM_MC_END();
11335 return VINF_SUCCESS;
11336}
11337
11338
11339/**
11340 * @opcode 0xfa
11341 */
11342FNIEMOP_DEF(iemOp_cli)
11343{
11344 IEMOP_MNEMONIC(cli, "cli");
11345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11346 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11347}
11348
11349
11350FNIEMOP_DEF(iemOp_sti)
11351{
11352 IEMOP_MNEMONIC(sti, "sti");
11353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11354 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11355}
11356
11357
11358/**
11359 * @opcode 0xfc
11360 */
11361FNIEMOP_DEF(iemOp_cld)
11362{
11363 IEMOP_MNEMONIC(cld, "cld");
11364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11365 IEM_MC_BEGIN(0, 0);
11366 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11367 IEM_MC_ADVANCE_RIP();
11368 IEM_MC_END();
11369 return VINF_SUCCESS;
11370}
11371
11372
11373/**
11374 * @opcode 0xfd
11375 */
11376FNIEMOP_DEF(iemOp_std)
11377{
11378 IEMOP_MNEMONIC(std, "std");
11379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11380 IEM_MC_BEGIN(0, 0);
11381 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11382 IEM_MC_ADVANCE_RIP();
11383 IEM_MC_END();
11384 return VINF_SUCCESS;
11385}
11386
11387
11388/**
11389 * @opcode 0xfe
11390 */
11391FNIEMOP_DEF(iemOp_Grp4)
11392{
11393 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11394 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11395 {
11396 case 0:
11397 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11398 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11399 case 1:
11400 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11401 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11402 default:
11403 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11404 return IEMOP_RAISE_INVALID_OPCODE();
11405 }
11406}
11407
11408
11409/**
11410 * Opcode 0xff /2.
11411 * @param bRm The RM byte.
11412 */
11413FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11414{
11415 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11416 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11417
11418 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11419 {
11420 /* The new RIP is taken from a register. */
11421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11422 switch (pVCpu->iem.s.enmEffOpSize)
11423 {
11424 case IEMMODE_16BIT:
11425 IEM_MC_BEGIN(1, 0);
11426 IEM_MC_ARG(uint16_t, u16Target, 0);
11427 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11428 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11429 IEM_MC_END()
11430 return VINF_SUCCESS;
11431
11432 case IEMMODE_32BIT:
11433 IEM_MC_BEGIN(1, 0);
11434 IEM_MC_ARG(uint32_t, u32Target, 0);
11435 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11436 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11437 IEM_MC_END()
11438 return VINF_SUCCESS;
11439
11440 case IEMMODE_64BIT:
11441 IEM_MC_BEGIN(1, 0);
11442 IEM_MC_ARG(uint64_t, u64Target, 0);
11443 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11444 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11445 IEM_MC_END()
11446 return VINF_SUCCESS;
11447
11448 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11449 }
11450 }
11451 else
11452 {
11453 /* The new RIP is taken from a register. */
11454 switch (pVCpu->iem.s.enmEffOpSize)
11455 {
11456 case IEMMODE_16BIT:
11457 IEM_MC_BEGIN(1, 1);
11458 IEM_MC_ARG(uint16_t, u16Target, 0);
11459 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11460 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11461 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11462 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11463 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11464 IEM_MC_END()
11465 return VINF_SUCCESS;
11466
11467 case IEMMODE_32BIT:
11468 IEM_MC_BEGIN(1, 1);
11469 IEM_MC_ARG(uint32_t, u32Target, 0);
11470 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11473 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11474 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11475 IEM_MC_END()
11476 return VINF_SUCCESS;
11477
11478 case IEMMODE_64BIT:
11479 IEM_MC_BEGIN(1, 1);
11480 IEM_MC_ARG(uint64_t, u64Target, 0);
11481 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11482 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11484 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11485 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11486 IEM_MC_END()
11487 return VINF_SUCCESS;
11488
11489 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11490 }
11491 }
11492}
11493
11494typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11495
11496FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11497{
11498 /* Registers? How?? */
11499 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11500 { /* likely */ }
11501 else
11502 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11503
11504 /* Far pointer loaded from memory. */
11505 switch (pVCpu->iem.s.enmEffOpSize)
11506 {
11507 case IEMMODE_16BIT:
11508 IEM_MC_BEGIN(3, 1);
11509 IEM_MC_ARG(uint16_t, u16Sel, 0);
11510 IEM_MC_ARG(uint16_t, offSeg, 1);
11511 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11512 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11513 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11515 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11516 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11517 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11518 IEM_MC_END();
11519 return VINF_SUCCESS;
11520
11521 case IEMMODE_64BIT:
11522 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11523 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11524 * and call far qword [rsp] encodings. */
11525 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11526 {
11527 IEM_MC_BEGIN(3, 1);
11528 IEM_MC_ARG(uint16_t, u16Sel, 0);
11529 IEM_MC_ARG(uint64_t, offSeg, 1);
11530 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11531 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11532 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11533 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11534 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11535 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11536 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11537 IEM_MC_END();
11538 return VINF_SUCCESS;
11539 }
11540 /* AMD falls thru. */
11541 RT_FALL_THRU();
11542
11543 case IEMMODE_32BIT:
11544 IEM_MC_BEGIN(3, 1);
11545 IEM_MC_ARG(uint16_t, u16Sel, 0);
11546 IEM_MC_ARG(uint32_t, offSeg, 1);
11547 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11548 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11550 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11551 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11552 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11553 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11554 IEM_MC_END();
11555 return VINF_SUCCESS;
11556
11557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11558 }
11559}
11560
11561
11562/**
11563 * Opcode 0xff /3.
11564 * @param bRm The RM byte.
11565 */
11566FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11567{
11568 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11569 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11570}
11571
11572
11573/**
11574 * Opcode 0xff /4.
11575 * @param bRm The RM byte.
11576 */
11577FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11578{
11579 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11580 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11581
11582 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11583 {
11584 /* The new RIP is taken from a register. */
11585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11586 switch (pVCpu->iem.s.enmEffOpSize)
11587 {
11588 case IEMMODE_16BIT:
11589 IEM_MC_BEGIN(0, 1);
11590 IEM_MC_LOCAL(uint16_t, u16Target);
11591 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11592 IEM_MC_SET_RIP_U16(u16Target);
11593 IEM_MC_END()
11594 return VINF_SUCCESS;
11595
11596 case IEMMODE_32BIT:
11597 IEM_MC_BEGIN(0, 1);
11598 IEM_MC_LOCAL(uint32_t, u32Target);
11599 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11600 IEM_MC_SET_RIP_U32(u32Target);
11601 IEM_MC_END()
11602 return VINF_SUCCESS;
11603
11604 case IEMMODE_64BIT:
11605 IEM_MC_BEGIN(0, 1);
11606 IEM_MC_LOCAL(uint64_t, u64Target);
11607 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11608 IEM_MC_SET_RIP_U64(u64Target);
11609 IEM_MC_END()
11610 return VINF_SUCCESS;
11611
11612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11613 }
11614 }
11615 else
11616 {
11617 /* The new RIP is taken from a memory location. */
11618 switch (pVCpu->iem.s.enmEffOpSize)
11619 {
11620 case IEMMODE_16BIT:
11621 IEM_MC_BEGIN(0, 2);
11622 IEM_MC_LOCAL(uint16_t, u16Target);
11623 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11626 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11627 IEM_MC_SET_RIP_U16(u16Target);
11628 IEM_MC_END()
11629 return VINF_SUCCESS;
11630
11631 case IEMMODE_32BIT:
11632 IEM_MC_BEGIN(0, 2);
11633 IEM_MC_LOCAL(uint32_t, u32Target);
11634 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11636 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11637 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11638 IEM_MC_SET_RIP_U32(u32Target);
11639 IEM_MC_END()
11640 return VINF_SUCCESS;
11641
11642 case IEMMODE_64BIT:
11643 IEM_MC_BEGIN(0, 2);
11644 IEM_MC_LOCAL(uint64_t, u64Target);
11645 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11646 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11648 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11649 IEM_MC_SET_RIP_U64(u64Target);
11650 IEM_MC_END()
11651 return VINF_SUCCESS;
11652
11653 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11654 }
11655 }
11656}
11657
11658
11659/**
11660 * Opcode 0xff /5.
11661 * @param bRm The RM byte.
11662 */
11663FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11664{
11665 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11666 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11667}
11668
11669
11670/**
11671 * Opcode 0xff /6.
11672 * @param bRm The RM byte.
11673 */
11674FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11675{
11676 IEMOP_MNEMONIC(push_Ev, "push Ev");
11677
11678 /* Registers are handled by a common worker. */
11679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11680 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11681
11682 /* Memory we do here. */
11683 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11684 switch (pVCpu->iem.s.enmEffOpSize)
11685 {
11686 case IEMMODE_16BIT:
11687 IEM_MC_BEGIN(0, 2);
11688 IEM_MC_LOCAL(uint16_t, u16Src);
11689 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11690 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11692 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11693 IEM_MC_PUSH_U16(u16Src);
11694 IEM_MC_ADVANCE_RIP();
11695 IEM_MC_END();
11696 return VINF_SUCCESS;
11697
11698 case IEMMODE_32BIT:
11699 IEM_MC_BEGIN(0, 2);
11700 IEM_MC_LOCAL(uint32_t, u32Src);
11701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11704 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11705 IEM_MC_PUSH_U32(u32Src);
11706 IEM_MC_ADVANCE_RIP();
11707 IEM_MC_END();
11708 return VINF_SUCCESS;
11709
11710 case IEMMODE_64BIT:
11711 IEM_MC_BEGIN(0, 2);
11712 IEM_MC_LOCAL(uint64_t, u64Src);
11713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11716 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11717 IEM_MC_PUSH_U64(u64Src);
11718 IEM_MC_ADVANCE_RIP();
11719 IEM_MC_END();
11720 return VINF_SUCCESS;
11721
11722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11723 }
11724}
11725
11726
11727/**
11728 * @opcode 0xff
11729 */
11730FNIEMOP_DEF(iemOp_Grp5)
11731{
11732 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11733 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11734 {
11735 case 0:
11736 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11737 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11738 case 1:
11739 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11740 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11741 case 2:
11742 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11743 case 3:
11744 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11745 case 4:
11746 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11747 case 5:
11748 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11749 case 6:
11750 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11751 case 7:
11752 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11753 return IEMOP_RAISE_INVALID_OPCODE();
11754 }
11755 AssertFailedReturn(VERR_IEM_IPE_3);
11756}
11757
11758
11759
11760const PFNIEMOP g_apfnOneByteMap[256] =
11761{
11762 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11763 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11764 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11765 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11766 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11767 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11768 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11769 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11770 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11771 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11772 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11773 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11774 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11775 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11776 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11777 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11778 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11779 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11780 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11781 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11782 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11783 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11784 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11785 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11786 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11787 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11788 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11789 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11790 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11791 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11792 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11793 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11794 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11795 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11796 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11797 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11798 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11799 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11800 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11801 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11802 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11803 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11804 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11805 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11806 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11807 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11808 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11809 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11810 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11811 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11812 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11813 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11814 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11815 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11816 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11817 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11818 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11819 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11820 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11821 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11822 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11823 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11824 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11825 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11826};
11827
11828
11829/** @} */
11830
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette