VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65897

Last change on this file since 65897 was 65894, checked in by vboxsync, 8 years ago

IEM: updates

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 375.9 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65894 2017-02-28 09:45:42Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42
43/**
44 * @opcode 0x00
45 * @opmnemonic add
46 * @op1 rm:Eb
47 * @op2 reg:Gb
48 * @opmaps one
49 * @openc ModR/M
50 * @opflmodify of,sf,zf,af,pf,cf
51 * @ophints harmless ignores_op_size
52 * @opstats add_Eb_Gb
53 * @opgroup op_gen_arith_bin
54 * @optest op1=1 op2=1 -> op1=2 efl=of,sf,zf,af
55 */
56FNIEMOP_DEF(iemOp_add_Eb_Gb)
57{
58 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
60}
61
62
63/**
64 * @opcode 0x01
65 * @opgroup op_gen_arith_bin
66 * @opflmodify of,sf,zf,af,pf,cf
67 */
68FNIEMOP_DEF(iemOp_add_Ev_Gv)
69{
70 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, 0);
71 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
72}
73
74
75/**
76 * @opcode 0x02
77 * @opgroup op_gen_arith_bin
78 * @opflmodify of,sf,zf,af,pf,cf
79 */
80FNIEMOP_DEF(iemOp_add_Gb_Eb)
81{
82 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
83 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
84}
85
86
87/**
88 * @opcode 0x03
89 * @opgroup op_gen_arith_bin
90 * @opflmodify of,sf,zf,af,pf,cf
91 */
92FNIEMOP_DEF(iemOp_add_Gv_Ev)
93{
94 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
95 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
96}
97
98
99/**
100 * @opcode 0x04
101 * @opgroup op_gen_arith_bin
102 * @opflmodify of,sf,zf,af,pf,cf
103 */
104FNIEMOP_DEF(iemOp_add_Al_Ib)
105{
106 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
107 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
108}
109
110
111/**
112 * @opcode 0x05
113 * @opgroup op_gen_arith_bin
114 * @opflmodify of,sf,zf,af,pf,cf
115 */
116FNIEMOP_DEF(iemOp_add_eAX_Iz)
117{
118 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
119 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
120}
121
122
123/**
124 * @opcode 0x06
125 * @opgroup op_stack_sreg
126 */
127FNIEMOP_DEF(iemOp_push_ES)
128{
129 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
130 IEMOP_HLP_NO_64BIT();
131 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
132}
133
134
135/**
136 * @opcode 0x07
137 * @opgroup op_stack_sreg
138 */
139FNIEMOP_DEF(iemOp_pop_ES)
140{
141 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
142 IEMOP_HLP_NO_64BIT();
143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
144 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
145}
146
147
148/**
149 * @opcode 0x08
150 * @opgroup op_gen_arith_bin
151 * @opflmodify of,sf,zf,af,pf,cf
152 * @opflundef af
153 * @opflclear of,cf
154 */
155FNIEMOP_DEF(iemOp_or_Eb_Gb)
156{
157 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
160}
161
162
163/**
164 * @opcode 0x09
165 * @opgroup op_gen_arith_bin
166 * @opflmodify of,sf,zf,af,pf,cf
167 * @opflundef af
168 * @opflclear of,cf
169 */
170FNIEMOP_DEF(iemOp_or_Ev_Gv)
171{
172 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, 0);
173 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
174 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
175}
176
177
178/**
179 * @opcode 0x0a
180 * @opgroup op_gen_arith_bin
181 * @opflmodify of,sf,zf,af,pf,cf
182 * @opflundef af
183 * @opflclear of,cf
184 */
185FNIEMOP_DEF(iemOp_or_Gb_Eb)
186{
187 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
189 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
190}
191
192
193/**
194 * @opcode 0x0b
195 * @opgroup op_gen_arith_bin
196 * @opflmodify of,sf,zf,af,pf,cf
197 * @opflundef af
198 * @opflclear of,cf
199 */
200FNIEMOP_DEF(iemOp_or_Gv_Ev)
201{
202 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
203 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
204 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
205}
206
207
208/**
209 * @opcode 0x0c
210 * @opgroup op_gen_arith_bin
211 * @opflmodify of,sf,zf,af,pf,cf
212 * @opflundef af
213 * @opflclear of,cf
214 */
215FNIEMOP_DEF(iemOp_or_Al_Ib)
216{
217 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
218 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
219 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
220}
221
222
223/**
224 * @opcode 0x0d
225 * @opgroup op_gen_arith_bin
226 * @opflmodify of,sf,zf,af,pf,cf
227 * @opflundef af
228 * @opflclear of,cf
229 */
230FNIEMOP_DEF(iemOp_or_eAX_Iz)
231{
232 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
233 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
234 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
235}
236
237
238/**
239 * @opcode 0x0e
240 * @opgroup op_stack_sreg
241 */
242FNIEMOP_DEF(iemOp_push_CS)
243{
244 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
245 IEMOP_HLP_NO_64BIT();
246 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
247}
248
249
250/**
251 * @opcode 0x0f
252 * @opmnemonic EscTwo0f
253 * @openc two0f
254 * @opdisenum OP_2B_ESC
255 * @ophints harmless
256 * @opgroup op_escapes
257 */
258FNIEMOP_DEF(iemOp_2byteEscape)
259{
260#ifdef VBOX_STRICT
261 /* Sanity check the table the first time around. */
262 static bool s_fTested = false;
263 if (RT_LIKELY(s_fTested)) { /* likely */ }
264 else
265 {
266 s_fTested = true;
267 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
268 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
269 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
270 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
271 }
272#endif
273
274 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
275 {
276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
277 IEMOP_HLP_MIN_286();
278 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
279 }
280 /* @opdone */
281
282 /*
283 * On the 8086 this is a POP CS instruction.
284 * For the time being we don't specify this this.
285 */
286 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
287 IEMOP_HLP_NO_64BIT();
288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
289 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
290}
291
292/**
293 * @opcode 0x10
294 * @opgroup op_gen_arith_bin
295 * @opfltest cf
296 * @opflmodify of,sf,zf,af,pf,cf
297 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=of,sf,zf,af
298 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=of,sf,zf,af
299 */
300FNIEMOP_DEF(iemOp_adc_Eb_Gb)
301{
302 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
303 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
304}
305
306
307/**
308 * @opcode 0x11
309 * @opgroup op_gen_arith_bin
310 * @opfltest cf
311 * @opflmodify of,sf,zf,af,pf,cf
312 */
313FNIEMOP_DEF(iemOp_adc_Ev_Gv)
314{
315 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, 0);
316 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
317}
318
319
320/**
321 * @opcode 0x12
322 * @opgroup op_gen_arith_bin
323 * @opfltest cf
324 * @opflmodify of,sf,zf,af,pf,cf
325 */
326FNIEMOP_DEF(iemOp_adc_Gb_Eb)
327{
328 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
329 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
330}
331
332
333/**
334 * @opcode 0x13
335 * @opgroup op_gen_arith_bin
336 * @opfltest cf
337 * @opflmodify of,sf,zf,af,pf,cf
338 */
339FNIEMOP_DEF(iemOp_adc_Gv_Ev)
340{
341 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
342 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
343}
344
345
346/**
347 * @opcode 0x14
348 * @opgroup op_gen_arith_bin
349 * @opfltest cf
350 * @opflmodify of,sf,zf,af,pf,cf
351 */
352FNIEMOP_DEF(iemOp_adc_Al_Ib)
353{
354 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
356}
357
358
359/**
360 * @opcode 0x15
361 * @opgroup op_gen_arith_bin
362 * @opfltest cf
363 * @opflmodify of,sf,zf,af,pf,cf
364 */
365FNIEMOP_DEF(iemOp_adc_eAX_Iz)
366{
367 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
368 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
369}
370
371
372/**
373 * @opcode 0x16
374 */
375FNIEMOP_DEF(iemOp_push_SS)
376{
377 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
378 IEMOP_HLP_NO_64BIT();
379 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
380}
381
382
383/**
384 * @opcode 0x17
385 * @opgroup op_gen_arith_bin
386 * @opfltest cf
387 * @opflmodify of,sf,zf,af,pf,cf
388 */
389FNIEMOP_DEF(iemOp_pop_SS)
390{
391 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
393 IEMOP_HLP_NO_64BIT();
394 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
395}
396
397
398/**
399 * @opcode 0x18
400 * @opgroup op_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify of,sf,zf,af,pf,cf
403 */
404FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
405{
406 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
407 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
408}
409
410
411/**
412 * @opcode 0x19
413 * @opgroup op_gen_arith_bin
414 * @opfltest cf
415 * @opflmodify of,sf,zf,af,pf,cf
416 */
417FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
418{
419 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, 0);
420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
421}
422
423
424/**
425 * @opcode 0x1a
426 * @opgroup op_gen_arith_bin
427 * @opfltest cf
428 * @opflmodify of,sf,zf,af,pf,cf
429 */
430FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
431{
432 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
433 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
434}
435
436
437/**
438 * @opcode 0x1b
439 * @opgroup op_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify of,sf,zf,af,pf,cf
442 */
443FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
444{
445 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
446 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
447}
448
449
450/**
451 * @opcode 0x1c
452 * @opgroup op_gen_arith_bin
453 * @opfltest cf
454 * @opflmodify of,sf,zf,af,pf,cf
455 */
456FNIEMOP_DEF(iemOp_sbb_Al_Ib)
457{
458 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
459 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
460}
461
462
463/**
464 * @opcode 0x1d
465 * @opgroup op_gen_arith_bin
466 * @opfltest cf
467 * @opflmodify of,sf,zf,af,pf,cf
468 */
469FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
470{
471 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
472 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
473}
474
475
476/**
477 * @opcode 0x1e
478 * @opgroup op_stack_sreg
479 */
480FNIEMOP_DEF(iemOp_push_DS)
481{
482 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
483 IEMOP_HLP_NO_64BIT();
484 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
485}
486
487
488/**
489 * @opcode 0x1f
490 * @opgroup op_stack_sreg
491 */
492FNIEMOP_DEF(iemOp_pop_DS)
493{
494 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
495 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
496 IEMOP_HLP_NO_64BIT();
497 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
498}
499
500
501/**
502 * @opcode 0x20
503 * @opgroup op_gen_arith_bin
504 * @opflmodify of,sf,zf,af,pf,cf
505 * @opflundef af
506 * @opflclear of,cf
507 */
508FNIEMOP_DEF(iemOp_and_Eb_Gb)
509{
510 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
513}
514
515
516/**
517 * @opcode 0x21
518 * @opgroup op_gen_arith_bin
519 * @opflmodify of,sf,zf,af,pf,cf
520 * @opflundef af
521 * @opflclear of,cf
522 */
523FNIEMOP_DEF(iemOp_and_Ev_Gv)
524{
525 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, 0);
526 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
527 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
528}
529
530
531/**
532 * @opcode 0x22
533 * @opgroup op_gen_arith_bin
534 * @opflmodify of,sf,zf,af,pf,cf
535 * @opflundef af
536 * @opflclear of,cf
537 */
538FNIEMOP_DEF(iemOp_and_Gb_Eb)
539{
540 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
541 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
543}
544
545
546/**
547 * @opcode 0x23
548 * @opgroup op_gen_arith_bin
549 * @opflmodify of,sf,zf,af,pf,cf
550 * @opflundef af
551 * @opflclear of,cf
552 */
553FNIEMOP_DEF(iemOp_and_Gv_Ev)
554{
555 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
556 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
557 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
558}
559
560
561/**
562 * @opcode 0x24
563 * @opgroup op_gen_arith_bin
564 * @opflmodify of,sf,zf,af,pf,cf
565 * @opflundef af
566 * @opflclear of,cf
567 */
568FNIEMOP_DEF(iemOp_and_Al_Ib)
569{
570 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
571 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
572 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
573}
574
575
576/**
577 * @opcode 0x25
578 * @opgroup op_gen_arith_bin
579 * @opflmodify of,sf,zf,af,pf,cf
580 * @opflundef af
581 * @opflclear of,cf
582 */
583FNIEMOP_DEF(iemOp_and_eAX_Iz)
584{
585 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
586 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
587 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
588}
589
590
591/**
592 * @opcode 0x26
593 * @opmnemonic SEG
594 * @op1 ES
595 * @opgroup op_prefix
596 * @openc prefix
597 * @opdisenum OP_SEG
598 * @ophints harmless
599 */
600FNIEMOP_DEF(iemOp_seg_ES)
601{
602 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
603 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
604 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
605
606 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
607 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
608}
609
610
611/**
612 * @opcode 0x27
613 * @opfltest af,cf
614 * @opflmodify of,sf,zf,af,pf,cf
615 * @opflundef of
616 */
617FNIEMOP_DEF(iemOp_daa)
618{
619 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
620 IEMOP_HLP_NO_64BIT();
621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
622 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
623 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
624}
625
626
627/**
628 * @opcode 0x28
629 * @opgroup op_gen_arith_bin
630 * @opflmodify of,sf,zf,af,pf,cf
631 */
632FNIEMOP_DEF(iemOp_sub_Eb_Gb)
633{
634 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
635 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
636}
637
638
639/**
640 * @opcode 0x29
641 * @opgroup op_gen_arith_bin
642 * @opflmodify of,sf,zf,af,pf,cf
643 */
644FNIEMOP_DEF(iemOp_sub_Ev_Gv)
645{
646 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, 0);
647 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
648}
649
650
651/**
652 * @opcode 0x2a
653 * @opgroup op_gen_arith_bin
654 * @opflmodify of,sf,zf,af,pf,cf
655 */
656FNIEMOP_DEF(iemOp_sub_Gb_Eb)
657{
658 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
659 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
660}
661
662
663/**
664 * @opcode 0x2b
665 * @opgroup op_gen_arith_bin
666 * @opflmodify of,sf,zf,af,pf,cf
667 */
668FNIEMOP_DEF(iemOp_sub_Gv_Ev)
669{
670 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
671 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
672}
673
674
675/**
676 * @opcode 0x2c
677 * @opgroup op_gen_arith_bin
678 * @opflmodify of,sf,zf,af,pf,cf
679 */
680FNIEMOP_DEF(iemOp_sub_Al_Ib)
681{
682 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
683 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
684}
685
686
687/**
688 * @opcode 0x2d
689 * @opgroup op_gen_arith_bin
690 * @opflmodify of,sf,zf,af,pf,cf
691 */
692FNIEMOP_DEF(iemOp_sub_eAX_Iz)
693{
694 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
695 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
696}
697
698
699/**
700 * @opcode 0x2e
701 * @opmnemonic SEG
702 * @op1 CS
703 * @opgroup op_prefix
704 * @openc prefix
705 * @opdisenum OP_SEG
706 * @ophints harmless
707 */
708FNIEMOP_DEF(iemOp_seg_CS)
709{
710 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
711 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
712 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
713
714 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
715 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
716}
717
718
719/**
720 * @opcode 0x2f
721 * @opfltest af,cf
722 * @opflmodify of,sf,zf,af,pf,cf
723 * @opflundef of
724 */
725FNIEMOP_DEF(iemOp_das)
726{
727 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
728 IEMOP_HLP_NO_64BIT();
729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
730 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
731 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
732}
733
734
735/**
736 * @opcode 0x30
737 * @opgroup op_gen_arith_bin
738 * @opflmodify of,sf,zf,af,pf,cf
739 * @opflundef af
740 * @opflclear of,cf
741 */
742FNIEMOP_DEF(iemOp_xor_Eb_Gb)
743{
744 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
745 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
746 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
747}
748
749
750/**
751 * @opcode 0x31
752 * @opgroup op_gen_arith_bin
753 * @opflmodify of,sf,zf,af,pf,cf
754 * @opflundef af
755 * @opflclear of,cf
756 */
757FNIEMOP_DEF(iemOp_xor_Ev_Gv)
758{
759 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, 0);
760 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
761 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
762}
763
764
765/**
766 * @opcode 0x32
767 * @opgroup op_gen_arith_bin
768 * @opflmodify of,sf,zf,af,pf,cf
769 * @opflundef af
770 * @opflclear of,cf
771 */
772FNIEMOP_DEF(iemOp_xor_Gb_Eb)
773{
774 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
775 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
776 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
777}
778
779
780/**
781 * @opcode 0x33
782 * @opgroup op_gen_arith_bin
783 * @opflmodify of,sf,zf,af,pf,cf
784 * @opflundef af
785 * @opflclear of,cf
786 */
787FNIEMOP_DEF(iemOp_xor_Gv_Ev)
788{
789 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
790 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
791 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
792}
793
794
795/**
796 * @opcode 0x34
797 * @opgroup op_gen_arith_bin
798 * @opflmodify of,sf,zf,af,pf,cf
799 * @opflundef af
800 * @opflclear of,cf
801 */
802FNIEMOP_DEF(iemOp_xor_Al_Ib)
803{
804 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
805 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
806 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
807}
808
809
810/**
811 * @opcode 0x35
812 * @opgroup op_gen_arith_bin
813 * @opflmodify of,sf,zf,af,pf,cf
814 * @opflundef af
815 * @opflclear of,cf
816 */
817FNIEMOP_DEF(iemOp_xor_eAX_Iz)
818{
819 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE);
820 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
821 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
822}
823
824
825/**
826 * @opcode 0x36
827 */
828FNIEMOP_DEF(iemOp_seg_SS)
829{
830 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
831 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
832 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
833
834 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
835 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
836}
837
838
839/**
840 * @opcode 0x37
841 */
842FNIEMOP_STUB(iemOp_aaa);
843
844
845/**
846 * @opcode 0x38
847 */
848FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
849{
850 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
851 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
852}
853
854
855/**
856 * @opcode 0x39
857 */
858FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
859{
860 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
861 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
862}
863
864
865/**
866 * @opcode 0x3a
867 */
868FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
869{
870 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
871 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
872}
873
874
875/**
876 * @opcode 0x3b
877 */
878FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
879{
880 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
881 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
882}
883
884
885/**
886 * @opcode 0x3c
887 */
888FNIEMOP_DEF(iemOp_cmp_Al_Ib)
889{
890 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
891 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
892}
893
894
895/**
896 * @opcode 0x3d
897 */
898FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
899{
900 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
901 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
902}
903
904
905/**
906 * @opcode 0x3e
907 */
908FNIEMOP_DEF(iemOp_seg_DS)
909{
910 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
911 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
912 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
913
914 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
915 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
916}
917
918
919/**
920 * @opcode 0x3f
921 */
922FNIEMOP_STUB(iemOp_aas);
923
924/**
925 * Common 'inc/dec/not/neg register' helper.
926 */
927FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
928{
929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
930 switch (pVCpu->iem.s.enmEffOpSize)
931 {
932 case IEMMODE_16BIT:
933 IEM_MC_BEGIN(2, 0);
934 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
935 IEM_MC_ARG(uint32_t *, pEFlags, 1);
936 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
937 IEM_MC_REF_EFLAGS(pEFlags);
938 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
939 IEM_MC_ADVANCE_RIP();
940 IEM_MC_END();
941 return VINF_SUCCESS;
942
943 case IEMMODE_32BIT:
944 IEM_MC_BEGIN(2, 0);
945 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
946 IEM_MC_ARG(uint32_t *, pEFlags, 1);
947 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
948 IEM_MC_REF_EFLAGS(pEFlags);
949 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
950 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
951 IEM_MC_ADVANCE_RIP();
952 IEM_MC_END();
953 return VINF_SUCCESS;
954
955 case IEMMODE_64BIT:
956 IEM_MC_BEGIN(2, 0);
957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
958 IEM_MC_ARG(uint32_t *, pEFlags, 1);
959 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
960 IEM_MC_REF_EFLAGS(pEFlags);
961 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
962 IEM_MC_ADVANCE_RIP();
963 IEM_MC_END();
964 return VINF_SUCCESS;
965 }
966 return VINF_SUCCESS;
967}
968
969
970/**
971 * @opcode 0x40
972 */
973FNIEMOP_DEF(iemOp_inc_eAX)
974{
975 /*
976 * This is a REX prefix in 64-bit mode.
977 */
978 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
979 {
980 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
981 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
982
983 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
984 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
985 }
986
987 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
988 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
989}
990
991
992/**
993 * @opcode 0x41
994 */
995FNIEMOP_DEF(iemOp_inc_eCX)
996{
997 /*
998 * This is a REX prefix in 64-bit mode.
999 */
1000 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1001 {
1002 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1003 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1004 pVCpu->iem.s.uRexB = 1 << 3;
1005
1006 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1007 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1008 }
1009
1010 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1011 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1012}
1013
1014
1015/**
1016 * @opcode 0x42
1017 */
1018FNIEMOP_DEF(iemOp_inc_eDX)
1019{
1020 /*
1021 * This is a REX prefix in 64-bit mode.
1022 */
1023 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1024 {
1025 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1026 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1027 pVCpu->iem.s.uRexIndex = 1 << 3;
1028
1029 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1030 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1031 }
1032
1033 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1034 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1035}
1036
1037
1038
1039/**
1040 * @opcode 0x43
1041 */
1042FNIEMOP_DEF(iemOp_inc_eBX)
1043{
1044 /*
1045 * This is a REX prefix in 64-bit mode.
1046 */
1047 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1048 {
1049 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1050 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1051 pVCpu->iem.s.uRexB = 1 << 3;
1052 pVCpu->iem.s.uRexIndex = 1 << 3;
1053
1054 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1055 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1056 }
1057
1058 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1059 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1060}
1061
1062
1063/**
1064 * @opcode 0x44
1065 */
1066FNIEMOP_DEF(iemOp_inc_eSP)
1067{
1068 /*
1069 * This is a REX prefix in 64-bit mode.
1070 */
1071 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1072 {
1073 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1074 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1075 pVCpu->iem.s.uRexReg = 1 << 3;
1076
1077 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1078 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1079 }
1080
1081 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1082 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1083}
1084
1085
1086/**
1087 * @opcode 0x45
1088 */
1089FNIEMOP_DEF(iemOp_inc_eBP)
1090{
1091 /*
1092 * This is a REX prefix in 64-bit mode.
1093 */
1094 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1095 {
1096 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1097 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1098 pVCpu->iem.s.uRexReg = 1 << 3;
1099 pVCpu->iem.s.uRexB = 1 << 3;
1100
1101 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1102 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1103 }
1104
1105 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1106 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1107}
1108
1109
1110/**
1111 * @opcode 0x46
1112 */
1113FNIEMOP_DEF(iemOp_inc_eSI)
1114{
1115 /*
1116 * This is a REX prefix in 64-bit mode.
1117 */
1118 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1119 {
1120 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1121 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1122 pVCpu->iem.s.uRexReg = 1 << 3;
1123 pVCpu->iem.s.uRexIndex = 1 << 3;
1124
1125 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1126 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1127 }
1128
1129 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1130 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1131}
1132
1133
1134/**
1135 * @opcode 0x47
1136 */
1137FNIEMOP_DEF(iemOp_inc_eDI)
1138{
1139 /*
1140 * This is a REX prefix in 64-bit mode.
1141 */
1142 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1143 {
1144 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1145 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1146 pVCpu->iem.s.uRexReg = 1 << 3;
1147 pVCpu->iem.s.uRexB = 1 << 3;
1148 pVCpu->iem.s.uRexIndex = 1 << 3;
1149
1150 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1151 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1152 }
1153
1154 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1155 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1156}
1157
1158
1159/**
1160 * @opcode 0x48
1161 */
1162FNIEMOP_DEF(iemOp_dec_eAX)
1163{
1164 /*
1165 * This is a REX prefix in 64-bit mode.
1166 */
1167 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1168 {
1169 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1170 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1171 iemRecalEffOpSize(pVCpu);
1172
1173 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1174 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1175 }
1176
1177 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1178 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1179}
1180
1181
1182/**
1183 * @opcode 0x49
1184 */
1185FNIEMOP_DEF(iemOp_dec_eCX)
1186{
1187 /*
1188 * This is a REX prefix in 64-bit mode.
1189 */
1190 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1191 {
1192 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1193 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1194 pVCpu->iem.s.uRexB = 1 << 3;
1195 iemRecalEffOpSize(pVCpu);
1196
1197 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1198 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1199 }
1200
1201 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1202 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1203}
1204
1205
1206/**
1207 * @opcode 0x4a
1208 */
1209FNIEMOP_DEF(iemOp_dec_eDX)
1210{
1211 /*
1212 * This is a REX prefix in 64-bit mode.
1213 */
1214 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1215 {
1216 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1217 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1218 pVCpu->iem.s.uRexIndex = 1 << 3;
1219 iemRecalEffOpSize(pVCpu);
1220
1221 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1222 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1223 }
1224
1225 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1226 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1227}
1228
1229
1230/**
1231 * @opcode 0x4b
1232 */
1233FNIEMOP_DEF(iemOp_dec_eBX)
1234{
1235 /*
1236 * This is a REX prefix in 64-bit mode.
1237 */
1238 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1239 {
1240 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1241 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1242 pVCpu->iem.s.uRexB = 1 << 3;
1243 pVCpu->iem.s.uRexIndex = 1 << 3;
1244 iemRecalEffOpSize(pVCpu);
1245
1246 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1247 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1248 }
1249
1250 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1251 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1252}
1253
1254
1255/**
1256 * @opcode 0x4c
1257 */
1258FNIEMOP_DEF(iemOp_dec_eSP)
1259{
1260 /*
1261 * This is a REX prefix in 64-bit mode.
1262 */
1263 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1264 {
1265 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1266 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1267 pVCpu->iem.s.uRexReg = 1 << 3;
1268 iemRecalEffOpSize(pVCpu);
1269
1270 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1271 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1272 }
1273
1274 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1275 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1276}
1277
1278
1279/**
1280 * @opcode 0x4d
1281 */
1282FNIEMOP_DEF(iemOp_dec_eBP)
1283{
1284 /*
1285 * This is a REX prefix in 64-bit mode.
1286 */
1287 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1288 {
1289 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1290 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1291 pVCpu->iem.s.uRexReg = 1 << 3;
1292 pVCpu->iem.s.uRexB = 1 << 3;
1293 iemRecalEffOpSize(pVCpu);
1294
1295 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1296 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1297 }
1298
1299 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1300 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1301}
1302
1303
1304/**
1305 * @opcode 0x4e
1306 */
1307FNIEMOP_DEF(iemOp_dec_eSI)
1308{
1309 /*
1310 * This is a REX prefix in 64-bit mode.
1311 */
1312 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1313 {
1314 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1315 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1316 pVCpu->iem.s.uRexReg = 1 << 3;
1317 pVCpu->iem.s.uRexIndex = 1 << 3;
1318 iemRecalEffOpSize(pVCpu);
1319
1320 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1321 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1322 }
1323
1324 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1325 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1326}
1327
1328
1329/**
1330 * @opcode 0x4f
1331 */
1332FNIEMOP_DEF(iemOp_dec_eDI)
1333{
1334 /*
1335 * This is a REX prefix in 64-bit mode.
1336 */
1337 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1338 {
1339 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1340 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1341 pVCpu->iem.s.uRexReg = 1 << 3;
1342 pVCpu->iem.s.uRexB = 1 << 3;
1343 pVCpu->iem.s.uRexIndex = 1 << 3;
1344 iemRecalEffOpSize(pVCpu);
1345
1346 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1347 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1348 }
1349
1350 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1351 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1352}
1353
1354
1355/**
1356 * Common 'push register' helper.
1357 */
1358FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1359{
1360 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1361 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1362 {
1363 iReg |= pVCpu->iem.s.uRexB;
1364 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1365 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1366 }
1367
1368 switch (pVCpu->iem.s.enmEffOpSize)
1369 {
1370 case IEMMODE_16BIT:
1371 IEM_MC_BEGIN(0, 1);
1372 IEM_MC_LOCAL(uint16_t, u16Value);
1373 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1374 IEM_MC_PUSH_U16(u16Value);
1375 IEM_MC_ADVANCE_RIP();
1376 IEM_MC_END();
1377 break;
1378
1379 case IEMMODE_32BIT:
1380 IEM_MC_BEGIN(0, 1);
1381 IEM_MC_LOCAL(uint32_t, u32Value);
1382 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1383 IEM_MC_PUSH_U32(u32Value);
1384 IEM_MC_ADVANCE_RIP();
1385 IEM_MC_END();
1386 break;
1387
1388 case IEMMODE_64BIT:
1389 IEM_MC_BEGIN(0, 1);
1390 IEM_MC_LOCAL(uint64_t, u64Value);
1391 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1392 IEM_MC_PUSH_U64(u64Value);
1393 IEM_MC_ADVANCE_RIP();
1394 IEM_MC_END();
1395 break;
1396 }
1397
1398 return VINF_SUCCESS;
1399}
1400
1401
1402/**
1403 * @opcode 0x50
1404 */
1405FNIEMOP_DEF(iemOp_push_eAX)
1406{
1407 IEMOP_MNEMONIC(push_rAX, "push rAX");
1408 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1409}
1410
1411
1412/**
1413 * @opcode 0x51
1414 */
1415FNIEMOP_DEF(iemOp_push_eCX)
1416{
1417 IEMOP_MNEMONIC(push_rCX, "push rCX");
1418 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1419}
1420
1421
1422/**
1423 * @opcode 0x52
1424 */
1425FNIEMOP_DEF(iemOp_push_eDX)
1426{
1427 IEMOP_MNEMONIC(push_rDX, "push rDX");
1428 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1429}
1430
1431
1432/**
1433 * @opcode 0x53
1434 */
1435FNIEMOP_DEF(iemOp_push_eBX)
1436{
1437 IEMOP_MNEMONIC(push_rBX, "push rBX");
1438 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1439}
1440
1441
1442/**
1443 * @opcode 0x54
1444 */
1445FNIEMOP_DEF(iemOp_push_eSP)
1446{
1447 IEMOP_MNEMONIC(push_rSP, "push rSP");
1448 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1449 {
1450 IEM_MC_BEGIN(0, 1);
1451 IEM_MC_LOCAL(uint16_t, u16Value);
1452 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1453 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1454 IEM_MC_PUSH_U16(u16Value);
1455 IEM_MC_ADVANCE_RIP();
1456 IEM_MC_END();
1457 }
1458 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1459}
1460
1461
1462/**
1463 * @opcode 0x55
1464 */
1465FNIEMOP_DEF(iemOp_push_eBP)
1466{
1467 IEMOP_MNEMONIC(push_rBP, "push rBP");
1468 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1469}
1470
1471
1472/**
1473 * @opcode 0x56
1474 */
1475FNIEMOP_DEF(iemOp_push_eSI)
1476{
1477 IEMOP_MNEMONIC(push_rSI, "push rSI");
1478 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1479}
1480
1481
1482/**
1483 * @opcode 0x57
1484 */
1485FNIEMOP_DEF(iemOp_push_eDI)
1486{
1487 IEMOP_MNEMONIC(push_rDI, "push rDI");
1488 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1489}
1490
1491
1492/**
1493 * Common 'pop register' helper.
1494 */
1495FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1496{
1497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1498 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1499 {
1500 iReg |= pVCpu->iem.s.uRexB;
1501 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1502 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1503 }
1504
1505 switch (pVCpu->iem.s.enmEffOpSize)
1506 {
1507 case IEMMODE_16BIT:
1508 IEM_MC_BEGIN(0, 1);
1509 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1510 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1511 IEM_MC_POP_U16(pu16Dst);
1512 IEM_MC_ADVANCE_RIP();
1513 IEM_MC_END();
1514 break;
1515
1516 case IEMMODE_32BIT:
1517 IEM_MC_BEGIN(0, 1);
1518 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1519 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1520 IEM_MC_POP_U32(pu32Dst);
1521 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1522 IEM_MC_ADVANCE_RIP();
1523 IEM_MC_END();
1524 break;
1525
1526 case IEMMODE_64BIT:
1527 IEM_MC_BEGIN(0, 1);
1528 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1529 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1530 IEM_MC_POP_U64(pu64Dst);
1531 IEM_MC_ADVANCE_RIP();
1532 IEM_MC_END();
1533 break;
1534 }
1535
1536 return VINF_SUCCESS;
1537}
1538
1539
1540/**
1541 * @opcode 0x58
1542 */
1543FNIEMOP_DEF(iemOp_pop_eAX)
1544{
1545 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1546 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1547}
1548
1549
1550/**
1551 * @opcode 0x59
1552 */
1553FNIEMOP_DEF(iemOp_pop_eCX)
1554{
1555 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1556 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1557}
1558
1559
1560/**
1561 * @opcode 0x5a
1562 */
1563FNIEMOP_DEF(iemOp_pop_eDX)
1564{
1565 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1566 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1567}
1568
1569
1570/**
1571 * @opcode 0x5b
1572 */
1573FNIEMOP_DEF(iemOp_pop_eBX)
1574{
1575 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1576 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1577}
1578
1579
1580/**
1581 * @opcode 0x5c
1582 */
1583FNIEMOP_DEF(iemOp_pop_eSP)
1584{
1585 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1586 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1587 {
1588 if (pVCpu->iem.s.uRexB)
1589 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1590 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1591 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1592 }
1593
1594 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1595 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1596 /** @todo add testcase for this instruction. */
1597 switch (pVCpu->iem.s.enmEffOpSize)
1598 {
1599 case IEMMODE_16BIT:
1600 IEM_MC_BEGIN(0, 1);
1601 IEM_MC_LOCAL(uint16_t, u16Dst);
1602 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1603 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1604 IEM_MC_ADVANCE_RIP();
1605 IEM_MC_END();
1606 break;
1607
1608 case IEMMODE_32BIT:
1609 IEM_MC_BEGIN(0, 1);
1610 IEM_MC_LOCAL(uint32_t, u32Dst);
1611 IEM_MC_POP_U32(&u32Dst);
1612 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1613 IEM_MC_ADVANCE_RIP();
1614 IEM_MC_END();
1615 break;
1616
1617 case IEMMODE_64BIT:
1618 IEM_MC_BEGIN(0, 1);
1619 IEM_MC_LOCAL(uint64_t, u64Dst);
1620 IEM_MC_POP_U64(&u64Dst);
1621 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1622 IEM_MC_ADVANCE_RIP();
1623 IEM_MC_END();
1624 break;
1625 }
1626
1627 return VINF_SUCCESS;
1628}
1629
1630
1631/**
1632 * @opcode 0x5d
1633 */
1634FNIEMOP_DEF(iemOp_pop_eBP)
1635{
1636 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1637 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1638}
1639
1640
1641/**
1642 * @opcode 0x5e
1643 */
1644FNIEMOP_DEF(iemOp_pop_eSI)
1645{
1646 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1647 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1648}
1649
1650
1651/**
1652 * @opcode 0x5f
1653 */
1654FNIEMOP_DEF(iemOp_pop_eDI)
1655{
1656 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1657 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1658}
1659
1660
1661/**
1662 * @opcode 0x60
1663 */
1664FNIEMOP_DEF(iemOp_pusha)
1665{
1666 IEMOP_MNEMONIC(pusha, "pusha");
1667 IEMOP_HLP_MIN_186();
1668 IEMOP_HLP_NO_64BIT();
1669 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1670 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1671 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1672 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1673}
1674
1675
1676/**
1677 * @opcode 0x61
1678 */
1679FNIEMOP_DEF(iemOp_popa__mvex)
1680{
1681 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1682 {
1683 IEMOP_MNEMONIC(popa, "popa");
1684 IEMOP_HLP_MIN_186();
1685 IEMOP_HLP_NO_64BIT();
1686 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1687 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1688 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1689 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1690 }
1691 IEMOP_MNEMONIC(mvex, "mvex");
1692 Log(("mvex prefix is not supported!\n"));
1693 return IEMOP_RAISE_INVALID_OPCODE();
1694}
1695
1696
1697/**
1698 * @opcode 0x62
1699 * @opmnemonic bound
1700 * @op1 Gv
1701 * @op2 Ma
1702 * @opmincpu 80186
1703 * @ophints harmless invalid_64
1704 */
1705FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1706// IEMOP_HLP_MIN_186();
1707
1708
1709/** Opcode 0x63 - non-64-bit modes. */
1710FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1711{
1712 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1713 IEMOP_HLP_MIN_286();
1714 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1715 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1716
1717 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1718 {
1719 /* Register */
1720 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1721 IEM_MC_BEGIN(3, 0);
1722 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1723 IEM_MC_ARG(uint16_t, u16Src, 1);
1724 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1725
1726 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1727 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1728 IEM_MC_REF_EFLAGS(pEFlags);
1729 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1730
1731 IEM_MC_ADVANCE_RIP();
1732 IEM_MC_END();
1733 }
1734 else
1735 {
1736 /* Memory */
1737 IEM_MC_BEGIN(3, 2);
1738 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1739 IEM_MC_ARG(uint16_t, u16Src, 1);
1740 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1741 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1742
1743 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1744 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1745 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1746 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1747 IEM_MC_FETCH_EFLAGS(EFlags);
1748 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1749
1750 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1751 IEM_MC_COMMIT_EFLAGS(EFlags);
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756
1757}
1758
1759
1760/**
1761 * @opcode 0x63
1762 *
1763 * @note This is a weird one. It works like a regular move instruction if
1764 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1765 * @todo This definitely needs a testcase to verify the odd cases. */
1766FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1767{
1768 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1769
1770 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1771 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1772
1773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1774 {
1775 /*
1776 * Register to register.
1777 */
1778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1779 IEM_MC_BEGIN(0, 1);
1780 IEM_MC_LOCAL(uint64_t, u64Value);
1781 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1782 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 else
1787 {
1788 /*
1789 * We're loading a register from memory.
1790 */
1791 IEM_MC_BEGIN(0, 2);
1792 IEM_MC_LOCAL(uint64_t, u64Value);
1793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1794 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1796 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1797 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1798 IEM_MC_ADVANCE_RIP();
1799 IEM_MC_END();
1800 }
1801 return VINF_SUCCESS;
1802}
1803
1804
1805/**
1806 * @opcode 0x64
1807 * @opmnemonic segfs
1808 * @opmincpu 80386
1809 * @opgroup op_prefixes
1810 */
1811FNIEMOP_DEF(iemOp_seg_FS)
1812{
1813 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1814 IEMOP_HLP_MIN_386();
1815
1816 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1817 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1818
1819 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1820 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1821}
1822
1823
1824/**
1825 * @opcode 0x65
1826 * @opmnemonic seggs
1827 * @opmincpu 80386
1828 * @opgroup op_prefixes
1829 */
1830FNIEMOP_DEF(iemOp_seg_GS)
1831{
1832 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1833 IEMOP_HLP_MIN_386();
1834
1835 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1836 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1837
1838 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1839 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1840}
1841
1842
1843/**
1844 * @opcode 0x66
1845 * @opmnemonic opsize
1846 * @openc prefix
1847 * @opmincpu 80386
1848 * @ophints harmless
1849 * @opgroup op_prefixes
1850 */
1851FNIEMOP_DEF(iemOp_op_size)
1852{
1853 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1854 IEMOP_HLP_MIN_386();
1855
1856 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1857 iemRecalEffOpSize(pVCpu);
1858
1859 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1860 when REPZ or REPNZ are present. */
1861 if (pVCpu->iem.s.idxPrefix == 0)
1862 pVCpu->iem.s.idxPrefix = 1;
1863
1864 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1865 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1866}
1867
1868
1869/**
1870 * @opcode 0x67
1871 * @opmnemonic addrsize
1872 * @openc prefix
1873 * @opmincpu 80386
1874 * @ophints harmless
1875 * @opgroup op_prefixes
1876 */
1877FNIEMOP_DEF(iemOp_addr_size)
1878{
1879 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1880 IEMOP_HLP_MIN_386();
1881
1882 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1883 switch (pVCpu->iem.s.enmDefAddrMode)
1884 {
1885 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1886 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1887 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1888 default: AssertFailed();
1889 }
1890
1891 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1892 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1893}
1894
1895
1896/**
1897 * @opcode 0x68
1898 */
1899FNIEMOP_DEF(iemOp_push_Iz)
1900{
1901 IEMOP_MNEMONIC(push_Iz, "push Iz");
1902 IEMOP_HLP_MIN_186();
1903 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1904 switch (pVCpu->iem.s.enmEffOpSize)
1905 {
1906 case IEMMODE_16BIT:
1907 {
1908 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1910 IEM_MC_BEGIN(0,0);
1911 IEM_MC_PUSH_U16(u16Imm);
1912 IEM_MC_ADVANCE_RIP();
1913 IEM_MC_END();
1914 return VINF_SUCCESS;
1915 }
1916
1917 case IEMMODE_32BIT:
1918 {
1919 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1921 IEM_MC_BEGIN(0,0);
1922 IEM_MC_PUSH_U32(u32Imm);
1923 IEM_MC_ADVANCE_RIP();
1924 IEM_MC_END();
1925 return VINF_SUCCESS;
1926 }
1927
1928 case IEMMODE_64BIT:
1929 {
1930 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1932 IEM_MC_BEGIN(0,0);
1933 IEM_MC_PUSH_U64(u64Imm);
1934 IEM_MC_ADVANCE_RIP();
1935 IEM_MC_END();
1936 return VINF_SUCCESS;
1937 }
1938
1939 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1940 }
1941}
1942
1943
1944/**
1945 * @opcode 0x69
1946 */
1947FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1948{
1949 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1950 IEMOP_HLP_MIN_186();
1951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1952 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1953
1954 switch (pVCpu->iem.s.enmEffOpSize)
1955 {
1956 case IEMMODE_16BIT:
1957 {
1958 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1959 {
1960 /* register operand */
1961 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1962 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1963
1964 IEM_MC_BEGIN(3, 1);
1965 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1966 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1967 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1968 IEM_MC_LOCAL(uint16_t, u16Tmp);
1969
1970 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1971 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1972 IEM_MC_REF_EFLAGS(pEFlags);
1973 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1974 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1975
1976 IEM_MC_ADVANCE_RIP();
1977 IEM_MC_END();
1978 }
1979 else
1980 {
1981 /* memory operand */
1982 IEM_MC_BEGIN(3, 2);
1983 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1984 IEM_MC_ARG(uint16_t, u16Src, 1);
1985 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1986 IEM_MC_LOCAL(uint16_t, u16Tmp);
1987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1988
1989 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1990 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1991 IEM_MC_ASSIGN(u16Src, u16Imm);
1992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1993 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1994 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1995 IEM_MC_REF_EFLAGS(pEFlags);
1996 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1997 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1998
1999 IEM_MC_ADVANCE_RIP();
2000 IEM_MC_END();
2001 }
2002 return VINF_SUCCESS;
2003 }
2004
2005 case IEMMODE_32BIT:
2006 {
2007 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2008 {
2009 /* register operand */
2010 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2012
2013 IEM_MC_BEGIN(3, 1);
2014 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2015 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2016 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2017 IEM_MC_LOCAL(uint32_t, u32Tmp);
2018
2019 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2020 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2021 IEM_MC_REF_EFLAGS(pEFlags);
2022 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2023 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2024
2025 IEM_MC_ADVANCE_RIP();
2026 IEM_MC_END();
2027 }
2028 else
2029 {
2030 /* memory operand */
2031 IEM_MC_BEGIN(3, 2);
2032 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2033 IEM_MC_ARG(uint32_t, u32Src, 1);
2034 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2035 IEM_MC_LOCAL(uint32_t, u32Tmp);
2036 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2037
2038 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2039 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2040 IEM_MC_ASSIGN(u32Src, u32Imm);
2041 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2042 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2043 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2044 IEM_MC_REF_EFLAGS(pEFlags);
2045 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2046 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2047
2048 IEM_MC_ADVANCE_RIP();
2049 IEM_MC_END();
2050 }
2051 return VINF_SUCCESS;
2052 }
2053
2054 case IEMMODE_64BIT:
2055 {
2056 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2057 {
2058 /* register operand */
2059 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2061
2062 IEM_MC_BEGIN(3, 1);
2063 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2064 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2065 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2066 IEM_MC_LOCAL(uint64_t, u64Tmp);
2067
2068 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2069 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2070 IEM_MC_REF_EFLAGS(pEFlags);
2071 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2072 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2073
2074 IEM_MC_ADVANCE_RIP();
2075 IEM_MC_END();
2076 }
2077 else
2078 {
2079 /* memory operand */
2080 IEM_MC_BEGIN(3, 2);
2081 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2082 IEM_MC_ARG(uint64_t, u64Src, 1);
2083 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2084 IEM_MC_LOCAL(uint64_t, u64Tmp);
2085 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2086
2087 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2088 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2089 IEM_MC_ASSIGN(u64Src, u64Imm);
2090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2091 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2092 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2093 IEM_MC_REF_EFLAGS(pEFlags);
2094 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2095 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2096
2097 IEM_MC_ADVANCE_RIP();
2098 IEM_MC_END();
2099 }
2100 return VINF_SUCCESS;
2101 }
2102 }
2103 AssertFailedReturn(VERR_IEM_IPE_9);
2104}
2105
2106
2107/**
2108 * @opcode 0x6a
2109 */
2110FNIEMOP_DEF(iemOp_push_Ib)
2111{
2112 IEMOP_MNEMONIC(push_Ib, "push Ib");
2113 IEMOP_HLP_MIN_186();
2114 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2117
2118 IEM_MC_BEGIN(0,0);
2119 switch (pVCpu->iem.s.enmEffOpSize)
2120 {
2121 case IEMMODE_16BIT:
2122 IEM_MC_PUSH_U16(i8Imm);
2123 break;
2124 case IEMMODE_32BIT:
2125 IEM_MC_PUSH_U32(i8Imm);
2126 break;
2127 case IEMMODE_64BIT:
2128 IEM_MC_PUSH_U64(i8Imm);
2129 break;
2130 }
2131 IEM_MC_ADVANCE_RIP();
2132 IEM_MC_END();
2133 return VINF_SUCCESS;
2134}
2135
2136
2137/**
2138 * @opcode 0x6b
2139 */
2140FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2141{
2142 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2143 IEMOP_HLP_MIN_186();
2144 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2145 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2146
2147 switch (pVCpu->iem.s.enmEffOpSize)
2148 {
2149 case IEMMODE_16BIT:
2150 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2151 {
2152 /* register operand */
2153 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2155
2156 IEM_MC_BEGIN(3, 1);
2157 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2158 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2159 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2160 IEM_MC_LOCAL(uint16_t, u16Tmp);
2161
2162 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2163 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2164 IEM_MC_REF_EFLAGS(pEFlags);
2165 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2166 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2167
2168 IEM_MC_ADVANCE_RIP();
2169 IEM_MC_END();
2170 }
2171 else
2172 {
2173 /* memory operand */
2174 IEM_MC_BEGIN(3, 2);
2175 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2176 IEM_MC_ARG(uint16_t, u16Src, 1);
2177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2178 IEM_MC_LOCAL(uint16_t, u16Tmp);
2179 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2180
2181 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2182 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2183 IEM_MC_ASSIGN(u16Src, u16Imm);
2184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2185 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2186 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2187 IEM_MC_REF_EFLAGS(pEFlags);
2188 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
2189 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2190
2191 IEM_MC_ADVANCE_RIP();
2192 IEM_MC_END();
2193 }
2194 return VINF_SUCCESS;
2195
2196 case IEMMODE_32BIT:
2197 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2198 {
2199 /* register operand */
2200 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2202
2203 IEM_MC_BEGIN(3, 1);
2204 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2205 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2206 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2207 IEM_MC_LOCAL(uint32_t, u32Tmp);
2208
2209 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2210 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2211 IEM_MC_REF_EFLAGS(pEFlags);
2212 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2213 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2214
2215 IEM_MC_ADVANCE_RIP();
2216 IEM_MC_END();
2217 }
2218 else
2219 {
2220 /* memory operand */
2221 IEM_MC_BEGIN(3, 2);
2222 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2223 IEM_MC_ARG(uint32_t, u32Src, 1);
2224 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2225 IEM_MC_LOCAL(uint32_t, u32Tmp);
2226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2227
2228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2229 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2230 IEM_MC_ASSIGN(u32Src, u32Imm);
2231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2232 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2233 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2234 IEM_MC_REF_EFLAGS(pEFlags);
2235 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
2236 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2237
2238 IEM_MC_ADVANCE_RIP();
2239 IEM_MC_END();
2240 }
2241 return VINF_SUCCESS;
2242
2243 case IEMMODE_64BIT:
2244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2245 {
2246 /* register operand */
2247 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2249
2250 IEM_MC_BEGIN(3, 1);
2251 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2252 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2253 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2254 IEM_MC_LOCAL(uint64_t, u64Tmp);
2255
2256 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2257 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2258 IEM_MC_REF_EFLAGS(pEFlags);
2259 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2260 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2261
2262 IEM_MC_ADVANCE_RIP();
2263 IEM_MC_END();
2264 }
2265 else
2266 {
2267 /* memory operand */
2268 IEM_MC_BEGIN(3, 2);
2269 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2270 IEM_MC_ARG(uint64_t, u64Src, 1);
2271 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2272 IEM_MC_LOCAL(uint64_t, u64Tmp);
2273 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2274
2275 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2276 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2277 IEM_MC_ASSIGN(u64Src, u64Imm);
2278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2279 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2280 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2281 IEM_MC_REF_EFLAGS(pEFlags);
2282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
2283 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2284
2285 IEM_MC_ADVANCE_RIP();
2286 IEM_MC_END();
2287 }
2288 return VINF_SUCCESS;
2289 }
2290 AssertFailedReturn(VERR_IEM_IPE_8);
2291}
2292
2293
2294/**
2295 * @opcode 0x6c
2296 */
2297FNIEMOP_DEF(iemOp_insb_Yb_DX)
2298{
2299 IEMOP_HLP_MIN_186();
2300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2301 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2302 {
2303 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2304 switch (pVCpu->iem.s.enmEffAddrMode)
2305 {
2306 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2307 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2308 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2310 }
2311 }
2312 else
2313 {
2314 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2315 switch (pVCpu->iem.s.enmEffAddrMode)
2316 {
2317 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2318 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2319 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2320 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2321 }
2322 }
2323}
2324
2325
2326/**
2327 * @opcode 0x6d
2328 */
2329FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2330{
2331 IEMOP_HLP_MIN_186();
2332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2333 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2334 {
2335 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2336 switch (pVCpu->iem.s.enmEffOpSize)
2337 {
2338 case IEMMODE_16BIT:
2339 switch (pVCpu->iem.s.enmEffAddrMode)
2340 {
2341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2345 }
2346 break;
2347 case IEMMODE_64BIT:
2348 case IEMMODE_32BIT:
2349 switch (pVCpu->iem.s.enmEffAddrMode)
2350 {
2351 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2352 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2353 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2355 }
2356 break;
2357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2358 }
2359 }
2360 else
2361 {
2362 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2363 switch (pVCpu->iem.s.enmEffOpSize)
2364 {
2365 case IEMMODE_16BIT:
2366 switch (pVCpu->iem.s.enmEffAddrMode)
2367 {
2368 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2369 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2370 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2371 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2372 }
2373 break;
2374 case IEMMODE_64BIT:
2375 case IEMMODE_32BIT:
2376 switch (pVCpu->iem.s.enmEffAddrMode)
2377 {
2378 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2379 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2380 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2381 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2382 }
2383 break;
2384 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2385 }
2386 }
2387}
2388
2389
2390/**
2391 * @opcode 0x6e
2392 */
2393FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2394{
2395 IEMOP_HLP_MIN_186();
2396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2397 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2398 {
2399 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2400 switch (pVCpu->iem.s.enmEffAddrMode)
2401 {
2402 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2403 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2404 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2405 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2406 }
2407 }
2408 else
2409 {
2410 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2411 switch (pVCpu->iem.s.enmEffAddrMode)
2412 {
2413 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2414 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2415 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2417 }
2418 }
2419}
2420
2421
2422/**
2423 * @opcode 0x6f
2424 */
2425FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2426{
2427 IEMOP_HLP_MIN_186();
2428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2429 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2430 {
2431 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2432 switch (pVCpu->iem.s.enmEffOpSize)
2433 {
2434 case IEMMODE_16BIT:
2435 switch (pVCpu->iem.s.enmEffAddrMode)
2436 {
2437 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2438 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2439 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2441 }
2442 break;
2443 case IEMMODE_64BIT:
2444 case IEMMODE_32BIT:
2445 switch (pVCpu->iem.s.enmEffAddrMode)
2446 {
2447 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2448 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2449 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2451 }
2452 break;
2453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2454 }
2455 }
2456 else
2457 {
2458 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2459 switch (pVCpu->iem.s.enmEffOpSize)
2460 {
2461 case IEMMODE_16BIT:
2462 switch (pVCpu->iem.s.enmEffAddrMode)
2463 {
2464 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2465 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2466 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2467 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2468 }
2469 break;
2470 case IEMMODE_64BIT:
2471 case IEMMODE_32BIT:
2472 switch (pVCpu->iem.s.enmEffAddrMode)
2473 {
2474 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2475 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2476 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2477 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2478 }
2479 break;
2480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2481 }
2482 }
2483}
2484
2485
2486/**
2487 * @opcode 0x70
2488 */
2489FNIEMOP_DEF(iemOp_jo_Jb)
2490{
2491 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2492 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2494 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2495
2496 IEM_MC_BEGIN(0, 0);
2497 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2498 IEM_MC_REL_JMP_S8(i8Imm);
2499 } IEM_MC_ELSE() {
2500 IEM_MC_ADVANCE_RIP();
2501 } IEM_MC_ENDIF();
2502 IEM_MC_END();
2503 return VINF_SUCCESS;
2504}
2505
2506
2507/**
2508 * @opcode 0x71
2509 */
2510FNIEMOP_DEF(iemOp_jno_Jb)
2511{
2512 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2513 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2514 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2515 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2516
2517 IEM_MC_BEGIN(0, 0);
2518 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2519 IEM_MC_ADVANCE_RIP();
2520 } IEM_MC_ELSE() {
2521 IEM_MC_REL_JMP_S8(i8Imm);
2522 } IEM_MC_ENDIF();
2523 IEM_MC_END();
2524 return VINF_SUCCESS;
2525}
2526
2527/**
2528 * @opcode 0x72
2529 */
2530FNIEMOP_DEF(iemOp_jc_Jb)
2531{
2532 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2533 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2534 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2535 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2536
2537 IEM_MC_BEGIN(0, 0);
2538 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2539 IEM_MC_REL_JMP_S8(i8Imm);
2540 } IEM_MC_ELSE() {
2541 IEM_MC_ADVANCE_RIP();
2542 } IEM_MC_ENDIF();
2543 IEM_MC_END();
2544 return VINF_SUCCESS;
2545}
2546
2547
2548/**
2549 * @opcode 0x73
2550 */
2551FNIEMOP_DEF(iemOp_jnc_Jb)
2552{
2553 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2554 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2556 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2557
2558 IEM_MC_BEGIN(0, 0);
2559 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2560 IEM_MC_ADVANCE_RIP();
2561 } IEM_MC_ELSE() {
2562 IEM_MC_REL_JMP_S8(i8Imm);
2563 } IEM_MC_ENDIF();
2564 IEM_MC_END();
2565 return VINF_SUCCESS;
2566}
2567
2568
2569/**
2570 * @opcode 0x74
2571 */
2572FNIEMOP_DEF(iemOp_je_Jb)
2573{
2574 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2575 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2577 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2578
2579 IEM_MC_BEGIN(0, 0);
2580 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2581 IEM_MC_REL_JMP_S8(i8Imm);
2582 } IEM_MC_ELSE() {
2583 IEM_MC_ADVANCE_RIP();
2584 } IEM_MC_ENDIF();
2585 IEM_MC_END();
2586 return VINF_SUCCESS;
2587}
2588
2589
2590/**
2591 * @opcode 0x75
2592 */
2593FNIEMOP_DEF(iemOp_jne_Jb)
2594{
2595 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2596 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2598 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2599
2600 IEM_MC_BEGIN(0, 0);
2601 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2602 IEM_MC_ADVANCE_RIP();
2603 } IEM_MC_ELSE() {
2604 IEM_MC_REL_JMP_S8(i8Imm);
2605 } IEM_MC_ENDIF();
2606 IEM_MC_END();
2607 return VINF_SUCCESS;
2608}
2609
2610
2611/**
2612 * @opcode 0x76
2613 */
2614FNIEMOP_DEF(iemOp_jbe_Jb)
2615{
2616 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2617 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2619 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2620
2621 IEM_MC_BEGIN(0, 0);
2622 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2623 IEM_MC_REL_JMP_S8(i8Imm);
2624 } IEM_MC_ELSE() {
2625 IEM_MC_ADVANCE_RIP();
2626 } IEM_MC_ENDIF();
2627 IEM_MC_END();
2628 return VINF_SUCCESS;
2629}
2630
2631
2632/**
2633 * @opcode 0x77
2634 */
2635FNIEMOP_DEF(iemOp_jnbe_Jb)
2636{
2637 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2638 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2640 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2641
2642 IEM_MC_BEGIN(0, 0);
2643 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2644 IEM_MC_ADVANCE_RIP();
2645 } IEM_MC_ELSE() {
2646 IEM_MC_REL_JMP_S8(i8Imm);
2647 } IEM_MC_ENDIF();
2648 IEM_MC_END();
2649 return VINF_SUCCESS;
2650}
2651
2652
2653/**
2654 * @opcode 0x78
2655 */
2656FNIEMOP_DEF(iemOp_js_Jb)
2657{
2658 IEMOP_MNEMONIC(js_Jb, "js Jb");
2659 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2661 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2662
2663 IEM_MC_BEGIN(0, 0);
2664 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2665 IEM_MC_REL_JMP_S8(i8Imm);
2666 } IEM_MC_ELSE() {
2667 IEM_MC_ADVANCE_RIP();
2668 } IEM_MC_ENDIF();
2669 IEM_MC_END();
2670 return VINF_SUCCESS;
2671}
2672
2673
2674/**
2675 * @opcode 0x79
2676 */
2677FNIEMOP_DEF(iemOp_jns_Jb)
2678{
2679 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2680 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2681 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2682 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2683
2684 IEM_MC_BEGIN(0, 0);
2685 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2686 IEM_MC_ADVANCE_RIP();
2687 } IEM_MC_ELSE() {
2688 IEM_MC_REL_JMP_S8(i8Imm);
2689 } IEM_MC_ENDIF();
2690 IEM_MC_END();
2691 return VINF_SUCCESS;
2692}
2693
2694
2695/**
2696 * @opcode 0x7a
2697 */
2698FNIEMOP_DEF(iemOp_jp_Jb)
2699{
2700 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2701 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2703 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2704
2705 IEM_MC_BEGIN(0, 0);
2706 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2707 IEM_MC_REL_JMP_S8(i8Imm);
2708 } IEM_MC_ELSE() {
2709 IEM_MC_ADVANCE_RIP();
2710 } IEM_MC_ENDIF();
2711 IEM_MC_END();
2712 return VINF_SUCCESS;
2713}
2714
2715
2716/**
2717 * @opcode 0x7b
2718 */
2719FNIEMOP_DEF(iemOp_jnp_Jb)
2720{
2721 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2722 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2724 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2725
2726 IEM_MC_BEGIN(0, 0);
2727 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2728 IEM_MC_ADVANCE_RIP();
2729 } IEM_MC_ELSE() {
2730 IEM_MC_REL_JMP_S8(i8Imm);
2731 } IEM_MC_ENDIF();
2732 IEM_MC_END();
2733 return VINF_SUCCESS;
2734}
2735
2736
2737/**
2738 * @opcode 0x7c
2739 */
2740FNIEMOP_DEF(iemOp_jl_Jb)
2741{
2742 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2743 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2744 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2745 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2746
2747 IEM_MC_BEGIN(0, 0);
2748 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2749 IEM_MC_REL_JMP_S8(i8Imm);
2750 } IEM_MC_ELSE() {
2751 IEM_MC_ADVANCE_RIP();
2752 } IEM_MC_ENDIF();
2753 IEM_MC_END();
2754 return VINF_SUCCESS;
2755}
2756
2757
2758/**
2759 * @opcode 0x7d
2760 */
2761FNIEMOP_DEF(iemOp_jnl_Jb)
2762{
2763 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2764 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2765 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2766 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2767
2768 IEM_MC_BEGIN(0, 0);
2769 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2770 IEM_MC_ADVANCE_RIP();
2771 } IEM_MC_ELSE() {
2772 IEM_MC_REL_JMP_S8(i8Imm);
2773 } IEM_MC_ENDIF();
2774 IEM_MC_END();
2775 return VINF_SUCCESS;
2776}
2777
2778
2779/**
2780 * @opcode 0x7e
2781 */
2782FNIEMOP_DEF(iemOp_jle_Jb)
2783{
2784 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2785 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2786 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2787 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2788
2789 IEM_MC_BEGIN(0, 0);
2790 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2791 IEM_MC_REL_JMP_S8(i8Imm);
2792 } IEM_MC_ELSE() {
2793 IEM_MC_ADVANCE_RIP();
2794 } IEM_MC_ENDIF();
2795 IEM_MC_END();
2796 return VINF_SUCCESS;
2797}
2798
2799
2800/**
2801 * @opcode 0x7f
2802 */
2803FNIEMOP_DEF(iemOp_jnle_Jb)
2804{
2805 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2806 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2808 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2809
2810 IEM_MC_BEGIN(0, 0);
2811 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2812 IEM_MC_ADVANCE_RIP();
2813 } IEM_MC_ELSE() {
2814 IEM_MC_REL_JMP_S8(i8Imm);
2815 } IEM_MC_ENDIF();
2816 IEM_MC_END();
2817 return VINF_SUCCESS;
2818}
2819
2820
2821/**
2822 * @opcode 0x80
2823 */
2824FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2825{
2826 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2827 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2828 {
2829 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2830 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2831 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2832 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2833 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2834 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2835 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2836 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2837 }
2838 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2839
2840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2841 {
2842 /* register target */
2843 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2845 IEM_MC_BEGIN(3, 0);
2846 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2847 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2848 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2849
2850 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2851 IEM_MC_REF_EFLAGS(pEFlags);
2852 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2853
2854 IEM_MC_ADVANCE_RIP();
2855 IEM_MC_END();
2856 }
2857 else
2858 {
2859 /* memory target */
2860 uint32_t fAccess;
2861 if (pImpl->pfnLockedU8)
2862 fAccess = IEM_ACCESS_DATA_RW;
2863 else /* CMP */
2864 fAccess = IEM_ACCESS_DATA_R;
2865 IEM_MC_BEGIN(3, 2);
2866 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2867 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2869
2870 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2871 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2872 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2873 if (pImpl->pfnLockedU8)
2874 IEMOP_HLP_DONE_DECODING();
2875 else
2876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2877
2878 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2879 IEM_MC_FETCH_EFLAGS(EFlags);
2880 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2881 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2882 else
2883 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2884
2885 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2886 IEM_MC_COMMIT_EFLAGS(EFlags);
2887 IEM_MC_ADVANCE_RIP();
2888 IEM_MC_END();
2889 }
2890 return VINF_SUCCESS;
2891}
2892
2893
2894/**
2895 * @opcode 0x81
2896 */
2897FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2898{
2899 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2900 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2901 {
2902 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2903 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2904 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2905 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2906 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2907 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2908 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2909 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2910 }
2911 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2912
2913 switch (pVCpu->iem.s.enmEffOpSize)
2914 {
2915 case IEMMODE_16BIT:
2916 {
2917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2918 {
2919 /* register target */
2920 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2921 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2922 IEM_MC_BEGIN(3, 0);
2923 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2924 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2925 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2926
2927 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2928 IEM_MC_REF_EFLAGS(pEFlags);
2929 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 }
2934 else
2935 {
2936 /* memory target */
2937 uint32_t fAccess;
2938 if (pImpl->pfnLockedU16)
2939 fAccess = IEM_ACCESS_DATA_RW;
2940 else /* CMP, TEST */
2941 fAccess = IEM_ACCESS_DATA_R;
2942 IEM_MC_BEGIN(3, 2);
2943 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2944 IEM_MC_ARG(uint16_t, u16Src, 1);
2945 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2947
2948 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2949 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2950 IEM_MC_ASSIGN(u16Src, u16Imm);
2951 if (pImpl->pfnLockedU16)
2952 IEMOP_HLP_DONE_DECODING();
2953 else
2954 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2955 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2956 IEM_MC_FETCH_EFLAGS(EFlags);
2957 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2958 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2959 else
2960 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2961
2962 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2963 IEM_MC_COMMIT_EFLAGS(EFlags);
2964 IEM_MC_ADVANCE_RIP();
2965 IEM_MC_END();
2966 }
2967 break;
2968 }
2969
2970 case IEMMODE_32BIT:
2971 {
2972 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2973 {
2974 /* register target */
2975 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2977 IEM_MC_BEGIN(3, 0);
2978 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2979 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2980 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2981
2982 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2983 IEM_MC_REF_EFLAGS(pEFlags);
2984 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2985 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2986
2987 IEM_MC_ADVANCE_RIP();
2988 IEM_MC_END();
2989 }
2990 else
2991 {
2992 /* memory target */
2993 uint32_t fAccess;
2994 if (pImpl->pfnLockedU32)
2995 fAccess = IEM_ACCESS_DATA_RW;
2996 else /* CMP, TEST */
2997 fAccess = IEM_ACCESS_DATA_R;
2998 IEM_MC_BEGIN(3, 2);
2999 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3000 IEM_MC_ARG(uint32_t, u32Src, 1);
3001 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3003
3004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3005 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3006 IEM_MC_ASSIGN(u32Src, u32Imm);
3007 if (pImpl->pfnLockedU32)
3008 IEMOP_HLP_DONE_DECODING();
3009 else
3010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3011 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3012 IEM_MC_FETCH_EFLAGS(EFlags);
3013 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3014 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3015 else
3016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3017
3018 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3019 IEM_MC_COMMIT_EFLAGS(EFlags);
3020 IEM_MC_ADVANCE_RIP();
3021 IEM_MC_END();
3022 }
3023 break;
3024 }
3025
3026 case IEMMODE_64BIT:
3027 {
3028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3029 {
3030 /* register target */
3031 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3033 IEM_MC_BEGIN(3, 0);
3034 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3035 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3037
3038 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3039 IEM_MC_REF_EFLAGS(pEFlags);
3040 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3041
3042 IEM_MC_ADVANCE_RIP();
3043 IEM_MC_END();
3044 }
3045 else
3046 {
3047 /* memory target */
3048 uint32_t fAccess;
3049 if (pImpl->pfnLockedU64)
3050 fAccess = IEM_ACCESS_DATA_RW;
3051 else /* CMP */
3052 fAccess = IEM_ACCESS_DATA_R;
3053 IEM_MC_BEGIN(3, 2);
3054 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3055 IEM_MC_ARG(uint64_t, u64Src, 1);
3056 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3058
3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3060 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3061 if (pImpl->pfnLockedU64)
3062 IEMOP_HLP_DONE_DECODING();
3063 else
3064 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3065 IEM_MC_ASSIGN(u64Src, u64Imm);
3066 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3067 IEM_MC_FETCH_EFLAGS(EFlags);
3068 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3069 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3070 else
3071 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3072
3073 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3074 IEM_MC_COMMIT_EFLAGS(EFlags);
3075 IEM_MC_ADVANCE_RIP();
3076 IEM_MC_END();
3077 }
3078 break;
3079 }
3080 }
3081 return VINF_SUCCESS;
3082}
3083
3084
3085/**
3086 * @opcode 0x82
3087 * @opmnemonic grp1_82
3088 * @opgroup op_groups
3089 */
3090FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3091{
3092 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3093 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3094}
3095
3096
3097/**
3098 * @opcode 0x83
3099 */
3100FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3101{
3102 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3103 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3104 {
3105 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3106 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3107 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3108 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3109 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3110 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3111 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3112 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3113 }
3114 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3115 to the 386 even if absent in the intel reference manuals and some
3116 3rd party opcode listings. */
3117 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3118
3119 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3120 {
3121 /*
3122 * Register target
3123 */
3124 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3125 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3126 switch (pVCpu->iem.s.enmEffOpSize)
3127 {
3128 case IEMMODE_16BIT:
3129 {
3130 IEM_MC_BEGIN(3, 0);
3131 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3132 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3133 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3134
3135 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3136 IEM_MC_REF_EFLAGS(pEFlags);
3137 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3138
3139 IEM_MC_ADVANCE_RIP();
3140 IEM_MC_END();
3141 break;
3142 }
3143
3144 case IEMMODE_32BIT:
3145 {
3146 IEM_MC_BEGIN(3, 0);
3147 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3148 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3149 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3150
3151 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3152 IEM_MC_REF_EFLAGS(pEFlags);
3153 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3154 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3155
3156 IEM_MC_ADVANCE_RIP();
3157 IEM_MC_END();
3158 break;
3159 }
3160
3161 case IEMMODE_64BIT:
3162 {
3163 IEM_MC_BEGIN(3, 0);
3164 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3165 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3166 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3167
3168 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3169 IEM_MC_REF_EFLAGS(pEFlags);
3170 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3171
3172 IEM_MC_ADVANCE_RIP();
3173 IEM_MC_END();
3174 break;
3175 }
3176 }
3177 }
3178 else
3179 {
3180 /*
3181 * Memory target.
3182 */
3183 uint32_t fAccess;
3184 if (pImpl->pfnLockedU16)
3185 fAccess = IEM_ACCESS_DATA_RW;
3186 else /* CMP */
3187 fAccess = IEM_ACCESS_DATA_R;
3188
3189 switch (pVCpu->iem.s.enmEffOpSize)
3190 {
3191 case IEMMODE_16BIT:
3192 {
3193 IEM_MC_BEGIN(3, 2);
3194 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3195 IEM_MC_ARG(uint16_t, u16Src, 1);
3196 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3198
3199 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3200 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3201 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3202 if (pImpl->pfnLockedU16)
3203 IEMOP_HLP_DONE_DECODING();
3204 else
3205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3206 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3207 IEM_MC_FETCH_EFLAGS(EFlags);
3208 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3209 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3210 else
3211 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3212
3213 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3214 IEM_MC_COMMIT_EFLAGS(EFlags);
3215 IEM_MC_ADVANCE_RIP();
3216 IEM_MC_END();
3217 break;
3218 }
3219
3220 case IEMMODE_32BIT:
3221 {
3222 IEM_MC_BEGIN(3, 2);
3223 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3224 IEM_MC_ARG(uint32_t, u32Src, 1);
3225 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3227
3228 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3229 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3230 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3231 if (pImpl->pfnLockedU32)
3232 IEMOP_HLP_DONE_DECODING();
3233 else
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3236 IEM_MC_FETCH_EFLAGS(EFlags);
3237 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3238 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3239 else
3240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3241
3242 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3243 IEM_MC_COMMIT_EFLAGS(EFlags);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 break;
3247 }
3248
3249 case IEMMODE_64BIT:
3250 {
3251 IEM_MC_BEGIN(3, 2);
3252 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3253 IEM_MC_ARG(uint64_t, u64Src, 1);
3254 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3255 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3256
3257 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3258 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3259 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3260 if (pImpl->pfnLockedU64)
3261 IEMOP_HLP_DONE_DECODING();
3262 else
3263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3264 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3265 IEM_MC_FETCH_EFLAGS(EFlags);
3266 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3267 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3268 else
3269 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3270
3271 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3272 IEM_MC_COMMIT_EFLAGS(EFlags);
3273 IEM_MC_ADVANCE_RIP();
3274 IEM_MC_END();
3275 break;
3276 }
3277 }
3278 }
3279 return VINF_SUCCESS;
3280}
3281
3282
3283/**
3284 * @opcode 0x84
3285 */
3286FNIEMOP_DEF(iemOp_test_Eb_Gb)
3287{
3288 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3291}
3292
3293
3294/**
3295 * @opcode 0x85
3296 */
3297FNIEMOP_DEF(iemOp_test_Ev_Gv)
3298{
3299 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3300 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3301 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3302}
3303
3304
3305/**
3306 * @opcode 0x86
3307 */
3308FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3309{
3310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3311 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3312
3313 /*
3314 * If rm is denoting a register, no more instruction bytes.
3315 */
3316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3317 {
3318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3319
3320 IEM_MC_BEGIN(0, 2);
3321 IEM_MC_LOCAL(uint8_t, uTmp1);
3322 IEM_MC_LOCAL(uint8_t, uTmp2);
3323
3324 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3325 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3326 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3327 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3328
3329 IEM_MC_ADVANCE_RIP();
3330 IEM_MC_END();
3331 }
3332 else
3333 {
3334 /*
3335 * We're accessing memory.
3336 */
3337/** @todo the register must be committed separately! */
3338 IEM_MC_BEGIN(2, 2);
3339 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3340 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3342
3343 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3344 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3345 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3346 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
3347 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3348
3349 IEM_MC_ADVANCE_RIP();
3350 IEM_MC_END();
3351 }
3352 return VINF_SUCCESS;
3353}
3354
3355
3356/**
3357 * @opcode 0x87
3358 */
3359FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3360{
3361 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3362 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3363
3364 /*
3365 * If rm is denoting a register, no more instruction bytes.
3366 */
3367 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3368 {
3369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3370
3371 switch (pVCpu->iem.s.enmEffOpSize)
3372 {
3373 case IEMMODE_16BIT:
3374 IEM_MC_BEGIN(0, 2);
3375 IEM_MC_LOCAL(uint16_t, uTmp1);
3376 IEM_MC_LOCAL(uint16_t, uTmp2);
3377
3378 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3379 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3380 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3381 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3382
3383 IEM_MC_ADVANCE_RIP();
3384 IEM_MC_END();
3385 return VINF_SUCCESS;
3386
3387 case IEMMODE_32BIT:
3388 IEM_MC_BEGIN(0, 2);
3389 IEM_MC_LOCAL(uint32_t, uTmp1);
3390 IEM_MC_LOCAL(uint32_t, uTmp2);
3391
3392 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3393 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3394 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3395 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3396
3397 IEM_MC_ADVANCE_RIP();
3398 IEM_MC_END();
3399 return VINF_SUCCESS;
3400
3401 case IEMMODE_64BIT:
3402 IEM_MC_BEGIN(0, 2);
3403 IEM_MC_LOCAL(uint64_t, uTmp1);
3404 IEM_MC_LOCAL(uint64_t, uTmp2);
3405
3406 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3407 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3408 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3409 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3410
3411 IEM_MC_ADVANCE_RIP();
3412 IEM_MC_END();
3413 return VINF_SUCCESS;
3414
3415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3416 }
3417 }
3418 else
3419 {
3420 /*
3421 * We're accessing memory.
3422 */
3423 switch (pVCpu->iem.s.enmEffOpSize)
3424 {
3425/** @todo the register must be committed separately! */
3426 case IEMMODE_16BIT:
3427 IEM_MC_BEGIN(2, 2);
3428 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3429 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3430 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3431
3432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3433 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3434 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3435 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
3436 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3437
3438 IEM_MC_ADVANCE_RIP();
3439 IEM_MC_END();
3440 return VINF_SUCCESS;
3441
3442 case IEMMODE_32BIT:
3443 IEM_MC_BEGIN(2, 2);
3444 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3445 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3447
3448 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3449 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3450 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3451 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
3452 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3453
3454 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3455 IEM_MC_ADVANCE_RIP();
3456 IEM_MC_END();
3457 return VINF_SUCCESS;
3458
3459 case IEMMODE_64BIT:
3460 IEM_MC_BEGIN(2, 2);
3461 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3462 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3464
3465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3466 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3467 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3468 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3469 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3470
3471 IEM_MC_ADVANCE_RIP();
3472 IEM_MC_END();
3473 return VINF_SUCCESS;
3474
3475 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3476 }
3477 }
3478}
3479
3480
3481/**
3482 * @opcode 0x88
3483 */
3484FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3485{
3486 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3487
3488 uint8_t bRm;
3489 IEM_OPCODE_GET_NEXT_U8(&bRm);
3490
3491 /*
3492 * If rm is denoting a register, no more instruction bytes.
3493 */
3494 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3495 {
3496 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3497 IEM_MC_BEGIN(0, 1);
3498 IEM_MC_LOCAL(uint8_t, u8Value);
3499 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3500 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3501 IEM_MC_ADVANCE_RIP();
3502 IEM_MC_END();
3503 }
3504 else
3505 {
3506 /*
3507 * We're writing a register to memory.
3508 */
3509 IEM_MC_BEGIN(0, 2);
3510 IEM_MC_LOCAL(uint8_t, u8Value);
3511 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3512 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3513 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3514 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3515 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3516 IEM_MC_ADVANCE_RIP();
3517 IEM_MC_END();
3518 }
3519 return VINF_SUCCESS;
3520
3521}
3522
3523
3524/**
3525 * @opcode 0x89
3526 */
3527FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3528{
3529 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3530
3531 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3532
3533 /*
3534 * If rm is denoting a register, no more instruction bytes.
3535 */
3536 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3537 {
3538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3539 switch (pVCpu->iem.s.enmEffOpSize)
3540 {
3541 case IEMMODE_16BIT:
3542 IEM_MC_BEGIN(0, 1);
3543 IEM_MC_LOCAL(uint16_t, u16Value);
3544 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3545 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3546 IEM_MC_ADVANCE_RIP();
3547 IEM_MC_END();
3548 break;
3549
3550 case IEMMODE_32BIT:
3551 IEM_MC_BEGIN(0, 1);
3552 IEM_MC_LOCAL(uint32_t, u32Value);
3553 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3554 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3555 IEM_MC_ADVANCE_RIP();
3556 IEM_MC_END();
3557 break;
3558
3559 case IEMMODE_64BIT:
3560 IEM_MC_BEGIN(0, 1);
3561 IEM_MC_LOCAL(uint64_t, u64Value);
3562 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3563 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3564 IEM_MC_ADVANCE_RIP();
3565 IEM_MC_END();
3566 break;
3567 }
3568 }
3569 else
3570 {
3571 /*
3572 * We're writing a register to memory.
3573 */
3574 switch (pVCpu->iem.s.enmEffOpSize)
3575 {
3576 case IEMMODE_16BIT:
3577 IEM_MC_BEGIN(0, 2);
3578 IEM_MC_LOCAL(uint16_t, u16Value);
3579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3580 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3582 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3583 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3584 IEM_MC_ADVANCE_RIP();
3585 IEM_MC_END();
3586 break;
3587
3588 case IEMMODE_32BIT:
3589 IEM_MC_BEGIN(0, 2);
3590 IEM_MC_LOCAL(uint32_t, u32Value);
3591 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3592 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3594 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3595 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3596 IEM_MC_ADVANCE_RIP();
3597 IEM_MC_END();
3598 break;
3599
3600 case IEMMODE_64BIT:
3601 IEM_MC_BEGIN(0, 2);
3602 IEM_MC_LOCAL(uint64_t, u64Value);
3603 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3604 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3605 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3606 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3607 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3608 IEM_MC_ADVANCE_RIP();
3609 IEM_MC_END();
3610 break;
3611 }
3612 }
3613 return VINF_SUCCESS;
3614}
3615
3616
3617/**
3618 * @opcode 0x8a
3619 */
3620FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3621{
3622 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3623
3624 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3625
3626 /*
3627 * If rm is denoting a register, no more instruction bytes.
3628 */
3629 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3630 {
3631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3632 IEM_MC_BEGIN(0, 1);
3633 IEM_MC_LOCAL(uint8_t, u8Value);
3634 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3635 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3636 IEM_MC_ADVANCE_RIP();
3637 IEM_MC_END();
3638 }
3639 else
3640 {
3641 /*
3642 * We're loading a register from memory.
3643 */
3644 IEM_MC_BEGIN(0, 2);
3645 IEM_MC_LOCAL(uint8_t, u8Value);
3646 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3647 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3649 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3650 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3651 IEM_MC_ADVANCE_RIP();
3652 IEM_MC_END();
3653 }
3654 return VINF_SUCCESS;
3655}
3656
3657
3658/**
3659 * @opcode 0x8b
3660 */
3661FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3662{
3663 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3664
3665 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3666
3667 /*
3668 * If rm is denoting a register, no more instruction bytes.
3669 */
3670 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3671 {
3672 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3673 switch (pVCpu->iem.s.enmEffOpSize)
3674 {
3675 case IEMMODE_16BIT:
3676 IEM_MC_BEGIN(0, 1);
3677 IEM_MC_LOCAL(uint16_t, u16Value);
3678 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3679 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3680 IEM_MC_ADVANCE_RIP();
3681 IEM_MC_END();
3682 break;
3683
3684 case IEMMODE_32BIT:
3685 IEM_MC_BEGIN(0, 1);
3686 IEM_MC_LOCAL(uint32_t, u32Value);
3687 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3688 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3689 IEM_MC_ADVANCE_RIP();
3690 IEM_MC_END();
3691 break;
3692
3693 case IEMMODE_64BIT:
3694 IEM_MC_BEGIN(0, 1);
3695 IEM_MC_LOCAL(uint64_t, u64Value);
3696 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3697 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3698 IEM_MC_ADVANCE_RIP();
3699 IEM_MC_END();
3700 break;
3701 }
3702 }
3703 else
3704 {
3705 /*
3706 * We're loading a register from memory.
3707 */
3708 switch (pVCpu->iem.s.enmEffOpSize)
3709 {
3710 case IEMMODE_16BIT:
3711 IEM_MC_BEGIN(0, 2);
3712 IEM_MC_LOCAL(uint16_t, u16Value);
3713 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3714 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3716 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3717 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3718 IEM_MC_ADVANCE_RIP();
3719 IEM_MC_END();
3720 break;
3721
3722 case IEMMODE_32BIT:
3723 IEM_MC_BEGIN(0, 2);
3724 IEM_MC_LOCAL(uint32_t, u32Value);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3726 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3729 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3730 IEM_MC_ADVANCE_RIP();
3731 IEM_MC_END();
3732 break;
3733
3734 case IEMMODE_64BIT:
3735 IEM_MC_BEGIN(0, 2);
3736 IEM_MC_LOCAL(uint64_t, u64Value);
3737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3740 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3741 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3742 IEM_MC_ADVANCE_RIP();
3743 IEM_MC_END();
3744 break;
3745 }
3746 }
3747 return VINF_SUCCESS;
3748}
3749
3750
3751/**
3752 * opcode 0x63
3753 * @todo Table fixme
3754 */
3755FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3756{
3757 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3758 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3759 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3760 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3761 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3762}
3763
3764
3765/**
3766 * @opcode 0x8c
3767 */
3768FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3769{
3770 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3771
3772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3773
3774 /*
3775 * Check that the destination register exists. The REX.R prefix is ignored.
3776 */
3777 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3778 if ( iSegReg > X86_SREG_GS)
3779 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3780
3781 /*
3782 * If rm is denoting a register, no more instruction bytes.
3783 * In that case, the operand size is respected and the upper bits are
3784 * cleared (starting with some pentium).
3785 */
3786 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3787 {
3788 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3789 switch (pVCpu->iem.s.enmEffOpSize)
3790 {
3791 case IEMMODE_16BIT:
3792 IEM_MC_BEGIN(0, 1);
3793 IEM_MC_LOCAL(uint16_t, u16Value);
3794 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3795 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3796 IEM_MC_ADVANCE_RIP();
3797 IEM_MC_END();
3798 break;
3799
3800 case IEMMODE_32BIT:
3801 IEM_MC_BEGIN(0, 1);
3802 IEM_MC_LOCAL(uint32_t, u32Value);
3803 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3804 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3805 IEM_MC_ADVANCE_RIP();
3806 IEM_MC_END();
3807 break;
3808
3809 case IEMMODE_64BIT:
3810 IEM_MC_BEGIN(0, 1);
3811 IEM_MC_LOCAL(uint64_t, u64Value);
3812 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3813 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3814 IEM_MC_ADVANCE_RIP();
3815 IEM_MC_END();
3816 break;
3817 }
3818 }
3819 else
3820 {
3821 /*
3822 * We're saving the register to memory. The access is word sized
3823 * regardless of operand size prefixes.
3824 */
3825#if 0 /* not necessary */
3826 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3827#endif
3828 IEM_MC_BEGIN(0, 2);
3829 IEM_MC_LOCAL(uint16_t, u16Value);
3830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3833 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3834 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3835 IEM_MC_ADVANCE_RIP();
3836 IEM_MC_END();
3837 }
3838 return VINF_SUCCESS;
3839}
3840
3841
3842
3843
3844/**
3845 * @opcode 0x8d
3846 */
3847FNIEMOP_DEF(iemOp_lea_Gv_M)
3848{
3849 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3850 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3851 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3852 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3853
3854 switch (pVCpu->iem.s.enmEffOpSize)
3855 {
3856 case IEMMODE_16BIT:
3857 IEM_MC_BEGIN(0, 2);
3858 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3859 IEM_MC_LOCAL(uint16_t, u16Cast);
3860 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3861 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3862 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3863 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3864 IEM_MC_ADVANCE_RIP();
3865 IEM_MC_END();
3866 return VINF_SUCCESS;
3867
3868 case IEMMODE_32BIT:
3869 IEM_MC_BEGIN(0, 2);
3870 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3871 IEM_MC_LOCAL(uint32_t, u32Cast);
3872 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3875 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 return VINF_SUCCESS;
3879
3880 case IEMMODE_64BIT:
3881 IEM_MC_BEGIN(0, 1);
3882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3884 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3885 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3886 IEM_MC_ADVANCE_RIP();
3887 IEM_MC_END();
3888 return VINF_SUCCESS;
3889 }
3890 AssertFailedReturn(VERR_IEM_IPE_7);
3891}
3892
3893
3894/**
3895 * @opcode 0x8e
3896 */
3897FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3898{
3899 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3900
3901 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3902
3903 /*
3904 * The practical operand size is 16-bit.
3905 */
3906#if 0 /* not necessary */
3907 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3908#endif
3909
3910 /*
3911 * Check that the destination register exists and can be used with this
3912 * instruction. The REX.R prefix is ignored.
3913 */
3914 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3915 if ( iSegReg == X86_SREG_CS
3916 || iSegReg > X86_SREG_GS)
3917 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3918
3919 /*
3920 * If rm is denoting a register, no more instruction bytes.
3921 */
3922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3923 {
3924 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3925 IEM_MC_BEGIN(2, 0);
3926 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3927 IEM_MC_ARG(uint16_t, u16Value, 1);
3928 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3929 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3930 IEM_MC_END();
3931 }
3932 else
3933 {
3934 /*
3935 * We're loading the register from memory. The access is word sized
3936 * regardless of operand size prefixes.
3937 */
3938 IEM_MC_BEGIN(2, 1);
3939 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3940 IEM_MC_ARG(uint16_t, u16Value, 1);
3941 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3944 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3945 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3946 IEM_MC_END();
3947 }
3948 return VINF_SUCCESS;
3949}
3950
3951
3952/** Opcode 0x8f /0. */
3953FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3954{
3955 /* This bugger is rather annoying as it requires rSP to be updated before
3956 doing the effective address calculations. Will eventually require a
3957 split between the R/M+SIB decoding and the effective address
3958 calculation - which is something that is required for any attempt at
3959 reusing this code for a recompiler. It may also be good to have if we
3960 need to delay #UD exception caused by invalid lock prefixes.
3961
3962 For now, we'll do a mostly safe interpreter-only implementation here. */
3963 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3964 * now until tests show it's checked.. */
3965 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3966
3967 /* Register access is relatively easy and can share code. */
3968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3969 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3970
3971 /*
3972 * Memory target.
3973 *
3974 * Intel says that RSP is incremented before it's used in any effective
3975 * address calcuations. This means some serious extra annoyance here since
3976 * we decode and calculate the effective address in one step and like to
3977 * delay committing registers till everything is done.
3978 *
3979 * So, we'll decode and calculate the effective address twice. This will
3980 * require some recoding if turned into a recompiler.
3981 */
3982 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3983
3984#ifndef TST_IEM_CHECK_MC
3985 /* Calc effective address with modified ESP. */
3986/** @todo testcase */
3987 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3988 RTGCPTR GCPtrEff;
3989 VBOXSTRICTRC rcStrict;
3990 switch (pVCpu->iem.s.enmEffOpSize)
3991 {
3992 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3993 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3994 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3995 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3996 }
3997 if (rcStrict != VINF_SUCCESS)
3998 return rcStrict;
3999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4000
4001 /* Perform the operation - this should be CImpl. */
4002 RTUINT64U TmpRsp;
4003 TmpRsp.u = pCtx->rsp;
4004 switch (pVCpu->iem.s.enmEffOpSize)
4005 {
4006 case IEMMODE_16BIT:
4007 {
4008 uint16_t u16Value;
4009 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4010 if (rcStrict == VINF_SUCCESS)
4011 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4012 break;
4013 }
4014
4015 case IEMMODE_32BIT:
4016 {
4017 uint32_t u32Value;
4018 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4019 if (rcStrict == VINF_SUCCESS)
4020 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4021 break;
4022 }
4023
4024 case IEMMODE_64BIT:
4025 {
4026 uint64_t u64Value;
4027 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4028 if (rcStrict == VINF_SUCCESS)
4029 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4030 break;
4031 }
4032
4033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4034 }
4035 if (rcStrict == VINF_SUCCESS)
4036 {
4037 pCtx->rsp = TmpRsp.u;
4038 iemRegUpdateRipAndClearRF(pVCpu);
4039 }
4040 return rcStrict;
4041
4042#else
4043 return VERR_IEM_IPE_2;
4044#endif
4045}
4046
4047
4048/**
4049 * @opcode 0x8f
4050 */
4051FNIEMOP_DEF(iemOp_Grp1A__xop)
4052{
4053 /*
4054 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4055 * three byte VEX prefix, except that the mmmmm field cannot have the values
4056 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4057 */
4058 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4059 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4060 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4061
4062 IEMOP_MNEMONIC(xop, "xop");
4063 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4064 {
4065 /** @todo Test when exctly the XOP conformance checks kick in during
4066 * instruction decoding and fetching (using \#PF). */
4067 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4068 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4069 if ( ( pVCpu->iem.s.fPrefixes
4070 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4071 == 0)
4072 {
4073 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4074 if (bXop2 & 0x80 /* XOP.W */)
4075 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4076 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
4077 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
4078 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
4079 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4080 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4081 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4082
4083 /** @todo XOP: Just use new tables and decoders. */
4084 switch (bRm & 0x1f)
4085 {
4086 case 8: /* xop opcode map 8. */
4087 IEMOP_BITCH_ABOUT_STUB();
4088 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4089
4090 case 9: /* xop opcode map 9. */
4091 IEMOP_BITCH_ABOUT_STUB();
4092 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4093
4094 case 10: /* xop opcode map 10. */
4095 IEMOP_BITCH_ABOUT_STUB();
4096 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4097
4098 default:
4099 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4100 return IEMOP_RAISE_INVALID_OPCODE();
4101 }
4102 }
4103 else
4104 Log(("XOP: Invalid prefix mix!\n"));
4105 }
4106 else
4107 Log(("XOP: XOP support disabled!\n"));
4108 return IEMOP_RAISE_INVALID_OPCODE();
4109}
4110
4111
4112/**
4113 * Common 'xchg reg,rAX' helper.
4114 */
4115FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4116{
4117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4118
4119 iReg |= pVCpu->iem.s.uRexB;
4120 switch (pVCpu->iem.s.enmEffOpSize)
4121 {
4122 case IEMMODE_16BIT:
4123 IEM_MC_BEGIN(0, 2);
4124 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4125 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4126 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4127 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4128 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4129 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4130 IEM_MC_ADVANCE_RIP();
4131 IEM_MC_END();
4132 return VINF_SUCCESS;
4133
4134 case IEMMODE_32BIT:
4135 IEM_MC_BEGIN(0, 2);
4136 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4137 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4138 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4139 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4140 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4141 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4142 IEM_MC_ADVANCE_RIP();
4143 IEM_MC_END();
4144 return VINF_SUCCESS;
4145
4146 case IEMMODE_64BIT:
4147 IEM_MC_BEGIN(0, 2);
4148 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4149 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4150 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4151 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4152 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4153 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4154 IEM_MC_ADVANCE_RIP();
4155 IEM_MC_END();
4156 return VINF_SUCCESS;
4157
4158 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4159 }
4160}
4161
4162
4163/**
4164 * @opcode 0x90
4165 */
4166FNIEMOP_DEF(iemOp_nop)
4167{
4168 /* R8/R8D and RAX/EAX can be exchanged. */
4169 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4170 {
4171 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4172 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4173 }
4174
4175 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4176 IEMOP_MNEMONIC(pause, "pause");
4177 else
4178 IEMOP_MNEMONIC(nop, "nop");
4179 IEM_MC_BEGIN(0, 0);
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 return VINF_SUCCESS;
4183}
4184
4185
4186/**
4187 * @opcode 0x91
4188 */
4189FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4190{
4191 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4192 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4193}
4194
4195
4196/**
4197 * @opcode 0x92
4198 */
4199FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4200{
4201 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4202 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4203}
4204
4205
4206/**
4207 * @opcode 0x93
4208 */
4209FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4210{
4211 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4212 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4213}
4214
4215
4216/**
4217 * @opcode 0x94
4218 */
4219FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4220{
4221 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4222 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4223}
4224
4225
4226/**
4227 * @opcode 0x95
4228 */
4229FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4230{
4231 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4232 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4233}
4234
4235
4236/**
4237 * @opcode 0x96
4238 */
4239FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4240{
4241 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4242 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4243}
4244
4245
4246/**
4247 * @opcode 0x97
4248 */
4249FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4250{
4251 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4252 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4253}
4254
4255
4256/**
4257 * @opcode 0x98
4258 */
4259FNIEMOP_DEF(iemOp_cbw)
4260{
4261 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4262 switch (pVCpu->iem.s.enmEffOpSize)
4263 {
4264 case IEMMODE_16BIT:
4265 IEMOP_MNEMONIC(cbw, "cbw");
4266 IEM_MC_BEGIN(0, 1);
4267 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4268 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4269 } IEM_MC_ELSE() {
4270 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4271 } IEM_MC_ENDIF();
4272 IEM_MC_ADVANCE_RIP();
4273 IEM_MC_END();
4274 return VINF_SUCCESS;
4275
4276 case IEMMODE_32BIT:
4277 IEMOP_MNEMONIC(cwde, "cwde");
4278 IEM_MC_BEGIN(0, 1);
4279 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4280 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4281 } IEM_MC_ELSE() {
4282 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4283 } IEM_MC_ENDIF();
4284 IEM_MC_ADVANCE_RIP();
4285 IEM_MC_END();
4286 return VINF_SUCCESS;
4287
4288 case IEMMODE_64BIT:
4289 IEMOP_MNEMONIC(cdqe, "cdqe");
4290 IEM_MC_BEGIN(0, 1);
4291 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4292 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4293 } IEM_MC_ELSE() {
4294 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4295 } IEM_MC_ENDIF();
4296 IEM_MC_ADVANCE_RIP();
4297 IEM_MC_END();
4298 return VINF_SUCCESS;
4299
4300 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4301 }
4302}
4303
4304
4305/**
4306 * @opcode 0x99
4307 */
4308FNIEMOP_DEF(iemOp_cwd)
4309{
4310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4311 switch (pVCpu->iem.s.enmEffOpSize)
4312 {
4313 case IEMMODE_16BIT:
4314 IEMOP_MNEMONIC(cwd, "cwd");
4315 IEM_MC_BEGIN(0, 1);
4316 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4317 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4318 } IEM_MC_ELSE() {
4319 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4320 } IEM_MC_ENDIF();
4321 IEM_MC_ADVANCE_RIP();
4322 IEM_MC_END();
4323 return VINF_SUCCESS;
4324
4325 case IEMMODE_32BIT:
4326 IEMOP_MNEMONIC(cdq, "cdq");
4327 IEM_MC_BEGIN(0, 1);
4328 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4329 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4330 } IEM_MC_ELSE() {
4331 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4332 } IEM_MC_ENDIF();
4333 IEM_MC_ADVANCE_RIP();
4334 IEM_MC_END();
4335 return VINF_SUCCESS;
4336
4337 case IEMMODE_64BIT:
4338 IEMOP_MNEMONIC(cqo, "cqo");
4339 IEM_MC_BEGIN(0, 1);
4340 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4341 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4342 } IEM_MC_ELSE() {
4343 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4344 } IEM_MC_ENDIF();
4345 IEM_MC_ADVANCE_RIP();
4346 IEM_MC_END();
4347 return VINF_SUCCESS;
4348
4349 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4350 }
4351}
4352
4353
4354/**
4355 * @opcode 0x9a
4356 */
4357FNIEMOP_DEF(iemOp_call_Ap)
4358{
4359 IEMOP_MNEMONIC(call_Ap, "call Ap");
4360 IEMOP_HLP_NO_64BIT();
4361
4362 /* Decode the far pointer address and pass it on to the far call C implementation. */
4363 uint32_t offSeg;
4364 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4365 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4366 else
4367 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4368 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4369 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4370 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4371}
4372
4373
4374/** Opcode 0x9b. (aka fwait) */
4375FNIEMOP_DEF(iemOp_wait)
4376{
4377 IEMOP_MNEMONIC(wait, "wait");
4378 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4379
4380 IEM_MC_BEGIN(0, 0);
4381 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4382 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4383 IEM_MC_ADVANCE_RIP();
4384 IEM_MC_END();
4385 return VINF_SUCCESS;
4386}
4387
4388
4389/**
4390 * @opcode 0x9c
4391 */
4392FNIEMOP_DEF(iemOp_pushf_Fv)
4393{
4394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4395 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4396 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4397}
4398
4399
4400/**
4401 * @opcode 0x9d
4402 */
4403FNIEMOP_DEF(iemOp_popf_Fv)
4404{
4405 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4406 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4407 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4408}
4409
4410
4411/**
4412 * @opcode 0x9e
4413 */
4414FNIEMOP_DEF(iemOp_sahf)
4415{
4416 IEMOP_MNEMONIC(sahf, "sahf");
4417 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4418 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4419 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4420 return IEMOP_RAISE_INVALID_OPCODE();
4421 IEM_MC_BEGIN(0, 2);
4422 IEM_MC_LOCAL(uint32_t, u32Flags);
4423 IEM_MC_LOCAL(uint32_t, EFlags);
4424 IEM_MC_FETCH_EFLAGS(EFlags);
4425 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4426 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4427 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4428 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4429 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4430 IEM_MC_COMMIT_EFLAGS(EFlags);
4431 IEM_MC_ADVANCE_RIP();
4432 IEM_MC_END();
4433 return VINF_SUCCESS;
4434}
4435
4436
4437/**
4438 * @opcode 0x9f
4439 */
4440FNIEMOP_DEF(iemOp_lahf)
4441{
4442 IEMOP_MNEMONIC(lahf, "lahf");
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4445 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4446 return IEMOP_RAISE_INVALID_OPCODE();
4447 IEM_MC_BEGIN(0, 1);
4448 IEM_MC_LOCAL(uint8_t, u8Flags);
4449 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4450 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4451 IEM_MC_ADVANCE_RIP();
4452 IEM_MC_END();
4453 return VINF_SUCCESS;
4454}
4455
4456
4457/**
4458 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4459 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
4460 * prefixes. Will return on failures.
4461 * @param a_GCPtrMemOff The variable to store the offset in.
4462 */
4463#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4464 do \
4465 { \
4466 switch (pVCpu->iem.s.enmEffAddrMode) \
4467 { \
4468 case IEMMODE_16BIT: \
4469 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4470 break; \
4471 case IEMMODE_32BIT: \
4472 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4473 break; \
4474 case IEMMODE_64BIT: \
4475 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4476 break; \
4477 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4478 } \
4479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4480 } while (0)
4481
4482/**
4483 * @opcode 0xa0
4484 */
4485FNIEMOP_DEF(iemOp_mov_AL_Ob)
4486{
4487 /*
4488 * Get the offset and fend of lock prefixes.
4489 */
4490 RTGCPTR GCPtrMemOff;
4491 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4492
4493 /*
4494 * Fetch AL.
4495 */
4496 IEM_MC_BEGIN(0,1);
4497 IEM_MC_LOCAL(uint8_t, u8Tmp);
4498 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4499 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4500 IEM_MC_ADVANCE_RIP();
4501 IEM_MC_END();
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/**
4507 * @opcode 0xa1
4508 */
4509FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4510{
4511 /*
4512 * Get the offset and fend of lock prefixes.
4513 */
4514 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4515 RTGCPTR GCPtrMemOff;
4516 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4517
4518 /*
4519 * Fetch rAX.
4520 */
4521 switch (pVCpu->iem.s.enmEffOpSize)
4522 {
4523 case IEMMODE_16BIT:
4524 IEM_MC_BEGIN(0,1);
4525 IEM_MC_LOCAL(uint16_t, u16Tmp);
4526 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4527 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4528 IEM_MC_ADVANCE_RIP();
4529 IEM_MC_END();
4530 return VINF_SUCCESS;
4531
4532 case IEMMODE_32BIT:
4533 IEM_MC_BEGIN(0,1);
4534 IEM_MC_LOCAL(uint32_t, u32Tmp);
4535 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4536 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4537 IEM_MC_ADVANCE_RIP();
4538 IEM_MC_END();
4539 return VINF_SUCCESS;
4540
4541 case IEMMODE_64BIT:
4542 IEM_MC_BEGIN(0,1);
4543 IEM_MC_LOCAL(uint64_t, u64Tmp);
4544 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4545 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4546 IEM_MC_ADVANCE_RIP();
4547 IEM_MC_END();
4548 return VINF_SUCCESS;
4549
4550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4551 }
4552}
4553
4554
4555/**
4556 * @opcode 0xa2
4557 */
4558FNIEMOP_DEF(iemOp_mov_Ob_AL)
4559{
4560 /*
4561 * Get the offset and fend of lock prefixes.
4562 */
4563 RTGCPTR GCPtrMemOff;
4564 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4565
4566 /*
4567 * Store AL.
4568 */
4569 IEM_MC_BEGIN(0,1);
4570 IEM_MC_LOCAL(uint8_t, u8Tmp);
4571 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4572 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4573 IEM_MC_ADVANCE_RIP();
4574 IEM_MC_END();
4575 return VINF_SUCCESS;
4576}
4577
4578
4579/**
4580 * @opcode 0xa3
4581 */
4582FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4583{
4584 /*
4585 * Get the offset and fend of lock prefixes.
4586 */
4587 RTGCPTR GCPtrMemOff;
4588 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4589
4590 /*
4591 * Store rAX.
4592 */
4593 switch (pVCpu->iem.s.enmEffOpSize)
4594 {
4595 case IEMMODE_16BIT:
4596 IEM_MC_BEGIN(0,1);
4597 IEM_MC_LOCAL(uint16_t, u16Tmp);
4598 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4599 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4600 IEM_MC_ADVANCE_RIP();
4601 IEM_MC_END();
4602 return VINF_SUCCESS;
4603
4604 case IEMMODE_32BIT:
4605 IEM_MC_BEGIN(0,1);
4606 IEM_MC_LOCAL(uint32_t, u32Tmp);
4607 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4608 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 return VINF_SUCCESS;
4612
4613 case IEMMODE_64BIT:
4614 IEM_MC_BEGIN(0,1);
4615 IEM_MC_LOCAL(uint64_t, u64Tmp);
4616 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4617 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4618 IEM_MC_ADVANCE_RIP();
4619 IEM_MC_END();
4620 return VINF_SUCCESS;
4621
4622 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4623 }
4624}
4625
4626/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4627#define IEM_MOVS_CASE(ValBits, AddrBits) \
4628 IEM_MC_BEGIN(0, 2); \
4629 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4630 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4631 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4632 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4633 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4634 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4635 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4636 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4637 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4638 } IEM_MC_ELSE() { \
4639 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4640 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4641 } IEM_MC_ENDIF(); \
4642 IEM_MC_ADVANCE_RIP(); \
4643 IEM_MC_END();
4644
4645/**
4646 * @opcode 0xa4
4647 */
4648FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4649{
4650 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4651
4652 /*
4653 * Use the C implementation if a repeat prefix is encountered.
4654 */
4655 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4656 {
4657 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4658 switch (pVCpu->iem.s.enmEffAddrMode)
4659 {
4660 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4664 }
4665 }
4666 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4667
4668 /*
4669 * Sharing case implementation with movs[wdq] below.
4670 */
4671 switch (pVCpu->iem.s.enmEffAddrMode)
4672 {
4673 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4674 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4675 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4677 }
4678 return VINF_SUCCESS;
4679}
4680
4681
4682/**
4683 * @opcode 0xa5
4684 */
4685FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4686{
4687 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4688
4689 /*
4690 * Use the C implementation if a repeat prefix is encountered.
4691 */
4692 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4693 {
4694 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4695 switch (pVCpu->iem.s.enmEffOpSize)
4696 {
4697 case IEMMODE_16BIT:
4698 switch (pVCpu->iem.s.enmEffAddrMode)
4699 {
4700 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4701 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4702 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4703 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4704 }
4705 break;
4706 case IEMMODE_32BIT:
4707 switch (pVCpu->iem.s.enmEffAddrMode)
4708 {
4709 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4710 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4711 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4713 }
4714 case IEMMODE_64BIT:
4715 switch (pVCpu->iem.s.enmEffAddrMode)
4716 {
4717 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4718 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4719 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4721 }
4722 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4723 }
4724 }
4725 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4726
4727 /*
4728 * Annoying double switch here.
4729 * Using ugly macro for implementing the cases, sharing it with movsb.
4730 */
4731 switch (pVCpu->iem.s.enmEffOpSize)
4732 {
4733 case IEMMODE_16BIT:
4734 switch (pVCpu->iem.s.enmEffAddrMode)
4735 {
4736 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4737 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4738 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4739 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4740 }
4741 break;
4742
4743 case IEMMODE_32BIT:
4744 switch (pVCpu->iem.s.enmEffAddrMode)
4745 {
4746 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4747 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4748 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4749 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4750 }
4751 break;
4752
4753 case IEMMODE_64BIT:
4754 switch (pVCpu->iem.s.enmEffAddrMode)
4755 {
4756 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4757 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4758 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4760 }
4761 break;
4762 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4763 }
4764 return VINF_SUCCESS;
4765}
4766
4767#undef IEM_MOVS_CASE
4768
4769/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4770#define IEM_CMPS_CASE(ValBits, AddrBits) \
4771 IEM_MC_BEGIN(3, 3); \
4772 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4773 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4774 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4775 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4776 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4777 \
4778 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4779 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4780 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4781 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4782 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4783 IEM_MC_REF_EFLAGS(pEFlags); \
4784 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4785 \
4786 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4787 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4788 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4789 } IEM_MC_ELSE() { \
4790 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4791 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4792 } IEM_MC_ENDIF(); \
4793 IEM_MC_ADVANCE_RIP(); \
4794 IEM_MC_END(); \
4795
4796/**
4797 * @opcode 0xa6
4798 */
4799FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4800{
4801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4802
4803 /*
4804 * Use the C implementation if a repeat prefix is encountered.
4805 */
4806 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4807 {
4808 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4809 switch (pVCpu->iem.s.enmEffAddrMode)
4810 {
4811 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4812 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4813 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4815 }
4816 }
4817 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4818 {
4819 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4820 switch (pVCpu->iem.s.enmEffAddrMode)
4821 {
4822 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4823 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4824 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4826 }
4827 }
4828 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4829
4830 /*
4831 * Sharing case implementation with cmps[wdq] below.
4832 */
4833 switch (pVCpu->iem.s.enmEffAddrMode)
4834 {
4835 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4836 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4837 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4839 }
4840 return VINF_SUCCESS;
4841
4842}
4843
4844
4845/**
4846 * @opcode 0xa7
4847 */
4848FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4849{
4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4851
4852 /*
4853 * Use the C implementation if a repeat prefix is encountered.
4854 */
4855 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4856 {
4857 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4858 switch (pVCpu->iem.s.enmEffOpSize)
4859 {
4860 case IEMMODE_16BIT:
4861 switch (pVCpu->iem.s.enmEffAddrMode)
4862 {
4863 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4864 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4865 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4867 }
4868 break;
4869 case IEMMODE_32BIT:
4870 switch (pVCpu->iem.s.enmEffAddrMode)
4871 {
4872 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4873 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4874 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4876 }
4877 case IEMMODE_64BIT:
4878 switch (pVCpu->iem.s.enmEffAddrMode)
4879 {
4880 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4881 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4882 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4884 }
4885 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4886 }
4887 }
4888
4889 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4890 {
4891 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4892 switch (pVCpu->iem.s.enmEffOpSize)
4893 {
4894 case IEMMODE_16BIT:
4895 switch (pVCpu->iem.s.enmEffAddrMode)
4896 {
4897 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4898 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4899 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4901 }
4902 break;
4903 case IEMMODE_32BIT:
4904 switch (pVCpu->iem.s.enmEffAddrMode)
4905 {
4906 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4907 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4908 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4909 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4910 }
4911 case IEMMODE_64BIT:
4912 switch (pVCpu->iem.s.enmEffAddrMode)
4913 {
4914 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4915 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4916 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4918 }
4919 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4920 }
4921 }
4922
4923 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4924
4925 /*
4926 * Annoying double switch here.
4927 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4928 */
4929 switch (pVCpu->iem.s.enmEffOpSize)
4930 {
4931 case IEMMODE_16BIT:
4932 switch (pVCpu->iem.s.enmEffAddrMode)
4933 {
4934 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4935 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4936 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4938 }
4939 break;
4940
4941 case IEMMODE_32BIT:
4942 switch (pVCpu->iem.s.enmEffAddrMode)
4943 {
4944 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4945 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4946 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4947 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4948 }
4949 break;
4950
4951 case IEMMODE_64BIT:
4952 switch (pVCpu->iem.s.enmEffAddrMode)
4953 {
4954 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4955 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4956 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4958 }
4959 break;
4960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4961 }
4962 return VINF_SUCCESS;
4963
4964}
4965
4966#undef IEM_CMPS_CASE
4967
4968/**
4969 * @opcode 0xa8
4970 */
4971FNIEMOP_DEF(iemOp_test_AL_Ib)
4972{
4973 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4974 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4975 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4976}
4977
4978
4979/**
4980 * @opcode 0xa9
4981 */
4982FNIEMOP_DEF(iemOp_test_eAX_Iz)
4983{
4984 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4985 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4986 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4987}
4988
4989
4990/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4991#define IEM_STOS_CASE(ValBits, AddrBits) \
4992 IEM_MC_BEGIN(0, 2); \
4993 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4994 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4995 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4996 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4997 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4999 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5000 } IEM_MC_ELSE() { \
5001 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5002 } IEM_MC_ENDIF(); \
5003 IEM_MC_ADVANCE_RIP(); \
5004 IEM_MC_END(); \
5005
5006/**
5007 * @opcode 0xaa
5008 */
5009FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5010{
5011 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5012
5013 /*
5014 * Use the C implementation if a repeat prefix is encountered.
5015 */
5016 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5017 {
5018 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5019 switch (pVCpu->iem.s.enmEffAddrMode)
5020 {
5021 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5022 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5023 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5025 }
5026 }
5027 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5028
5029 /*
5030 * Sharing case implementation with stos[wdq] below.
5031 */
5032 switch (pVCpu->iem.s.enmEffAddrMode)
5033 {
5034 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5035 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5036 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5038 }
5039 return VINF_SUCCESS;
5040}
5041
5042
5043/**
5044 * @opcode 0xab
5045 */
5046FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5047{
5048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5049
5050 /*
5051 * Use the C implementation if a repeat prefix is encountered.
5052 */
5053 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5054 {
5055 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5056 switch (pVCpu->iem.s.enmEffOpSize)
5057 {
5058 case IEMMODE_16BIT:
5059 switch (pVCpu->iem.s.enmEffAddrMode)
5060 {
5061 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5062 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5063 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5065 }
5066 break;
5067 case IEMMODE_32BIT:
5068 switch (pVCpu->iem.s.enmEffAddrMode)
5069 {
5070 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5071 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5072 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5074 }
5075 case IEMMODE_64BIT:
5076 switch (pVCpu->iem.s.enmEffAddrMode)
5077 {
5078 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5079 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5080 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5081 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5082 }
5083 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5084 }
5085 }
5086 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5087
5088 /*
5089 * Annoying double switch here.
5090 * Using ugly macro for implementing the cases, sharing it with stosb.
5091 */
5092 switch (pVCpu->iem.s.enmEffOpSize)
5093 {
5094 case IEMMODE_16BIT:
5095 switch (pVCpu->iem.s.enmEffAddrMode)
5096 {
5097 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5098 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5099 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5101 }
5102 break;
5103
5104 case IEMMODE_32BIT:
5105 switch (pVCpu->iem.s.enmEffAddrMode)
5106 {
5107 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5108 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5109 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5110 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5111 }
5112 break;
5113
5114 case IEMMODE_64BIT:
5115 switch (pVCpu->iem.s.enmEffAddrMode)
5116 {
5117 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5118 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5119 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5121 }
5122 break;
5123 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5124 }
5125 return VINF_SUCCESS;
5126}
5127
5128#undef IEM_STOS_CASE
5129
5130/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5131#define IEM_LODS_CASE(ValBits, AddrBits) \
5132 IEM_MC_BEGIN(0, 2); \
5133 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5134 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5135 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5136 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5137 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5138 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5139 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5140 } IEM_MC_ELSE() { \
5141 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5142 } IEM_MC_ENDIF(); \
5143 IEM_MC_ADVANCE_RIP(); \
5144 IEM_MC_END();
5145
5146/**
5147 * @opcode 0xac
5148 */
5149FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5150{
5151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5152
5153 /*
5154 * Use the C implementation if a repeat prefix is encountered.
5155 */
5156 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5157 {
5158 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5159 switch (pVCpu->iem.s.enmEffAddrMode)
5160 {
5161 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5162 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5163 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5165 }
5166 }
5167 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5168
5169 /*
5170 * Sharing case implementation with stos[wdq] below.
5171 */
5172 switch (pVCpu->iem.s.enmEffAddrMode)
5173 {
5174 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5175 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5176 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5177 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5178 }
5179 return VINF_SUCCESS;
5180}
5181
5182
5183/**
5184 * @opcode 0xad
5185 */
5186FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5187{
5188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5189
5190 /*
5191 * Use the C implementation if a repeat prefix is encountered.
5192 */
5193 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5194 {
5195 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5196 switch (pVCpu->iem.s.enmEffOpSize)
5197 {
5198 case IEMMODE_16BIT:
5199 switch (pVCpu->iem.s.enmEffAddrMode)
5200 {
5201 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5202 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5203 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5205 }
5206 break;
5207 case IEMMODE_32BIT:
5208 switch (pVCpu->iem.s.enmEffAddrMode)
5209 {
5210 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5211 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5212 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5213 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5214 }
5215 case IEMMODE_64BIT:
5216 switch (pVCpu->iem.s.enmEffAddrMode)
5217 {
5218 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5219 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5220 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5221 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5222 }
5223 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5224 }
5225 }
5226 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5227
5228 /*
5229 * Annoying double switch here.
5230 * Using ugly macro for implementing the cases, sharing it with lodsb.
5231 */
5232 switch (pVCpu->iem.s.enmEffOpSize)
5233 {
5234 case IEMMODE_16BIT:
5235 switch (pVCpu->iem.s.enmEffAddrMode)
5236 {
5237 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5238 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5239 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5241 }
5242 break;
5243
5244 case IEMMODE_32BIT:
5245 switch (pVCpu->iem.s.enmEffAddrMode)
5246 {
5247 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5248 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5249 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5251 }
5252 break;
5253
5254 case IEMMODE_64BIT:
5255 switch (pVCpu->iem.s.enmEffAddrMode)
5256 {
5257 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5258 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5259 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5260 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5261 }
5262 break;
5263 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5264 }
5265 return VINF_SUCCESS;
5266}
5267
5268#undef IEM_LODS_CASE
5269
5270/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5271#define IEM_SCAS_CASE(ValBits, AddrBits) \
5272 IEM_MC_BEGIN(3, 2); \
5273 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5274 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5275 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5276 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5277 \
5278 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5279 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5280 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5281 IEM_MC_REF_EFLAGS(pEFlags); \
5282 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5283 \
5284 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5285 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5286 } IEM_MC_ELSE() { \
5287 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5288 } IEM_MC_ENDIF(); \
5289 IEM_MC_ADVANCE_RIP(); \
5290 IEM_MC_END();
5291
5292/**
5293 * @opcode 0xae
5294 */
5295FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5296{
5297 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5298
5299 /*
5300 * Use the C implementation if a repeat prefix is encountered.
5301 */
5302 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5303 {
5304 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5305 switch (pVCpu->iem.s.enmEffAddrMode)
5306 {
5307 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5308 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5309 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5310 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5311 }
5312 }
5313 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5314 {
5315 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5316 switch (pVCpu->iem.s.enmEffAddrMode)
5317 {
5318 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5319 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5320 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5322 }
5323 }
5324 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5325
5326 /*
5327 * Sharing case implementation with stos[wdq] below.
5328 */
5329 switch (pVCpu->iem.s.enmEffAddrMode)
5330 {
5331 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5332 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5333 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5334 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5335 }
5336 return VINF_SUCCESS;
5337}
5338
5339
5340/**
5341 * @opcode 0xaf
5342 */
5343FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5344{
5345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5346
5347 /*
5348 * Use the C implementation if a repeat prefix is encountered.
5349 */
5350 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5351 {
5352 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5353 switch (pVCpu->iem.s.enmEffOpSize)
5354 {
5355 case IEMMODE_16BIT:
5356 switch (pVCpu->iem.s.enmEffAddrMode)
5357 {
5358 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5359 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5360 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5361 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5362 }
5363 break;
5364 case IEMMODE_32BIT:
5365 switch (pVCpu->iem.s.enmEffAddrMode)
5366 {
5367 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5368 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5369 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5370 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5371 }
5372 case IEMMODE_64BIT:
5373 switch (pVCpu->iem.s.enmEffAddrMode)
5374 {
5375 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5379 }
5380 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5381 }
5382 }
5383 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5384 {
5385 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5386 switch (pVCpu->iem.s.enmEffOpSize)
5387 {
5388 case IEMMODE_16BIT:
5389 switch (pVCpu->iem.s.enmEffAddrMode)
5390 {
5391 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5392 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5393 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5394 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5395 }
5396 break;
5397 case IEMMODE_32BIT:
5398 switch (pVCpu->iem.s.enmEffAddrMode)
5399 {
5400 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5401 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5402 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5403 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5404 }
5405 case IEMMODE_64BIT:
5406 switch (pVCpu->iem.s.enmEffAddrMode)
5407 {
5408 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5409 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5410 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5411 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5412 }
5413 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5414 }
5415 }
5416 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5417
5418 /*
5419 * Annoying double switch here.
5420 * Using ugly macro for implementing the cases, sharing it with scasb.
5421 */
5422 switch (pVCpu->iem.s.enmEffOpSize)
5423 {
5424 case IEMMODE_16BIT:
5425 switch (pVCpu->iem.s.enmEffAddrMode)
5426 {
5427 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5428 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5429 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5430 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5431 }
5432 break;
5433
5434 case IEMMODE_32BIT:
5435 switch (pVCpu->iem.s.enmEffAddrMode)
5436 {
5437 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5438 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5439 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5440 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5441 }
5442 break;
5443
5444 case IEMMODE_64BIT:
5445 switch (pVCpu->iem.s.enmEffAddrMode)
5446 {
5447 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5448 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5449 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5450 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5451 }
5452 break;
5453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5454 }
5455 return VINF_SUCCESS;
5456}
5457
5458#undef IEM_SCAS_CASE
5459
5460/**
5461 * Common 'mov r8, imm8' helper.
5462 */
5463FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5464{
5465 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5467
5468 IEM_MC_BEGIN(0, 1);
5469 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5470 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473
5474 return VINF_SUCCESS;
5475}
5476
5477
5478/**
5479 * @opcode 0xb0
5480 */
5481FNIEMOP_DEF(iemOp_mov_AL_Ib)
5482{
5483 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5484 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5485}
5486
5487
5488/**
5489 * @opcode 0xb1
5490 */
5491FNIEMOP_DEF(iemOp_CL_Ib)
5492{
5493 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5494 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5495}
5496
5497
5498/**
5499 * @opcode 0xb2
5500 */
5501FNIEMOP_DEF(iemOp_DL_Ib)
5502{
5503 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5504 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5505}
5506
5507
5508/**
5509 * @opcode 0xb3
5510 */
5511FNIEMOP_DEF(iemOp_BL_Ib)
5512{
5513 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5514 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5515}
5516
5517
5518/**
5519 * @opcode 0xb4
5520 */
5521FNIEMOP_DEF(iemOp_mov_AH_Ib)
5522{
5523 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5524 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5525}
5526
5527
5528/**
5529 * @opcode 0xb5
5530 */
5531FNIEMOP_DEF(iemOp_CH_Ib)
5532{
5533 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5534 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5535}
5536
5537
5538/**
5539 * @opcode 0xb6
5540 */
5541FNIEMOP_DEF(iemOp_DH_Ib)
5542{
5543 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5544 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5545}
5546
5547
5548/**
5549 * @opcode 0xb7
5550 */
5551FNIEMOP_DEF(iemOp_BH_Ib)
5552{
5553 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5554 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5555}
5556
5557
5558/**
5559 * Common 'mov regX,immX' helper.
5560 */
5561FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5562{
5563 switch (pVCpu->iem.s.enmEffOpSize)
5564 {
5565 case IEMMODE_16BIT:
5566 {
5567 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5569
5570 IEM_MC_BEGIN(0, 1);
5571 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5572 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5573 IEM_MC_ADVANCE_RIP();
5574 IEM_MC_END();
5575 break;
5576 }
5577
5578 case IEMMODE_32BIT:
5579 {
5580 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5582
5583 IEM_MC_BEGIN(0, 1);
5584 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5585 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5586 IEM_MC_ADVANCE_RIP();
5587 IEM_MC_END();
5588 break;
5589 }
5590 case IEMMODE_64BIT:
5591 {
5592 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5594
5595 IEM_MC_BEGIN(0, 1);
5596 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5597 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5598 IEM_MC_ADVANCE_RIP();
5599 IEM_MC_END();
5600 break;
5601 }
5602 }
5603
5604 return VINF_SUCCESS;
5605}
5606
5607
5608/**
5609 * @opcode 0xb8
5610 */
5611FNIEMOP_DEF(iemOp_eAX_Iv)
5612{
5613 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5614 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5615}
5616
5617
5618/**
5619 * @opcode 0xb9
5620 */
5621FNIEMOP_DEF(iemOp_eCX_Iv)
5622{
5623 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5624 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5625}
5626
5627
5628/**
5629 * @opcode 0xba
5630 */
5631FNIEMOP_DEF(iemOp_eDX_Iv)
5632{
5633 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5634 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5635}
5636
5637
5638/**
5639 * @opcode 0xbb
5640 */
5641FNIEMOP_DEF(iemOp_eBX_Iv)
5642{
5643 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5644 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5645}
5646
5647
5648/**
5649 * @opcode 0xbc
5650 */
5651FNIEMOP_DEF(iemOp_eSP_Iv)
5652{
5653 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5654 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5655}
5656
5657
5658/**
5659 * @opcode 0xbd
5660 */
5661FNIEMOP_DEF(iemOp_eBP_Iv)
5662{
5663 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5664 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5665}
5666
5667
5668/**
5669 * @opcode 0xbe
5670 */
5671FNIEMOP_DEF(iemOp_eSI_Iv)
5672{
5673 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5674 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5675}
5676
5677
5678/**
5679 * @opcode 0xbf
5680 */
5681FNIEMOP_DEF(iemOp_eDI_Iv)
5682{
5683 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5684 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5685}
5686
5687
5688/**
5689 * @opcode 0xc0
5690 */
5691FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5692{
5693 IEMOP_HLP_MIN_186();
5694 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5695 PCIEMOPSHIFTSIZES pImpl;
5696 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5697 {
5698 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5699 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5700 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5701 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5702 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5703 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5704 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5705 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5706 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5707 }
5708 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5709
5710 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5711 {
5712 /* register */
5713 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5714 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5715 IEM_MC_BEGIN(3, 0);
5716 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5717 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5718 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5719 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5720 IEM_MC_REF_EFLAGS(pEFlags);
5721 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5722 IEM_MC_ADVANCE_RIP();
5723 IEM_MC_END();
5724 }
5725 else
5726 {
5727 /* memory */
5728 IEM_MC_BEGIN(3, 2);
5729 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5730 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5731 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5733
5734 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5735 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5736 IEM_MC_ASSIGN(cShiftArg, cShift);
5737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5738 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5739 IEM_MC_FETCH_EFLAGS(EFlags);
5740 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5741
5742 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5743 IEM_MC_COMMIT_EFLAGS(EFlags);
5744 IEM_MC_ADVANCE_RIP();
5745 IEM_MC_END();
5746 }
5747 return VINF_SUCCESS;
5748}
5749
5750
5751/**
5752 * @opcode 0xc1
5753 */
5754FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5755{
5756 IEMOP_HLP_MIN_186();
5757 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5758 PCIEMOPSHIFTSIZES pImpl;
5759 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5760 {
5761 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5762 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5763 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5764 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5765 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5766 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5767 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5768 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5769 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5770 }
5771 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5772
5773 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5774 {
5775 /* register */
5776 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 switch (pVCpu->iem.s.enmEffOpSize)
5779 {
5780 case IEMMODE_16BIT:
5781 IEM_MC_BEGIN(3, 0);
5782 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5783 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5784 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5785 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5786 IEM_MC_REF_EFLAGS(pEFlags);
5787 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5788 IEM_MC_ADVANCE_RIP();
5789 IEM_MC_END();
5790 return VINF_SUCCESS;
5791
5792 case IEMMODE_32BIT:
5793 IEM_MC_BEGIN(3, 0);
5794 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5795 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5796 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5797 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5798 IEM_MC_REF_EFLAGS(pEFlags);
5799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5800 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5801 IEM_MC_ADVANCE_RIP();
5802 IEM_MC_END();
5803 return VINF_SUCCESS;
5804
5805 case IEMMODE_64BIT:
5806 IEM_MC_BEGIN(3, 0);
5807 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5808 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5810 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5811 IEM_MC_REF_EFLAGS(pEFlags);
5812 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5813 IEM_MC_ADVANCE_RIP();
5814 IEM_MC_END();
5815 return VINF_SUCCESS;
5816
5817 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5818 }
5819 }
5820 else
5821 {
5822 /* memory */
5823 switch (pVCpu->iem.s.enmEffOpSize)
5824 {
5825 case IEMMODE_16BIT:
5826 IEM_MC_BEGIN(3, 2);
5827 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5828 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5829 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5830 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5831
5832 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5833 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5834 IEM_MC_ASSIGN(cShiftArg, cShift);
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5837 IEM_MC_FETCH_EFLAGS(EFlags);
5838 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5839
5840 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5841 IEM_MC_COMMIT_EFLAGS(EFlags);
5842 IEM_MC_ADVANCE_RIP();
5843 IEM_MC_END();
5844 return VINF_SUCCESS;
5845
5846 case IEMMODE_32BIT:
5847 IEM_MC_BEGIN(3, 2);
5848 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5849 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5850 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5851 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5852
5853 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5854 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5855 IEM_MC_ASSIGN(cShiftArg, cShift);
5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5857 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5858 IEM_MC_FETCH_EFLAGS(EFlags);
5859 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5860
5861 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5862 IEM_MC_COMMIT_EFLAGS(EFlags);
5863 IEM_MC_ADVANCE_RIP();
5864 IEM_MC_END();
5865 return VINF_SUCCESS;
5866
5867 case IEMMODE_64BIT:
5868 IEM_MC_BEGIN(3, 2);
5869 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5871 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5872 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5873
5874 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5875 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5876 IEM_MC_ASSIGN(cShiftArg, cShift);
5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5878 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5879 IEM_MC_FETCH_EFLAGS(EFlags);
5880 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5881
5882 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5883 IEM_MC_COMMIT_EFLAGS(EFlags);
5884 IEM_MC_ADVANCE_RIP();
5885 IEM_MC_END();
5886 return VINF_SUCCESS;
5887
5888 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5889 }
5890 }
5891}
5892
5893
5894/**
5895 * @opcode 0xc2
5896 */
5897FNIEMOP_DEF(iemOp_retn_Iw)
5898{
5899 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5900 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5902 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5903 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5904}
5905
5906
5907/**
5908 * @opcode 0xc3
5909 */
5910FNIEMOP_DEF(iemOp_retn)
5911{
5912 IEMOP_MNEMONIC(retn, "retn");
5913 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5915 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5916}
5917
5918
5919/**
5920 * @opcode 0xc4
5921 */
5922FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5923{
5924 /* The LES instruction is invalid 64-bit mode. In legacy and
5925 compatability mode it is invalid with MOD=3.
5926 The use as a VEX prefix is made possible by assigning the inverted
5927 REX.R to the top MOD bit, and the top bit in the inverted register
5928 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5929 to accessing registers 0..7 in this VEX form. */
5930 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5931 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5932 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5933 {
5934 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5935 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5936 {
5937 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5938 if ( ( pVCpu->iem.s.fPrefixes
5939 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5940 == 0)
5941 {
5942 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5943 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5944 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5945 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5946 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5947
5948 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5949 }
5950
5951 Log(("VEX2: Invalid prefix mix!\n"));
5952 }
5953 else
5954 Log(("VEX2: AVX support disabled!\n"));
5955
5956 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5957 return IEMOP_RAISE_INVALID_OPCODE();
5958 }
5959 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5960 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5961}
5962
5963
5964/**
5965 * @opcode 0xc5
5966 */
5967FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5968{
5969 /* The LDS instruction is invalid 64-bit mode. In legacy and
5970 compatability mode it is invalid with MOD=3.
5971 The use as a VEX prefix is made possible by assigning the inverted
5972 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5973 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5974 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5975 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5976 {
5977 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5978 {
5979 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5980 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5981 }
5982 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5983 }
5984
5985 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5986 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5987 {
5988 /** @todo Test when exctly the VEX conformance checks kick in during
5989 * instruction decoding and fetching (using \#PF). */
5990 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5991 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5992 if ( ( pVCpu->iem.s.fPrefixes
5993 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5994 == 0)
5995 {
5996 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5997 if (bVex2 & 0x80 /* VEX.W */)
5998 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5999 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
6000 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
6001 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
6002 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6003 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6004 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6005
6006 switch (bRm & 0x1f)
6007 {
6008 case 1: /* 0x0f lead opcode byte. */
6009 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6010
6011 case 2: /* 0x0f 0x38 lead opcode bytes. */
6012 /** @todo VEX: Just use new tables and decoders. */
6013 IEMOP_BITCH_ABOUT_STUB();
6014 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6015
6016 case 3: /* 0x0f 0x3a lead opcode bytes. */
6017 /** @todo VEX: Just use new tables and decoders. */
6018 IEMOP_BITCH_ABOUT_STUB();
6019 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6020
6021 default:
6022 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6023 return IEMOP_RAISE_INVALID_OPCODE();
6024 }
6025 }
6026 else
6027 Log(("VEX3: Invalid prefix mix!\n"));
6028 }
6029 else
6030 Log(("VEX3: AVX support disabled!\n"));
6031 return IEMOP_RAISE_INVALID_OPCODE();
6032}
6033
6034
6035/**
6036 * @opcode 0xc6
6037 */
6038FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6039{
6040 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6041 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6042 return IEMOP_RAISE_INVALID_OPCODE();
6043 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6044
6045 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6046 {
6047 /* register access */
6048 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6050 IEM_MC_BEGIN(0, 0);
6051 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6052 IEM_MC_ADVANCE_RIP();
6053 IEM_MC_END();
6054 }
6055 else
6056 {
6057 /* memory access. */
6058 IEM_MC_BEGIN(0, 1);
6059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6061 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6063 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6064 IEM_MC_ADVANCE_RIP();
6065 IEM_MC_END();
6066 }
6067 return VINF_SUCCESS;
6068}
6069
6070
6071/**
6072 * @opcode 0xc7
6073 */
6074FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6075{
6076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6077 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6078 return IEMOP_RAISE_INVALID_OPCODE();
6079 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6080
6081 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6082 {
6083 /* register access */
6084 switch (pVCpu->iem.s.enmEffOpSize)
6085 {
6086 case IEMMODE_16BIT:
6087 IEM_MC_BEGIN(0, 0);
6088 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6089 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6090 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6091 IEM_MC_ADVANCE_RIP();
6092 IEM_MC_END();
6093 return VINF_SUCCESS;
6094
6095 case IEMMODE_32BIT:
6096 IEM_MC_BEGIN(0, 0);
6097 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6099 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6100 IEM_MC_ADVANCE_RIP();
6101 IEM_MC_END();
6102 return VINF_SUCCESS;
6103
6104 case IEMMODE_64BIT:
6105 IEM_MC_BEGIN(0, 0);
6106 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6107 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6108 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6109 IEM_MC_ADVANCE_RIP();
6110 IEM_MC_END();
6111 return VINF_SUCCESS;
6112
6113 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6114 }
6115 }
6116 else
6117 {
6118 /* memory access. */
6119 switch (pVCpu->iem.s.enmEffOpSize)
6120 {
6121 case IEMMODE_16BIT:
6122 IEM_MC_BEGIN(0, 1);
6123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6125 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6126 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6127 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6128 IEM_MC_ADVANCE_RIP();
6129 IEM_MC_END();
6130 return VINF_SUCCESS;
6131
6132 case IEMMODE_32BIT:
6133 IEM_MC_BEGIN(0, 1);
6134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6136 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6138 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6139 IEM_MC_ADVANCE_RIP();
6140 IEM_MC_END();
6141 return VINF_SUCCESS;
6142
6143 case IEMMODE_64BIT:
6144 IEM_MC_BEGIN(0, 1);
6145 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6146 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6147 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6148 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6149 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6150 IEM_MC_ADVANCE_RIP();
6151 IEM_MC_END();
6152 return VINF_SUCCESS;
6153
6154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6155 }
6156 }
6157}
6158
6159
6160
6161
6162/**
6163 * @opcode 0xc8
6164 */
6165FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6166{
6167 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6168 IEMOP_HLP_MIN_186();
6169 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6170 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6171 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6173 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6174}
6175
6176
6177/**
6178 * @opcode 0xc9
6179 */
6180FNIEMOP_DEF(iemOp_leave)
6181{
6182 IEMOP_MNEMONIC(leave, "leave");
6183 IEMOP_HLP_MIN_186();
6184 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6185 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6186 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6187}
6188
6189
6190/**
6191 * @opcode 0xca
6192 */
6193FNIEMOP_DEF(iemOp_retf_Iw)
6194{
6195 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6196 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6198 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6199 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6200}
6201
6202
6203/**
6204 * @opcode 0xcb
6205 */
6206FNIEMOP_DEF(iemOp_retf)
6207{
6208 IEMOP_MNEMONIC(retf, "retf");
6209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6210 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6211 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6212}
6213
6214
6215/**
6216 * @opcode 0xcc
6217 */
6218FNIEMOP_DEF(iemOp_int3)
6219{
6220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6221 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
6222}
6223
6224
6225/**
6226 * @opcode 0xcd
6227 */
6228FNIEMOP_DEF(iemOp_int_Ib)
6229{
6230 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6231 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6232 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
6233}
6234
6235
6236/**
6237 * @opcode 0xce
6238 */
6239FNIEMOP_DEF(iemOp_into)
6240{
6241 IEMOP_MNEMONIC(into, "into");
6242 IEMOP_HLP_NO_64BIT();
6243
6244 IEM_MC_BEGIN(2, 0);
6245 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6246 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
6247 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
6248 IEM_MC_END();
6249 return VINF_SUCCESS;
6250}
6251
6252
6253/**
6254 * @opcode 0xcf
6255 */
6256FNIEMOP_DEF(iemOp_iret)
6257{
6258 IEMOP_MNEMONIC(iret, "iret");
6259 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6260 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6261}
6262
6263
6264/**
6265 * @opcode 0xd0
6266 */
6267FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6268{
6269 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6270 PCIEMOPSHIFTSIZES pImpl;
6271 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6272 {
6273 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6274 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6275 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6276 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6277 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6278 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6279 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6280 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6281 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6282 }
6283 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6284
6285 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6286 {
6287 /* register */
6288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6289 IEM_MC_BEGIN(3, 0);
6290 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6291 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6292 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6293 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6294 IEM_MC_REF_EFLAGS(pEFlags);
6295 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6296 IEM_MC_ADVANCE_RIP();
6297 IEM_MC_END();
6298 }
6299 else
6300 {
6301 /* memory */
6302 IEM_MC_BEGIN(3, 2);
6303 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6304 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6305 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6306 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6307
6308 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6310 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6311 IEM_MC_FETCH_EFLAGS(EFlags);
6312 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6313
6314 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6315 IEM_MC_COMMIT_EFLAGS(EFlags);
6316 IEM_MC_ADVANCE_RIP();
6317 IEM_MC_END();
6318 }
6319 return VINF_SUCCESS;
6320}
6321
6322
6323
6324/**
6325 * @opcode 0xd1
6326 */
6327FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6328{
6329 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6330 PCIEMOPSHIFTSIZES pImpl;
6331 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6332 {
6333 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6334 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6335 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6336 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6337 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6338 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6339 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6340 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6341 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6342 }
6343 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6344
6345 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6346 {
6347 /* register */
6348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6349 switch (pVCpu->iem.s.enmEffOpSize)
6350 {
6351 case IEMMODE_16BIT:
6352 IEM_MC_BEGIN(3, 0);
6353 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6354 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6355 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6356 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6357 IEM_MC_REF_EFLAGS(pEFlags);
6358 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6359 IEM_MC_ADVANCE_RIP();
6360 IEM_MC_END();
6361 return VINF_SUCCESS;
6362
6363 case IEMMODE_32BIT:
6364 IEM_MC_BEGIN(3, 0);
6365 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6366 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6368 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6369 IEM_MC_REF_EFLAGS(pEFlags);
6370 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6371 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6372 IEM_MC_ADVANCE_RIP();
6373 IEM_MC_END();
6374 return VINF_SUCCESS;
6375
6376 case IEMMODE_64BIT:
6377 IEM_MC_BEGIN(3, 0);
6378 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6379 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6380 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6381 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6382 IEM_MC_REF_EFLAGS(pEFlags);
6383 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6384 IEM_MC_ADVANCE_RIP();
6385 IEM_MC_END();
6386 return VINF_SUCCESS;
6387
6388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6389 }
6390 }
6391 else
6392 {
6393 /* memory */
6394 switch (pVCpu->iem.s.enmEffOpSize)
6395 {
6396 case IEMMODE_16BIT:
6397 IEM_MC_BEGIN(3, 2);
6398 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6399 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6400 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6402
6403 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6405 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6406 IEM_MC_FETCH_EFLAGS(EFlags);
6407 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6408
6409 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6410 IEM_MC_COMMIT_EFLAGS(EFlags);
6411 IEM_MC_ADVANCE_RIP();
6412 IEM_MC_END();
6413 return VINF_SUCCESS;
6414
6415 case IEMMODE_32BIT:
6416 IEM_MC_BEGIN(3, 2);
6417 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6418 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6419 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6420 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6421
6422 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6424 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6425 IEM_MC_FETCH_EFLAGS(EFlags);
6426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6427
6428 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6429 IEM_MC_COMMIT_EFLAGS(EFlags);
6430 IEM_MC_ADVANCE_RIP();
6431 IEM_MC_END();
6432 return VINF_SUCCESS;
6433
6434 case IEMMODE_64BIT:
6435 IEM_MC_BEGIN(3, 2);
6436 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6437 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6438 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6440
6441 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6442 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6443 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6444 IEM_MC_FETCH_EFLAGS(EFlags);
6445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6446
6447 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6448 IEM_MC_COMMIT_EFLAGS(EFlags);
6449 IEM_MC_ADVANCE_RIP();
6450 IEM_MC_END();
6451 return VINF_SUCCESS;
6452
6453 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6454 }
6455 }
6456}
6457
6458
6459/**
6460 * @opcode 0xd2
6461 */
6462FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6463{
6464 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6465 PCIEMOPSHIFTSIZES pImpl;
6466 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6467 {
6468 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6469 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6470 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6471 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6472 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6473 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6474 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6475 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6476 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6477 }
6478 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6479
6480 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6481 {
6482 /* register */
6483 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6484 IEM_MC_BEGIN(3, 0);
6485 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6486 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6487 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6488 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6489 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6490 IEM_MC_REF_EFLAGS(pEFlags);
6491 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6492 IEM_MC_ADVANCE_RIP();
6493 IEM_MC_END();
6494 }
6495 else
6496 {
6497 /* memory */
6498 IEM_MC_BEGIN(3, 2);
6499 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6500 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6501 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6502 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6503
6504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6506 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6507 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6508 IEM_MC_FETCH_EFLAGS(EFlags);
6509 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6510
6511 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6512 IEM_MC_COMMIT_EFLAGS(EFlags);
6513 IEM_MC_ADVANCE_RIP();
6514 IEM_MC_END();
6515 }
6516 return VINF_SUCCESS;
6517}
6518
6519
6520/**
6521 * @opcode 0xd3
6522 */
6523FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6524{
6525 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6526 PCIEMOPSHIFTSIZES pImpl;
6527 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6528 {
6529 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6530 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6531 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6532 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6533 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6534 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6535 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6536 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6537 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6538 }
6539 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6540
6541 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6542 {
6543 /* register */
6544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6545 switch (pVCpu->iem.s.enmEffOpSize)
6546 {
6547 case IEMMODE_16BIT:
6548 IEM_MC_BEGIN(3, 0);
6549 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6550 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6551 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6552 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6553 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6554 IEM_MC_REF_EFLAGS(pEFlags);
6555 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6556 IEM_MC_ADVANCE_RIP();
6557 IEM_MC_END();
6558 return VINF_SUCCESS;
6559
6560 case IEMMODE_32BIT:
6561 IEM_MC_BEGIN(3, 0);
6562 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6563 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6564 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6565 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6566 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6567 IEM_MC_REF_EFLAGS(pEFlags);
6568 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6569 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6570 IEM_MC_ADVANCE_RIP();
6571 IEM_MC_END();
6572 return VINF_SUCCESS;
6573
6574 case IEMMODE_64BIT:
6575 IEM_MC_BEGIN(3, 0);
6576 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6577 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6578 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6579 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6580 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6581 IEM_MC_REF_EFLAGS(pEFlags);
6582 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6583 IEM_MC_ADVANCE_RIP();
6584 IEM_MC_END();
6585 return VINF_SUCCESS;
6586
6587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6588 }
6589 }
6590 else
6591 {
6592 /* memory */
6593 switch (pVCpu->iem.s.enmEffOpSize)
6594 {
6595 case IEMMODE_16BIT:
6596 IEM_MC_BEGIN(3, 2);
6597 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6598 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6599 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6601
6602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6604 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6605 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6606 IEM_MC_FETCH_EFLAGS(EFlags);
6607 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6608
6609 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6610 IEM_MC_COMMIT_EFLAGS(EFlags);
6611 IEM_MC_ADVANCE_RIP();
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614
6615 case IEMMODE_32BIT:
6616 IEM_MC_BEGIN(3, 2);
6617 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6618 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6619 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6621
6622 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6624 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6625 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6626 IEM_MC_FETCH_EFLAGS(EFlags);
6627 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6628
6629 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6630 IEM_MC_COMMIT_EFLAGS(EFlags);
6631 IEM_MC_ADVANCE_RIP();
6632 IEM_MC_END();
6633 return VINF_SUCCESS;
6634
6635 case IEMMODE_64BIT:
6636 IEM_MC_BEGIN(3, 2);
6637 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6638 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6639 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6641
6642 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6643 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6644 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6645 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6646 IEM_MC_FETCH_EFLAGS(EFlags);
6647 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6648
6649 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6650 IEM_MC_COMMIT_EFLAGS(EFlags);
6651 IEM_MC_ADVANCE_RIP();
6652 IEM_MC_END();
6653 return VINF_SUCCESS;
6654
6655 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6656 }
6657 }
6658}
6659
6660/**
6661 * @opcode 0xd4
6662 */
6663FNIEMOP_DEF(iemOp_aam_Ib)
6664{
6665 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6666 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6668 IEMOP_HLP_NO_64BIT();
6669 if (!bImm)
6670 return IEMOP_RAISE_DIVIDE_ERROR();
6671 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6672}
6673
6674
6675/**
6676 * @opcode 0xd5
6677 */
6678FNIEMOP_DEF(iemOp_aad_Ib)
6679{
6680 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6681 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6683 IEMOP_HLP_NO_64BIT();
6684 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6685}
6686
6687
6688/**
6689 * @opcode 0xd6
6690 */
6691FNIEMOP_DEF(iemOp_salc)
6692{
6693 IEMOP_MNEMONIC(salc, "salc");
6694 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6695 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6697 IEMOP_HLP_NO_64BIT();
6698
6699 IEM_MC_BEGIN(0, 0);
6700 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6701 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6702 } IEM_MC_ELSE() {
6703 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6704 } IEM_MC_ENDIF();
6705 IEM_MC_ADVANCE_RIP();
6706 IEM_MC_END();
6707 return VINF_SUCCESS;
6708}
6709
6710
6711/**
6712 * @opcode 0xd7
6713 */
6714FNIEMOP_DEF(iemOp_xlat)
6715{
6716 IEMOP_MNEMONIC(xlat, "xlat");
6717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6718 switch (pVCpu->iem.s.enmEffAddrMode)
6719 {
6720 case IEMMODE_16BIT:
6721 IEM_MC_BEGIN(2, 0);
6722 IEM_MC_LOCAL(uint8_t, u8Tmp);
6723 IEM_MC_LOCAL(uint16_t, u16Addr);
6724 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6725 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6726 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6727 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6728 IEM_MC_ADVANCE_RIP();
6729 IEM_MC_END();
6730 return VINF_SUCCESS;
6731
6732 case IEMMODE_32BIT:
6733 IEM_MC_BEGIN(2, 0);
6734 IEM_MC_LOCAL(uint8_t, u8Tmp);
6735 IEM_MC_LOCAL(uint32_t, u32Addr);
6736 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6737 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6738 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6739 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6740 IEM_MC_ADVANCE_RIP();
6741 IEM_MC_END();
6742 return VINF_SUCCESS;
6743
6744 case IEMMODE_64BIT:
6745 IEM_MC_BEGIN(2, 0);
6746 IEM_MC_LOCAL(uint8_t, u8Tmp);
6747 IEM_MC_LOCAL(uint64_t, u64Addr);
6748 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6749 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6750 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6751 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6752 IEM_MC_ADVANCE_RIP();
6753 IEM_MC_END();
6754 return VINF_SUCCESS;
6755
6756 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6757 }
6758}
6759
6760
6761/**
6762 * Common worker for FPU instructions working on ST0 and STn, and storing the
6763 * result in ST0.
6764 *
6765 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6766 */
6767FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6768{
6769 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6770
6771 IEM_MC_BEGIN(3, 1);
6772 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6773 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6774 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6775 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6776
6777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6779 IEM_MC_PREPARE_FPU_USAGE();
6780 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6781 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6782 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6783 IEM_MC_ELSE()
6784 IEM_MC_FPU_STACK_UNDERFLOW(0);
6785 IEM_MC_ENDIF();
6786 IEM_MC_ADVANCE_RIP();
6787
6788 IEM_MC_END();
6789 return VINF_SUCCESS;
6790}
6791
6792
6793/**
6794 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6795 * flags.
6796 *
6797 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6798 */
6799FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6800{
6801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6802
6803 IEM_MC_BEGIN(3, 1);
6804 IEM_MC_LOCAL(uint16_t, u16Fsw);
6805 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6806 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6807 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6808
6809 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6810 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6811 IEM_MC_PREPARE_FPU_USAGE();
6812 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6813 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6814 IEM_MC_UPDATE_FSW(u16Fsw);
6815 IEM_MC_ELSE()
6816 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6817 IEM_MC_ENDIF();
6818 IEM_MC_ADVANCE_RIP();
6819
6820 IEM_MC_END();
6821 return VINF_SUCCESS;
6822}
6823
6824
6825/**
6826 * Common worker for FPU instructions working on ST0 and STn, only affecting
6827 * flags, and popping when done.
6828 *
6829 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6830 */
6831FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6832{
6833 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6834
6835 IEM_MC_BEGIN(3, 1);
6836 IEM_MC_LOCAL(uint16_t, u16Fsw);
6837 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6838 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6839 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6840
6841 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6842 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6843 IEM_MC_PREPARE_FPU_USAGE();
6844 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6845 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6846 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6847 IEM_MC_ELSE()
6848 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6849 IEM_MC_ENDIF();
6850 IEM_MC_ADVANCE_RIP();
6851
6852 IEM_MC_END();
6853 return VINF_SUCCESS;
6854}
6855
6856
6857/** Opcode 0xd8 11/0. */
6858FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6859{
6860 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6861 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6862}
6863
6864
6865/** Opcode 0xd8 11/1. */
6866FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6867{
6868 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6869 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6870}
6871
6872
6873/** Opcode 0xd8 11/2. */
6874FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6875{
6876 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6877 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6878}
6879
6880
6881/** Opcode 0xd8 11/3. */
6882FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6883{
6884 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6885 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6886}
6887
6888
6889/** Opcode 0xd8 11/4. */
6890FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6891{
6892 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6893 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6894}
6895
6896
6897/** Opcode 0xd8 11/5. */
6898FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6899{
6900 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6901 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6902}
6903
6904
6905/** Opcode 0xd8 11/6. */
6906FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6907{
6908 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6909 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6910}
6911
6912
6913/** Opcode 0xd8 11/7. */
6914FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6915{
6916 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6917 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6918}
6919
6920
6921/**
6922 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6923 * the result in ST0.
6924 *
6925 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6926 */
6927FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6928{
6929 IEM_MC_BEGIN(3, 3);
6930 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6931 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6932 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6933 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6934 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6935 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6936
6937 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6939
6940 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6941 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6942 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6943
6944 IEM_MC_PREPARE_FPU_USAGE();
6945 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6946 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6947 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6948 IEM_MC_ELSE()
6949 IEM_MC_FPU_STACK_UNDERFLOW(0);
6950 IEM_MC_ENDIF();
6951 IEM_MC_ADVANCE_RIP();
6952
6953 IEM_MC_END();
6954 return VINF_SUCCESS;
6955}
6956
6957
6958/** Opcode 0xd8 !11/0. */
6959FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6960{
6961 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6962 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6963}
6964
6965
6966/** Opcode 0xd8 !11/1. */
6967FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6968{
6969 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6970 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6971}
6972
6973
6974/** Opcode 0xd8 !11/2. */
6975FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6976{
6977 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6978
6979 IEM_MC_BEGIN(3, 3);
6980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6981 IEM_MC_LOCAL(uint16_t, u16Fsw);
6982 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6983 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6984 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6985 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6986
6987 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6989
6990 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6991 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6992 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6993
6994 IEM_MC_PREPARE_FPU_USAGE();
6995 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6996 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6997 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6998 IEM_MC_ELSE()
6999 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7000 IEM_MC_ENDIF();
7001 IEM_MC_ADVANCE_RIP();
7002
7003 IEM_MC_END();
7004 return VINF_SUCCESS;
7005}
7006
7007
7008/** Opcode 0xd8 !11/3. */
7009FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7010{
7011 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7012
7013 IEM_MC_BEGIN(3, 3);
7014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7015 IEM_MC_LOCAL(uint16_t, u16Fsw);
7016 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7017 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7018 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7019 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7020
7021 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7022 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7023
7024 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7025 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7026 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7027
7028 IEM_MC_PREPARE_FPU_USAGE();
7029 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7030 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7031 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7032 IEM_MC_ELSE()
7033 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7034 IEM_MC_ENDIF();
7035 IEM_MC_ADVANCE_RIP();
7036
7037 IEM_MC_END();
7038 return VINF_SUCCESS;
7039}
7040
7041
7042/** Opcode 0xd8 !11/4. */
7043FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7044{
7045 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7046 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7047}
7048
7049
7050/** Opcode 0xd8 !11/5. */
7051FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7052{
7053 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7054 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7055}
7056
7057
7058/** Opcode 0xd8 !11/6. */
7059FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7060{
7061 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7062 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7063}
7064
7065
7066/** Opcode 0xd8 !11/7. */
7067FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7068{
7069 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7070 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7071}
7072
7073
7074/**
7075 * @opcode 0xd8
7076 */
7077FNIEMOP_DEF(iemOp_EscF0)
7078{
7079 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7080 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7081
7082 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7083 {
7084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7085 {
7086 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7087 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7088 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7089 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7090 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7091 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7092 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7093 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7094 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7095 }
7096 }
7097 else
7098 {
7099 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7100 {
7101 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7102 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7103 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7104 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7105 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7106 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7107 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7108 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7109 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7110 }
7111 }
7112}
7113
7114
7115/** Opcode 0xd9 /0 mem32real
7116 * @sa iemOp_fld_m64r */
7117FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7118{
7119 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7120
7121 IEM_MC_BEGIN(2, 3);
7122 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7123 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7124 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7125 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7126 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7127
7128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7130
7131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7133 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7134
7135 IEM_MC_PREPARE_FPU_USAGE();
7136 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7137 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7138 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7139 IEM_MC_ELSE()
7140 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7141 IEM_MC_ENDIF();
7142 IEM_MC_ADVANCE_RIP();
7143
7144 IEM_MC_END();
7145 return VINF_SUCCESS;
7146}
7147
7148
7149/** Opcode 0xd9 !11/2 mem32real */
7150FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7151{
7152 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7153 IEM_MC_BEGIN(3, 2);
7154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7155 IEM_MC_LOCAL(uint16_t, u16Fsw);
7156 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7157 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7159
7160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7164
7165 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7166 IEM_MC_PREPARE_FPU_USAGE();
7167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7168 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7169 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7170 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7171 IEM_MC_ELSE()
7172 IEM_MC_IF_FCW_IM()
7173 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7174 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7175 IEM_MC_ENDIF();
7176 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7177 IEM_MC_ENDIF();
7178 IEM_MC_ADVANCE_RIP();
7179
7180 IEM_MC_END();
7181 return VINF_SUCCESS;
7182}
7183
7184
7185/** Opcode 0xd9 !11/3 */
7186FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7187{
7188 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7189 IEM_MC_BEGIN(3, 2);
7190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7191 IEM_MC_LOCAL(uint16_t, u16Fsw);
7192 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7193 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7194 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7195
7196 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7197 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7198 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7199 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7200
7201 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7202 IEM_MC_PREPARE_FPU_USAGE();
7203 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7204 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7205 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7206 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7207 IEM_MC_ELSE()
7208 IEM_MC_IF_FCW_IM()
7209 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7210 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7211 IEM_MC_ENDIF();
7212 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7213 IEM_MC_ENDIF();
7214 IEM_MC_ADVANCE_RIP();
7215
7216 IEM_MC_END();
7217 return VINF_SUCCESS;
7218}
7219
7220
7221/** Opcode 0xd9 !11/4 */
7222FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7223{
7224 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7225 IEM_MC_BEGIN(3, 0);
7226 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7227 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7228 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7231 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7232 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7233 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7234 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7235 IEM_MC_END();
7236 return VINF_SUCCESS;
7237}
7238
7239
7240/** Opcode 0xd9 !11/5 */
7241FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7242{
7243 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7244 IEM_MC_BEGIN(1, 1);
7245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7246 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7247 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7248 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7249 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7250 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7251 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7252 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7253 IEM_MC_END();
7254 return VINF_SUCCESS;
7255}
7256
7257
7258/** Opcode 0xd9 !11/6 */
7259FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7260{
7261 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7262 IEM_MC_BEGIN(3, 0);
7263 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7264 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7265 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7268 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7269 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7270 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7271 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7272 IEM_MC_END();
7273 return VINF_SUCCESS;
7274}
7275
7276
7277/** Opcode 0xd9 !11/7 */
7278FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7279{
7280 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7281 IEM_MC_BEGIN(2, 0);
7282 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7283 IEM_MC_LOCAL(uint16_t, u16Fcw);
7284 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7287 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7288 IEM_MC_FETCH_FCW(u16Fcw);
7289 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7290 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7291 IEM_MC_END();
7292 return VINF_SUCCESS;
7293}
7294
7295
7296/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7297FNIEMOP_DEF(iemOp_fnop)
7298{
7299 IEMOP_MNEMONIC(fnop, "fnop");
7300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7301
7302 IEM_MC_BEGIN(0, 0);
7303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7305 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7306 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7307 * intel optimizations. Investigate. */
7308 IEM_MC_UPDATE_FPU_OPCODE_IP();
7309 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7310 IEM_MC_END();
7311 return VINF_SUCCESS;
7312}
7313
7314
7315/** Opcode 0xd9 11/0 stN */
7316FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7317{
7318 IEMOP_MNEMONIC(fld_stN, "fld stN");
7319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7320
7321 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7322 * indicates that it does. */
7323 IEM_MC_BEGIN(0, 2);
7324 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7325 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7326 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7327 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7328
7329 IEM_MC_PREPARE_FPU_USAGE();
7330 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7331 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7332 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7333 IEM_MC_ELSE()
7334 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7335 IEM_MC_ENDIF();
7336
7337 IEM_MC_ADVANCE_RIP();
7338 IEM_MC_END();
7339
7340 return VINF_SUCCESS;
7341}
7342
7343
7344/** Opcode 0xd9 11/3 stN */
7345FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7346{
7347 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7349
7350 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7351 * indicates that it does. */
7352 IEM_MC_BEGIN(1, 3);
7353 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7354 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7355 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7356 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7357 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7358 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7359
7360 IEM_MC_PREPARE_FPU_USAGE();
7361 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7362 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7363 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7364 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7365 IEM_MC_ELSE()
7366 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7367 IEM_MC_ENDIF();
7368
7369 IEM_MC_ADVANCE_RIP();
7370 IEM_MC_END();
7371
7372 return VINF_SUCCESS;
7373}
7374
7375
7376/** Opcode 0xd9 11/4, 0xdd 11/2. */
7377FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7378{
7379 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7381
7382 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7383 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7384 if (!iDstReg)
7385 {
7386 IEM_MC_BEGIN(0, 1);
7387 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7388 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7389 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7390
7391 IEM_MC_PREPARE_FPU_USAGE();
7392 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7393 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7394 IEM_MC_ELSE()
7395 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7396 IEM_MC_ENDIF();
7397
7398 IEM_MC_ADVANCE_RIP();
7399 IEM_MC_END();
7400 }
7401 else
7402 {
7403 IEM_MC_BEGIN(0, 2);
7404 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7405 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7406 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7407 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7408
7409 IEM_MC_PREPARE_FPU_USAGE();
7410 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7411 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7412 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7413 IEM_MC_ELSE()
7414 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7415 IEM_MC_ENDIF();
7416
7417 IEM_MC_ADVANCE_RIP();
7418 IEM_MC_END();
7419 }
7420 return VINF_SUCCESS;
7421}
7422
7423
7424/**
7425 * Common worker for FPU instructions working on ST0 and replaces it with the
7426 * result, i.e. unary operators.
7427 *
7428 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7429 */
7430FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7431{
7432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7433
7434 IEM_MC_BEGIN(2, 1);
7435 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7436 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7437 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7438
7439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7441 IEM_MC_PREPARE_FPU_USAGE();
7442 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7443 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7444 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7445 IEM_MC_ELSE()
7446 IEM_MC_FPU_STACK_UNDERFLOW(0);
7447 IEM_MC_ENDIF();
7448 IEM_MC_ADVANCE_RIP();
7449
7450 IEM_MC_END();
7451 return VINF_SUCCESS;
7452}
7453
7454
7455/** Opcode 0xd9 0xe0. */
7456FNIEMOP_DEF(iemOp_fchs)
7457{
7458 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7459 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7460}
7461
7462
7463/** Opcode 0xd9 0xe1. */
7464FNIEMOP_DEF(iemOp_fabs)
7465{
7466 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7467 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7468}
7469
7470
7471/**
7472 * Common worker for FPU instructions working on ST0 and only returns FSW.
7473 *
7474 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7475 */
7476FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7477{
7478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7479
7480 IEM_MC_BEGIN(2, 1);
7481 IEM_MC_LOCAL(uint16_t, u16Fsw);
7482 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7483 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7484
7485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7487 IEM_MC_PREPARE_FPU_USAGE();
7488 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7489 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7490 IEM_MC_UPDATE_FSW(u16Fsw);
7491 IEM_MC_ELSE()
7492 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7493 IEM_MC_ENDIF();
7494 IEM_MC_ADVANCE_RIP();
7495
7496 IEM_MC_END();
7497 return VINF_SUCCESS;
7498}
7499
7500
7501/** Opcode 0xd9 0xe4. */
7502FNIEMOP_DEF(iemOp_ftst)
7503{
7504 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7505 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7506}
7507
7508
7509/** Opcode 0xd9 0xe5. */
7510FNIEMOP_DEF(iemOp_fxam)
7511{
7512 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7513 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7514}
7515
7516
7517/**
7518 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7519 *
7520 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7521 */
7522FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7523{
7524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7525
7526 IEM_MC_BEGIN(1, 1);
7527 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7528 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7529
7530 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7531 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7532 IEM_MC_PREPARE_FPU_USAGE();
7533 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7534 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7535 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7536 IEM_MC_ELSE()
7537 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7538 IEM_MC_ENDIF();
7539 IEM_MC_ADVANCE_RIP();
7540
7541 IEM_MC_END();
7542 return VINF_SUCCESS;
7543}
7544
7545
7546/** Opcode 0xd9 0xe8. */
7547FNIEMOP_DEF(iemOp_fld1)
7548{
7549 IEMOP_MNEMONIC(fld1, "fld1");
7550 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7551}
7552
7553
7554/** Opcode 0xd9 0xe9. */
7555FNIEMOP_DEF(iemOp_fldl2t)
7556{
7557 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7558 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7559}
7560
7561
7562/** Opcode 0xd9 0xea. */
7563FNIEMOP_DEF(iemOp_fldl2e)
7564{
7565 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7566 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7567}
7568
7569/** Opcode 0xd9 0xeb. */
7570FNIEMOP_DEF(iemOp_fldpi)
7571{
7572 IEMOP_MNEMONIC(fldpi, "fldpi");
7573 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7574}
7575
7576
7577/** Opcode 0xd9 0xec. */
7578FNIEMOP_DEF(iemOp_fldlg2)
7579{
7580 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7581 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7582}
7583
7584/** Opcode 0xd9 0xed. */
7585FNIEMOP_DEF(iemOp_fldln2)
7586{
7587 IEMOP_MNEMONIC(fldln2, "fldln2");
7588 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7589}
7590
7591
7592/** Opcode 0xd9 0xee. */
7593FNIEMOP_DEF(iemOp_fldz)
7594{
7595 IEMOP_MNEMONIC(fldz, "fldz");
7596 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7597}
7598
7599
7600/** Opcode 0xd9 0xf0. */
7601FNIEMOP_DEF(iemOp_f2xm1)
7602{
7603 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7604 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7605}
7606
7607
7608/**
7609 * Common worker for FPU instructions working on STn and ST0, storing the result
7610 * in STn, and popping the stack unless IE, DE or ZE was raised.
7611 *
7612 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7613 */
7614FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7615{
7616 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7617
7618 IEM_MC_BEGIN(3, 1);
7619 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7620 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7621 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7622 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7623
7624 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7625 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7626
7627 IEM_MC_PREPARE_FPU_USAGE();
7628 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7629 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7630 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7631 IEM_MC_ELSE()
7632 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7633 IEM_MC_ENDIF();
7634 IEM_MC_ADVANCE_RIP();
7635
7636 IEM_MC_END();
7637 return VINF_SUCCESS;
7638}
7639
7640
7641/** Opcode 0xd9 0xf1. */
7642FNIEMOP_DEF(iemOp_fyl2x)
7643{
7644 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7645 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7646}
7647
7648
7649/**
7650 * Common worker for FPU instructions working on ST0 and having two outputs, one
7651 * replacing ST0 and one pushed onto the stack.
7652 *
7653 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7654 */
7655FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7656{
7657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7658
7659 IEM_MC_BEGIN(2, 1);
7660 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7661 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7662 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7663
7664 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7665 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7666 IEM_MC_PREPARE_FPU_USAGE();
7667 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7668 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7669 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7670 IEM_MC_ELSE()
7671 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7672 IEM_MC_ENDIF();
7673 IEM_MC_ADVANCE_RIP();
7674
7675 IEM_MC_END();
7676 return VINF_SUCCESS;
7677}
7678
7679
7680/** Opcode 0xd9 0xf2. */
7681FNIEMOP_DEF(iemOp_fptan)
7682{
7683 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7684 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7685}
7686
7687
7688/** Opcode 0xd9 0xf3. */
7689FNIEMOP_DEF(iemOp_fpatan)
7690{
7691 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7692 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7693}
7694
7695
7696/** Opcode 0xd9 0xf4. */
7697FNIEMOP_DEF(iemOp_fxtract)
7698{
7699 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7700 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7701}
7702
7703
7704/** Opcode 0xd9 0xf5. */
7705FNIEMOP_DEF(iemOp_fprem1)
7706{
7707 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7708 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7709}
7710
7711
7712/** Opcode 0xd9 0xf6. */
7713FNIEMOP_DEF(iemOp_fdecstp)
7714{
7715 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7717 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7718 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7719 * FINCSTP and FDECSTP. */
7720
7721 IEM_MC_BEGIN(0,0);
7722
7723 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7724 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7725
7726 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7727 IEM_MC_FPU_STACK_DEC_TOP();
7728 IEM_MC_UPDATE_FSW_CONST(0);
7729
7730 IEM_MC_ADVANCE_RIP();
7731 IEM_MC_END();
7732 return VINF_SUCCESS;
7733}
7734
7735
7736/** Opcode 0xd9 0xf7. */
7737FNIEMOP_DEF(iemOp_fincstp)
7738{
7739 IEMOP_MNEMONIC(fincstp, "fincstp");
7740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7741 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7742 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7743 * FINCSTP and FDECSTP. */
7744
7745 IEM_MC_BEGIN(0,0);
7746
7747 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7748 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7749
7750 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7751 IEM_MC_FPU_STACK_INC_TOP();
7752 IEM_MC_UPDATE_FSW_CONST(0);
7753
7754 IEM_MC_ADVANCE_RIP();
7755 IEM_MC_END();
7756 return VINF_SUCCESS;
7757}
7758
7759
7760/** Opcode 0xd9 0xf8. */
7761FNIEMOP_DEF(iemOp_fprem)
7762{
7763 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7764 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7765}
7766
7767
7768/** Opcode 0xd9 0xf9. */
7769FNIEMOP_DEF(iemOp_fyl2xp1)
7770{
7771 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7772 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7773}
7774
7775
7776/** Opcode 0xd9 0xfa. */
7777FNIEMOP_DEF(iemOp_fsqrt)
7778{
7779 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7780 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7781}
7782
7783
7784/** Opcode 0xd9 0xfb. */
7785FNIEMOP_DEF(iemOp_fsincos)
7786{
7787 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7788 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7789}
7790
7791
7792/** Opcode 0xd9 0xfc. */
7793FNIEMOP_DEF(iemOp_frndint)
7794{
7795 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7796 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7797}
7798
7799
7800/** Opcode 0xd9 0xfd. */
7801FNIEMOP_DEF(iemOp_fscale)
7802{
7803 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7804 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7805}
7806
7807
7808/** Opcode 0xd9 0xfe. */
7809FNIEMOP_DEF(iemOp_fsin)
7810{
7811 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7812 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7813}
7814
7815
7816/** Opcode 0xd9 0xff. */
7817FNIEMOP_DEF(iemOp_fcos)
7818{
7819 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7820 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7821}
7822
7823
7824/** Used by iemOp_EscF1. */
7825IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7826{
7827 /* 0xe0 */ iemOp_fchs,
7828 /* 0xe1 */ iemOp_fabs,
7829 /* 0xe2 */ iemOp_Invalid,
7830 /* 0xe3 */ iemOp_Invalid,
7831 /* 0xe4 */ iemOp_ftst,
7832 /* 0xe5 */ iemOp_fxam,
7833 /* 0xe6 */ iemOp_Invalid,
7834 /* 0xe7 */ iemOp_Invalid,
7835 /* 0xe8 */ iemOp_fld1,
7836 /* 0xe9 */ iemOp_fldl2t,
7837 /* 0xea */ iemOp_fldl2e,
7838 /* 0xeb */ iemOp_fldpi,
7839 /* 0xec */ iemOp_fldlg2,
7840 /* 0xed */ iemOp_fldln2,
7841 /* 0xee */ iemOp_fldz,
7842 /* 0xef */ iemOp_Invalid,
7843 /* 0xf0 */ iemOp_f2xm1,
7844 /* 0xf1 */ iemOp_fyl2x,
7845 /* 0xf2 */ iemOp_fptan,
7846 /* 0xf3 */ iemOp_fpatan,
7847 /* 0xf4 */ iemOp_fxtract,
7848 /* 0xf5 */ iemOp_fprem1,
7849 /* 0xf6 */ iemOp_fdecstp,
7850 /* 0xf7 */ iemOp_fincstp,
7851 /* 0xf8 */ iemOp_fprem,
7852 /* 0xf9 */ iemOp_fyl2xp1,
7853 /* 0xfa */ iemOp_fsqrt,
7854 /* 0xfb */ iemOp_fsincos,
7855 /* 0xfc */ iemOp_frndint,
7856 /* 0xfd */ iemOp_fscale,
7857 /* 0xfe */ iemOp_fsin,
7858 /* 0xff */ iemOp_fcos
7859};
7860
7861
7862/**
7863 * @opcode 0xd9
7864 */
7865FNIEMOP_DEF(iemOp_EscF1)
7866{
7867 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7868 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7869
7870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7871 {
7872 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7873 {
7874 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7875 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7876 case 2:
7877 if (bRm == 0xd0)
7878 return FNIEMOP_CALL(iemOp_fnop);
7879 return IEMOP_RAISE_INVALID_OPCODE();
7880 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7881 case 4:
7882 case 5:
7883 case 6:
7884 case 7:
7885 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7886 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7888 }
7889 }
7890 else
7891 {
7892 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7893 {
7894 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7895 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7896 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7897 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7898 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7899 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7900 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7901 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7902 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7903 }
7904 }
7905}
7906
7907
7908/** Opcode 0xda 11/0. */
7909FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7910{
7911 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7912 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7913
7914 IEM_MC_BEGIN(0, 1);
7915 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7916
7917 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7918 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7919
7920 IEM_MC_PREPARE_FPU_USAGE();
7921 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7922 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7923 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7924 IEM_MC_ENDIF();
7925 IEM_MC_UPDATE_FPU_OPCODE_IP();
7926 IEM_MC_ELSE()
7927 IEM_MC_FPU_STACK_UNDERFLOW(0);
7928 IEM_MC_ENDIF();
7929 IEM_MC_ADVANCE_RIP();
7930
7931 IEM_MC_END();
7932 return VINF_SUCCESS;
7933}
7934
7935
7936/** Opcode 0xda 11/1. */
7937FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7938{
7939 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7940 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7941
7942 IEM_MC_BEGIN(0, 1);
7943 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7944
7945 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7946 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7947
7948 IEM_MC_PREPARE_FPU_USAGE();
7949 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7950 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7951 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7952 IEM_MC_ENDIF();
7953 IEM_MC_UPDATE_FPU_OPCODE_IP();
7954 IEM_MC_ELSE()
7955 IEM_MC_FPU_STACK_UNDERFLOW(0);
7956 IEM_MC_ENDIF();
7957 IEM_MC_ADVANCE_RIP();
7958
7959 IEM_MC_END();
7960 return VINF_SUCCESS;
7961}
7962
7963
7964/** Opcode 0xda 11/2. */
7965FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7966{
7967 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7969
7970 IEM_MC_BEGIN(0, 1);
7971 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7972
7973 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7974 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7975
7976 IEM_MC_PREPARE_FPU_USAGE();
7977 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7978 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7979 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7980 IEM_MC_ENDIF();
7981 IEM_MC_UPDATE_FPU_OPCODE_IP();
7982 IEM_MC_ELSE()
7983 IEM_MC_FPU_STACK_UNDERFLOW(0);
7984 IEM_MC_ENDIF();
7985 IEM_MC_ADVANCE_RIP();
7986
7987 IEM_MC_END();
7988 return VINF_SUCCESS;
7989}
7990
7991
7992/** Opcode 0xda 11/3. */
7993FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7994{
7995 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7997
7998 IEM_MC_BEGIN(0, 1);
7999 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8000
8001 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8002 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8003
8004 IEM_MC_PREPARE_FPU_USAGE();
8005 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8006 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8007 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8008 IEM_MC_ENDIF();
8009 IEM_MC_UPDATE_FPU_OPCODE_IP();
8010 IEM_MC_ELSE()
8011 IEM_MC_FPU_STACK_UNDERFLOW(0);
8012 IEM_MC_ENDIF();
8013 IEM_MC_ADVANCE_RIP();
8014
8015 IEM_MC_END();
8016 return VINF_SUCCESS;
8017}
8018
8019
8020/**
8021 * Common worker for FPU instructions working on ST0 and STn, only affecting
8022 * flags, and popping twice when done.
8023 *
8024 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8025 */
8026FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8027{
8028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8029
8030 IEM_MC_BEGIN(3, 1);
8031 IEM_MC_LOCAL(uint16_t, u16Fsw);
8032 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8034 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8035
8036 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8037 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8038
8039 IEM_MC_PREPARE_FPU_USAGE();
8040 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8041 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8042 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8043 IEM_MC_ELSE()
8044 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8045 IEM_MC_ENDIF();
8046 IEM_MC_ADVANCE_RIP();
8047
8048 IEM_MC_END();
8049 return VINF_SUCCESS;
8050}
8051
8052
8053/** Opcode 0xda 0xe9. */
8054FNIEMOP_DEF(iemOp_fucompp)
8055{
8056 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8057 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8058}
8059
8060
8061/**
8062 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8063 * the result in ST0.
8064 *
8065 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8066 */
8067FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8068{
8069 IEM_MC_BEGIN(3, 3);
8070 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8071 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8072 IEM_MC_LOCAL(int32_t, i32Val2);
8073 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8074 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8075 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8076
8077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8079
8080 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8081 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8082 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8083
8084 IEM_MC_PREPARE_FPU_USAGE();
8085 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8086 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8087 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8088 IEM_MC_ELSE()
8089 IEM_MC_FPU_STACK_UNDERFLOW(0);
8090 IEM_MC_ENDIF();
8091 IEM_MC_ADVANCE_RIP();
8092
8093 IEM_MC_END();
8094 return VINF_SUCCESS;
8095}
8096
8097
8098/** Opcode 0xda !11/0. */
8099FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8100{
8101 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8102 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8103}
8104
8105
8106/** Opcode 0xda !11/1. */
8107FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8108{
8109 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8110 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8111}
8112
8113
8114/** Opcode 0xda !11/2. */
8115FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8116{
8117 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8118
8119 IEM_MC_BEGIN(3, 3);
8120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8121 IEM_MC_LOCAL(uint16_t, u16Fsw);
8122 IEM_MC_LOCAL(int32_t, i32Val2);
8123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8125 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8126
8127 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8129
8130 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8131 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8132 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8133
8134 IEM_MC_PREPARE_FPU_USAGE();
8135 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8136 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8137 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8138 IEM_MC_ELSE()
8139 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8140 IEM_MC_ENDIF();
8141 IEM_MC_ADVANCE_RIP();
8142
8143 IEM_MC_END();
8144 return VINF_SUCCESS;
8145}
8146
8147
8148/** Opcode 0xda !11/3. */
8149FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8150{
8151 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8152
8153 IEM_MC_BEGIN(3, 3);
8154 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8155 IEM_MC_LOCAL(uint16_t, u16Fsw);
8156 IEM_MC_LOCAL(int32_t, i32Val2);
8157 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8158 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8159 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8160
8161 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8162 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8163
8164 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8165 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8166 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8167
8168 IEM_MC_PREPARE_FPU_USAGE();
8169 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8170 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8171 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8172 IEM_MC_ELSE()
8173 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8174 IEM_MC_ENDIF();
8175 IEM_MC_ADVANCE_RIP();
8176
8177 IEM_MC_END();
8178 return VINF_SUCCESS;
8179}
8180
8181
8182/** Opcode 0xda !11/4. */
8183FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8184{
8185 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8186 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8187}
8188
8189
8190/** Opcode 0xda !11/5. */
8191FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8192{
8193 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8194 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8195}
8196
8197
8198/** Opcode 0xda !11/6. */
8199FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8200{
8201 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8202 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8203}
8204
8205
8206/** Opcode 0xda !11/7. */
8207FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8208{
8209 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8210 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8211}
8212
8213
8214/**
8215 * @opcode 0xda
8216 */
8217FNIEMOP_DEF(iemOp_EscF2)
8218{
8219 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8220 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8221 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8222 {
8223 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8224 {
8225 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8226 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8227 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8228 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8229 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8230 case 5:
8231 if (bRm == 0xe9)
8232 return FNIEMOP_CALL(iemOp_fucompp);
8233 return IEMOP_RAISE_INVALID_OPCODE();
8234 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8235 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8236 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8237 }
8238 }
8239 else
8240 {
8241 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8242 {
8243 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8244 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8245 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8246 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8247 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8248 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8249 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8250 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8251 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8252 }
8253 }
8254}
8255
8256
8257/** Opcode 0xdb !11/0. */
8258FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8259{
8260 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8261
8262 IEM_MC_BEGIN(2, 3);
8263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8264 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8265 IEM_MC_LOCAL(int32_t, i32Val);
8266 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8267 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8268
8269 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8270 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8271
8272 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8273 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8274 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8275
8276 IEM_MC_PREPARE_FPU_USAGE();
8277 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8278 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8279 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8280 IEM_MC_ELSE()
8281 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8282 IEM_MC_ENDIF();
8283 IEM_MC_ADVANCE_RIP();
8284
8285 IEM_MC_END();
8286 return VINF_SUCCESS;
8287}
8288
8289
8290/** Opcode 0xdb !11/1. */
8291FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8292{
8293 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8294 IEM_MC_BEGIN(3, 2);
8295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8296 IEM_MC_LOCAL(uint16_t, u16Fsw);
8297 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8298 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8299 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8300
8301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8303 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8304 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8305
8306 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8307 IEM_MC_PREPARE_FPU_USAGE();
8308 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8309 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8310 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8311 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8312 IEM_MC_ELSE()
8313 IEM_MC_IF_FCW_IM()
8314 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8315 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8316 IEM_MC_ENDIF();
8317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8318 IEM_MC_ENDIF();
8319 IEM_MC_ADVANCE_RIP();
8320
8321 IEM_MC_END();
8322 return VINF_SUCCESS;
8323}
8324
8325
8326/** Opcode 0xdb !11/2. */
8327FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8328{
8329 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8330 IEM_MC_BEGIN(3, 2);
8331 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8332 IEM_MC_LOCAL(uint16_t, u16Fsw);
8333 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8334 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8335 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8336
8337 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8341
8342 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8343 IEM_MC_PREPARE_FPU_USAGE();
8344 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8345 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8346 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8347 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8348 IEM_MC_ELSE()
8349 IEM_MC_IF_FCW_IM()
8350 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8351 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8352 IEM_MC_ENDIF();
8353 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8354 IEM_MC_ENDIF();
8355 IEM_MC_ADVANCE_RIP();
8356
8357 IEM_MC_END();
8358 return VINF_SUCCESS;
8359}
8360
8361
8362/** Opcode 0xdb !11/3. */
8363FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8364{
8365 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8366 IEM_MC_BEGIN(3, 2);
8367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8368 IEM_MC_LOCAL(uint16_t, u16Fsw);
8369 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8370 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8372
8373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8375 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8376 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8377
8378 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8379 IEM_MC_PREPARE_FPU_USAGE();
8380 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8381 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8382 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8383 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8384 IEM_MC_ELSE()
8385 IEM_MC_IF_FCW_IM()
8386 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8387 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8388 IEM_MC_ENDIF();
8389 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8390 IEM_MC_ENDIF();
8391 IEM_MC_ADVANCE_RIP();
8392
8393 IEM_MC_END();
8394 return VINF_SUCCESS;
8395}
8396
8397
8398/** Opcode 0xdb !11/5. */
8399FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8400{
8401 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8402
8403 IEM_MC_BEGIN(2, 3);
8404 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8405 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8406 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8407 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8408 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8409
8410 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8411 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8412
8413 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8414 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8415 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8416
8417 IEM_MC_PREPARE_FPU_USAGE();
8418 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8419 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8420 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8421 IEM_MC_ELSE()
8422 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8423 IEM_MC_ENDIF();
8424 IEM_MC_ADVANCE_RIP();
8425
8426 IEM_MC_END();
8427 return VINF_SUCCESS;
8428}
8429
8430
8431/** Opcode 0xdb !11/7. */
8432FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8433{
8434 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8435 IEM_MC_BEGIN(3, 2);
8436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8437 IEM_MC_LOCAL(uint16_t, u16Fsw);
8438 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8439 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8440 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8441
8442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8444 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8445 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8446
8447 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8448 IEM_MC_PREPARE_FPU_USAGE();
8449 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8450 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8451 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8452 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8453 IEM_MC_ELSE()
8454 IEM_MC_IF_FCW_IM()
8455 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8456 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8457 IEM_MC_ENDIF();
8458 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8459 IEM_MC_ENDIF();
8460 IEM_MC_ADVANCE_RIP();
8461
8462 IEM_MC_END();
8463 return VINF_SUCCESS;
8464}
8465
8466
8467/** Opcode 0xdb 11/0. */
8468FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8469{
8470 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8472
8473 IEM_MC_BEGIN(0, 1);
8474 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8475
8476 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8477 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8478
8479 IEM_MC_PREPARE_FPU_USAGE();
8480 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8481 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8482 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8483 IEM_MC_ENDIF();
8484 IEM_MC_UPDATE_FPU_OPCODE_IP();
8485 IEM_MC_ELSE()
8486 IEM_MC_FPU_STACK_UNDERFLOW(0);
8487 IEM_MC_ENDIF();
8488 IEM_MC_ADVANCE_RIP();
8489
8490 IEM_MC_END();
8491 return VINF_SUCCESS;
8492}
8493
8494
8495/** Opcode 0xdb 11/1. */
8496FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8497{
8498 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8499 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8500
8501 IEM_MC_BEGIN(0, 1);
8502 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8503
8504 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8505 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8506
8507 IEM_MC_PREPARE_FPU_USAGE();
8508 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8509 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8510 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8511 IEM_MC_ENDIF();
8512 IEM_MC_UPDATE_FPU_OPCODE_IP();
8513 IEM_MC_ELSE()
8514 IEM_MC_FPU_STACK_UNDERFLOW(0);
8515 IEM_MC_ENDIF();
8516 IEM_MC_ADVANCE_RIP();
8517
8518 IEM_MC_END();
8519 return VINF_SUCCESS;
8520}
8521
8522
8523/** Opcode 0xdb 11/2. */
8524FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8525{
8526 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8528
8529 IEM_MC_BEGIN(0, 1);
8530 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8531
8532 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8533 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8534
8535 IEM_MC_PREPARE_FPU_USAGE();
8536 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8537 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8538 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8539 IEM_MC_ENDIF();
8540 IEM_MC_UPDATE_FPU_OPCODE_IP();
8541 IEM_MC_ELSE()
8542 IEM_MC_FPU_STACK_UNDERFLOW(0);
8543 IEM_MC_ENDIF();
8544 IEM_MC_ADVANCE_RIP();
8545
8546 IEM_MC_END();
8547 return VINF_SUCCESS;
8548}
8549
8550
8551/** Opcode 0xdb 11/3. */
8552FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8553{
8554 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8556
8557 IEM_MC_BEGIN(0, 1);
8558 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8559
8560 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8561 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8562
8563 IEM_MC_PREPARE_FPU_USAGE();
8564 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8565 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8566 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8567 IEM_MC_ENDIF();
8568 IEM_MC_UPDATE_FPU_OPCODE_IP();
8569 IEM_MC_ELSE()
8570 IEM_MC_FPU_STACK_UNDERFLOW(0);
8571 IEM_MC_ENDIF();
8572 IEM_MC_ADVANCE_RIP();
8573
8574 IEM_MC_END();
8575 return VINF_SUCCESS;
8576}
8577
8578
8579/** Opcode 0xdb 0xe0. */
8580FNIEMOP_DEF(iemOp_fneni)
8581{
8582 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8583 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8584 IEM_MC_BEGIN(0,0);
8585 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8586 IEM_MC_ADVANCE_RIP();
8587 IEM_MC_END();
8588 return VINF_SUCCESS;
8589}
8590
8591
8592/** Opcode 0xdb 0xe1. */
8593FNIEMOP_DEF(iemOp_fndisi)
8594{
8595 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8597 IEM_MC_BEGIN(0,0);
8598 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8599 IEM_MC_ADVANCE_RIP();
8600 IEM_MC_END();
8601 return VINF_SUCCESS;
8602}
8603
8604
8605/** Opcode 0xdb 0xe2. */
8606FNIEMOP_DEF(iemOp_fnclex)
8607{
8608 IEMOP_MNEMONIC(fnclex, "fnclex");
8609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8610
8611 IEM_MC_BEGIN(0,0);
8612 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8613 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8614 IEM_MC_CLEAR_FSW_EX();
8615 IEM_MC_ADVANCE_RIP();
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618}
8619
8620
8621/** Opcode 0xdb 0xe3. */
8622FNIEMOP_DEF(iemOp_fninit)
8623{
8624 IEMOP_MNEMONIC(fninit, "fninit");
8625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8626 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8627}
8628
8629
8630/** Opcode 0xdb 0xe4. */
8631FNIEMOP_DEF(iemOp_fnsetpm)
8632{
8633 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8634 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8635 IEM_MC_BEGIN(0,0);
8636 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8637 IEM_MC_ADVANCE_RIP();
8638 IEM_MC_END();
8639 return VINF_SUCCESS;
8640}
8641
8642
8643/** Opcode 0xdb 0xe5. */
8644FNIEMOP_DEF(iemOp_frstpm)
8645{
8646 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8647#if 0 /* #UDs on newer CPUs */
8648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8649 IEM_MC_BEGIN(0,0);
8650 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8651 IEM_MC_ADVANCE_RIP();
8652 IEM_MC_END();
8653 return VINF_SUCCESS;
8654#else
8655 return IEMOP_RAISE_INVALID_OPCODE();
8656#endif
8657}
8658
8659
8660/** Opcode 0xdb 11/5. */
8661FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8662{
8663 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8664 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8665}
8666
8667
8668/** Opcode 0xdb 11/6. */
8669FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8670{
8671 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8672 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8673}
8674
8675
8676/**
8677 * @opcode 0xdb
8678 */
8679FNIEMOP_DEF(iemOp_EscF3)
8680{
8681 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8682 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8683 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8684 {
8685 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8686 {
8687 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8688 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8689 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8690 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8691 case 4:
8692 switch (bRm)
8693 {
8694 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8695 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8696 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8697 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8698 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8699 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8700 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8701 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8702 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8703 }
8704 break;
8705 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8706 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8707 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8709 }
8710 }
8711 else
8712 {
8713 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8714 {
8715 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8716 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8717 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8718 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8719 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8720 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8721 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8722 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8723 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8724 }
8725 }
8726}
8727
8728
8729/**
8730 * Common worker for FPU instructions working on STn and ST0, and storing the
8731 * result in STn unless IE, DE or ZE was raised.
8732 *
8733 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8734 */
8735FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8736{
8737 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8738
8739 IEM_MC_BEGIN(3, 1);
8740 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8741 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8742 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8743 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8744
8745 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8746 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8747
8748 IEM_MC_PREPARE_FPU_USAGE();
8749 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8750 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8751 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8752 IEM_MC_ELSE()
8753 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8754 IEM_MC_ENDIF();
8755 IEM_MC_ADVANCE_RIP();
8756
8757 IEM_MC_END();
8758 return VINF_SUCCESS;
8759}
8760
8761
8762/** Opcode 0xdc 11/0. */
8763FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8764{
8765 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8766 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8767}
8768
8769
8770/** Opcode 0xdc 11/1. */
8771FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8772{
8773 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8774 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8775}
8776
8777
8778/** Opcode 0xdc 11/4. */
8779FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8780{
8781 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8782 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8783}
8784
8785
8786/** Opcode 0xdc 11/5. */
8787FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8788{
8789 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8790 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8791}
8792
8793
8794/** Opcode 0xdc 11/6. */
8795FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8796{
8797 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8798 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8799}
8800
8801
8802/** Opcode 0xdc 11/7. */
8803FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8804{
8805 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8806 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8807}
8808
8809
8810/**
8811 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8812 * memory operand, and storing the result in ST0.
8813 *
8814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8815 */
8816FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8817{
8818 IEM_MC_BEGIN(3, 3);
8819 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8820 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8821 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8822 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8823 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8824 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8825
8826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8828 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8829 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8830
8831 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8832 IEM_MC_PREPARE_FPU_USAGE();
8833 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8834 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8835 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8836 IEM_MC_ELSE()
8837 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8838 IEM_MC_ENDIF();
8839 IEM_MC_ADVANCE_RIP();
8840
8841 IEM_MC_END();
8842 return VINF_SUCCESS;
8843}
8844
8845
8846/** Opcode 0xdc !11/0. */
8847FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8848{
8849 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8850 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8851}
8852
8853
8854/** Opcode 0xdc !11/1. */
8855FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8856{
8857 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8858 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8859}
8860
8861
8862/** Opcode 0xdc !11/2. */
8863FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8864{
8865 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8866
8867 IEM_MC_BEGIN(3, 3);
8868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8869 IEM_MC_LOCAL(uint16_t, u16Fsw);
8870 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8871 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8872 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8873 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8874
8875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8877
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8880 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8881
8882 IEM_MC_PREPARE_FPU_USAGE();
8883 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8884 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8885 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8886 IEM_MC_ELSE()
8887 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8888 IEM_MC_ENDIF();
8889 IEM_MC_ADVANCE_RIP();
8890
8891 IEM_MC_END();
8892 return VINF_SUCCESS;
8893}
8894
8895
8896/** Opcode 0xdc !11/3. */
8897FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8898{
8899 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8900
8901 IEM_MC_BEGIN(3, 3);
8902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8903 IEM_MC_LOCAL(uint16_t, u16Fsw);
8904 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8905 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8906 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8907 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8908
8909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8911
8912 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8913 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8914 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8915
8916 IEM_MC_PREPARE_FPU_USAGE();
8917 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8918 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8919 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8920 IEM_MC_ELSE()
8921 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8922 IEM_MC_ENDIF();
8923 IEM_MC_ADVANCE_RIP();
8924
8925 IEM_MC_END();
8926 return VINF_SUCCESS;
8927}
8928
8929
8930/** Opcode 0xdc !11/4. */
8931FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8932{
8933 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8934 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8935}
8936
8937
8938/** Opcode 0xdc !11/5. */
8939FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8940{
8941 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8942 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8943}
8944
8945
8946/** Opcode 0xdc !11/6. */
8947FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8948{
8949 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8950 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8951}
8952
8953
8954/** Opcode 0xdc !11/7. */
8955FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8956{
8957 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8958 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8959}
8960
8961
8962/**
8963 * @opcode 0xdc
8964 */
8965FNIEMOP_DEF(iemOp_EscF4)
8966{
8967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8968 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8969 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8970 {
8971 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8972 {
8973 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8974 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8975 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8976 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8977 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8978 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8979 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8980 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8982 }
8983 }
8984 else
8985 {
8986 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8987 {
8988 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8989 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8990 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8991 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8992 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8993 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8994 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8995 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8996 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8997 }
8998 }
8999}
9000
9001
9002/** Opcode 0xdd !11/0.
9003 * @sa iemOp_fld_m32r */
9004FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9005{
9006 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9007
9008 IEM_MC_BEGIN(2, 3);
9009 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9010 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9011 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9012 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9013 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9014
9015 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9016 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9018 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9019
9020 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9021 IEM_MC_PREPARE_FPU_USAGE();
9022 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9023 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9024 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9025 IEM_MC_ELSE()
9026 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9027 IEM_MC_ENDIF();
9028 IEM_MC_ADVANCE_RIP();
9029
9030 IEM_MC_END();
9031 return VINF_SUCCESS;
9032}
9033
9034
9035/** Opcode 0xdd !11/0. */
9036FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9037{
9038 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9039 IEM_MC_BEGIN(3, 2);
9040 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9041 IEM_MC_LOCAL(uint16_t, u16Fsw);
9042 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9043 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9044 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9045
9046 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9047 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9048 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9049 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9050
9051 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9052 IEM_MC_PREPARE_FPU_USAGE();
9053 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9054 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9055 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9056 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9057 IEM_MC_ELSE()
9058 IEM_MC_IF_FCW_IM()
9059 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9060 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9061 IEM_MC_ENDIF();
9062 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9063 IEM_MC_ENDIF();
9064 IEM_MC_ADVANCE_RIP();
9065
9066 IEM_MC_END();
9067 return VINF_SUCCESS;
9068}
9069
9070
9071/** Opcode 0xdd !11/0. */
9072FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9073{
9074 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9075 IEM_MC_BEGIN(3, 2);
9076 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9077 IEM_MC_LOCAL(uint16_t, u16Fsw);
9078 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9079 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9080 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9081
9082 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9083 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9084 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9085 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9086
9087 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9088 IEM_MC_PREPARE_FPU_USAGE();
9089 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9090 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9091 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9092 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9093 IEM_MC_ELSE()
9094 IEM_MC_IF_FCW_IM()
9095 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9096 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9097 IEM_MC_ENDIF();
9098 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9099 IEM_MC_ENDIF();
9100 IEM_MC_ADVANCE_RIP();
9101
9102 IEM_MC_END();
9103 return VINF_SUCCESS;
9104}
9105
9106
9107
9108
9109/** Opcode 0xdd !11/0. */
9110FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9111{
9112 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9113 IEM_MC_BEGIN(3, 2);
9114 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9115 IEM_MC_LOCAL(uint16_t, u16Fsw);
9116 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9117 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9118 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9119
9120 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9121 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9122 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9123 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9124
9125 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9126 IEM_MC_PREPARE_FPU_USAGE();
9127 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9128 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9129 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9130 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9131 IEM_MC_ELSE()
9132 IEM_MC_IF_FCW_IM()
9133 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9134 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9135 IEM_MC_ENDIF();
9136 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9137 IEM_MC_ENDIF();
9138 IEM_MC_ADVANCE_RIP();
9139
9140 IEM_MC_END();
9141 return VINF_SUCCESS;
9142}
9143
9144
9145/** Opcode 0xdd !11/0. */
9146FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9147{
9148 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9149 IEM_MC_BEGIN(3, 0);
9150 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9151 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9152 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9155 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9156 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9157 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9158 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9159 IEM_MC_END();
9160 return VINF_SUCCESS;
9161}
9162
9163
9164/** Opcode 0xdd !11/0. */
9165FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9166{
9167 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9168 IEM_MC_BEGIN(3, 0);
9169 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9170 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9171 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9174 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9175 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9176 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9177 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9178 IEM_MC_END();
9179 return VINF_SUCCESS;
9180
9181}
9182
9183/** Opcode 0xdd !11/0. */
9184FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9185{
9186 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9187
9188 IEM_MC_BEGIN(0, 2);
9189 IEM_MC_LOCAL(uint16_t, u16Tmp);
9190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9191
9192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9193 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9194 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9195
9196 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9197 IEM_MC_FETCH_FSW(u16Tmp);
9198 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9199 IEM_MC_ADVANCE_RIP();
9200
9201/** @todo Debug / drop a hint to the verifier that things may differ
9202 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9203 * NT4SP1. (X86_FSW_PE) */
9204 IEM_MC_END();
9205 return VINF_SUCCESS;
9206}
9207
9208
9209/** Opcode 0xdd 11/0. */
9210FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9211{
9212 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9214 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9215 unmodified. */
9216
9217 IEM_MC_BEGIN(0, 0);
9218
9219 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9220 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9221
9222 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9223 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9224 IEM_MC_UPDATE_FPU_OPCODE_IP();
9225
9226 IEM_MC_ADVANCE_RIP();
9227 IEM_MC_END();
9228 return VINF_SUCCESS;
9229}
9230
9231
9232/** Opcode 0xdd 11/1. */
9233FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9234{
9235 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9237
9238 IEM_MC_BEGIN(0, 2);
9239 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9240 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9241 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9242 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9243
9244 IEM_MC_PREPARE_FPU_USAGE();
9245 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9246 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9247 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9248 IEM_MC_ELSE()
9249 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9250 IEM_MC_ENDIF();
9251
9252 IEM_MC_ADVANCE_RIP();
9253 IEM_MC_END();
9254 return VINF_SUCCESS;
9255}
9256
9257
9258/** Opcode 0xdd 11/3. */
9259FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9260{
9261 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9262 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9263}
9264
9265
9266/** Opcode 0xdd 11/4. */
9267FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9268{
9269 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9270 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9271}
9272
9273
9274/**
9275 * @opcode 0xdd
9276 */
9277FNIEMOP_DEF(iemOp_EscF5)
9278{
9279 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9280 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9281 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9282 {
9283 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9284 {
9285 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9286 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9287 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9288 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9289 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9290 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9291 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9292 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9293 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9294 }
9295 }
9296 else
9297 {
9298 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9299 {
9300 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9301 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9302 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9303 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9304 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9305 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9306 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9307 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9308 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9309 }
9310 }
9311}
9312
9313
9314/** Opcode 0xde 11/0. */
9315FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9316{
9317 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9318 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9319}
9320
9321
9322/** Opcode 0xde 11/0. */
9323FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9324{
9325 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9326 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9327}
9328
9329
9330/** Opcode 0xde 0xd9. */
9331FNIEMOP_DEF(iemOp_fcompp)
9332{
9333 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9334 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9335}
9336
9337
9338/** Opcode 0xde 11/4. */
9339FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9340{
9341 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9342 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9343}
9344
9345
9346/** Opcode 0xde 11/5. */
9347FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9348{
9349 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9350 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9351}
9352
9353
9354/** Opcode 0xde 11/6. */
9355FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9356{
9357 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9358 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9359}
9360
9361
9362/** Opcode 0xde 11/7. */
9363FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9364{
9365 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9366 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9367}
9368
9369
9370/**
9371 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9372 * the result in ST0.
9373 *
9374 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9375 */
9376FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9377{
9378 IEM_MC_BEGIN(3, 3);
9379 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9380 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9381 IEM_MC_LOCAL(int16_t, i16Val2);
9382 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9383 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9384 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9385
9386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9388
9389 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9390 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9391 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9392
9393 IEM_MC_PREPARE_FPU_USAGE();
9394 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9395 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9396 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9397 IEM_MC_ELSE()
9398 IEM_MC_FPU_STACK_UNDERFLOW(0);
9399 IEM_MC_ENDIF();
9400 IEM_MC_ADVANCE_RIP();
9401
9402 IEM_MC_END();
9403 return VINF_SUCCESS;
9404}
9405
9406
9407/** Opcode 0xde !11/0. */
9408FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9409{
9410 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9411 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9412}
9413
9414
9415/** Opcode 0xde !11/1. */
9416FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9417{
9418 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9419 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9420}
9421
9422
9423/** Opcode 0xde !11/2. */
9424FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9425{
9426 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9427
9428 IEM_MC_BEGIN(3, 3);
9429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9430 IEM_MC_LOCAL(uint16_t, u16Fsw);
9431 IEM_MC_LOCAL(int16_t, i16Val2);
9432 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9433 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9434 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9435
9436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9437 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9438
9439 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9440 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9441 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9442
9443 IEM_MC_PREPARE_FPU_USAGE();
9444 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9445 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9446 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9447 IEM_MC_ELSE()
9448 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9449 IEM_MC_ENDIF();
9450 IEM_MC_ADVANCE_RIP();
9451
9452 IEM_MC_END();
9453 return VINF_SUCCESS;
9454}
9455
9456
9457/** Opcode 0xde !11/3. */
9458FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9459{
9460 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9461
9462 IEM_MC_BEGIN(3, 3);
9463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9464 IEM_MC_LOCAL(uint16_t, u16Fsw);
9465 IEM_MC_LOCAL(int16_t, i16Val2);
9466 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9467 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9468 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9469
9470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9472
9473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9474 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9475 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9476
9477 IEM_MC_PREPARE_FPU_USAGE();
9478 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9479 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9480 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9481 IEM_MC_ELSE()
9482 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9483 IEM_MC_ENDIF();
9484 IEM_MC_ADVANCE_RIP();
9485
9486 IEM_MC_END();
9487 return VINF_SUCCESS;
9488}
9489
9490
9491/** Opcode 0xde !11/4. */
9492FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9493{
9494 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9495 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9496}
9497
9498
9499/** Opcode 0xde !11/5. */
9500FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9501{
9502 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9503 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9504}
9505
9506
9507/** Opcode 0xde !11/6. */
9508FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9509{
9510 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9511 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9512}
9513
9514
9515/** Opcode 0xde !11/7. */
9516FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9517{
9518 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9519 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9520}
9521
9522
9523/**
9524 * @opcode 0xde
9525 */
9526FNIEMOP_DEF(iemOp_EscF6)
9527{
9528 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9529 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9530 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9531 {
9532 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9533 {
9534 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9535 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9536 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9537 case 3: if (bRm == 0xd9)
9538 return FNIEMOP_CALL(iemOp_fcompp);
9539 return IEMOP_RAISE_INVALID_OPCODE();
9540 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9541 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9542 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9543 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9545 }
9546 }
9547 else
9548 {
9549 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9550 {
9551 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9552 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9553 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9554 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9555 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9556 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9557 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9558 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9560 }
9561 }
9562}
9563
9564
9565/** Opcode 0xdf 11/0.
9566 * Undocument instruction, assumed to work like ffree + fincstp. */
9567FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9568{
9569 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571
9572 IEM_MC_BEGIN(0, 0);
9573
9574 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9575 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9576
9577 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9578 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9579 IEM_MC_FPU_STACK_INC_TOP();
9580 IEM_MC_UPDATE_FPU_OPCODE_IP();
9581
9582 IEM_MC_ADVANCE_RIP();
9583 IEM_MC_END();
9584 return VINF_SUCCESS;
9585}
9586
9587
9588/** Opcode 0xdf 0xe0. */
9589FNIEMOP_DEF(iemOp_fnstsw_ax)
9590{
9591 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9593
9594 IEM_MC_BEGIN(0, 1);
9595 IEM_MC_LOCAL(uint16_t, u16Tmp);
9596 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9597 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9598 IEM_MC_FETCH_FSW(u16Tmp);
9599 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9600 IEM_MC_ADVANCE_RIP();
9601 IEM_MC_END();
9602 return VINF_SUCCESS;
9603}
9604
9605
9606/** Opcode 0xdf 11/5. */
9607FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9608{
9609 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9610 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9611}
9612
9613
9614/** Opcode 0xdf 11/6. */
9615FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9616{
9617 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9618 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9619}
9620
9621
9622/** Opcode 0xdf !11/0. */
9623FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9624{
9625 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9626
9627 IEM_MC_BEGIN(2, 3);
9628 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9629 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9630 IEM_MC_LOCAL(int16_t, i16Val);
9631 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9632 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9633
9634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9636
9637 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9638 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9639 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9640
9641 IEM_MC_PREPARE_FPU_USAGE();
9642 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9643 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9644 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9645 IEM_MC_ELSE()
9646 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9647 IEM_MC_ENDIF();
9648 IEM_MC_ADVANCE_RIP();
9649
9650 IEM_MC_END();
9651 return VINF_SUCCESS;
9652}
9653
9654
9655/** Opcode 0xdf !11/1. */
9656FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9657{
9658 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9659 IEM_MC_BEGIN(3, 2);
9660 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9661 IEM_MC_LOCAL(uint16_t, u16Fsw);
9662 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9663 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9664 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9665
9666 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9668 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9669 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9670
9671 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9672 IEM_MC_PREPARE_FPU_USAGE();
9673 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9674 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9675 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9676 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9677 IEM_MC_ELSE()
9678 IEM_MC_IF_FCW_IM()
9679 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9680 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9681 IEM_MC_ENDIF();
9682 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9683 IEM_MC_ENDIF();
9684 IEM_MC_ADVANCE_RIP();
9685
9686 IEM_MC_END();
9687 return VINF_SUCCESS;
9688}
9689
9690
9691/** Opcode 0xdf !11/2. */
9692FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9693{
9694 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9695 IEM_MC_BEGIN(3, 2);
9696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9697 IEM_MC_LOCAL(uint16_t, u16Fsw);
9698 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9699 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9700 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9701
9702 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9703 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9704 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9705 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9706
9707 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9708 IEM_MC_PREPARE_FPU_USAGE();
9709 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9710 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9711 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9712 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9713 IEM_MC_ELSE()
9714 IEM_MC_IF_FCW_IM()
9715 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9716 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9717 IEM_MC_ENDIF();
9718 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9719 IEM_MC_ENDIF();
9720 IEM_MC_ADVANCE_RIP();
9721
9722 IEM_MC_END();
9723 return VINF_SUCCESS;
9724}
9725
9726
9727/** Opcode 0xdf !11/3. */
9728FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9729{
9730 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9731 IEM_MC_BEGIN(3, 2);
9732 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9733 IEM_MC_LOCAL(uint16_t, u16Fsw);
9734 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9735 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9736 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9737
9738 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9739 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9740 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9741 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9742
9743 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9744 IEM_MC_PREPARE_FPU_USAGE();
9745 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9746 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9747 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9748 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9749 IEM_MC_ELSE()
9750 IEM_MC_IF_FCW_IM()
9751 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9752 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9753 IEM_MC_ENDIF();
9754 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9755 IEM_MC_ENDIF();
9756 IEM_MC_ADVANCE_RIP();
9757
9758 IEM_MC_END();
9759 return VINF_SUCCESS;
9760}
9761
9762
9763/** Opcode 0xdf !11/4. */
9764FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9765
9766
9767/** Opcode 0xdf !11/5. */
9768FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9769{
9770 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9771
9772 IEM_MC_BEGIN(2, 3);
9773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9774 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9775 IEM_MC_LOCAL(int64_t, i64Val);
9776 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9777 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9778
9779 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9780 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9781
9782 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9783 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9784 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9785
9786 IEM_MC_PREPARE_FPU_USAGE();
9787 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9788 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9789 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9790 IEM_MC_ELSE()
9791 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9792 IEM_MC_ENDIF();
9793 IEM_MC_ADVANCE_RIP();
9794
9795 IEM_MC_END();
9796 return VINF_SUCCESS;
9797}
9798
9799
9800/** Opcode 0xdf !11/6. */
9801FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9802
9803
9804/** Opcode 0xdf !11/7. */
9805FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9806{
9807 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9808 IEM_MC_BEGIN(3, 2);
9809 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9810 IEM_MC_LOCAL(uint16_t, u16Fsw);
9811 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9812 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9813 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9814
9815 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9816 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9817 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9818 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9819
9820 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9821 IEM_MC_PREPARE_FPU_USAGE();
9822 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9823 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9824 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9825 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9826 IEM_MC_ELSE()
9827 IEM_MC_IF_FCW_IM()
9828 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9829 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9830 IEM_MC_ENDIF();
9831 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9832 IEM_MC_ENDIF();
9833 IEM_MC_ADVANCE_RIP();
9834
9835 IEM_MC_END();
9836 return VINF_SUCCESS;
9837}
9838
9839
9840/**
9841 * @opcode 0xdf
9842 */
9843FNIEMOP_DEF(iemOp_EscF7)
9844{
9845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9846 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9847 {
9848 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9849 {
9850 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9851 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9852 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9853 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9854 case 4: if (bRm == 0xe0)
9855 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9856 return IEMOP_RAISE_INVALID_OPCODE();
9857 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9858 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9859 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9860 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9861 }
9862 }
9863 else
9864 {
9865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9866 {
9867 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9868 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9869 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9870 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9871 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9872 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9873 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9874 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9876 }
9877 }
9878}
9879
9880
9881/**
9882 * @opcode 0xe0
9883 */
9884FNIEMOP_DEF(iemOp_loopne_Jb)
9885{
9886 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9887 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9890
9891 switch (pVCpu->iem.s.enmEffAddrMode)
9892 {
9893 case IEMMODE_16BIT:
9894 IEM_MC_BEGIN(0,0);
9895 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9896 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9897 IEM_MC_REL_JMP_S8(i8Imm);
9898 } IEM_MC_ELSE() {
9899 IEM_MC_ADVANCE_RIP();
9900 } IEM_MC_ENDIF();
9901 IEM_MC_END();
9902 return VINF_SUCCESS;
9903
9904 case IEMMODE_32BIT:
9905 IEM_MC_BEGIN(0,0);
9906 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9907 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9908 IEM_MC_REL_JMP_S8(i8Imm);
9909 } IEM_MC_ELSE() {
9910 IEM_MC_ADVANCE_RIP();
9911 } IEM_MC_ENDIF();
9912 IEM_MC_END();
9913 return VINF_SUCCESS;
9914
9915 case IEMMODE_64BIT:
9916 IEM_MC_BEGIN(0,0);
9917 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9918 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9919 IEM_MC_REL_JMP_S8(i8Imm);
9920 } IEM_MC_ELSE() {
9921 IEM_MC_ADVANCE_RIP();
9922 } IEM_MC_ENDIF();
9923 IEM_MC_END();
9924 return VINF_SUCCESS;
9925
9926 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9927 }
9928}
9929
9930
9931/**
9932 * @opcode 0xe1
9933 */
9934FNIEMOP_DEF(iemOp_loope_Jb)
9935{
9936 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9937 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9938 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9939 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9940
9941 switch (pVCpu->iem.s.enmEffAddrMode)
9942 {
9943 case IEMMODE_16BIT:
9944 IEM_MC_BEGIN(0,0);
9945 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9946 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9947 IEM_MC_REL_JMP_S8(i8Imm);
9948 } IEM_MC_ELSE() {
9949 IEM_MC_ADVANCE_RIP();
9950 } IEM_MC_ENDIF();
9951 IEM_MC_END();
9952 return VINF_SUCCESS;
9953
9954 case IEMMODE_32BIT:
9955 IEM_MC_BEGIN(0,0);
9956 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9957 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9958 IEM_MC_REL_JMP_S8(i8Imm);
9959 } IEM_MC_ELSE() {
9960 IEM_MC_ADVANCE_RIP();
9961 } IEM_MC_ENDIF();
9962 IEM_MC_END();
9963 return VINF_SUCCESS;
9964
9965 case IEMMODE_64BIT:
9966 IEM_MC_BEGIN(0,0);
9967 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9968 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9969 IEM_MC_REL_JMP_S8(i8Imm);
9970 } IEM_MC_ELSE() {
9971 IEM_MC_ADVANCE_RIP();
9972 } IEM_MC_ENDIF();
9973 IEM_MC_END();
9974 return VINF_SUCCESS;
9975
9976 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9977 }
9978}
9979
9980
9981/**
9982 * @opcode 0xe2
9983 */
9984FNIEMOP_DEF(iemOp_loop_Jb)
9985{
9986 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9987 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9988 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9989 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9990
9991 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9992 * using the 32-bit operand size override. How can that be restarted? See
9993 * weird pseudo code in intel manual. */
9994 switch (pVCpu->iem.s.enmEffAddrMode)
9995 {
9996 case IEMMODE_16BIT:
9997 IEM_MC_BEGIN(0,0);
9998 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9999 {
10000 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10001 IEM_MC_IF_CX_IS_NZ() {
10002 IEM_MC_REL_JMP_S8(i8Imm);
10003 } IEM_MC_ELSE() {
10004 IEM_MC_ADVANCE_RIP();
10005 } IEM_MC_ENDIF();
10006 }
10007 else
10008 {
10009 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10010 IEM_MC_ADVANCE_RIP();
10011 }
10012 IEM_MC_END();
10013 return VINF_SUCCESS;
10014
10015 case IEMMODE_32BIT:
10016 IEM_MC_BEGIN(0,0);
10017 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10018 {
10019 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10020 IEM_MC_IF_ECX_IS_NZ() {
10021 IEM_MC_REL_JMP_S8(i8Imm);
10022 } IEM_MC_ELSE() {
10023 IEM_MC_ADVANCE_RIP();
10024 } IEM_MC_ENDIF();
10025 }
10026 else
10027 {
10028 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10029 IEM_MC_ADVANCE_RIP();
10030 }
10031 IEM_MC_END();
10032 return VINF_SUCCESS;
10033
10034 case IEMMODE_64BIT:
10035 IEM_MC_BEGIN(0,0);
10036 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
10037 {
10038 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10039 IEM_MC_IF_RCX_IS_NZ() {
10040 IEM_MC_REL_JMP_S8(i8Imm);
10041 } IEM_MC_ELSE() {
10042 IEM_MC_ADVANCE_RIP();
10043 } IEM_MC_ENDIF();
10044 }
10045 else
10046 {
10047 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10048 IEM_MC_ADVANCE_RIP();
10049 }
10050 IEM_MC_END();
10051 return VINF_SUCCESS;
10052
10053 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10054 }
10055}
10056
10057
10058/**
10059 * @opcode 0xe3
10060 */
10061FNIEMOP_DEF(iemOp_jecxz_Jb)
10062{
10063 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10064 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10066 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10067
10068 switch (pVCpu->iem.s.enmEffAddrMode)
10069 {
10070 case IEMMODE_16BIT:
10071 IEM_MC_BEGIN(0,0);
10072 IEM_MC_IF_CX_IS_NZ() {
10073 IEM_MC_ADVANCE_RIP();
10074 } IEM_MC_ELSE() {
10075 IEM_MC_REL_JMP_S8(i8Imm);
10076 } IEM_MC_ENDIF();
10077 IEM_MC_END();
10078 return VINF_SUCCESS;
10079
10080 case IEMMODE_32BIT:
10081 IEM_MC_BEGIN(0,0);
10082 IEM_MC_IF_ECX_IS_NZ() {
10083 IEM_MC_ADVANCE_RIP();
10084 } IEM_MC_ELSE() {
10085 IEM_MC_REL_JMP_S8(i8Imm);
10086 } IEM_MC_ENDIF();
10087 IEM_MC_END();
10088 return VINF_SUCCESS;
10089
10090 case IEMMODE_64BIT:
10091 IEM_MC_BEGIN(0,0);
10092 IEM_MC_IF_RCX_IS_NZ() {
10093 IEM_MC_ADVANCE_RIP();
10094 } IEM_MC_ELSE() {
10095 IEM_MC_REL_JMP_S8(i8Imm);
10096 } IEM_MC_ENDIF();
10097 IEM_MC_END();
10098 return VINF_SUCCESS;
10099
10100 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10101 }
10102}
10103
10104
10105/** Opcode 0xe4 */
10106FNIEMOP_DEF(iemOp_in_AL_Ib)
10107{
10108 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10109 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10110 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10111 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
10112}
10113
10114
10115/** Opcode 0xe5 */
10116FNIEMOP_DEF(iemOp_in_eAX_Ib)
10117{
10118 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10119 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10121 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10122}
10123
10124
10125/** Opcode 0xe6 */
10126FNIEMOP_DEF(iemOp_out_Ib_AL)
10127{
10128 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10129 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10131 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
10132}
10133
10134
10135/** Opcode 0xe7 */
10136FNIEMOP_DEF(iemOp_out_Ib_eAX)
10137{
10138 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10139 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10140 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10141 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10142}
10143
10144
10145/**
10146 * @opcode 0xe8
10147 */
10148FNIEMOP_DEF(iemOp_call_Jv)
10149{
10150 IEMOP_MNEMONIC(call_Jv, "call Jv");
10151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10152 switch (pVCpu->iem.s.enmEffOpSize)
10153 {
10154 case IEMMODE_16BIT:
10155 {
10156 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10157 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10158 }
10159
10160 case IEMMODE_32BIT:
10161 {
10162 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10163 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10164 }
10165
10166 case IEMMODE_64BIT:
10167 {
10168 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10169 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10170 }
10171
10172 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10173 }
10174}
10175
10176
10177/**
10178 * @opcode 0xe9
10179 */
10180FNIEMOP_DEF(iemOp_jmp_Jv)
10181{
10182 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10184 switch (pVCpu->iem.s.enmEffOpSize)
10185 {
10186 case IEMMODE_16BIT:
10187 {
10188 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10189 IEM_MC_BEGIN(0, 0);
10190 IEM_MC_REL_JMP_S16(i16Imm);
10191 IEM_MC_END();
10192 return VINF_SUCCESS;
10193 }
10194
10195 case IEMMODE_64BIT:
10196 case IEMMODE_32BIT:
10197 {
10198 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10199 IEM_MC_BEGIN(0, 0);
10200 IEM_MC_REL_JMP_S32(i32Imm);
10201 IEM_MC_END();
10202 return VINF_SUCCESS;
10203 }
10204
10205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10206 }
10207}
10208
10209
10210/**
10211 * @opcode 0xea
10212 */
10213FNIEMOP_DEF(iemOp_jmp_Ap)
10214{
10215 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10216 IEMOP_HLP_NO_64BIT();
10217
10218 /* Decode the far pointer address and pass it on to the far call C implementation. */
10219 uint32_t offSeg;
10220 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10221 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10222 else
10223 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10224 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10225 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10226 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10227}
10228
10229
10230/**
10231 * @opcode 0xeb
10232 */
10233FNIEMOP_DEF(iemOp_jmp_Jb)
10234{
10235 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10236 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10237 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10238 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10239
10240 IEM_MC_BEGIN(0, 0);
10241 IEM_MC_REL_JMP_S8(i8Imm);
10242 IEM_MC_END();
10243 return VINF_SUCCESS;
10244}
10245
10246
10247/** Opcode 0xec */
10248FNIEMOP_DEF(iemOp_in_AL_DX)
10249{
10250 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10251 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10252 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10253}
10254
10255
10256/** Opcode 0xed */
10257FNIEMOP_DEF(iemOp_eAX_DX)
10258{
10259 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10261 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10262}
10263
10264
10265/** Opcode 0xee */
10266FNIEMOP_DEF(iemOp_out_DX_AL)
10267{
10268 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10269 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10270 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10271}
10272
10273
10274/** Opcode 0xef */
10275FNIEMOP_DEF(iemOp_out_DX_eAX)
10276{
10277 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10279 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10280}
10281
10282
10283/**
10284 * @opcode 0xf0
10285 */
10286FNIEMOP_DEF(iemOp_lock)
10287{
10288 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10289 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10290
10291 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10292 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10293}
10294
10295
10296/**
10297 * @opcode 0xf1
10298 */
10299FNIEMOP_DEF(iemOp_int1)
10300{
10301 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10302 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
10303 /** @todo testcase! */
10304 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
10305}
10306
10307
10308/**
10309 * @opcode 0xf2
10310 */
10311FNIEMOP_DEF(iemOp_repne)
10312{
10313 /* This overrides any previous REPE prefix. */
10314 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10315 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10316 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10317
10318 /* For the 4 entry opcode tables, REPNZ overrides any previous
10319 REPZ and operand size prefixes. */
10320 pVCpu->iem.s.idxPrefix = 3;
10321
10322 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10323 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10324}
10325
10326
10327/**
10328 * @opcode 0xf3
10329 */
10330FNIEMOP_DEF(iemOp_repe)
10331{
10332 /* This overrides any previous REPNE prefix. */
10333 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10334 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10335 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10336
10337 /* For the 4 entry opcode tables, REPNZ overrides any previous
10338 REPNZ and operand size prefixes. */
10339 pVCpu->iem.s.idxPrefix = 2;
10340
10341 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10342 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10343}
10344
10345
10346/**
10347 * @opcode 0xf4
10348 */
10349FNIEMOP_DEF(iemOp_hlt)
10350{
10351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10352 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10353}
10354
10355
10356/**
10357 * @opcode 0xf5
10358 */
10359FNIEMOP_DEF(iemOp_cmc)
10360{
10361 IEMOP_MNEMONIC(cmc, "cmc");
10362 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10363 IEM_MC_BEGIN(0, 0);
10364 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10365 IEM_MC_ADVANCE_RIP();
10366 IEM_MC_END();
10367 return VINF_SUCCESS;
10368}
10369
10370
10371/**
10372 * Common implementation of 'inc/dec/not/neg Eb'.
10373 *
10374 * @param bRm The RM byte.
10375 * @param pImpl The instruction implementation.
10376 */
10377FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10378{
10379 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10380 {
10381 /* register access */
10382 IEM_MC_BEGIN(2, 0);
10383 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10384 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10385 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10386 IEM_MC_REF_EFLAGS(pEFlags);
10387 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10388 IEM_MC_ADVANCE_RIP();
10389 IEM_MC_END();
10390 }
10391 else
10392 {
10393 /* memory access. */
10394 IEM_MC_BEGIN(2, 2);
10395 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10396 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10397 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10398
10399 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10400 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10401 IEM_MC_FETCH_EFLAGS(EFlags);
10402 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10403 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10404 else
10405 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10406
10407 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10408 IEM_MC_COMMIT_EFLAGS(EFlags);
10409 IEM_MC_ADVANCE_RIP();
10410 IEM_MC_END();
10411 }
10412 return VINF_SUCCESS;
10413}
10414
10415
10416/**
10417 * Common implementation of 'inc/dec/not/neg Ev'.
10418 *
10419 * @param bRm The RM byte.
10420 * @param pImpl The instruction implementation.
10421 */
10422FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10423{
10424 /* Registers are handled by a common worker. */
10425 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10427
10428 /* Memory we do here. */
10429 switch (pVCpu->iem.s.enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(2, 2);
10433 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10434 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10436
10437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10438 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10439 IEM_MC_FETCH_EFLAGS(EFlags);
10440 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10441 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10442 else
10443 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10444
10445 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10446 IEM_MC_COMMIT_EFLAGS(EFlags);
10447 IEM_MC_ADVANCE_RIP();
10448 IEM_MC_END();
10449 return VINF_SUCCESS;
10450
10451 case IEMMODE_32BIT:
10452 IEM_MC_BEGIN(2, 2);
10453 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10454 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10455 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10456
10457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10458 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10459 IEM_MC_FETCH_EFLAGS(EFlags);
10460 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10461 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10462 else
10463 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10464
10465 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10466 IEM_MC_COMMIT_EFLAGS(EFlags);
10467 IEM_MC_ADVANCE_RIP();
10468 IEM_MC_END();
10469 return VINF_SUCCESS;
10470
10471 case IEMMODE_64BIT:
10472 IEM_MC_BEGIN(2, 2);
10473 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10474 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10475 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10476
10477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10478 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10479 IEM_MC_FETCH_EFLAGS(EFlags);
10480 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10481 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10482 else
10483 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10484
10485 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10486 IEM_MC_COMMIT_EFLAGS(EFlags);
10487 IEM_MC_ADVANCE_RIP();
10488 IEM_MC_END();
10489 return VINF_SUCCESS;
10490
10491 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10492 }
10493}
10494
10495
10496/** Opcode 0xf6 /0. */
10497FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10498{
10499 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10500 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10501
10502 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10503 {
10504 /* register access */
10505 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10507
10508 IEM_MC_BEGIN(3, 0);
10509 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10510 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10511 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10512 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10513 IEM_MC_REF_EFLAGS(pEFlags);
10514 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10515 IEM_MC_ADVANCE_RIP();
10516 IEM_MC_END();
10517 }
10518 else
10519 {
10520 /* memory access. */
10521 IEM_MC_BEGIN(3, 2);
10522 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10523 IEM_MC_ARG(uint8_t, u8Src, 1);
10524 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10526
10527 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10528 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10529 IEM_MC_ASSIGN(u8Src, u8Imm);
10530 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10531 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10532 IEM_MC_FETCH_EFLAGS(EFlags);
10533 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10534
10535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10536 IEM_MC_COMMIT_EFLAGS(EFlags);
10537 IEM_MC_ADVANCE_RIP();
10538 IEM_MC_END();
10539 }
10540 return VINF_SUCCESS;
10541}
10542
10543
10544/** Opcode 0xf7 /0. */
10545FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10546{
10547 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10549
10550 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10551 {
10552 /* register access */
10553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10554 switch (pVCpu->iem.s.enmEffOpSize)
10555 {
10556 case IEMMODE_16BIT:
10557 {
10558 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10559 IEM_MC_BEGIN(3, 0);
10560 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10561 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10562 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10563 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10564 IEM_MC_REF_EFLAGS(pEFlags);
10565 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10566 IEM_MC_ADVANCE_RIP();
10567 IEM_MC_END();
10568 return VINF_SUCCESS;
10569 }
10570
10571 case IEMMODE_32BIT:
10572 {
10573 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10574 IEM_MC_BEGIN(3, 0);
10575 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10576 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10577 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10578 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10579 IEM_MC_REF_EFLAGS(pEFlags);
10580 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10581 /* No clearing the high dword here - test doesn't write back the result. */
10582 IEM_MC_ADVANCE_RIP();
10583 IEM_MC_END();
10584 return VINF_SUCCESS;
10585 }
10586
10587 case IEMMODE_64BIT:
10588 {
10589 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10590 IEM_MC_BEGIN(3, 0);
10591 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10592 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10593 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10594 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10595 IEM_MC_REF_EFLAGS(pEFlags);
10596 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10597 IEM_MC_ADVANCE_RIP();
10598 IEM_MC_END();
10599 return VINF_SUCCESS;
10600 }
10601
10602 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10603 }
10604 }
10605 else
10606 {
10607 /* memory access. */
10608 switch (pVCpu->iem.s.enmEffOpSize)
10609 {
10610 case IEMMODE_16BIT:
10611 {
10612 IEM_MC_BEGIN(3, 2);
10613 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10614 IEM_MC_ARG(uint16_t, u16Src, 1);
10615 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10617
10618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10619 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10620 IEM_MC_ASSIGN(u16Src, u16Imm);
10621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10622 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10623 IEM_MC_FETCH_EFLAGS(EFlags);
10624 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10625
10626 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10627 IEM_MC_COMMIT_EFLAGS(EFlags);
10628 IEM_MC_ADVANCE_RIP();
10629 IEM_MC_END();
10630 return VINF_SUCCESS;
10631 }
10632
10633 case IEMMODE_32BIT:
10634 {
10635 IEM_MC_BEGIN(3, 2);
10636 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10637 IEM_MC_ARG(uint32_t, u32Src, 1);
10638 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10639 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10640
10641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10642 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10643 IEM_MC_ASSIGN(u32Src, u32Imm);
10644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10645 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10646 IEM_MC_FETCH_EFLAGS(EFlags);
10647 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10648
10649 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
10650 IEM_MC_COMMIT_EFLAGS(EFlags);
10651 IEM_MC_ADVANCE_RIP();
10652 IEM_MC_END();
10653 return VINF_SUCCESS;
10654 }
10655
10656 case IEMMODE_64BIT:
10657 {
10658 IEM_MC_BEGIN(3, 2);
10659 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10660 IEM_MC_ARG(uint64_t, u64Src, 1);
10661 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10663
10664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
10665 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10666 IEM_MC_ASSIGN(u64Src, u64Imm);
10667 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10668 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10669 IEM_MC_FETCH_EFLAGS(EFlags);
10670 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10671
10672 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10673 IEM_MC_COMMIT_EFLAGS(EFlags);
10674 IEM_MC_ADVANCE_RIP();
10675 IEM_MC_END();
10676 return VINF_SUCCESS;
10677 }
10678
10679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10680 }
10681 }
10682}
10683
10684
10685/** Opcode 0xf6 /4, /5, /6 and /7. */
10686FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10687{
10688 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10689 {
10690 /* register access */
10691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10692 IEM_MC_BEGIN(3, 1);
10693 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10694 IEM_MC_ARG(uint8_t, u8Value, 1);
10695 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10696 IEM_MC_LOCAL(int32_t, rc);
10697
10698 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10699 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10700 IEM_MC_REF_EFLAGS(pEFlags);
10701 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10702 IEM_MC_IF_LOCAL_IS_Z(rc) {
10703 IEM_MC_ADVANCE_RIP();
10704 } IEM_MC_ELSE() {
10705 IEM_MC_RAISE_DIVIDE_ERROR();
10706 } IEM_MC_ENDIF();
10707
10708 IEM_MC_END();
10709 }
10710 else
10711 {
10712 /* memory access. */
10713 IEM_MC_BEGIN(3, 2);
10714 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10715 IEM_MC_ARG(uint8_t, u8Value, 1);
10716 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10717 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10718 IEM_MC_LOCAL(int32_t, rc);
10719
10720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10722 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10723 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10724 IEM_MC_REF_EFLAGS(pEFlags);
10725 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10726 IEM_MC_IF_LOCAL_IS_Z(rc) {
10727 IEM_MC_ADVANCE_RIP();
10728 } IEM_MC_ELSE() {
10729 IEM_MC_RAISE_DIVIDE_ERROR();
10730 } IEM_MC_ENDIF();
10731
10732 IEM_MC_END();
10733 }
10734 return VINF_SUCCESS;
10735}
10736
10737
10738/** Opcode 0xf7 /4, /5, /6 and /7. */
10739FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10740{
10741 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10742
10743 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10744 {
10745 /* register access */
10746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10747 switch (pVCpu->iem.s.enmEffOpSize)
10748 {
10749 case IEMMODE_16BIT:
10750 {
10751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10752 IEM_MC_BEGIN(4, 1);
10753 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10754 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10755 IEM_MC_ARG(uint16_t, u16Value, 2);
10756 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10757 IEM_MC_LOCAL(int32_t, rc);
10758
10759 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10760 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10761 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10762 IEM_MC_REF_EFLAGS(pEFlags);
10763 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10764 IEM_MC_IF_LOCAL_IS_Z(rc) {
10765 IEM_MC_ADVANCE_RIP();
10766 } IEM_MC_ELSE() {
10767 IEM_MC_RAISE_DIVIDE_ERROR();
10768 } IEM_MC_ENDIF();
10769
10770 IEM_MC_END();
10771 return VINF_SUCCESS;
10772 }
10773
10774 case IEMMODE_32BIT:
10775 {
10776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10777 IEM_MC_BEGIN(4, 1);
10778 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10779 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10780 IEM_MC_ARG(uint32_t, u32Value, 2);
10781 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10782 IEM_MC_LOCAL(int32_t, rc);
10783
10784 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10785 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10786 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10787 IEM_MC_REF_EFLAGS(pEFlags);
10788 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10789 IEM_MC_IF_LOCAL_IS_Z(rc) {
10790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10791 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10792 IEM_MC_ADVANCE_RIP();
10793 } IEM_MC_ELSE() {
10794 IEM_MC_RAISE_DIVIDE_ERROR();
10795 } IEM_MC_ENDIF();
10796
10797 IEM_MC_END();
10798 return VINF_SUCCESS;
10799 }
10800
10801 case IEMMODE_64BIT:
10802 {
10803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10804 IEM_MC_BEGIN(4, 1);
10805 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10806 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10807 IEM_MC_ARG(uint64_t, u64Value, 2);
10808 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10809 IEM_MC_LOCAL(int32_t, rc);
10810
10811 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10812 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10813 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10814 IEM_MC_REF_EFLAGS(pEFlags);
10815 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10816 IEM_MC_IF_LOCAL_IS_Z(rc) {
10817 IEM_MC_ADVANCE_RIP();
10818 } IEM_MC_ELSE() {
10819 IEM_MC_RAISE_DIVIDE_ERROR();
10820 } IEM_MC_ENDIF();
10821
10822 IEM_MC_END();
10823 return VINF_SUCCESS;
10824 }
10825
10826 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10827 }
10828 }
10829 else
10830 {
10831 /* memory access. */
10832 switch (pVCpu->iem.s.enmEffOpSize)
10833 {
10834 case IEMMODE_16BIT:
10835 {
10836 IEM_MC_BEGIN(4, 2);
10837 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10838 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10839 IEM_MC_ARG(uint16_t, u16Value, 2);
10840 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10842 IEM_MC_LOCAL(int32_t, rc);
10843
10844 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10846 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10847 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10848 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10849 IEM_MC_REF_EFLAGS(pEFlags);
10850 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10851 IEM_MC_IF_LOCAL_IS_Z(rc) {
10852 IEM_MC_ADVANCE_RIP();
10853 } IEM_MC_ELSE() {
10854 IEM_MC_RAISE_DIVIDE_ERROR();
10855 } IEM_MC_ENDIF();
10856
10857 IEM_MC_END();
10858 return VINF_SUCCESS;
10859 }
10860
10861 case IEMMODE_32BIT:
10862 {
10863 IEM_MC_BEGIN(4, 2);
10864 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10865 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10866 IEM_MC_ARG(uint32_t, u32Value, 2);
10867 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10869 IEM_MC_LOCAL(int32_t, rc);
10870
10871 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10873 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10874 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10875 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10876 IEM_MC_REF_EFLAGS(pEFlags);
10877 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10878 IEM_MC_IF_LOCAL_IS_Z(rc) {
10879 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10880 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10881 IEM_MC_ADVANCE_RIP();
10882 } IEM_MC_ELSE() {
10883 IEM_MC_RAISE_DIVIDE_ERROR();
10884 } IEM_MC_ENDIF();
10885
10886 IEM_MC_END();
10887 return VINF_SUCCESS;
10888 }
10889
10890 case IEMMODE_64BIT:
10891 {
10892 IEM_MC_BEGIN(4, 2);
10893 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10894 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10895 IEM_MC_ARG(uint64_t, u64Value, 2);
10896 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10897 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10898 IEM_MC_LOCAL(int32_t, rc);
10899
10900 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10902 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10903 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10904 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10905 IEM_MC_REF_EFLAGS(pEFlags);
10906 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10907 IEM_MC_IF_LOCAL_IS_Z(rc) {
10908 IEM_MC_ADVANCE_RIP();
10909 } IEM_MC_ELSE() {
10910 IEM_MC_RAISE_DIVIDE_ERROR();
10911 } IEM_MC_ENDIF();
10912
10913 IEM_MC_END();
10914 return VINF_SUCCESS;
10915 }
10916
10917 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10918 }
10919 }
10920}
10921
10922/**
10923 * @opcode 0xf6
10924 */
10925FNIEMOP_DEF(iemOp_Grp3_Eb)
10926{
10927 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10928 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10929 {
10930 case 0:
10931 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10932 case 1:
10933/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10934 return IEMOP_RAISE_INVALID_OPCODE();
10935 case 2:
10936 IEMOP_MNEMONIC(not_Eb, "not Eb");
10937 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10938 case 3:
10939 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10940 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10941 case 4:
10942 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10943 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10944 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10945 case 5:
10946 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10947 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10948 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10949 case 6:
10950 IEMOP_MNEMONIC(div_Eb, "div Eb");
10951 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10952 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10953 case 7:
10954 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10955 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10956 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10957 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10958 }
10959}
10960
10961
10962/**
10963 * @opcode 0xf7
10964 */
10965FNIEMOP_DEF(iemOp_Grp3_Ev)
10966{
10967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10968 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10969 {
10970 case 0:
10971 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10972 case 1:
10973/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10974 return IEMOP_RAISE_INVALID_OPCODE();
10975 case 2:
10976 IEMOP_MNEMONIC(not_Ev, "not Ev");
10977 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10978 case 3:
10979 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10980 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10981 case 4:
10982 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10983 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10984 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10985 case 5:
10986 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10987 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10988 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10989 case 6:
10990 IEMOP_MNEMONIC(div_Ev, "div Ev");
10991 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10992 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10993 case 7:
10994 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10995 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10996 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10997 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10998 }
10999}
11000
11001
11002/**
11003 * @opcode 0xf8
11004 */
11005FNIEMOP_DEF(iemOp_clc)
11006{
11007 IEMOP_MNEMONIC(clc, "clc");
11008 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11009 IEM_MC_BEGIN(0, 0);
11010 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11011 IEM_MC_ADVANCE_RIP();
11012 IEM_MC_END();
11013 return VINF_SUCCESS;
11014}
11015
11016
11017/**
11018 * @opcode 0xf9
11019 */
11020FNIEMOP_DEF(iemOp_stc)
11021{
11022 IEMOP_MNEMONIC(stc, "stc");
11023 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11024 IEM_MC_BEGIN(0, 0);
11025 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11026 IEM_MC_ADVANCE_RIP();
11027 IEM_MC_END();
11028 return VINF_SUCCESS;
11029}
11030
11031
11032/**
11033 * @opcode 0xfa
11034 */
11035FNIEMOP_DEF(iemOp_cli)
11036{
11037 IEMOP_MNEMONIC(cli, "cli");
11038 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11039 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11040}
11041
11042
11043FNIEMOP_DEF(iemOp_sti)
11044{
11045 IEMOP_MNEMONIC(sti, "sti");
11046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11047 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11048}
11049
11050
11051/**
11052 * @opcode 0xfc
11053 */
11054FNIEMOP_DEF(iemOp_cld)
11055{
11056 IEMOP_MNEMONIC(cld, "cld");
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_BEGIN(0, 0);
11059 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11060 IEM_MC_ADVANCE_RIP();
11061 IEM_MC_END();
11062 return VINF_SUCCESS;
11063}
11064
11065
11066/**
11067 * @opcode 0xfd
11068 */
11069FNIEMOP_DEF(iemOp_std)
11070{
11071 IEMOP_MNEMONIC(std, "std");
11072 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11073 IEM_MC_BEGIN(0, 0);
11074 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11075 IEM_MC_ADVANCE_RIP();
11076 IEM_MC_END();
11077 return VINF_SUCCESS;
11078}
11079
11080
11081/**
11082 * @opcode 0xfe
11083 */
11084FNIEMOP_DEF(iemOp_Grp4)
11085{
11086 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11087 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11088 {
11089 case 0:
11090 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11091 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11092 case 1:
11093 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11094 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11095 default:
11096 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11097 return IEMOP_RAISE_INVALID_OPCODE();
11098 }
11099}
11100
11101
11102/**
11103 * Opcode 0xff /2.
11104 * @param bRm The RM byte.
11105 */
11106FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11107{
11108 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11109 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11110
11111 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11112 {
11113 /* The new RIP is taken from a register. */
11114 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11115 switch (pVCpu->iem.s.enmEffOpSize)
11116 {
11117 case IEMMODE_16BIT:
11118 IEM_MC_BEGIN(1, 0);
11119 IEM_MC_ARG(uint16_t, u16Target, 0);
11120 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11121 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11122 IEM_MC_END()
11123 return VINF_SUCCESS;
11124
11125 case IEMMODE_32BIT:
11126 IEM_MC_BEGIN(1, 0);
11127 IEM_MC_ARG(uint32_t, u32Target, 0);
11128 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11129 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11130 IEM_MC_END()
11131 return VINF_SUCCESS;
11132
11133 case IEMMODE_64BIT:
11134 IEM_MC_BEGIN(1, 0);
11135 IEM_MC_ARG(uint64_t, u64Target, 0);
11136 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11137 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11138 IEM_MC_END()
11139 return VINF_SUCCESS;
11140
11141 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11142 }
11143 }
11144 else
11145 {
11146 /* The new RIP is taken from a register. */
11147 switch (pVCpu->iem.s.enmEffOpSize)
11148 {
11149 case IEMMODE_16BIT:
11150 IEM_MC_BEGIN(1, 1);
11151 IEM_MC_ARG(uint16_t, u16Target, 0);
11152 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11155 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11156 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11157 IEM_MC_END()
11158 return VINF_SUCCESS;
11159
11160 case IEMMODE_32BIT:
11161 IEM_MC_BEGIN(1, 1);
11162 IEM_MC_ARG(uint32_t, u32Target, 0);
11163 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11166 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11167 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11168 IEM_MC_END()
11169 return VINF_SUCCESS;
11170
11171 case IEMMODE_64BIT:
11172 IEM_MC_BEGIN(1, 1);
11173 IEM_MC_ARG(uint64_t, u64Target, 0);
11174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11177 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11178 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11179 IEM_MC_END()
11180 return VINF_SUCCESS;
11181
11182 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11183 }
11184 }
11185}
11186
11187typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11188
11189FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11190{
11191 /* Registers? How?? */
11192 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11193 { /* likely */ }
11194 else
11195 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11196
11197 /* Far pointer loaded from memory. */
11198 switch (pVCpu->iem.s.enmEffOpSize)
11199 {
11200 case IEMMODE_16BIT:
11201 IEM_MC_BEGIN(3, 1);
11202 IEM_MC_ARG(uint16_t, u16Sel, 0);
11203 IEM_MC_ARG(uint16_t, offSeg, 1);
11204 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11205 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11208 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11209 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11210 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11211 IEM_MC_END();
11212 return VINF_SUCCESS;
11213
11214 case IEMMODE_64BIT:
11215 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11216 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11217 * and call far qword [rsp] encodings. */
11218 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11219 {
11220 IEM_MC_BEGIN(3, 1);
11221 IEM_MC_ARG(uint16_t, u16Sel, 0);
11222 IEM_MC_ARG(uint64_t, offSeg, 1);
11223 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11224 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11227 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11228 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11229 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11230 IEM_MC_END();
11231 return VINF_SUCCESS;
11232 }
11233 /* AMD falls thru. */
11234 /* fall thru */
11235
11236 case IEMMODE_32BIT:
11237 IEM_MC_BEGIN(3, 1);
11238 IEM_MC_ARG(uint16_t, u16Sel, 0);
11239 IEM_MC_ARG(uint32_t, offSeg, 1);
11240 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11241 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11242 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11244 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11245 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11246 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11247 IEM_MC_END();
11248 return VINF_SUCCESS;
11249
11250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11251 }
11252}
11253
11254
11255/**
11256 * Opcode 0xff /3.
11257 * @param bRm The RM byte.
11258 */
11259FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11260{
11261 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11262 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11263}
11264
11265
11266/**
11267 * Opcode 0xff /4.
11268 * @param bRm The RM byte.
11269 */
11270FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11271{
11272 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11273 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11274
11275 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11276 {
11277 /* The new RIP is taken from a register. */
11278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11279 switch (pVCpu->iem.s.enmEffOpSize)
11280 {
11281 case IEMMODE_16BIT:
11282 IEM_MC_BEGIN(0, 1);
11283 IEM_MC_LOCAL(uint16_t, u16Target);
11284 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11285 IEM_MC_SET_RIP_U16(u16Target);
11286 IEM_MC_END()
11287 return VINF_SUCCESS;
11288
11289 case IEMMODE_32BIT:
11290 IEM_MC_BEGIN(0, 1);
11291 IEM_MC_LOCAL(uint32_t, u32Target);
11292 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11293 IEM_MC_SET_RIP_U32(u32Target);
11294 IEM_MC_END()
11295 return VINF_SUCCESS;
11296
11297 case IEMMODE_64BIT:
11298 IEM_MC_BEGIN(0, 1);
11299 IEM_MC_LOCAL(uint64_t, u64Target);
11300 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11301 IEM_MC_SET_RIP_U64(u64Target);
11302 IEM_MC_END()
11303 return VINF_SUCCESS;
11304
11305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11306 }
11307 }
11308 else
11309 {
11310 /* The new RIP is taken from a memory location. */
11311 switch (pVCpu->iem.s.enmEffOpSize)
11312 {
11313 case IEMMODE_16BIT:
11314 IEM_MC_BEGIN(0, 2);
11315 IEM_MC_LOCAL(uint16_t, u16Target);
11316 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11317 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11318 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11319 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11320 IEM_MC_SET_RIP_U16(u16Target);
11321 IEM_MC_END()
11322 return VINF_SUCCESS;
11323
11324 case IEMMODE_32BIT:
11325 IEM_MC_BEGIN(0, 2);
11326 IEM_MC_LOCAL(uint32_t, u32Target);
11327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11328 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11330 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11331 IEM_MC_SET_RIP_U32(u32Target);
11332 IEM_MC_END()
11333 return VINF_SUCCESS;
11334
11335 case IEMMODE_64BIT:
11336 IEM_MC_BEGIN(0, 2);
11337 IEM_MC_LOCAL(uint64_t, u64Target);
11338 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11341 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11342 IEM_MC_SET_RIP_U64(u64Target);
11343 IEM_MC_END()
11344 return VINF_SUCCESS;
11345
11346 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11347 }
11348 }
11349}
11350
11351
11352/**
11353 * Opcode 0xff /5.
11354 * @param bRm The RM byte.
11355 */
11356FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11357{
11358 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11359 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11360}
11361
11362
11363/**
11364 * Opcode 0xff /6.
11365 * @param bRm The RM byte.
11366 */
11367FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11368{
11369 IEMOP_MNEMONIC(push_Ev, "push Ev");
11370
11371 /* Registers are handled by a common worker. */
11372 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11373 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11374
11375 /* Memory we do here. */
11376 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11377 switch (pVCpu->iem.s.enmEffOpSize)
11378 {
11379 case IEMMODE_16BIT:
11380 IEM_MC_BEGIN(0, 2);
11381 IEM_MC_LOCAL(uint16_t, u16Src);
11382 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11383 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11384 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11385 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11386 IEM_MC_PUSH_U16(u16Src);
11387 IEM_MC_ADVANCE_RIP();
11388 IEM_MC_END();
11389 return VINF_SUCCESS;
11390
11391 case IEMMODE_32BIT:
11392 IEM_MC_BEGIN(0, 2);
11393 IEM_MC_LOCAL(uint32_t, u32Src);
11394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11397 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11398 IEM_MC_PUSH_U32(u32Src);
11399 IEM_MC_ADVANCE_RIP();
11400 IEM_MC_END();
11401 return VINF_SUCCESS;
11402
11403 case IEMMODE_64BIT:
11404 IEM_MC_BEGIN(0, 2);
11405 IEM_MC_LOCAL(uint64_t, u64Src);
11406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11407 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11409 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11410 IEM_MC_PUSH_U64(u64Src);
11411 IEM_MC_ADVANCE_RIP();
11412 IEM_MC_END();
11413 return VINF_SUCCESS;
11414
11415 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11416 }
11417}
11418
11419
11420/**
11421 * @opcode 0xff
11422 */
11423FNIEMOP_DEF(iemOp_Grp5)
11424{
11425 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11426 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11427 {
11428 case 0:
11429 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11430 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11431 case 1:
11432 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11433 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11434 case 2:
11435 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11436 case 3:
11437 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11438 case 4:
11439 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11440 case 5:
11441 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11442 case 6:
11443 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11444 case 7:
11445 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11446 return IEMOP_RAISE_INVALID_OPCODE();
11447 }
11448 AssertFailedReturn(VERR_IEM_IPE_3);
11449}
11450
11451
11452
11453const PFNIEMOP g_apfnOneByteMap[256] =
11454{
11455 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11456 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11457 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11458 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11459 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11460 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11461 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11462 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11463 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11464 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11465 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11466 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11467 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11468 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11469 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11470 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11471 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11472 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11473 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11474 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11475 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11476 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11477 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11478 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11479 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11480 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11481 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11482 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11483 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11484 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11485 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11486 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11487 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11488 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11489 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11490 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11491 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11492 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11493 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11494 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11495 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11496 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11497 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11498 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11499 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11500 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11501 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11502 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11503 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11504 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11505 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11506 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11507 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11508 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11509 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11510 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11511 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11512 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11513 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11514 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11515 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11516 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11517 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11518 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11519};
11520
11521
11522/** @} */
11523
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette