VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65869

Last change on this file since 65869 was 65834, checked in by vboxsync, 8 years ago

IEMAllInstructionsPython.py: some more tinkering.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 366.2 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65834 2017-02-21 16:21:36Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/** @def og_gen General
25 * @{
26 */
27
28/** @def og_gen_arith Arithmetic
29 * @{
30 */
31/** @defgroup og_gen_arith_bin Binary numbers */
32/** @defgroup og_gen_arith_dec Decimal numbers */
33/** @} */
34
35
36
37/** @name One byte opcodes.
38 * @{
39 */
40
41/* Instruction specification format - work in progress: */
42/*
43 *
44 * @opmnemonic add
45 * @op1 reg:Eb
46 * @op2 rm:Gb
47 * @opmaps one
48 * @oppfx none
49 * @opcode 0x00
50 * @openc ModR/M
51 * @opfltest none
52 * @opflmodify of,sf,zf,af,pf,cf
53 * @opflundef none
54 * @opflset none
55 * @opflclear none
56 * @ophints harmless
57 * @opstats add_Eb_Gb
58 * @opgroup op_gen_arith_bin
59 * @optest op1=1 op2=1 -> op1=2 efl=of,sf,zf,af
60 * @optest o32 / op1=0xfffffffe op2=1 -> op1=0xffffffff efl=af,pe,up
61 */
62FNIEMOP_DEF(iemOp_add_Eb_Gb)
63{
64 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
65 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
66}
67
68
69/** Opcode 0x01. */
70FNIEMOP_DEF(iemOp_add_Ev_Gv)
71{
72 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
73 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
74}
75
76
77/** Opcode 0x02. */
78FNIEMOP_DEF(iemOp_add_Gb_Eb)
79{
80 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
81 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
82}
83
84
85/** Opcode 0x03. */
86FNIEMOP_DEF(iemOp_add_Gv_Ev)
87{
88 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
89 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
90}
91
92
93/** Opcode 0x04. */
94FNIEMOP_DEF(iemOp_add_Al_Ib)
95{
96 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
97 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
98}
99
100
101/** Opcode 0x05. */
102FNIEMOP_DEF(iemOp_add_eAX_Iz)
103{
104 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
105 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
106}
107
108
109/** Opcode 0x06. */
110FNIEMOP_DEF(iemOp_push_ES)
111{
112 IEMOP_MNEMONIC(push_es, "push es");
113 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
114}
115
116
117/** Opcode 0x07. */
118FNIEMOP_DEF(iemOp_pop_ES)
119{
120 IEMOP_MNEMONIC(pop_es, "pop es");
121 IEMOP_HLP_NO_64BIT();
122 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
123 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
124}
125
126
127/** Opcode 0x08. */
128FNIEMOP_DEF(iemOp_or_Eb_Gb)
129{
130 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
131 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
132 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
133}
134
135
136/** Opcode 0x09. */
137FNIEMOP_DEF(iemOp_or_Ev_Gv)
138{
139 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
140 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
142}
143
144
145/** Opcode 0x0a. */
146FNIEMOP_DEF(iemOp_or_Gb_Eb)
147{
148 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
149 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
150 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
151}
152
153
154/** Opcode 0x0b. */
155FNIEMOP_DEF(iemOp_or_Gv_Ev)
156{
157 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
158 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
159 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
160}
161
162
163/** Opcode 0x0c. */
164FNIEMOP_DEF(iemOp_or_Al_Ib)
165{
166 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
167 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
168 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
169}
170
171
172/** Opcode 0x0d. */
173FNIEMOP_DEF(iemOp_or_eAX_Iz)
174{
175 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
176 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
177 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
178}
179
180
181/** Opcode 0x0e. */
182FNIEMOP_DEF(iemOp_push_CS)
183{
184 IEMOP_MNEMONIC(push_cs, "push cs");
185 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
186}
187
188
189/** Opcode 0x0f. */
190FNIEMOP_DEF(iemOp_2byteEscape)
191{
192#ifdef VBOX_STRICT
193 static bool s_fTested = false;
194 if (RT_LIKELY(s_fTested)) { /* likely */ }
195 else
196 {
197 s_fTested = true;
198 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
199 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
200 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
201 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
202 }
203#endif
204
205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
206
207 /** @todo PUSH CS on 8086, undefined on 80186. */
208 IEMOP_HLP_MIN_286();
209 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
210}
211
212/** Opcode 0x10. */
213FNIEMOP_DEF(iemOp_adc_Eb_Gb)
214{
215 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
217}
218
219
220/** Opcode 0x11. */
221FNIEMOP_DEF(iemOp_adc_Ev_Gv)
222{
223 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
224 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
225}
226
227
228/** Opcode 0x12. */
229FNIEMOP_DEF(iemOp_adc_Gb_Eb)
230{
231 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
233}
234
235
236/** Opcode 0x13. */
237FNIEMOP_DEF(iemOp_adc_Gv_Ev)
238{
239 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
240 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
241}
242
243
244/** Opcode 0x14. */
245FNIEMOP_DEF(iemOp_adc_Al_Ib)
246{
247 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
249}
250
251
252/** Opcode 0x15. */
253FNIEMOP_DEF(iemOp_adc_eAX_Iz)
254{
255 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
256 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
257}
258
259
260/** Opcode 0x16. */
261FNIEMOP_DEF(iemOp_push_SS)
262{
263 IEMOP_MNEMONIC(push_ss, "push ss");
264 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
265}
266
267
268/** Opcode 0x17. */
269FNIEMOP_DEF(iemOp_pop_SS)
270{
271 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
273 IEMOP_HLP_NO_64BIT();
274 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
275}
276
277
278/** Opcode 0x18. */
279FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
280{
281 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
282 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
283}
284
285
286/** Opcode 0x19. */
287FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
288{
289 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
290 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
291}
292
293
294/** Opcode 0x1a. */
295FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
296{
297 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
298 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
299}
300
301
302/** Opcode 0x1b. */
303FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
304{
305 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
306 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
307}
308
309
310/** Opcode 0x1c. */
311FNIEMOP_DEF(iemOp_sbb_Al_Ib)
312{
313 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
314 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
315}
316
317
318/** Opcode 0x1d. */
319FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
320{
321 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
322 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
323}
324
325
326/** Opcode 0x1e. */
327FNIEMOP_DEF(iemOp_push_DS)
328{
329 IEMOP_MNEMONIC(push_ds, "push ds");
330 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
331}
332
333
334/** Opcode 0x1f. */
335FNIEMOP_DEF(iemOp_pop_DS)
336{
337 IEMOP_MNEMONIC(pop_ds, "pop ds");
338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
339 IEMOP_HLP_NO_64BIT();
340 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
341}
342
343
344/** Opcode 0x20. */
345FNIEMOP_DEF(iemOp_and_Eb_Gb)
346{
347 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
348 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
349 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
350}
351
352
353/** Opcode 0x21. */
354FNIEMOP_DEF(iemOp_and_Ev_Gv)
355{
356 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
359}
360
361
362/** Opcode 0x22. */
363FNIEMOP_DEF(iemOp_and_Gb_Eb)
364{
365 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
366 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
367 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
368}
369
370
371/** Opcode 0x23. */
372FNIEMOP_DEF(iemOp_and_Gv_Ev)
373{
374 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
375 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
377}
378
379
380/** Opcode 0x24. */
381FNIEMOP_DEF(iemOp_and_Al_Ib)
382{
383 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
384 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
385 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
386}
387
388
389/** Opcode 0x25. */
390FNIEMOP_DEF(iemOp_and_eAX_Iz)
391{
392 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
393 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
395}
396
397
398/** Opcode 0x26. */
399FNIEMOP_DEF(iemOp_seg_ES)
400{
401 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
402 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
403 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
404
405 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
406 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
407}
408
409
410/** Opcode 0x27. */
411FNIEMOP_DEF(iemOp_daa)
412{
413 IEMOP_MNEMONIC(daa_AL, "daa AL");
414 IEMOP_HLP_NO_64BIT();
415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
416 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
417 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
418}
419
420
421/** Opcode 0x28. */
422FNIEMOP_DEF(iemOp_sub_Eb_Gb)
423{
424 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
425 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
426}
427
428
429/** Opcode 0x29. */
430FNIEMOP_DEF(iemOp_sub_Ev_Gv)
431{
432 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
433 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
434}
435
436
437/** Opcode 0x2a. */
438FNIEMOP_DEF(iemOp_sub_Gb_Eb)
439{
440 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
441 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
442}
443
444
445/** Opcode 0x2b. */
446FNIEMOP_DEF(iemOp_sub_Gv_Ev)
447{
448 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
449 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
450}
451
452
453/** Opcode 0x2c. */
454FNIEMOP_DEF(iemOp_sub_Al_Ib)
455{
456 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
457 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
458}
459
460
461/** Opcode 0x2d. */
462FNIEMOP_DEF(iemOp_sub_eAX_Iz)
463{
464 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
465 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
466}
467
468
469/** Opcode 0x2e. */
470FNIEMOP_DEF(iemOp_seg_CS)
471{
472 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
473 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
474 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
475
476 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
477 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
478}
479
480
481/** Opcode 0x2f. */
482FNIEMOP_DEF(iemOp_das)
483{
484 IEMOP_MNEMONIC(das_AL, "das AL");
485 IEMOP_HLP_NO_64BIT();
486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
487 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
488 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
489}
490
491
492/** Opcode 0x30. */
493FNIEMOP_DEF(iemOp_xor_Eb_Gb)
494{
495 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
496 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
498}
499
500
501/** Opcode 0x31. */
502FNIEMOP_DEF(iemOp_xor_Ev_Gv)
503{
504 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
505 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
506 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
507}
508
509
510/** Opcode 0x32. */
511FNIEMOP_DEF(iemOp_xor_Gb_Eb)
512{
513 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
514 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
515 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
516}
517
518
519/** Opcode 0x33. */
520FNIEMOP_DEF(iemOp_xor_Gv_Ev)
521{
522 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
523 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
524 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
525}
526
527
528/** Opcode 0x34. */
529FNIEMOP_DEF(iemOp_xor_Al_Ib)
530{
531 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
532 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
533 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
534}
535
536
537/** Opcode 0x35. */
538FNIEMOP_DEF(iemOp_xor_eAX_Iz)
539{
540 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
541 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
542 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
543}
544
545
546/** Opcode 0x36. */
547FNIEMOP_DEF(iemOp_seg_SS)
548{
549 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
550 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
551 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
552
553 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
554 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
555}
556
557
558/** Opcode 0x37. */
559FNIEMOP_STUB(iemOp_aaa);
560
561
562/** Opcode 0x38. */
563FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
564{
565 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
567}
568
569
570/** Opcode 0x39. */
571FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
572{
573 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
574 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
575}
576
577
578/** Opcode 0x3a. */
579FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
580{
581 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
583}
584
585
586/** Opcode 0x3b. */
587FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
588{
589 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
590 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
591}
592
593
594/** Opcode 0x3c. */
595FNIEMOP_DEF(iemOp_cmp_Al_Ib)
596{
597 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
598 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
599}
600
601
602/** Opcode 0x3d. */
603FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
604{
605 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
607}
608
609
610/** Opcode 0x3e. */
611FNIEMOP_DEF(iemOp_seg_DS)
612{
613 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
614 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
615 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
616
617 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
618 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
619}
620
621
622/** Opcode 0x3f. */
623FNIEMOP_STUB(iemOp_aas);
624
625/**
626 * Common 'inc/dec/not/neg register' helper.
627 */
628FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
629{
630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
631 switch (pVCpu->iem.s.enmEffOpSize)
632 {
633 case IEMMODE_16BIT:
634 IEM_MC_BEGIN(2, 0);
635 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
636 IEM_MC_ARG(uint32_t *, pEFlags, 1);
637 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
638 IEM_MC_REF_EFLAGS(pEFlags);
639 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
640 IEM_MC_ADVANCE_RIP();
641 IEM_MC_END();
642 return VINF_SUCCESS;
643
644 case IEMMODE_32BIT:
645 IEM_MC_BEGIN(2, 0);
646 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
647 IEM_MC_ARG(uint32_t *, pEFlags, 1);
648 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
649 IEM_MC_REF_EFLAGS(pEFlags);
650 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
651 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
652 IEM_MC_ADVANCE_RIP();
653 IEM_MC_END();
654 return VINF_SUCCESS;
655
656 case IEMMODE_64BIT:
657 IEM_MC_BEGIN(2, 0);
658 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
659 IEM_MC_ARG(uint32_t *, pEFlags, 1);
660 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
661 IEM_MC_REF_EFLAGS(pEFlags);
662 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
663 IEM_MC_ADVANCE_RIP();
664 IEM_MC_END();
665 return VINF_SUCCESS;
666 }
667 return VINF_SUCCESS;
668}
669
670
671/** Opcode 0x40. */
672FNIEMOP_DEF(iemOp_inc_eAX)
673{
674 /*
675 * This is a REX prefix in 64-bit mode.
676 */
677 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
678 {
679 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
680 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
681
682 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
683 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
684 }
685
686 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
687 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
688}
689
690
691/** Opcode 0x41. */
692FNIEMOP_DEF(iemOp_inc_eCX)
693{
694 /*
695 * This is a REX prefix in 64-bit mode.
696 */
697 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
698 {
699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
701 pVCpu->iem.s.uRexB = 1 << 3;
702
703 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
704 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
705 }
706
707 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
708 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
709}
710
711
712/** Opcode 0x42. */
713FNIEMOP_DEF(iemOp_inc_eDX)
714{
715 /*
716 * This is a REX prefix in 64-bit mode.
717 */
718 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
719 {
720 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
721 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
722 pVCpu->iem.s.uRexIndex = 1 << 3;
723
724 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
725 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
726 }
727
728 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
729 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
730}
731
732
733
734/** Opcode 0x43. */
735FNIEMOP_DEF(iemOp_inc_eBX)
736{
737 /*
738 * This is a REX prefix in 64-bit mode.
739 */
740 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
741 {
742 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
743 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
744 pVCpu->iem.s.uRexB = 1 << 3;
745 pVCpu->iem.s.uRexIndex = 1 << 3;
746
747 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
748 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
749 }
750
751 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
752 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
753}
754
755
756/** Opcode 0x44. */
757FNIEMOP_DEF(iemOp_inc_eSP)
758{
759 /*
760 * This is a REX prefix in 64-bit mode.
761 */
762 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
763 {
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
766 pVCpu->iem.s.uRexReg = 1 << 3;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770 }
771
772 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
773 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
774}
775
776
777/** Opcode 0x45. */
778FNIEMOP_DEF(iemOp_inc_eBP)
779{
780 /*
781 * This is a REX prefix in 64-bit mode.
782 */
783 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
784 {
785 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
786 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
787 pVCpu->iem.s.uRexReg = 1 << 3;
788 pVCpu->iem.s.uRexB = 1 << 3;
789
790 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
791 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
792 }
793
794 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
795 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
796}
797
798
799/** Opcode 0x46. */
800FNIEMOP_DEF(iemOp_inc_eSI)
801{
802 /*
803 * This is a REX prefix in 64-bit mode.
804 */
805 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
806 {
807 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
808 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
809 pVCpu->iem.s.uRexReg = 1 << 3;
810 pVCpu->iem.s.uRexIndex = 1 << 3;
811
812 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
813 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
814 }
815
816 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
817 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
818}
819
820
821/** Opcode 0x47. */
822FNIEMOP_DEF(iemOp_inc_eDI)
823{
824 /*
825 * This is a REX prefix in 64-bit mode.
826 */
827 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
828 {
829 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
830 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
831 pVCpu->iem.s.uRexReg = 1 << 3;
832 pVCpu->iem.s.uRexB = 1 << 3;
833 pVCpu->iem.s.uRexIndex = 1 << 3;
834
835 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
836 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
837 }
838
839 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
840 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
841}
842
843
844/** Opcode 0x48. */
845FNIEMOP_DEF(iemOp_dec_eAX)
846{
847 /*
848 * This is a REX prefix in 64-bit mode.
849 */
850 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
851 {
852 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
853 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
854 iemRecalEffOpSize(pVCpu);
855
856 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
857 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
858 }
859
860 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
861 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
862}
863
864
865/** Opcode 0x49. */
866FNIEMOP_DEF(iemOp_dec_eCX)
867{
868 /*
869 * This is a REX prefix in 64-bit mode.
870 */
871 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
872 {
873 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
874 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
875 pVCpu->iem.s.uRexB = 1 << 3;
876 iemRecalEffOpSize(pVCpu);
877
878 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
879 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
880 }
881
882 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
883 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
884}
885
886
887/** Opcode 0x4a. */
888FNIEMOP_DEF(iemOp_dec_eDX)
889{
890 /*
891 * This is a REX prefix in 64-bit mode.
892 */
893 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
894 {
895 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
896 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
897 pVCpu->iem.s.uRexIndex = 1 << 3;
898 iemRecalEffOpSize(pVCpu);
899
900 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
901 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
902 }
903
904 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
905 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
906}
907
908
909/** Opcode 0x4b. */
910FNIEMOP_DEF(iemOp_dec_eBX)
911{
912 /*
913 * This is a REX prefix in 64-bit mode.
914 */
915 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
916 {
917 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
918 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
919 pVCpu->iem.s.uRexB = 1 << 3;
920 pVCpu->iem.s.uRexIndex = 1 << 3;
921 iemRecalEffOpSize(pVCpu);
922
923 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
924 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
925 }
926
927 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
928 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
929}
930
931
932/** Opcode 0x4c. */
933FNIEMOP_DEF(iemOp_dec_eSP)
934{
935 /*
936 * This is a REX prefix in 64-bit mode.
937 */
938 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
939 {
940 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
941 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
942 pVCpu->iem.s.uRexReg = 1 << 3;
943 iemRecalEffOpSize(pVCpu);
944
945 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
946 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
947 }
948
949 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
950 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
951}
952
953
954/** Opcode 0x4d. */
955FNIEMOP_DEF(iemOp_dec_eBP)
956{
957 /*
958 * This is a REX prefix in 64-bit mode.
959 */
960 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
961 {
962 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
963 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
964 pVCpu->iem.s.uRexReg = 1 << 3;
965 pVCpu->iem.s.uRexB = 1 << 3;
966 iemRecalEffOpSize(pVCpu);
967
968 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
969 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
970 }
971
972 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
973 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
974}
975
976
977/** Opcode 0x4e. */
978FNIEMOP_DEF(iemOp_dec_eSI)
979{
980 /*
981 * This is a REX prefix in 64-bit mode.
982 */
983 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
984 {
985 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
986 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
987 pVCpu->iem.s.uRexReg = 1 << 3;
988 pVCpu->iem.s.uRexIndex = 1 << 3;
989 iemRecalEffOpSize(pVCpu);
990
991 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
992 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
993 }
994
995 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
996 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
997}
998
999
1000/** Opcode 0x4f. */
1001FNIEMOP_DEF(iemOp_dec_eDI)
1002{
1003 /*
1004 * This is a REX prefix in 64-bit mode.
1005 */
1006 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1007 {
1008 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1009 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1010 pVCpu->iem.s.uRexReg = 1 << 3;
1011 pVCpu->iem.s.uRexB = 1 << 3;
1012 pVCpu->iem.s.uRexIndex = 1 << 3;
1013 iemRecalEffOpSize(pVCpu);
1014
1015 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1016 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1017 }
1018
1019 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1020 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1021}
1022
1023
1024/**
1025 * Common 'push register' helper.
1026 */
1027FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1028{
1029 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1030 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1031 {
1032 iReg |= pVCpu->iem.s.uRexB;
1033 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1034 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1035 }
1036
1037 switch (pVCpu->iem.s.enmEffOpSize)
1038 {
1039 case IEMMODE_16BIT:
1040 IEM_MC_BEGIN(0, 1);
1041 IEM_MC_LOCAL(uint16_t, u16Value);
1042 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1043 IEM_MC_PUSH_U16(u16Value);
1044 IEM_MC_ADVANCE_RIP();
1045 IEM_MC_END();
1046 break;
1047
1048 case IEMMODE_32BIT:
1049 IEM_MC_BEGIN(0, 1);
1050 IEM_MC_LOCAL(uint32_t, u32Value);
1051 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1052 IEM_MC_PUSH_U32(u32Value);
1053 IEM_MC_ADVANCE_RIP();
1054 IEM_MC_END();
1055 break;
1056
1057 case IEMMODE_64BIT:
1058 IEM_MC_BEGIN(0, 1);
1059 IEM_MC_LOCAL(uint64_t, u64Value);
1060 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1061 IEM_MC_PUSH_U64(u64Value);
1062 IEM_MC_ADVANCE_RIP();
1063 IEM_MC_END();
1064 break;
1065 }
1066
1067 return VINF_SUCCESS;
1068}
1069
1070
1071/** Opcode 0x50. */
1072FNIEMOP_DEF(iemOp_push_eAX)
1073{
1074 IEMOP_MNEMONIC(push_rAX, "push rAX");
1075 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1076}
1077
1078
1079/** Opcode 0x51. */
1080FNIEMOP_DEF(iemOp_push_eCX)
1081{
1082 IEMOP_MNEMONIC(push_rCX, "push rCX");
1083 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1084}
1085
1086
1087/** Opcode 0x52. */
1088FNIEMOP_DEF(iemOp_push_eDX)
1089{
1090 IEMOP_MNEMONIC(push_rDX, "push rDX");
1091 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1092}
1093
1094
1095/** Opcode 0x53. */
1096FNIEMOP_DEF(iemOp_push_eBX)
1097{
1098 IEMOP_MNEMONIC(push_rBX, "push rBX");
1099 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1100}
1101
1102
1103/** Opcode 0x54. */
1104FNIEMOP_DEF(iemOp_push_eSP)
1105{
1106 IEMOP_MNEMONIC(push_rSP, "push rSP");
1107 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1108 {
1109 IEM_MC_BEGIN(0, 1);
1110 IEM_MC_LOCAL(uint16_t, u16Value);
1111 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1112 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1113 IEM_MC_PUSH_U16(u16Value);
1114 IEM_MC_ADVANCE_RIP();
1115 IEM_MC_END();
1116 }
1117 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1118}
1119
1120
1121/** Opcode 0x55. */
1122FNIEMOP_DEF(iemOp_push_eBP)
1123{
1124 IEMOP_MNEMONIC(push_rBP, "push rBP");
1125 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1126}
1127
1128
1129/** Opcode 0x56. */
1130FNIEMOP_DEF(iemOp_push_eSI)
1131{
1132 IEMOP_MNEMONIC(push_rSI, "push rSI");
1133 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1134}
1135
1136
1137/** Opcode 0x57. */
1138FNIEMOP_DEF(iemOp_push_eDI)
1139{
1140 IEMOP_MNEMONIC(push_rDI, "push rDI");
1141 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1142}
1143
1144
1145/**
1146 * Common 'pop register' helper.
1147 */
1148FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1149{
1150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 iReg |= pVCpu->iem.s.uRexB;
1154 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1155 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1156 }
1157
1158 switch (pVCpu->iem.s.enmEffOpSize)
1159 {
1160 case IEMMODE_16BIT:
1161 IEM_MC_BEGIN(0, 1);
1162 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1163 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1164 IEM_MC_POP_U16(pu16Dst);
1165 IEM_MC_ADVANCE_RIP();
1166 IEM_MC_END();
1167 break;
1168
1169 case IEMMODE_32BIT:
1170 IEM_MC_BEGIN(0, 1);
1171 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1172 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1173 IEM_MC_POP_U32(pu32Dst);
1174 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1175 IEM_MC_ADVANCE_RIP();
1176 IEM_MC_END();
1177 break;
1178
1179 case IEMMODE_64BIT:
1180 IEM_MC_BEGIN(0, 1);
1181 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1182 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1183 IEM_MC_POP_U64(pu64Dst);
1184 IEM_MC_ADVANCE_RIP();
1185 IEM_MC_END();
1186 break;
1187 }
1188
1189 return VINF_SUCCESS;
1190}
1191
1192
1193/** Opcode 0x58. */
1194FNIEMOP_DEF(iemOp_pop_eAX)
1195{
1196 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1197 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1198}
1199
1200
1201/** Opcode 0x59. */
1202FNIEMOP_DEF(iemOp_pop_eCX)
1203{
1204 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1205 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1206}
1207
1208
1209/** Opcode 0x5a. */
1210FNIEMOP_DEF(iemOp_pop_eDX)
1211{
1212 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1213 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1214}
1215
1216
1217/** Opcode 0x5b. */
1218FNIEMOP_DEF(iemOp_pop_eBX)
1219{
1220 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1221 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1222}
1223
1224
1225/** Opcode 0x5c. */
1226FNIEMOP_DEF(iemOp_pop_eSP)
1227{
1228 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1229 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1230 {
1231 if (pVCpu->iem.s.uRexB)
1232 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1233 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1234 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1235 }
1236
1237 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1238 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1239 /** @todo add testcase for this instruction. */
1240 switch (pVCpu->iem.s.enmEffOpSize)
1241 {
1242 case IEMMODE_16BIT:
1243 IEM_MC_BEGIN(0, 1);
1244 IEM_MC_LOCAL(uint16_t, u16Dst);
1245 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1246 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1247 IEM_MC_ADVANCE_RIP();
1248 IEM_MC_END();
1249 break;
1250
1251 case IEMMODE_32BIT:
1252 IEM_MC_BEGIN(0, 1);
1253 IEM_MC_LOCAL(uint32_t, u32Dst);
1254 IEM_MC_POP_U32(&u32Dst);
1255 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1256 IEM_MC_ADVANCE_RIP();
1257 IEM_MC_END();
1258 break;
1259
1260 case IEMMODE_64BIT:
1261 IEM_MC_BEGIN(0, 1);
1262 IEM_MC_LOCAL(uint64_t, u64Dst);
1263 IEM_MC_POP_U64(&u64Dst);
1264 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1265 IEM_MC_ADVANCE_RIP();
1266 IEM_MC_END();
1267 break;
1268 }
1269
1270 return VINF_SUCCESS;
1271}
1272
1273
1274/** Opcode 0x5d. */
1275FNIEMOP_DEF(iemOp_pop_eBP)
1276{
1277 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1278 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1279}
1280
1281
1282/** Opcode 0x5e. */
1283FNIEMOP_DEF(iemOp_pop_eSI)
1284{
1285 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1286 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1287}
1288
1289
1290/** Opcode 0x5f. */
1291FNIEMOP_DEF(iemOp_pop_eDI)
1292{
1293 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1294 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1295}
1296
1297
1298/** Opcode 0x60. */
1299FNIEMOP_DEF(iemOp_pusha)
1300{
1301 IEMOP_MNEMONIC(pusha, "pusha");
1302 IEMOP_HLP_MIN_186();
1303 IEMOP_HLP_NO_64BIT();
1304 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1305 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1306 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1307 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1308}
1309
1310
1311/** Opcode 0x61. */
1312FNIEMOP_DEF(iemOp_popa__mvex)
1313{
1314 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1315 {
1316 IEMOP_MNEMONIC(popa, "popa");
1317 IEMOP_HLP_MIN_186();
1318 IEMOP_HLP_NO_64BIT();
1319 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1320 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1321 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1322 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1323 }
1324 IEMOP_MNEMONIC(mvex, "mvex");
1325 Log(("mvex prefix is not supported!\n"));
1326 return IEMOP_RAISE_INVALID_OPCODE();
1327}
1328
1329
1330/** Opcode 0x62. */
1331FNIEMOP_STUB(iemOp_bound_Gv_Ma__evex);
1332// IEMOP_HLP_MIN_186();
1333
1334
1335/** Opcode 0x63 - non-64-bit modes. */
1336FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1337{
1338 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1339 IEMOP_HLP_MIN_286();
1340 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1342
1343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1344 {
1345 /* Register */
1346 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1347 IEM_MC_BEGIN(3, 0);
1348 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1349 IEM_MC_ARG(uint16_t, u16Src, 1);
1350 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1351
1352 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1353 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1354 IEM_MC_REF_EFLAGS(pEFlags);
1355 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1356
1357 IEM_MC_ADVANCE_RIP();
1358 IEM_MC_END();
1359 }
1360 else
1361 {
1362 /* Memory */
1363 IEM_MC_BEGIN(3, 2);
1364 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1365 IEM_MC_ARG(uint16_t, u16Src, 1);
1366 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1367 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1368
1369 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1370 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1371 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1372 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1373 IEM_MC_FETCH_EFLAGS(EFlags);
1374 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1375
1376 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1377 IEM_MC_COMMIT_EFLAGS(EFlags);
1378 IEM_MC_ADVANCE_RIP();
1379 IEM_MC_END();
1380 }
1381 return VINF_SUCCESS;
1382
1383}
1384
1385
1386/** Opcode 0x63.
1387 * @note This is a weird one. It works like a regular move instruction if
1388 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1389 * @todo This definitely needs a testcase to verify the odd cases. */
1390FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1391{
1392 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1393
1394 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1395 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1396
1397 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1398 {
1399 /*
1400 * Register to register.
1401 */
1402 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1403 IEM_MC_BEGIN(0, 1);
1404 IEM_MC_LOCAL(uint64_t, u64Value);
1405 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1406 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1407 IEM_MC_ADVANCE_RIP();
1408 IEM_MC_END();
1409 }
1410 else
1411 {
1412 /*
1413 * We're loading a register from memory.
1414 */
1415 IEM_MC_BEGIN(0, 2);
1416 IEM_MC_LOCAL(uint64_t, u64Value);
1417 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1418 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1419 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1420 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1421 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1422 IEM_MC_ADVANCE_RIP();
1423 IEM_MC_END();
1424 }
1425 return VINF_SUCCESS;
1426}
1427
1428
1429/** Opcode 0x64. */
1430FNIEMOP_DEF(iemOp_seg_FS)
1431{
1432 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1433 IEMOP_HLP_MIN_386();
1434
1435 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1436 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1437
1438 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1439 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1440}
1441
1442
1443/** Opcode 0x65. */
1444FNIEMOP_DEF(iemOp_seg_GS)
1445{
1446 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1447 IEMOP_HLP_MIN_386();
1448
1449 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1450 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1451
1452 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1453 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1454}
1455
1456
1457/** Opcode 0x66. */
1458FNIEMOP_DEF(iemOp_op_size)
1459{
1460 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1461 IEMOP_HLP_MIN_386();
1462
1463 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1464 iemRecalEffOpSize(pVCpu);
1465
1466 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1467 when REPZ or REPNZ are present. */
1468 if (pVCpu->iem.s.idxPrefix == 0)
1469 pVCpu->iem.s.idxPrefix = 1;
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473}
1474
1475
1476/** Opcode 0x67. */
1477FNIEMOP_DEF(iemOp_addr_size)
1478{
1479 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1480 IEMOP_HLP_MIN_386();
1481
1482 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1483 switch (pVCpu->iem.s.enmDefAddrMode)
1484 {
1485 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1486 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1487 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1488 default: AssertFailed();
1489 }
1490
1491 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1492 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1493}
1494
1495
1496/** Opcode 0x68. */
1497FNIEMOP_DEF(iemOp_push_Iz)
1498{
1499 IEMOP_MNEMONIC(push_Iz, "push Iz");
1500 IEMOP_HLP_MIN_186();
1501 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1502 switch (pVCpu->iem.s.enmEffOpSize)
1503 {
1504 case IEMMODE_16BIT:
1505 {
1506 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1507 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1508 IEM_MC_BEGIN(0,0);
1509 IEM_MC_PUSH_U16(u16Imm);
1510 IEM_MC_ADVANCE_RIP();
1511 IEM_MC_END();
1512 return VINF_SUCCESS;
1513 }
1514
1515 case IEMMODE_32BIT:
1516 {
1517 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1518 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1519 IEM_MC_BEGIN(0,0);
1520 IEM_MC_PUSH_U32(u32Imm);
1521 IEM_MC_ADVANCE_RIP();
1522 IEM_MC_END();
1523 return VINF_SUCCESS;
1524 }
1525
1526 case IEMMODE_64BIT:
1527 {
1528 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1530 IEM_MC_BEGIN(0,0);
1531 IEM_MC_PUSH_U64(u64Imm);
1532 IEM_MC_ADVANCE_RIP();
1533 IEM_MC_END();
1534 return VINF_SUCCESS;
1535 }
1536
1537 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1538 }
1539}
1540
1541
1542/** Opcode 0x69. */
1543FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1544{
1545 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1546 IEMOP_HLP_MIN_186();
1547 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1548 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1549
1550 switch (pVCpu->iem.s.enmEffOpSize)
1551 {
1552 case IEMMODE_16BIT:
1553 {
1554 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1555 {
1556 /* register operand */
1557 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1559
1560 IEM_MC_BEGIN(3, 1);
1561 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1562 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1563 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1564 IEM_MC_LOCAL(uint16_t, u16Tmp);
1565
1566 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1567 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1568 IEM_MC_REF_EFLAGS(pEFlags);
1569 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1570 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1571
1572 IEM_MC_ADVANCE_RIP();
1573 IEM_MC_END();
1574 }
1575 else
1576 {
1577 /* memory operand */
1578 IEM_MC_BEGIN(3, 2);
1579 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1580 IEM_MC_ARG(uint16_t, u16Src, 1);
1581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1582 IEM_MC_LOCAL(uint16_t, u16Tmp);
1583 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1584
1585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1586 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1587 IEM_MC_ASSIGN(u16Src, u16Imm);
1588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1589 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1590 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1591 IEM_MC_REF_EFLAGS(pEFlags);
1592 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1593 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1594
1595 IEM_MC_ADVANCE_RIP();
1596 IEM_MC_END();
1597 }
1598 return VINF_SUCCESS;
1599 }
1600
1601 case IEMMODE_32BIT:
1602 {
1603 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1604 {
1605 /* register operand */
1606 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1608
1609 IEM_MC_BEGIN(3, 1);
1610 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1611 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1613 IEM_MC_LOCAL(uint32_t, u32Tmp);
1614
1615 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1616 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1617 IEM_MC_REF_EFLAGS(pEFlags);
1618 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1619 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1620
1621 IEM_MC_ADVANCE_RIP();
1622 IEM_MC_END();
1623 }
1624 else
1625 {
1626 /* memory operand */
1627 IEM_MC_BEGIN(3, 2);
1628 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1629 IEM_MC_ARG(uint32_t, u32Src, 1);
1630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1631 IEM_MC_LOCAL(uint32_t, u32Tmp);
1632 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1633
1634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1635 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1636 IEM_MC_ASSIGN(u32Src, u32Imm);
1637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1638 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1639 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1640 IEM_MC_REF_EFLAGS(pEFlags);
1641 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1642 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1643
1644 IEM_MC_ADVANCE_RIP();
1645 IEM_MC_END();
1646 }
1647 return VINF_SUCCESS;
1648 }
1649
1650 case IEMMODE_64BIT:
1651 {
1652 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1653 {
1654 /* register operand */
1655 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1657
1658 IEM_MC_BEGIN(3, 1);
1659 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1660 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1661 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1662 IEM_MC_LOCAL(uint64_t, u64Tmp);
1663
1664 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1665 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1666 IEM_MC_REF_EFLAGS(pEFlags);
1667 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1668 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1669
1670 IEM_MC_ADVANCE_RIP();
1671 IEM_MC_END();
1672 }
1673 else
1674 {
1675 /* memory operand */
1676 IEM_MC_BEGIN(3, 2);
1677 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1678 IEM_MC_ARG(uint64_t, u64Src, 1);
1679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1680 IEM_MC_LOCAL(uint64_t, u64Tmp);
1681 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1682
1683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1684 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1685 IEM_MC_ASSIGN(u64Src, u64Imm);
1686 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1687 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1688 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1689 IEM_MC_REF_EFLAGS(pEFlags);
1690 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1691 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1692
1693 IEM_MC_ADVANCE_RIP();
1694 IEM_MC_END();
1695 }
1696 return VINF_SUCCESS;
1697 }
1698 }
1699 AssertFailedReturn(VERR_IEM_IPE_9);
1700}
1701
1702
1703/** Opcode 0x6a. */
1704FNIEMOP_DEF(iemOp_push_Ib)
1705{
1706 IEMOP_MNEMONIC(push_Ib, "push Ib");
1707 IEMOP_HLP_MIN_186();
1708 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
1709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1710 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1711
1712 IEM_MC_BEGIN(0,0);
1713 switch (pVCpu->iem.s.enmEffOpSize)
1714 {
1715 case IEMMODE_16BIT:
1716 IEM_MC_PUSH_U16(i8Imm);
1717 break;
1718 case IEMMODE_32BIT:
1719 IEM_MC_PUSH_U32(i8Imm);
1720 break;
1721 case IEMMODE_64BIT:
1722 IEM_MC_PUSH_U64(i8Imm);
1723 break;
1724 }
1725 IEM_MC_ADVANCE_RIP();
1726 IEM_MC_END();
1727 return VINF_SUCCESS;
1728}
1729
1730
1731/** Opcode 0x6b. */
1732FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
1733{
1734 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
1735 IEMOP_HLP_MIN_186();
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1738
1739 switch (pVCpu->iem.s.enmEffOpSize)
1740 {
1741 case IEMMODE_16BIT:
1742 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1743 {
1744 /* register operand */
1745 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1746 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1747
1748 IEM_MC_BEGIN(3, 1);
1749 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1750 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
1751 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1752 IEM_MC_LOCAL(uint16_t, u16Tmp);
1753
1754 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1755 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1756 IEM_MC_REF_EFLAGS(pEFlags);
1757 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1758 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1759
1760 IEM_MC_ADVANCE_RIP();
1761 IEM_MC_END();
1762 }
1763 else
1764 {
1765 /* memory operand */
1766 IEM_MC_BEGIN(3, 2);
1767 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1768 IEM_MC_ARG(uint16_t, u16Src, 1);
1769 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1770 IEM_MC_LOCAL(uint16_t, u16Tmp);
1771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1772
1773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1774 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
1775 IEM_MC_ASSIGN(u16Src, u16Imm);
1776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1777 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1778 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1779 IEM_MC_REF_EFLAGS(pEFlags);
1780 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1781 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1782
1783 IEM_MC_ADVANCE_RIP();
1784 IEM_MC_END();
1785 }
1786 return VINF_SUCCESS;
1787
1788 case IEMMODE_32BIT:
1789 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1790 {
1791 /* register operand */
1792 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1793 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1794
1795 IEM_MC_BEGIN(3, 1);
1796 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1797 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
1798 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1799 IEM_MC_LOCAL(uint32_t, u32Tmp);
1800
1801 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1802 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1803 IEM_MC_REF_EFLAGS(pEFlags);
1804 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1805 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1806
1807 IEM_MC_ADVANCE_RIP();
1808 IEM_MC_END();
1809 }
1810 else
1811 {
1812 /* memory operand */
1813 IEM_MC_BEGIN(3, 2);
1814 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1815 IEM_MC_ARG(uint32_t, u32Src, 1);
1816 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1817 IEM_MC_LOCAL(uint32_t, u32Tmp);
1818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1819
1820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1821 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
1822 IEM_MC_ASSIGN(u32Src, u32Imm);
1823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1824 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1825 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1826 IEM_MC_REF_EFLAGS(pEFlags);
1827 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1828 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1829
1830 IEM_MC_ADVANCE_RIP();
1831 IEM_MC_END();
1832 }
1833 return VINF_SUCCESS;
1834
1835 case IEMMODE_64BIT:
1836 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1837 {
1838 /* register operand */
1839 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1841
1842 IEM_MC_BEGIN(3, 1);
1843 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1844 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
1845 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1846 IEM_MC_LOCAL(uint64_t, u64Tmp);
1847
1848 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1849 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1850 IEM_MC_REF_EFLAGS(pEFlags);
1851 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1852 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1853
1854 IEM_MC_ADVANCE_RIP();
1855 IEM_MC_END();
1856 }
1857 else
1858 {
1859 /* memory operand */
1860 IEM_MC_BEGIN(3, 2);
1861 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1862 IEM_MC_ARG(uint64_t, u64Src, 1);
1863 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1864 IEM_MC_LOCAL(uint64_t, u64Tmp);
1865 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1866
1867 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1868 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
1869 IEM_MC_ASSIGN(u64Src, u64Imm);
1870 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1871 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1872 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1873 IEM_MC_REF_EFLAGS(pEFlags);
1874 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1875 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1876
1877 IEM_MC_ADVANCE_RIP();
1878 IEM_MC_END();
1879 }
1880 return VINF_SUCCESS;
1881 }
1882 AssertFailedReturn(VERR_IEM_IPE_8);
1883}
1884
1885
1886/** Opcode 0x6c. */
1887FNIEMOP_DEF(iemOp_insb_Yb_DX)
1888{
1889 IEMOP_HLP_MIN_186();
1890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1891 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1892 {
1893 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
1894 switch (pVCpu->iem.s.enmEffAddrMode)
1895 {
1896 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
1897 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
1898 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
1899 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1900 }
1901 }
1902 else
1903 {
1904 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
1905 switch (pVCpu->iem.s.enmEffAddrMode)
1906 {
1907 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
1908 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
1909 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
1910 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1911 }
1912 }
1913}
1914
1915
1916/** Opcode 0x6d. */
1917FNIEMOP_DEF(iemOp_inswd_Yv_DX)
1918{
1919 IEMOP_HLP_MIN_186();
1920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1921 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1922 {
1923 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
1924 switch (pVCpu->iem.s.enmEffOpSize)
1925 {
1926 case IEMMODE_16BIT:
1927 switch (pVCpu->iem.s.enmEffAddrMode)
1928 {
1929 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
1930 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
1931 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
1932 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1933 }
1934 break;
1935 case IEMMODE_64BIT:
1936 case IEMMODE_32BIT:
1937 switch (pVCpu->iem.s.enmEffAddrMode)
1938 {
1939 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
1940 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
1941 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
1942 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1943 }
1944 break;
1945 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1946 }
1947 }
1948 else
1949 {
1950 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
1951 switch (pVCpu->iem.s.enmEffOpSize)
1952 {
1953 case IEMMODE_16BIT:
1954 switch (pVCpu->iem.s.enmEffAddrMode)
1955 {
1956 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
1957 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
1958 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
1959 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1960 }
1961 break;
1962 case IEMMODE_64BIT:
1963 case IEMMODE_32BIT:
1964 switch (pVCpu->iem.s.enmEffAddrMode)
1965 {
1966 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
1967 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
1968 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
1969 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1970 }
1971 break;
1972 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1973 }
1974 }
1975}
1976
1977
1978/** Opcode 0x6e. */
1979FNIEMOP_DEF(iemOp_outsb_Yb_DX)
1980{
1981 IEMOP_HLP_MIN_186();
1982 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1983 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1984 {
1985 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
1986 switch (pVCpu->iem.s.enmEffAddrMode)
1987 {
1988 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1989 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1990 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1991 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1992 }
1993 }
1994 else
1995 {
1996 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
1997 switch (pVCpu->iem.s.enmEffAddrMode)
1998 {
1999 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2000 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2001 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2002 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2003 }
2004 }
2005}
2006
2007
2008/** Opcode 0x6f. */
2009FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2010{
2011 IEMOP_HLP_MIN_186();
2012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2013 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2014 {
2015 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2016 switch (pVCpu->iem.s.enmEffOpSize)
2017 {
2018 case IEMMODE_16BIT:
2019 switch (pVCpu->iem.s.enmEffAddrMode)
2020 {
2021 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2022 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2023 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2024 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2025 }
2026 break;
2027 case IEMMODE_64BIT:
2028 case IEMMODE_32BIT:
2029 switch (pVCpu->iem.s.enmEffAddrMode)
2030 {
2031 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2032 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2033 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2034 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2035 }
2036 break;
2037 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2038 }
2039 }
2040 else
2041 {
2042 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2043 switch (pVCpu->iem.s.enmEffOpSize)
2044 {
2045 case IEMMODE_16BIT:
2046 switch (pVCpu->iem.s.enmEffAddrMode)
2047 {
2048 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2049 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2050 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2051 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2052 }
2053 break;
2054 case IEMMODE_64BIT:
2055 case IEMMODE_32BIT:
2056 switch (pVCpu->iem.s.enmEffAddrMode)
2057 {
2058 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2059 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2060 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2061 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2062 }
2063 break;
2064 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2065 }
2066 }
2067}
2068
2069
2070/** Opcode 0x70. */
2071FNIEMOP_DEF(iemOp_jo_Jb)
2072{
2073 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2074 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2077
2078 IEM_MC_BEGIN(0, 0);
2079 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2080 IEM_MC_REL_JMP_S8(i8Imm);
2081 } IEM_MC_ELSE() {
2082 IEM_MC_ADVANCE_RIP();
2083 } IEM_MC_ENDIF();
2084 IEM_MC_END();
2085 return VINF_SUCCESS;
2086}
2087
2088
2089/** Opcode 0x71. */
2090FNIEMOP_DEF(iemOp_jno_Jb)
2091{
2092 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2093 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2095 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2096
2097 IEM_MC_BEGIN(0, 0);
2098 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2099 IEM_MC_ADVANCE_RIP();
2100 } IEM_MC_ELSE() {
2101 IEM_MC_REL_JMP_S8(i8Imm);
2102 } IEM_MC_ENDIF();
2103 IEM_MC_END();
2104 return VINF_SUCCESS;
2105}
2106
2107/** Opcode 0x72. */
2108FNIEMOP_DEF(iemOp_jc_Jb)
2109{
2110 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2111 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2113 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2114
2115 IEM_MC_BEGIN(0, 0);
2116 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2117 IEM_MC_REL_JMP_S8(i8Imm);
2118 } IEM_MC_ELSE() {
2119 IEM_MC_ADVANCE_RIP();
2120 } IEM_MC_ENDIF();
2121 IEM_MC_END();
2122 return VINF_SUCCESS;
2123}
2124
2125
2126/** Opcode 0x73. */
2127FNIEMOP_DEF(iemOp_jnc_Jb)
2128{
2129 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2130 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2131 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2132 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2133
2134 IEM_MC_BEGIN(0, 0);
2135 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2136 IEM_MC_ADVANCE_RIP();
2137 } IEM_MC_ELSE() {
2138 IEM_MC_REL_JMP_S8(i8Imm);
2139 } IEM_MC_ENDIF();
2140 IEM_MC_END();
2141 return VINF_SUCCESS;
2142}
2143
2144
2145/** Opcode 0x74. */
2146FNIEMOP_DEF(iemOp_je_Jb)
2147{
2148 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2149 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2150 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2151 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2152
2153 IEM_MC_BEGIN(0, 0);
2154 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2155 IEM_MC_REL_JMP_S8(i8Imm);
2156 } IEM_MC_ELSE() {
2157 IEM_MC_ADVANCE_RIP();
2158 } IEM_MC_ENDIF();
2159 IEM_MC_END();
2160 return VINF_SUCCESS;
2161}
2162
2163
2164/** Opcode 0x75. */
2165FNIEMOP_DEF(iemOp_jne_Jb)
2166{
2167 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2168 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2170 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2171
2172 IEM_MC_BEGIN(0, 0);
2173 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2174 IEM_MC_ADVANCE_RIP();
2175 } IEM_MC_ELSE() {
2176 IEM_MC_REL_JMP_S8(i8Imm);
2177 } IEM_MC_ENDIF();
2178 IEM_MC_END();
2179 return VINF_SUCCESS;
2180}
2181
2182
2183/** Opcode 0x76. */
2184FNIEMOP_DEF(iemOp_jbe_Jb)
2185{
2186 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2187 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2189 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2190
2191 IEM_MC_BEGIN(0, 0);
2192 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2193 IEM_MC_REL_JMP_S8(i8Imm);
2194 } IEM_MC_ELSE() {
2195 IEM_MC_ADVANCE_RIP();
2196 } IEM_MC_ENDIF();
2197 IEM_MC_END();
2198 return VINF_SUCCESS;
2199}
2200
2201
2202/** Opcode 0x77. */
2203FNIEMOP_DEF(iemOp_jnbe_Jb)
2204{
2205 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2206 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2207 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2208 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2209
2210 IEM_MC_BEGIN(0, 0);
2211 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2212 IEM_MC_ADVANCE_RIP();
2213 } IEM_MC_ELSE() {
2214 IEM_MC_REL_JMP_S8(i8Imm);
2215 } IEM_MC_ENDIF();
2216 IEM_MC_END();
2217 return VINF_SUCCESS;
2218}
2219
2220
2221/** Opcode 0x78. */
2222FNIEMOP_DEF(iemOp_js_Jb)
2223{
2224 IEMOP_MNEMONIC(js_Jb, "js Jb");
2225 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2226 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2227 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2228
2229 IEM_MC_BEGIN(0, 0);
2230 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2231 IEM_MC_REL_JMP_S8(i8Imm);
2232 } IEM_MC_ELSE() {
2233 IEM_MC_ADVANCE_RIP();
2234 } IEM_MC_ENDIF();
2235 IEM_MC_END();
2236 return VINF_SUCCESS;
2237}
2238
2239
2240/** Opcode 0x79. */
2241FNIEMOP_DEF(iemOp_jns_Jb)
2242{
2243 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2244 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2245 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2246 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2247
2248 IEM_MC_BEGIN(0, 0);
2249 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2250 IEM_MC_ADVANCE_RIP();
2251 } IEM_MC_ELSE() {
2252 IEM_MC_REL_JMP_S8(i8Imm);
2253 } IEM_MC_ENDIF();
2254 IEM_MC_END();
2255 return VINF_SUCCESS;
2256}
2257
2258
2259/** Opcode 0x7a. */
2260FNIEMOP_DEF(iemOp_jp_Jb)
2261{
2262 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2263 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2265 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2266
2267 IEM_MC_BEGIN(0, 0);
2268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2269 IEM_MC_REL_JMP_S8(i8Imm);
2270 } IEM_MC_ELSE() {
2271 IEM_MC_ADVANCE_RIP();
2272 } IEM_MC_ENDIF();
2273 IEM_MC_END();
2274 return VINF_SUCCESS;
2275}
2276
2277
2278/** Opcode 0x7b. */
2279FNIEMOP_DEF(iemOp_jnp_Jb)
2280{
2281 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2282 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2284 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2285
2286 IEM_MC_BEGIN(0, 0);
2287 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2288 IEM_MC_ADVANCE_RIP();
2289 } IEM_MC_ELSE() {
2290 IEM_MC_REL_JMP_S8(i8Imm);
2291 } IEM_MC_ENDIF();
2292 IEM_MC_END();
2293 return VINF_SUCCESS;
2294}
2295
2296
2297/** Opcode 0x7c. */
2298FNIEMOP_DEF(iemOp_jl_Jb)
2299{
2300 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2301 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2303 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2304
2305 IEM_MC_BEGIN(0, 0);
2306 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2307 IEM_MC_REL_JMP_S8(i8Imm);
2308 } IEM_MC_ELSE() {
2309 IEM_MC_ADVANCE_RIP();
2310 } IEM_MC_ENDIF();
2311 IEM_MC_END();
2312 return VINF_SUCCESS;
2313}
2314
2315
2316/** Opcode 0x7d. */
2317FNIEMOP_DEF(iemOp_jnl_Jb)
2318{
2319 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2320 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2321 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2322 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2323
2324 IEM_MC_BEGIN(0, 0);
2325 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2326 IEM_MC_ADVANCE_RIP();
2327 } IEM_MC_ELSE() {
2328 IEM_MC_REL_JMP_S8(i8Imm);
2329 } IEM_MC_ENDIF();
2330 IEM_MC_END();
2331 return VINF_SUCCESS;
2332}
2333
2334
2335/** Opcode 0x7e. */
2336FNIEMOP_DEF(iemOp_jle_Jb)
2337{
2338 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2339 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2341 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2342
2343 IEM_MC_BEGIN(0, 0);
2344 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2345 IEM_MC_REL_JMP_S8(i8Imm);
2346 } IEM_MC_ELSE() {
2347 IEM_MC_ADVANCE_RIP();
2348 } IEM_MC_ENDIF();
2349 IEM_MC_END();
2350 return VINF_SUCCESS;
2351}
2352
2353
2354/** Opcode 0x7f. */
2355FNIEMOP_DEF(iemOp_jnle_Jb)
2356{
2357 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2358 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2360 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2361
2362 IEM_MC_BEGIN(0, 0);
2363 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2364 IEM_MC_ADVANCE_RIP();
2365 } IEM_MC_ELSE() {
2366 IEM_MC_REL_JMP_S8(i8Imm);
2367 } IEM_MC_ENDIF();
2368 IEM_MC_END();
2369 return VINF_SUCCESS;
2370}
2371
2372
2373/** Opcode 0x80. */
2374FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2375{
2376 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2377 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2378 {
2379 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2380 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2381 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2382 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2383 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2384 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2385 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2386 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2387 }
2388 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2389
2390 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2391 {
2392 /* register target */
2393 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2394 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2395 IEM_MC_BEGIN(3, 0);
2396 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2397 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2398 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2399
2400 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2401 IEM_MC_REF_EFLAGS(pEFlags);
2402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2403
2404 IEM_MC_ADVANCE_RIP();
2405 IEM_MC_END();
2406 }
2407 else
2408 {
2409 /* memory target */
2410 uint32_t fAccess;
2411 if (pImpl->pfnLockedU8)
2412 fAccess = IEM_ACCESS_DATA_RW;
2413 else /* CMP */
2414 fAccess = IEM_ACCESS_DATA_R;
2415 IEM_MC_BEGIN(3, 2);
2416 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2417 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2418 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2419
2420 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2421 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2422 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2423 if (pImpl->pfnLockedU8)
2424 IEMOP_HLP_DONE_DECODING();
2425 else
2426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2427
2428 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2429 IEM_MC_FETCH_EFLAGS(EFlags);
2430 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2431 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2432 else
2433 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2434
2435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2436 IEM_MC_COMMIT_EFLAGS(EFlags);
2437 IEM_MC_ADVANCE_RIP();
2438 IEM_MC_END();
2439 }
2440 return VINF_SUCCESS;
2441}
2442
2443
2444/** Opcode 0x81. */
2445FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2446{
2447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2448 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2449 {
2450 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2451 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2452 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2453 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2454 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2455 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2456 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2457 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2458 }
2459 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2460
2461 switch (pVCpu->iem.s.enmEffOpSize)
2462 {
2463 case IEMMODE_16BIT:
2464 {
2465 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2466 {
2467 /* register target */
2468 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2470 IEM_MC_BEGIN(3, 0);
2471 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2472 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2473 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2474
2475 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2476 IEM_MC_REF_EFLAGS(pEFlags);
2477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2478
2479 IEM_MC_ADVANCE_RIP();
2480 IEM_MC_END();
2481 }
2482 else
2483 {
2484 /* memory target */
2485 uint32_t fAccess;
2486 if (pImpl->pfnLockedU16)
2487 fAccess = IEM_ACCESS_DATA_RW;
2488 else /* CMP, TEST */
2489 fAccess = IEM_ACCESS_DATA_R;
2490 IEM_MC_BEGIN(3, 2);
2491 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2492 IEM_MC_ARG(uint16_t, u16Src, 1);
2493 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2495
2496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2497 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2498 IEM_MC_ASSIGN(u16Src, u16Imm);
2499 if (pImpl->pfnLockedU16)
2500 IEMOP_HLP_DONE_DECODING();
2501 else
2502 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2503 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2504 IEM_MC_FETCH_EFLAGS(EFlags);
2505 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2506 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2507 else
2508 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2509
2510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2511 IEM_MC_COMMIT_EFLAGS(EFlags);
2512 IEM_MC_ADVANCE_RIP();
2513 IEM_MC_END();
2514 }
2515 break;
2516 }
2517
2518 case IEMMODE_32BIT:
2519 {
2520 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2521 {
2522 /* register target */
2523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2525 IEM_MC_BEGIN(3, 0);
2526 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2527 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2528 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2529
2530 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2531 IEM_MC_REF_EFLAGS(pEFlags);
2532 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2533 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2534
2535 IEM_MC_ADVANCE_RIP();
2536 IEM_MC_END();
2537 }
2538 else
2539 {
2540 /* memory target */
2541 uint32_t fAccess;
2542 if (pImpl->pfnLockedU32)
2543 fAccess = IEM_ACCESS_DATA_RW;
2544 else /* CMP, TEST */
2545 fAccess = IEM_ACCESS_DATA_R;
2546 IEM_MC_BEGIN(3, 2);
2547 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2548 IEM_MC_ARG(uint32_t, u32Src, 1);
2549 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2551
2552 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2553 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2554 IEM_MC_ASSIGN(u32Src, u32Imm);
2555 if (pImpl->pfnLockedU32)
2556 IEMOP_HLP_DONE_DECODING();
2557 else
2558 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2559 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2560 IEM_MC_FETCH_EFLAGS(EFlags);
2561 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2562 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2563 else
2564 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2565
2566 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2567 IEM_MC_COMMIT_EFLAGS(EFlags);
2568 IEM_MC_ADVANCE_RIP();
2569 IEM_MC_END();
2570 }
2571 break;
2572 }
2573
2574 case IEMMODE_64BIT:
2575 {
2576 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2577 {
2578 /* register target */
2579 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2580 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2581 IEM_MC_BEGIN(3, 0);
2582 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2583 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2584 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2585
2586 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2587 IEM_MC_REF_EFLAGS(pEFlags);
2588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2589
2590 IEM_MC_ADVANCE_RIP();
2591 IEM_MC_END();
2592 }
2593 else
2594 {
2595 /* memory target */
2596 uint32_t fAccess;
2597 if (pImpl->pfnLockedU64)
2598 fAccess = IEM_ACCESS_DATA_RW;
2599 else /* CMP */
2600 fAccess = IEM_ACCESS_DATA_R;
2601 IEM_MC_BEGIN(3, 2);
2602 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2603 IEM_MC_ARG(uint64_t, u64Src, 1);
2604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2606
2607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2608 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2609 if (pImpl->pfnLockedU64)
2610 IEMOP_HLP_DONE_DECODING();
2611 else
2612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2613 IEM_MC_ASSIGN(u64Src, u64Imm);
2614 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2615 IEM_MC_FETCH_EFLAGS(EFlags);
2616 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2617 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2618 else
2619 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2620
2621 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2622 IEM_MC_COMMIT_EFLAGS(EFlags);
2623 IEM_MC_ADVANCE_RIP();
2624 IEM_MC_END();
2625 }
2626 break;
2627 }
2628 }
2629 return VINF_SUCCESS;
2630}
2631
2632
2633/** Opcode 0x82. */
2634FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
2635{
2636 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
2637 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
2638}
2639
2640
2641/** Opcode 0x83. */
2642FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
2643{
2644 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2645 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2646 {
2647 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
2648 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
2649 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
2650 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
2651 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
2652 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
2653 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
2654 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
2655 }
2656 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
2657 to the 386 even if absent in the intel reference manuals and some
2658 3rd party opcode listings. */
2659 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2660
2661 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2662 {
2663 /*
2664 * Register target
2665 */
2666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2667 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2668 switch (pVCpu->iem.s.enmEffOpSize)
2669 {
2670 case IEMMODE_16BIT:
2671 {
2672 IEM_MC_BEGIN(3, 0);
2673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2674 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
2675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2676
2677 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2678 IEM_MC_REF_EFLAGS(pEFlags);
2679 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2680
2681 IEM_MC_ADVANCE_RIP();
2682 IEM_MC_END();
2683 break;
2684 }
2685
2686 case IEMMODE_32BIT:
2687 {
2688 IEM_MC_BEGIN(3, 0);
2689 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2690 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
2691 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2692
2693 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2694 IEM_MC_REF_EFLAGS(pEFlags);
2695 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2696 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2697
2698 IEM_MC_ADVANCE_RIP();
2699 IEM_MC_END();
2700 break;
2701 }
2702
2703 case IEMMODE_64BIT:
2704 {
2705 IEM_MC_BEGIN(3, 0);
2706 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2707 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
2708 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2709
2710 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2711 IEM_MC_REF_EFLAGS(pEFlags);
2712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2713
2714 IEM_MC_ADVANCE_RIP();
2715 IEM_MC_END();
2716 break;
2717 }
2718 }
2719 }
2720 else
2721 {
2722 /*
2723 * Memory target.
2724 */
2725 uint32_t fAccess;
2726 if (pImpl->pfnLockedU16)
2727 fAccess = IEM_ACCESS_DATA_RW;
2728 else /* CMP */
2729 fAccess = IEM_ACCESS_DATA_R;
2730
2731 switch (pVCpu->iem.s.enmEffOpSize)
2732 {
2733 case IEMMODE_16BIT:
2734 {
2735 IEM_MC_BEGIN(3, 2);
2736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2737 IEM_MC_ARG(uint16_t, u16Src, 1);
2738 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2740
2741 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2742 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2743 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
2744 if (pImpl->pfnLockedU16)
2745 IEMOP_HLP_DONE_DECODING();
2746 else
2747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2748 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2749 IEM_MC_FETCH_EFLAGS(EFlags);
2750 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2752 else
2753 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2754
2755 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2756 IEM_MC_COMMIT_EFLAGS(EFlags);
2757 IEM_MC_ADVANCE_RIP();
2758 IEM_MC_END();
2759 break;
2760 }
2761
2762 case IEMMODE_32BIT:
2763 {
2764 IEM_MC_BEGIN(3, 2);
2765 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2766 IEM_MC_ARG(uint32_t, u32Src, 1);
2767 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2768 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2769
2770 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2771 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2772 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
2773 if (pImpl->pfnLockedU32)
2774 IEMOP_HLP_DONE_DECODING();
2775 else
2776 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2777 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2778 IEM_MC_FETCH_EFLAGS(EFlags);
2779 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2781 else
2782 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2783
2784 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2785 IEM_MC_COMMIT_EFLAGS(EFlags);
2786 IEM_MC_ADVANCE_RIP();
2787 IEM_MC_END();
2788 break;
2789 }
2790
2791 case IEMMODE_64BIT:
2792 {
2793 IEM_MC_BEGIN(3, 2);
2794 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2795 IEM_MC_ARG(uint64_t, u64Src, 1);
2796 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2797 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2798
2799 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2800 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2801 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
2802 if (pImpl->pfnLockedU64)
2803 IEMOP_HLP_DONE_DECODING();
2804 else
2805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2806 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2807 IEM_MC_FETCH_EFLAGS(EFlags);
2808 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2809 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2810 else
2811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2812
2813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2814 IEM_MC_COMMIT_EFLAGS(EFlags);
2815 IEM_MC_ADVANCE_RIP();
2816 IEM_MC_END();
2817 break;
2818 }
2819 }
2820 }
2821 return VINF_SUCCESS;
2822}
2823
2824
2825/** Opcode 0x84. */
2826FNIEMOP_DEF(iemOp_test_Eb_Gb)
2827{
2828 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
2829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
2831}
2832
2833
2834/** Opcode 0x85. */
2835FNIEMOP_DEF(iemOp_test_Ev_Gv)
2836{
2837 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
2838 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2839 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
2840}
2841
2842
2843/** Opcode 0x86. */
2844FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
2845{
2846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2847 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
2848
2849 /*
2850 * If rm is denoting a register, no more instruction bytes.
2851 */
2852 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2853 {
2854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2855
2856 IEM_MC_BEGIN(0, 2);
2857 IEM_MC_LOCAL(uint8_t, uTmp1);
2858 IEM_MC_LOCAL(uint8_t, uTmp2);
2859
2860 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2861 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2862 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2863 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2864
2865 IEM_MC_ADVANCE_RIP();
2866 IEM_MC_END();
2867 }
2868 else
2869 {
2870 /*
2871 * We're accessing memory.
2872 */
2873/** @todo the register must be committed separately! */
2874 IEM_MC_BEGIN(2, 2);
2875 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
2876 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
2877 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2878
2879 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2880 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2881 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2882 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
2883 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
2884
2885 IEM_MC_ADVANCE_RIP();
2886 IEM_MC_END();
2887 }
2888 return VINF_SUCCESS;
2889}
2890
2891
2892/** Opcode 0x87. */
2893FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
2894{
2895 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
2896 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2897
2898 /*
2899 * If rm is denoting a register, no more instruction bytes.
2900 */
2901 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2902 {
2903 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2904
2905 switch (pVCpu->iem.s.enmEffOpSize)
2906 {
2907 case IEMMODE_16BIT:
2908 IEM_MC_BEGIN(0, 2);
2909 IEM_MC_LOCAL(uint16_t, uTmp1);
2910 IEM_MC_LOCAL(uint16_t, uTmp2);
2911
2912 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2913 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2914 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2915 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2916
2917 IEM_MC_ADVANCE_RIP();
2918 IEM_MC_END();
2919 return VINF_SUCCESS;
2920
2921 case IEMMODE_32BIT:
2922 IEM_MC_BEGIN(0, 2);
2923 IEM_MC_LOCAL(uint32_t, uTmp1);
2924 IEM_MC_LOCAL(uint32_t, uTmp2);
2925
2926 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2927 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2928 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2929 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2930
2931 IEM_MC_ADVANCE_RIP();
2932 IEM_MC_END();
2933 return VINF_SUCCESS;
2934
2935 case IEMMODE_64BIT:
2936 IEM_MC_BEGIN(0, 2);
2937 IEM_MC_LOCAL(uint64_t, uTmp1);
2938 IEM_MC_LOCAL(uint64_t, uTmp2);
2939
2940 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2941 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2942 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2943 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2944
2945 IEM_MC_ADVANCE_RIP();
2946 IEM_MC_END();
2947 return VINF_SUCCESS;
2948
2949 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2950 }
2951 }
2952 else
2953 {
2954 /*
2955 * We're accessing memory.
2956 */
2957 switch (pVCpu->iem.s.enmEffOpSize)
2958 {
2959/** @todo the register must be committed separately! */
2960 case IEMMODE_16BIT:
2961 IEM_MC_BEGIN(2, 2);
2962 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
2963 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
2964 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2965
2966 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2967 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2968 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2969 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
2970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
2971
2972 IEM_MC_ADVANCE_RIP();
2973 IEM_MC_END();
2974 return VINF_SUCCESS;
2975
2976 case IEMMODE_32BIT:
2977 IEM_MC_BEGIN(2, 2);
2978 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
2979 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
2980 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2981
2982 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2983 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2984 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2985 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
2986 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
2987
2988 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
2989 IEM_MC_ADVANCE_RIP();
2990 IEM_MC_END();
2991 return VINF_SUCCESS;
2992
2993 case IEMMODE_64BIT:
2994 IEM_MC_BEGIN(2, 2);
2995 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
2996 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
2997 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2998
2999 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3000 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3001 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3002 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
3003 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3004
3005 IEM_MC_ADVANCE_RIP();
3006 IEM_MC_END();
3007 return VINF_SUCCESS;
3008
3009 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3010 }
3011 }
3012}
3013
3014
3015/** Opcode 0x88. */
3016FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3017{
3018 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3019
3020 uint8_t bRm;
3021 IEM_OPCODE_GET_NEXT_U8(&bRm);
3022
3023 /*
3024 * If rm is denoting a register, no more instruction bytes.
3025 */
3026 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3027 {
3028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3029 IEM_MC_BEGIN(0, 1);
3030 IEM_MC_LOCAL(uint8_t, u8Value);
3031 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3032 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3033 IEM_MC_ADVANCE_RIP();
3034 IEM_MC_END();
3035 }
3036 else
3037 {
3038 /*
3039 * We're writing a register to memory.
3040 */
3041 IEM_MC_BEGIN(0, 2);
3042 IEM_MC_LOCAL(uint8_t, u8Value);
3043 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3044 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3045 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3046 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3047 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3048 IEM_MC_ADVANCE_RIP();
3049 IEM_MC_END();
3050 }
3051 return VINF_SUCCESS;
3052
3053}
3054
3055
3056/** Opcode 0x89. */
3057FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3058{
3059 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3060
3061 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3062
3063 /*
3064 * If rm is denoting a register, no more instruction bytes.
3065 */
3066 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3067 {
3068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3069 switch (pVCpu->iem.s.enmEffOpSize)
3070 {
3071 case IEMMODE_16BIT:
3072 IEM_MC_BEGIN(0, 1);
3073 IEM_MC_LOCAL(uint16_t, u16Value);
3074 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3075 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3076 IEM_MC_ADVANCE_RIP();
3077 IEM_MC_END();
3078 break;
3079
3080 case IEMMODE_32BIT:
3081 IEM_MC_BEGIN(0, 1);
3082 IEM_MC_LOCAL(uint32_t, u32Value);
3083 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3084 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3085 IEM_MC_ADVANCE_RIP();
3086 IEM_MC_END();
3087 break;
3088
3089 case IEMMODE_64BIT:
3090 IEM_MC_BEGIN(0, 1);
3091 IEM_MC_LOCAL(uint64_t, u64Value);
3092 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3093 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3094 IEM_MC_ADVANCE_RIP();
3095 IEM_MC_END();
3096 break;
3097 }
3098 }
3099 else
3100 {
3101 /*
3102 * We're writing a register to memory.
3103 */
3104 switch (pVCpu->iem.s.enmEffOpSize)
3105 {
3106 case IEMMODE_16BIT:
3107 IEM_MC_BEGIN(0, 2);
3108 IEM_MC_LOCAL(uint16_t, u16Value);
3109 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3110 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3112 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3113 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3114 IEM_MC_ADVANCE_RIP();
3115 IEM_MC_END();
3116 break;
3117
3118 case IEMMODE_32BIT:
3119 IEM_MC_BEGIN(0, 2);
3120 IEM_MC_LOCAL(uint32_t, u32Value);
3121 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3124 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3125 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3126 IEM_MC_ADVANCE_RIP();
3127 IEM_MC_END();
3128 break;
3129
3130 case IEMMODE_64BIT:
3131 IEM_MC_BEGIN(0, 2);
3132 IEM_MC_LOCAL(uint64_t, u64Value);
3133 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3134 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3135 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3136 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3137 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3138 IEM_MC_ADVANCE_RIP();
3139 IEM_MC_END();
3140 break;
3141 }
3142 }
3143 return VINF_SUCCESS;
3144}
3145
3146
3147/** Opcode 0x8a. */
3148FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3149{
3150 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3151
3152 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3153
3154 /*
3155 * If rm is denoting a register, no more instruction bytes.
3156 */
3157 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3158 {
3159 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3160 IEM_MC_BEGIN(0, 1);
3161 IEM_MC_LOCAL(uint8_t, u8Value);
3162 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3163 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3164 IEM_MC_ADVANCE_RIP();
3165 IEM_MC_END();
3166 }
3167 else
3168 {
3169 /*
3170 * We're loading a register from memory.
3171 */
3172 IEM_MC_BEGIN(0, 2);
3173 IEM_MC_LOCAL(uint8_t, u8Value);
3174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3177 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3178 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 return VINF_SUCCESS;
3183}
3184
3185
3186/** Opcode 0x8b. */
3187FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3188{
3189 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3190
3191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3192
3193 /*
3194 * If rm is denoting a register, no more instruction bytes.
3195 */
3196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3197 {
3198 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3199 switch (pVCpu->iem.s.enmEffOpSize)
3200 {
3201 case IEMMODE_16BIT:
3202 IEM_MC_BEGIN(0, 1);
3203 IEM_MC_LOCAL(uint16_t, u16Value);
3204 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3205 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3206 IEM_MC_ADVANCE_RIP();
3207 IEM_MC_END();
3208 break;
3209
3210 case IEMMODE_32BIT:
3211 IEM_MC_BEGIN(0, 1);
3212 IEM_MC_LOCAL(uint32_t, u32Value);
3213 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3214 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3215 IEM_MC_ADVANCE_RIP();
3216 IEM_MC_END();
3217 break;
3218
3219 case IEMMODE_64BIT:
3220 IEM_MC_BEGIN(0, 1);
3221 IEM_MC_LOCAL(uint64_t, u64Value);
3222 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3223 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3224 IEM_MC_ADVANCE_RIP();
3225 IEM_MC_END();
3226 break;
3227 }
3228 }
3229 else
3230 {
3231 /*
3232 * We're loading a register from memory.
3233 */
3234 switch (pVCpu->iem.s.enmEffOpSize)
3235 {
3236 case IEMMODE_16BIT:
3237 IEM_MC_BEGIN(0, 2);
3238 IEM_MC_LOCAL(uint16_t, u16Value);
3239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3242 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3243 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3244 IEM_MC_ADVANCE_RIP();
3245 IEM_MC_END();
3246 break;
3247
3248 case IEMMODE_32BIT:
3249 IEM_MC_BEGIN(0, 2);
3250 IEM_MC_LOCAL(uint32_t, u32Value);
3251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3252 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3253 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3254 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3255 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3256 IEM_MC_ADVANCE_RIP();
3257 IEM_MC_END();
3258 break;
3259
3260 case IEMMODE_64BIT:
3261 IEM_MC_BEGIN(0, 2);
3262 IEM_MC_LOCAL(uint64_t, u64Value);
3263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3264 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3265 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3266 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3267 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3268 IEM_MC_ADVANCE_RIP();
3269 IEM_MC_END();
3270 break;
3271 }
3272 }
3273 return VINF_SUCCESS;
3274}
3275
3276
3277/** Opcode 0x63. */
3278FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3279{
3280 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3281 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3282 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3283 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3284 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3285}
3286
3287
3288/** Opcode 0x8c. */
3289FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3290{
3291 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3292
3293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3294
3295 /*
3296 * Check that the destination register exists. The REX.R prefix is ignored.
3297 */
3298 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3299 if ( iSegReg > X86_SREG_GS)
3300 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3301
3302 /*
3303 * If rm is denoting a register, no more instruction bytes.
3304 * In that case, the operand size is respected and the upper bits are
3305 * cleared (starting with some pentium).
3306 */
3307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3308 {
3309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3310 switch (pVCpu->iem.s.enmEffOpSize)
3311 {
3312 case IEMMODE_16BIT:
3313 IEM_MC_BEGIN(0, 1);
3314 IEM_MC_LOCAL(uint16_t, u16Value);
3315 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3316 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3317 IEM_MC_ADVANCE_RIP();
3318 IEM_MC_END();
3319 break;
3320
3321 case IEMMODE_32BIT:
3322 IEM_MC_BEGIN(0, 1);
3323 IEM_MC_LOCAL(uint32_t, u32Value);
3324 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3325 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3326 IEM_MC_ADVANCE_RIP();
3327 IEM_MC_END();
3328 break;
3329
3330 case IEMMODE_64BIT:
3331 IEM_MC_BEGIN(0, 1);
3332 IEM_MC_LOCAL(uint64_t, u64Value);
3333 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3334 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3335 IEM_MC_ADVANCE_RIP();
3336 IEM_MC_END();
3337 break;
3338 }
3339 }
3340 else
3341 {
3342 /*
3343 * We're saving the register to memory. The access is word sized
3344 * regardless of operand size prefixes.
3345 */
3346#if 0 /* not necessary */
3347 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3348#endif
3349 IEM_MC_BEGIN(0, 2);
3350 IEM_MC_LOCAL(uint16_t, u16Value);
3351 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3352 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3354 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3355 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3356 IEM_MC_ADVANCE_RIP();
3357 IEM_MC_END();
3358 }
3359 return VINF_SUCCESS;
3360}
3361
3362
3363
3364
3365/** Opcode 0x8d. */
3366FNIEMOP_DEF(iemOp_lea_Gv_M)
3367{
3368 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3369 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3370 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3371 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3372
3373 switch (pVCpu->iem.s.enmEffOpSize)
3374 {
3375 case IEMMODE_16BIT:
3376 IEM_MC_BEGIN(0, 2);
3377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3378 IEM_MC_LOCAL(uint16_t, u16Cast);
3379 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3381 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3382 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3383 IEM_MC_ADVANCE_RIP();
3384 IEM_MC_END();
3385 return VINF_SUCCESS;
3386
3387 case IEMMODE_32BIT:
3388 IEM_MC_BEGIN(0, 2);
3389 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3390 IEM_MC_LOCAL(uint32_t, u32Cast);
3391 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3392 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3393 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3394 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3395 IEM_MC_ADVANCE_RIP();
3396 IEM_MC_END();
3397 return VINF_SUCCESS;
3398
3399 case IEMMODE_64BIT:
3400 IEM_MC_BEGIN(0, 1);
3401 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3402 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3403 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3404 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3405 IEM_MC_ADVANCE_RIP();
3406 IEM_MC_END();
3407 return VINF_SUCCESS;
3408 }
3409 AssertFailedReturn(VERR_IEM_IPE_7);
3410}
3411
3412
3413/** Opcode 0x8e. */
3414FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3415{
3416 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3417
3418 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3419
3420 /*
3421 * The practical operand size is 16-bit.
3422 */
3423#if 0 /* not necessary */
3424 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3425#endif
3426
3427 /*
3428 * Check that the destination register exists and can be used with this
3429 * instruction. The REX.R prefix is ignored.
3430 */
3431 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3432 if ( iSegReg == X86_SREG_CS
3433 || iSegReg > X86_SREG_GS)
3434 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3435
3436 /*
3437 * If rm is denoting a register, no more instruction bytes.
3438 */
3439 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3440 {
3441 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3442 IEM_MC_BEGIN(2, 0);
3443 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3444 IEM_MC_ARG(uint16_t, u16Value, 1);
3445 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3446 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3447 IEM_MC_END();
3448 }
3449 else
3450 {
3451 /*
3452 * We're loading the register from memory. The access is word sized
3453 * regardless of operand size prefixes.
3454 */
3455 IEM_MC_BEGIN(2, 1);
3456 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3457 IEM_MC_ARG(uint16_t, u16Value, 1);
3458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3459 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3460 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3461 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3462 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3463 IEM_MC_END();
3464 }
3465 return VINF_SUCCESS;
3466}
3467
3468
3469/** Opcode 0x8f /0. */
3470FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3471{
3472 /* This bugger is rather annoying as it requires rSP to be updated before
3473 doing the effective address calculations. Will eventually require a
3474 split between the R/M+SIB decoding and the effective address
3475 calculation - which is something that is required for any attempt at
3476 reusing this code for a recompiler. It may also be good to have if we
3477 need to delay #UD exception caused by invalid lock prefixes.
3478
3479 For now, we'll do a mostly safe interpreter-only implementation here. */
3480 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3481 * now until tests show it's checked.. */
3482 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3483
3484 /* Register access is relatively easy and can share code. */
3485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3486 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3487
3488 /*
3489 * Memory target.
3490 *
3491 * Intel says that RSP is incremented before it's used in any effective
3492 * address calcuations. This means some serious extra annoyance here since
3493 * we decode and calculate the effective address in one step and like to
3494 * delay committing registers till everything is done.
3495 *
3496 * So, we'll decode and calculate the effective address twice. This will
3497 * require some recoding if turned into a recompiler.
3498 */
3499 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3500
3501#ifndef TST_IEM_CHECK_MC
3502 /* Calc effective address with modified ESP. */
3503/** @todo testcase */
3504 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3505 RTGCPTR GCPtrEff;
3506 VBOXSTRICTRC rcStrict;
3507 switch (pVCpu->iem.s.enmEffOpSize)
3508 {
3509 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3510 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3511 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3512 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3513 }
3514 if (rcStrict != VINF_SUCCESS)
3515 return rcStrict;
3516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3517
3518 /* Perform the operation - this should be CImpl. */
3519 RTUINT64U TmpRsp;
3520 TmpRsp.u = pCtx->rsp;
3521 switch (pVCpu->iem.s.enmEffOpSize)
3522 {
3523 case IEMMODE_16BIT:
3524 {
3525 uint16_t u16Value;
3526 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3527 if (rcStrict == VINF_SUCCESS)
3528 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3529 break;
3530 }
3531
3532 case IEMMODE_32BIT:
3533 {
3534 uint32_t u32Value;
3535 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3536 if (rcStrict == VINF_SUCCESS)
3537 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3538 break;
3539 }
3540
3541 case IEMMODE_64BIT:
3542 {
3543 uint64_t u64Value;
3544 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3545 if (rcStrict == VINF_SUCCESS)
3546 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3547 break;
3548 }
3549
3550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3551 }
3552 if (rcStrict == VINF_SUCCESS)
3553 {
3554 pCtx->rsp = TmpRsp.u;
3555 iemRegUpdateRipAndClearRF(pVCpu);
3556 }
3557 return rcStrict;
3558
3559#else
3560 return VERR_IEM_IPE_2;
3561#endif
3562}
3563
3564
3565/** Opcode 0x8f. */
3566FNIEMOP_DEF(iemOp_Grp1A__xop)
3567{
3568 /*
3569 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
3570 * three byte VEX prefix, except that the mmmmm field cannot have the values
3571 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
3572 */
3573 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3574 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3575 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3576
3577 IEMOP_MNEMONIC(xop, "xop");
3578 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
3579 {
3580 /** @todo Test when exctly the XOP conformance checks kick in during
3581 * instruction decoding and fetching (using \#PF). */
3582 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
3583 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
3584 if ( ( pVCpu->iem.s.fPrefixes
3585 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
3586 == 0)
3587 {
3588 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
3589 if (bXop2 & 0x80 /* XOP.W */)
3590 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
3591 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
3592 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
3593 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
3594 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
3595 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
3596 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
3597
3598 /** @todo XOP: Just use new tables and decoders. */
3599 switch (bRm & 0x1f)
3600 {
3601 case 8: /* xop opcode map 8. */
3602 IEMOP_BITCH_ABOUT_STUB();
3603 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3604
3605 case 9: /* xop opcode map 9. */
3606 IEMOP_BITCH_ABOUT_STUB();
3607 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3608
3609 case 10: /* xop opcode map 10. */
3610 IEMOP_BITCH_ABOUT_STUB();
3611 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3612
3613 default:
3614 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
3615 return IEMOP_RAISE_INVALID_OPCODE();
3616 }
3617 }
3618 else
3619 Log(("XOP: Invalid prefix mix!\n"));
3620 }
3621 else
3622 Log(("XOP: XOP support disabled!\n"));
3623 return IEMOP_RAISE_INVALID_OPCODE();
3624}
3625
3626
3627/**
3628 * Common 'xchg reg,rAX' helper.
3629 */
3630FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
3631{
3632 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3633
3634 iReg |= pVCpu->iem.s.uRexB;
3635 switch (pVCpu->iem.s.enmEffOpSize)
3636 {
3637 case IEMMODE_16BIT:
3638 IEM_MC_BEGIN(0, 2);
3639 IEM_MC_LOCAL(uint16_t, u16Tmp1);
3640 IEM_MC_LOCAL(uint16_t, u16Tmp2);
3641 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
3642 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
3643 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
3644 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
3645 IEM_MC_ADVANCE_RIP();
3646 IEM_MC_END();
3647 return VINF_SUCCESS;
3648
3649 case IEMMODE_32BIT:
3650 IEM_MC_BEGIN(0, 2);
3651 IEM_MC_LOCAL(uint32_t, u32Tmp1);
3652 IEM_MC_LOCAL(uint32_t, u32Tmp2);
3653 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
3654 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
3655 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
3656 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
3657 IEM_MC_ADVANCE_RIP();
3658 IEM_MC_END();
3659 return VINF_SUCCESS;
3660
3661 case IEMMODE_64BIT:
3662 IEM_MC_BEGIN(0, 2);
3663 IEM_MC_LOCAL(uint64_t, u64Tmp1);
3664 IEM_MC_LOCAL(uint64_t, u64Tmp2);
3665 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
3666 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
3667 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
3668 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
3669 IEM_MC_ADVANCE_RIP();
3670 IEM_MC_END();
3671 return VINF_SUCCESS;
3672
3673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3674 }
3675}
3676
3677
3678/** Opcode 0x90. */
3679FNIEMOP_DEF(iemOp_nop)
3680{
3681 /* R8/R8D and RAX/EAX can be exchanged. */
3682 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
3683 {
3684 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
3685 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
3686 }
3687
3688 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3689 IEMOP_MNEMONIC(pause, "pause");
3690 else
3691 IEMOP_MNEMONIC(nop, "nop");
3692 IEM_MC_BEGIN(0, 0);
3693 IEM_MC_ADVANCE_RIP();
3694 IEM_MC_END();
3695 return VINF_SUCCESS;
3696}
3697
3698
3699/** Opcode 0x91. */
3700FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
3701{
3702 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
3703 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
3704}
3705
3706
3707/** Opcode 0x92. */
3708FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
3709{
3710 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
3711 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
3712}
3713
3714
3715/** Opcode 0x93. */
3716FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
3717{
3718 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
3719 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
3720}
3721
3722
3723/** Opcode 0x94. */
3724FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
3725{
3726 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
3727 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
3728}
3729
3730
3731/** Opcode 0x95. */
3732FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
3733{
3734 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
3735 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
3736}
3737
3738
3739/** Opcode 0x96. */
3740FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
3741{
3742 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
3743 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
3744}
3745
3746
3747/** Opcode 0x97. */
3748FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
3749{
3750 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
3751 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
3752}
3753
3754
3755/** Opcode 0x98. */
3756FNIEMOP_DEF(iemOp_cbw)
3757{
3758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3759 switch (pVCpu->iem.s.enmEffOpSize)
3760 {
3761 case IEMMODE_16BIT:
3762 IEMOP_MNEMONIC(cbw, "cbw");
3763 IEM_MC_BEGIN(0, 1);
3764 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
3765 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
3766 } IEM_MC_ELSE() {
3767 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
3768 } IEM_MC_ENDIF();
3769 IEM_MC_ADVANCE_RIP();
3770 IEM_MC_END();
3771 return VINF_SUCCESS;
3772
3773 case IEMMODE_32BIT:
3774 IEMOP_MNEMONIC(cwde, "cwde");
3775 IEM_MC_BEGIN(0, 1);
3776 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3777 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
3778 } IEM_MC_ELSE() {
3779 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
3780 } IEM_MC_ENDIF();
3781 IEM_MC_ADVANCE_RIP();
3782 IEM_MC_END();
3783 return VINF_SUCCESS;
3784
3785 case IEMMODE_64BIT:
3786 IEMOP_MNEMONIC(cdqe, "cdqe");
3787 IEM_MC_BEGIN(0, 1);
3788 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3789 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
3790 } IEM_MC_ELSE() {
3791 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
3792 } IEM_MC_ENDIF();
3793 IEM_MC_ADVANCE_RIP();
3794 IEM_MC_END();
3795 return VINF_SUCCESS;
3796
3797 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3798 }
3799}
3800
3801
3802/** Opcode 0x99. */
3803FNIEMOP_DEF(iemOp_cwd)
3804{
3805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3806 switch (pVCpu->iem.s.enmEffOpSize)
3807 {
3808 case IEMMODE_16BIT:
3809 IEMOP_MNEMONIC(cwd, "cwd");
3810 IEM_MC_BEGIN(0, 1);
3811 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3812 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
3813 } IEM_MC_ELSE() {
3814 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
3815 } IEM_MC_ENDIF();
3816 IEM_MC_ADVANCE_RIP();
3817 IEM_MC_END();
3818 return VINF_SUCCESS;
3819
3820 case IEMMODE_32BIT:
3821 IEMOP_MNEMONIC(cdq, "cdq");
3822 IEM_MC_BEGIN(0, 1);
3823 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3824 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
3825 } IEM_MC_ELSE() {
3826 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
3827 } IEM_MC_ENDIF();
3828 IEM_MC_ADVANCE_RIP();
3829 IEM_MC_END();
3830 return VINF_SUCCESS;
3831
3832 case IEMMODE_64BIT:
3833 IEMOP_MNEMONIC(cqo, "cqo");
3834 IEM_MC_BEGIN(0, 1);
3835 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
3836 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
3837 } IEM_MC_ELSE() {
3838 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
3839 } IEM_MC_ENDIF();
3840 IEM_MC_ADVANCE_RIP();
3841 IEM_MC_END();
3842 return VINF_SUCCESS;
3843
3844 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3845 }
3846}
3847
3848
3849/** Opcode 0x9a. */
3850FNIEMOP_DEF(iemOp_call_Ap)
3851{
3852 IEMOP_MNEMONIC(call_Ap, "call Ap");
3853 IEMOP_HLP_NO_64BIT();
3854
3855 /* Decode the far pointer address and pass it on to the far call C implementation. */
3856 uint32_t offSeg;
3857 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
3858 IEM_OPCODE_GET_NEXT_U32(&offSeg);
3859 else
3860 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
3861 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
3862 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3863 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
3864}
3865
3866
3867/** Opcode 0x9b. (aka fwait) */
3868FNIEMOP_DEF(iemOp_wait)
3869{
3870 IEMOP_MNEMONIC(wait, "wait");
3871 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3872
3873 IEM_MC_BEGIN(0, 0);
3874 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
3875 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3876 IEM_MC_ADVANCE_RIP();
3877 IEM_MC_END();
3878 return VINF_SUCCESS;
3879}
3880
3881
3882/** Opcode 0x9c. */
3883FNIEMOP_DEF(iemOp_pushf_Fv)
3884{
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3887 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
3888}
3889
3890
3891/** Opcode 0x9d. */
3892FNIEMOP_DEF(iemOp_popf_Fv)
3893{
3894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3895 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3896 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
3897}
3898
3899
3900/** Opcode 0x9e. */
3901FNIEMOP_DEF(iemOp_sahf)
3902{
3903 IEMOP_MNEMONIC(sahf, "sahf");
3904 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3905 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3906 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3907 return IEMOP_RAISE_INVALID_OPCODE();
3908 IEM_MC_BEGIN(0, 2);
3909 IEM_MC_LOCAL(uint32_t, u32Flags);
3910 IEM_MC_LOCAL(uint32_t, EFlags);
3911 IEM_MC_FETCH_EFLAGS(EFlags);
3912 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
3913 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3914 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
3915 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
3916 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
3917 IEM_MC_COMMIT_EFLAGS(EFlags);
3918 IEM_MC_ADVANCE_RIP();
3919 IEM_MC_END();
3920 return VINF_SUCCESS;
3921}
3922
3923
3924/** Opcode 0x9f. */
3925FNIEMOP_DEF(iemOp_lahf)
3926{
3927 IEMOP_MNEMONIC(lahf, "lahf");
3928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3929 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3930 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3931 return IEMOP_RAISE_INVALID_OPCODE();
3932 IEM_MC_BEGIN(0, 1);
3933 IEM_MC_LOCAL(uint8_t, u8Flags);
3934 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
3935 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
3936 IEM_MC_ADVANCE_RIP();
3937 IEM_MC_END();
3938 return VINF_SUCCESS;
3939}
3940
3941
3942/**
3943 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
3944 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
3945 * prefixes. Will return on failures.
3946 * @param a_GCPtrMemOff The variable to store the offset in.
3947 */
3948#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
3949 do \
3950 { \
3951 switch (pVCpu->iem.s.enmEffAddrMode) \
3952 { \
3953 case IEMMODE_16BIT: \
3954 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
3955 break; \
3956 case IEMMODE_32BIT: \
3957 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
3958 break; \
3959 case IEMMODE_64BIT: \
3960 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
3961 break; \
3962 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3963 } \
3964 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3965 } while (0)
3966
3967/** Opcode 0xa0. */
3968FNIEMOP_DEF(iemOp_mov_Al_Ob)
3969{
3970 /*
3971 * Get the offset and fend of lock prefixes.
3972 */
3973 RTGCPTR GCPtrMemOff;
3974 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3975
3976 /*
3977 * Fetch AL.
3978 */
3979 IEM_MC_BEGIN(0,1);
3980 IEM_MC_LOCAL(uint8_t, u8Tmp);
3981 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3982 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
3983 IEM_MC_ADVANCE_RIP();
3984 IEM_MC_END();
3985 return VINF_SUCCESS;
3986}
3987
3988
3989/** Opcode 0xa1. */
3990FNIEMOP_DEF(iemOp_mov_rAX_Ov)
3991{
3992 /*
3993 * Get the offset and fend of lock prefixes.
3994 */
3995 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
3996 RTGCPTR GCPtrMemOff;
3997 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3998
3999 /*
4000 * Fetch rAX.
4001 */
4002 switch (pVCpu->iem.s.enmEffOpSize)
4003 {
4004 case IEMMODE_16BIT:
4005 IEM_MC_BEGIN(0,1);
4006 IEM_MC_LOCAL(uint16_t, u16Tmp);
4007 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4008 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4009 IEM_MC_ADVANCE_RIP();
4010 IEM_MC_END();
4011 return VINF_SUCCESS;
4012
4013 case IEMMODE_32BIT:
4014 IEM_MC_BEGIN(0,1);
4015 IEM_MC_LOCAL(uint32_t, u32Tmp);
4016 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4017 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4018 IEM_MC_ADVANCE_RIP();
4019 IEM_MC_END();
4020 return VINF_SUCCESS;
4021
4022 case IEMMODE_64BIT:
4023 IEM_MC_BEGIN(0,1);
4024 IEM_MC_LOCAL(uint64_t, u64Tmp);
4025 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4026 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4027 IEM_MC_ADVANCE_RIP();
4028 IEM_MC_END();
4029 return VINF_SUCCESS;
4030
4031 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4032 }
4033}
4034
4035
4036/** Opcode 0xa2. */
4037FNIEMOP_DEF(iemOp_mov_Ob_AL)
4038{
4039 /*
4040 * Get the offset and fend of lock prefixes.
4041 */
4042 RTGCPTR GCPtrMemOff;
4043 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4044
4045 /*
4046 * Store AL.
4047 */
4048 IEM_MC_BEGIN(0,1);
4049 IEM_MC_LOCAL(uint8_t, u8Tmp);
4050 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4051 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4052 IEM_MC_ADVANCE_RIP();
4053 IEM_MC_END();
4054 return VINF_SUCCESS;
4055}
4056
4057
4058/** Opcode 0xa3. */
4059FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4060{
4061 /*
4062 * Get the offset and fend of lock prefixes.
4063 */
4064 RTGCPTR GCPtrMemOff;
4065 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4066
4067 /*
4068 * Store rAX.
4069 */
4070 switch (pVCpu->iem.s.enmEffOpSize)
4071 {
4072 case IEMMODE_16BIT:
4073 IEM_MC_BEGIN(0,1);
4074 IEM_MC_LOCAL(uint16_t, u16Tmp);
4075 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4076 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4077 IEM_MC_ADVANCE_RIP();
4078 IEM_MC_END();
4079 return VINF_SUCCESS;
4080
4081 case IEMMODE_32BIT:
4082 IEM_MC_BEGIN(0,1);
4083 IEM_MC_LOCAL(uint32_t, u32Tmp);
4084 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4085 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4086 IEM_MC_ADVANCE_RIP();
4087 IEM_MC_END();
4088 return VINF_SUCCESS;
4089
4090 case IEMMODE_64BIT:
4091 IEM_MC_BEGIN(0,1);
4092 IEM_MC_LOCAL(uint64_t, u64Tmp);
4093 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4094 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4095 IEM_MC_ADVANCE_RIP();
4096 IEM_MC_END();
4097 return VINF_SUCCESS;
4098
4099 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4100 }
4101}
4102
4103/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4104#define IEM_MOVS_CASE(ValBits, AddrBits) \
4105 IEM_MC_BEGIN(0, 2); \
4106 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4107 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4108 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4109 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4110 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4111 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4112 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4113 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4114 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4115 } IEM_MC_ELSE() { \
4116 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4117 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4118 } IEM_MC_ENDIF(); \
4119 IEM_MC_ADVANCE_RIP(); \
4120 IEM_MC_END();
4121
4122/** Opcode 0xa4. */
4123FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4124{
4125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4126
4127 /*
4128 * Use the C implementation if a repeat prefix is encountered.
4129 */
4130 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4131 {
4132 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4133 switch (pVCpu->iem.s.enmEffAddrMode)
4134 {
4135 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4136 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4137 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4138 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4139 }
4140 }
4141 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4142
4143 /*
4144 * Sharing case implementation with movs[wdq] below.
4145 */
4146 switch (pVCpu->iem.s.enmEffAddrMode)
4147 {
4148 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4149 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4150 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4151 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4152 }
4153 return VINF_SUCCESS;
4154}
4155
4156
4157/** Opcode 0xa5. */
4158FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4159{
4160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4161
4162 /*
4163 * Use the C implementation if a repeat prefix is encountered.
4164 */
4165 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4166 {
4167 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4168 switch (pVCpu->iem.s.enmEffOpSize)
4169 {
4170 case IEMMODE_16BIT:
4171 switch (pVCpu->iem.s.enmEffAddrMode)
4172 {
4173 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4174 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4175 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4176 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4177 }
4178 break;
4179 case IEMMODE_32BIT:
4180 switch (pVCpu->iem.s.enmEffAddrMode)
4181 {
4182 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4183 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4184 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4185 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4186 }
4187 case IEMMODE_64BIT:
4188 switch (pVCpu->iem.s.enmEffAddrMode)
4189 {
4190 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4191 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4192 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4194 }
4195 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4196 }
4197 }
4198 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4199
4200 /*
4201 * Annoying double switch here.
4202 * Using ugly macro for implementing the cases, sharing it with movsb.
4203 */
4204 switch (pVCpu->iem.s.enmEffOpSize)
4205 {
4206 case IEMMODE_16BIT:
4207 switch (pVCpu->iem.s.enmEffAddrMode)
4208 {
4209 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4210 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4211 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4212 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4213 }
4214 break;
4215
4216 case IEMMODE_32BIT:
4217 switch (pVCpu->iem.s.enmEffAddrMode)
4218 {
4219 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4220 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4221 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4222 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4223 }
4224 break;
4225
4226 case IEMMODE_64BIT:
4227 switch (pVCpu->iem.s.enmEffAddrMode)
4228 {
4229 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4230 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4231 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4233 }
4234 break;
4235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4236 }
4237 return VINF_SUCCESS;
4238}
4239
4240#undef IEM_MOVS_CASE
4241
4242/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4243#define IEM_CMPS_CASE(ValBits, AddrBits) \
4244 IEM_MC_BEGIN(3, 3); \
4245 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4246 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4247 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4248 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4249 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4250 \
4251 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4252 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4253 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4254 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4255 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4256 IEM_MC_REF_EFLAGS(pEFlags); \
4257 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4258 \
4259 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4260 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4261 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4262 } IEM_MC_ELSE() { \
4263 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4264 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4265 } IEM_MC_ENDIF(); \
4266 IEM_MC_ADVANCE_RIP(); \
4267 IEM_MC_END(); \
4268
4269/** Opcode 0xa6. */
4270FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4271{
4272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4273
4274 /*
4275 * Use the C implementation if a repeat prefix is encountered.
4276 */
4277 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4278 {
4279 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4280 switch (pVCpu->iem.s.enmEffAddrMode)
4281 {
4282 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4283 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4284 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4285 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4286 }
4287 }
4288 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4289 {
4290 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4291 switch (pVCpu->iem.s.enmEffAddrMode)
4292 {
4293 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4294 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4295 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4296 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4297 }
4298 }
4299 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4300
4301 /*
4302 * Sharing case implementation with cmps[wdq] below.
4303 */
4304 switch (pVCpu->iem.s.enmEffAddrMode)
4305 {
4306 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4307 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4308 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4309 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4310 }
4311 return VINF_SUCCESS;
4312
4313}
4314
4315
4316/** Opcode 0xa7. */
4317FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4318{
4319 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4320
4321 /*
4322 * Use the C implementation if a repeat prefix is encountered.
4323 */
4324 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4325 {
4326 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4327 switch (pVCpu->iem.s.enmEffOpSize)
4328 {
4329 case IEMMODE_16BIT:
4330 switch (pVCpu->iem.s.enmEffAddrMode)
4331 {
4332 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4333 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4334 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4335 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4336 }
4337 break;
4338 case IEMMODE_32BIT:
4339 switch (pVCpu->iem.s.enmEffAddrMode)
4340 {
4341 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4342 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4343 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4345 }
4346 case IEMMODE_64BIT:
4347 switch (pVCpu->iem.s.enmEffAddrMode)
4348 {
4349 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4350 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4351 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4352 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4353 }
4354 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4355 }
4356 }
4357
4358 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4359 {
4360 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4361 switch (pVCpu->iem.s.enmEffOpSize)
4362 {
4363 case IEMMODE_16BIT:
4364 switch (pVCpu->iem.s.enmEffAddrMode)
4365 {
4366 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4367 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4368 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4369 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4370 }
4371 break;
4372 case IEMMODE_32BIT:
4373 switch (pVCpu->iem.s.enmEffAddrMode)
4374 {
4375 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4376 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4377 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4378 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4379 }
4380 case IEMMODE_64BIT:
4381 switch (pVCpu->iem.s.enmEffAddrMode)
4382 {
4383 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4384 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4385 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4386 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4387 }
4388 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4389 }
4390 }
4391
4392 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4393
4394 /*
4395 * Annoying double switch here.
4396 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4397 */
4398 switch (pVCpu->iem.s.enmEffOpSize)
4399 {
4400 case IEMMODE_16BIT:
4401 switch (pVCpu->iem.s.enmEffAddrMode)
4402 {
4403 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4404 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4405 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4406 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4407 }
4408 break;
4409
4410 case IEMMODE_32BIT:
4411 switch (pVCpu->iem.s.enmEffAddrMode)
4412 {
4413 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4414 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4415 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4416 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4417 }
4418 break;
4419
4420 case IEMMODE_64BIT:
4421 switch (pVCpu->iem.s.enmEffAddrMode)
4422 {
4423 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4424 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4425 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4426 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4427 }
4428 break;
4429 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4430 }
4431 return VINF_SUCCESS;
4432
4433}
4434
4435#undef IEM_CMPS_CASE
4436
4437/** Opcode 0xa8. */
4438FNIEMOP_DEF(iemOp_test_AL_Ib)
4439{
4440 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4441 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4442 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4443}
4444
4445
4446/** Opcode 0xa9. */
4447FNIEMOP_DEF(iemOp_test_eAX_Iz)
4448{
4449 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4450 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4451 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4452}
4453
4454
4455/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4456#define IEM_STOS_CASE(ValBits, AddrBits) \
4457 IEM_MC_BEGIN(0, 2); \
4458 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4459 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4460 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4461 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4462 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4463 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4464 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4465 } IEM_MC_ELSE() { \
4466 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4467 } IEM_MC_ENDIF(); \
4468 IEM_MC_ADVANCE_RIP(); \
4469 IEM_MC_END(); \
4470
4471/** Opcode 0xaa. */
4472FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4473{
4474 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4475
4476 /*
4477 * Use the C implementation if a repeat prefix is encountered.
4478 */
4479 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4480 {
4481 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4482 switch (pVCpu->iem.s.enmEffAddrMode)
4483 {
4484 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4485 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4486 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4488 }
4489 }
4490 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4491
4492 /*
4493 * Sharing case implementation with stos[wdq] below.
4494 */
4495 switch (pVCpu->iem.s.enmEffAddrMode)
4496 {
4497 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4498 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4499 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4500 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4501 }
4502 return VINF_SUCCESS;
4503}
4504
4505
4506/** Opcode 0xab. */
4507FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4508{
4509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4510
4511 /*
4512 * Use the C implementation if a repeat prefix is encountered.
4513 */
4514 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4515 {
4516 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4517 switch (pVCpu->iem.s.enmEffOpSize)
4518 {
4519 case IEMMODE_16BIT:
4520 switch (pVCpu->iem.s.enmEffAddrMode)
4521 {
4522 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4523 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4524 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4525 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4526 }
4527 break;
4528 case IEMMODE_32BIT:
4529 switch (pVCpu->iem.s.enmEffAddrMode)
4530 {
4531 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4532 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4533 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4534 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4535 }
4536 case IEMMODE_64BIT:
4537 switch (pVCpu->iem.s.enmEffAddrMode)
4538 {
4539 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
4540 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
4541 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
4542 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4543 }
4544 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4545 }
4546 }
4547 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
4548
4549 /*
4550 * Annoying double switch here.
4551 * Using ugly macro for implementing the cases, sharing it with stosb.
4552 */
4553 switch (pVCpu->iem.s.enmEffOpSize)
4554 {
4555 case IEMMODE_16BIT:
4556 switch (pVCpu->iem.s.enmEffAddrMode)
4557 {
4558 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
4559 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
4560 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
4561 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4562 }
4563 break;
4564
4565 case IEMMODE_32BIT:
4566 switch (pVCpu->iem.s.enmEffAddrMode)
4567 {
4568 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
4569 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
4570 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
4571 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4572 }
4573 break;
4574
4575 case IEMMODE_64BIT:
4576 switch (pVCpu->iem.s.enmEffAddrMode)
4577 {
4578 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4579 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
4580 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
4581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4582 }
4583 break;
4584 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4585 }
4586 return VINF_SUCCESS;
4587}
4588
4589#undef IEM_STOS_CASE
4590
4591/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
4592#define IEM_LODS_CASE(ValBits, AddrBits) \
4593 IEM_MC_BEGIN(0, 2); \
4594 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4595 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4596 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4597 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4598 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
4599 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4600 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4601 } IEM_MC_ELSE() { \
4602 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4603 } IEM_MC_ENDIF(); \
4604 IEM_MC_ADVANCE_RIP(); \
4605 IEM_MC_END();
4606
4607/** Opcode 0xac. */
4608FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
4609{
4610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4611
4612 /*
4613 * Use the C implementation if a repeat prefix is encountered.
4614 */
4615 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4616 {
4617 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
4618 switch (pVCpu->iem.s.enmEffAddrMode)
4619 {
4620 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
4621 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
4622 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
4623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4624 }
4625 }
4626 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
4627
4628 /*
4629 * Sharing case implementation with stos[wdq] below.
4630 */
4631 switch (pVCpu->iem.s.enmEffAddrMode)
4632 {
4633 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
4634 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
4635 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
4636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4637 }
4638 return VINF_SUCCESS;
4639}
4640
4641
4642/** Opcode 0xad. */
4643FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
4644{
4645 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4646
4647 /*
4648 * Use the C implementation if a repeat prefix is encountered.
4649 */
4650 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4651 {
4652 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
4653 switch (pVCpu->iem.s.enmEffOpSize)
4654 {
4655 case IEMMODE_16BIT:
4656 switch (pVCpu->iem.s.enmEffAddrMode)
4657 {
4658 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
4659 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
4660 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
4661 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4662 }
4663 break;
4664 case IEMMODE_32BIT:
4665 switch (pVCpu->iem.s.enmEffAddrMode)
4666 {
4667 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
4668 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
4669 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
4670 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4671 }
4672 case IEMMODE_64BIT:
4673 switch (pVCpu->iem.s.enmEffAddrMode)
4674 {
4675 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
4676 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
4677 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
4678 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4679 }
4680 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4681 }
4682 }
4683 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
4684
4685 /*
4686 * Annoying double switch here.
4687 * Using ugly macro for implementing the cases, sharing it with lodsb.
4688 */
4689 switch (pVCpu->iem.s.enmEffOpSize)
4690 {
4691 case IEMMODE_16BIT:
4692 switch (pVCpu->iem.s.enmEffAddrMode)
4693 {
4694 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
4695 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
4696 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
4697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4698 }
4699 break;
4700
4701 case IEMMODE_32BIT:
4702 switch (pVCpu->iem.s.enmEffAddrMode)
4703 {
4704 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
4705 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
4706 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
4707 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4708 }
4709 break;
4710
4711 case IEMMODE_64BIT:
4712 switch (pVCpu->iem.s.enmEffAddrMode)
4713 {
4714 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4715 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
4716 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
4717 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4718 }
4719 break;
4720 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4721 }
4722 return VINF_SUCCESS;
4723}
4724
4725#undef IEM_LODS_CASE
4726
4727/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
4728#define IEM_SCAS_CASE(ValBits, AddrBits) \
4729 IEM_MC_BEGIN(3, 2); \
4730 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
4731 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
4732 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4733 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4734 \
4735 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4736 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
4737 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
4738 IEM_MC_REF_EFLAGS(pEFlags); \
4739 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
4740 \
4741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4742 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4743 } IEM_MC_ELSE() { \
4744 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4745 } IEM_MC_ENDIF(); \
4746 IEM_MC_ADVANCE_RIP(); \
4747 IEM_MC_END();
4748
4749/** Opcode 0xae. */
4750FNIEMOP_DEF(iemOp_scasb_AL_Xb)
4751{
4752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4753
4754 /*
4755 * Use the C implementation if a repeat prefix is encountered.
4756 */
4757 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4758 {
4759 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
4760 switch (pVCpu->iem.s.enmEffAddrMode)
4761 {
4762 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
4763 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
4764 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
4765 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4766 }
4767 }
4768 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4769 {
4770 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
4771 switch (pVCpu->iem.s.enmEffAddrMode)
4772 {
4773 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
4774 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
4775 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
4776 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4777 }
4778 }
4779 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
4780
4781 /*
4782 * Sharing case implementation with stos[wdq] below.
4783 */
4784 switch (pVCpu->iem.s.enmEffAddrMode)
4785 {
4786 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
4787 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
4788 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
4789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4790 }
4791 return VINF_SUCCESS;
4792}
4793
4794
4795/** Opcode 0xaf. */
4796FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
4797{
4798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4799
4800 /*
4801 * Use the C implementation if a repeat prefix is encountered.
4802 */
4803 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4804 {
4805 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
4806 switch (pVCpu->iem.s.enmEffOpSize)
4807 {
4808 case IEMMODE_16BIT:
4809 switch (pVCpu->iem.s.enmEffAddrMode)
4810 {
4811 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
4812 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
4813 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
4814 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4815 }
4816 break;
4817 case IEMMODE_32BIT:
4818 switch (pVCpu->iem.s.enmEffAddrMode)
4819 {
4820 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
4821 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
4822 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
4823 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4824 }
4825 case IEMMODE_64BIT:
4826 switch (pVCpu->iem.s.enmEffAddrMode)
4827 {
4828 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
4829 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
4830 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
4831 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4832 }
4833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4834 }
4835 }
4836 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4837 {
4838 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
4839 switch (pVCpu->iem.s.enmEffOpSize)
4840 {
4841 case IEMMODE_16BIT:
4842 switch (pVCpu->iem.s.enmEffAddrMode)
4843 {
4844 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
4845 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
4846 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
4847 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4848 }
4849 break;
4850 case IEMMODE_32BIT:
4851 switch (pVCpu->iem.s.enmEffAddrMode)
4852 {
4853 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
4854 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
4855 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
4856 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4857 }
4858 case IEMMODE_64BIT:
4859 switch (pVCpu->iem.s.enmEffAddrMode)
4860 {
4861 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
4862 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
4863 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
4864 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4865 }
4866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4867 }
4868 }
4869 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
4870
4871 /*
4872 * Annoying double switch here.
4873 * Using ugly macro for implementing the cases, sharing it with scasb.
4874 */
4875 switch (pVCpu->iem.s.enmEffOpSize)
4876 {
4877 case IEMMODE_16BIT:
4878 switch (pVCpu->iem.s.enmEffAddrMode)
4879 {
4880 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
4881 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
4882 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
4883 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4884 }
4885 break;
4886
4887 case IEMMODE_32BIT:
4888 switch (pVCpu->iem.s.enmEffAddrMode)
4889 {
4890 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
4891 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
4892 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
4893 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4894 }
4895 break;
4896
4897 case IEMMODE_64BIT:
4898 switch (pVCpu->iem.s.enmEffAddrMode)
4899 {
4900 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4901 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
4902 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
4903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4904 }
4905 break;
4906 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4907 }
4908 return VINF_SUCCESS;
4909}
4910
4911#undef IEM_SCAS_CASE
4912
4913/**
4914 * Common 'mov r8, imm8' helper.
4915 */
4916FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
4917{
4918 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
4919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4920
4921 IEM_MC_BEGIN(0, 1);
4922 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
4923 IEM_MC_STORE_GREG_U8(iReg, u8Value);
4924 IEM_MC_ADVANCE_RIP();
4925 IEM_MC_END();
4926
4927 return VINF_SUCCESS;
4928}
4929
4930
4931/** Opcode 0xb0. */
4932FNIEMOP_DEF(iemOp_mov_AL_Ib)
4933{
4934 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
4935 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4936}
4937
4938
4939/** Opcode 0xb1. */
4940FNIEMOP_DEF(iemOp_CL_Ib)
4941{
4942 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
4943 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4944}
4945
4946
4947/** Opcode 0xb2. */
4948FNIEMOP_DEF(iemOp_DL_Ib)
4949{
4950 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
4951 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4952}
4953
4954
4955/** Opcode 0xb3. */
4956FNIEMOP_DEF(iemOp_BL_Ib)
4957{
4958 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
4959 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4960}
4961
4962
4963/** Opcode 0xb4. */
4964FNIEMOP_DEF(iemOp_mov_AH_Ib)
4965{
4966 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
4967 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4968}
4969
4970
4971/** Opcode 0xb5. */
4972FNIEMOP_DEF(iemOp_CH_Ib)
4973{
4974 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
4975 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
4976}
4977
4978
4979/** Opcode 0xb6. */
4980FNIEMOP_DEF(iemOp_DH_Ib)
4981{
4982 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
4983 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
4984}
4985
4986
4987/** Opcode 0xb7. */
4988FNIEMOP_DEF(iemOp_BH_Ib)
4989{
4990 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
4991 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
4992}
4993
4994
4995/**
4996 * Common 'mov regX,immX' helper.
4997 */
4998FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
4999{
5000 switch (pVCpu->iem.s.enmEffOpSize)
5001 {
5002 case IEMMODE_16BIT:
5003 {
5004 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5006
5007 IEM_MC_BEGIN(0, 1);
5008 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5009 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5010 IEM_MC_ADVANCE_RIP();
5011 IEM_MC_END();
5012 break;
5013 }
5014
5015 case IEMMODE_32BIT:
5016 {
5017 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5019
5020 IEM_MC_BEGIN(0, 1);
5021 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5022 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5023 IEM_MC_ADVANCE_RIP();
5024 IEM_MC_END();
5025 break;
5026 }
5027 case IEMMODE_64BIT:
5028 {
5029 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5030 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5031
5032 IEM_MC_BEGIN(0, 1);
5033 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5034 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5035 IEM_MC_ADVANCE_RIP();
5036 IEM_MC_END();
5037 break;
5038 }
5039 }
5040
5041 return VINF_SUCCESS;
5042}
5043
5044
5045/** Opcode 0xb8. */
5046FNIEMOP_DEF(iemOp_eAX_Iv)
5047{
5048 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5049 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5050}
5051
5052
5053/** Opcode 0xb9. */
5054FNIEMOP_DEF(iemOp_eCX_Iv)
5055{
5056 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5057 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5058}
5059
5060
5061/** Opcode 0xba. */
5062FNIEMOP_DEF(iemOp_eDX_Iv)
5063{
5064 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5065 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5066}
5067
5068
5069/** Opcode 0xbb. */
5070FNIEMOP_DEF(iemOp_eBX_Iv)
5071{
5072 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5073 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5074}
5075
5076
5077/** Opcode 0xbc. */
5078FNIEMOP_DEF(iemOp_eSP_Iv)
5079{
5080 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5081 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5082}
5083
5084
5085/** Opcode 0xbd. */
5086FNIEMOP_DEF(iemOp_eBP_Iv)
5087{
5088 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5089 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5090}
5091
5092
5093/** Opcode 0xbe. */
5094FNIEMOP_DEF(iemOp_eSI_Iv)
5095{
5096 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5097 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5098}
5099
5100
5101/** Opcode 0xbf. */
5102FNIEMOP_DEF(iemOp_eDI_Iv)
5103{
5104 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5105 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5106}
5107
5108
5109/** Opcode 0xc0. */
5110FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5111{
5112 IEMOP_HLP_MIN_186();
5113 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5114 PCIEMOPSHIFTSIZES pImpl;
5115 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5116 {
5117 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5118 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5119 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5120 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5121 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5122 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5123 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5124 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5125 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5126 }
5127 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5128
5129 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5130 {
5131 /* register */
5132 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5134 IEM_MC_BEGIN(3, 0);
5135 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5136 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5137 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5138 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5139 IEM_MC_REF_EFLAGS(pEFlags);
5140 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5141 IEM_MC_ADVANCE_RIP();
5142 IEM_MC_END();
5143 }
5144 else
5145 {
5146 /* memory */
5147 IEM_MC_BEGIN(3, 2);
5148 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5149 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5150 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5151 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5152
5153 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5154 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5155 IEM_MC_ASSIGN(cShiftArg, cShift);
5156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5157 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5158 IEM_MC_FETCH_EFLAGS(EFlags);
5159 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5160
5161 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5162 IEM_MC_COMMIT_EFLAGS(EFlags);
5163 IEM_MC_ADVANCE_RIP();
5164 IEM_MC_END();
5165 }
5166 return VINF_SUCCESS;
5167}
5168
5169
5170/** Opcode 0xc1. */
5171FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5172{
5173 IEMOP_HLP_MIN_186();
5174 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5175 PCIEMOPSHIFTSIZES pImpl;
5176 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5177 {
5178 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5179 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5180 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5181 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5182 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5183 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5184 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5185 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5186 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5187 }
5188 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5189
5190 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5191 {
5192 /* register */
5193 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5195 switch (pVCpu->iem.s.enmEffOpSize)
5196 {
5197 case IEMMODE_16BIT:
5198 IEM_MC_BEGIN(3, 0);
5199 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5200 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5201 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5202 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5203 IEM_MC_REF_EFLAGS(pEFlags);
5204 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5205 IEM_MC_ADVANCE_RIP();
5206 IEM_MC_END();
5207 return VINF_SUCCESS;
5208
5209 case IEMMODE_32BIT:
5210 IEM_MC_BEGIN(3, 0);
5211 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5212 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5213 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5214 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5215 IEM_MC_REF_EFLAGS(pEFlags);
5216 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5217 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5218 IEM_MC_ADVANCE_RIP();
5219 IEM_MC_END();
5220 return VINF_SUCCESS;
5221
5222 case IEMMODE_64BIT:
5223 IEM_MC_BEGIN(3, 0);
5224 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5225 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5226 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5227 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5228 IEM_MC_REF_EFLAGS(pEFlags);
5229 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5230 IEM_MC_ADVANCE_RIP();
5231 IEM_MC_END();
5232 return VINF_SUCCESS;
5233
5234 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5235 }
5236 }
5237 else
5238 {
5239 /* memory */
5240 switch (pVCpu->iem.s.enmEffOpSize)
5241 {
5242 case IEMMODE_16BIT:
5243 IEM_MC_BEGIN(3, 2);
5244 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5245 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5246 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5247 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5248
5249 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5250 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5251 IEM_MC_ASSIGN(cShiftArg, cShift);
5252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5253 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5254 IEM_MC_FETCH_EFLAGS(EFlags);
5255 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5256
5257 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5258 IEM_MC_COMMIT_EFLAGS(EFlags);
5259 IEM_MC_ADVANCE_RIP();
5260 IEM_MC_END();
5261 return VINF_SUCCESS;
5262
5263 case IEMMODE_32BIT:
5264 IEM_MC_BEGIN(3, 2);
5265 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5266 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5267 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5269
5270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5271 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5272 IEM_MC_ASSIGN(cShiftArg, cShift);
5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5274 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5275 IEM_MC_FETCH_EFLAGS(EFlags);
5276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5277
5278 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5279 IEM_MC_COMMIT_EFLAGS(EFlags);
5280 IEM_MC_ADVANCE_RIP();
5281 IEM_MC_END();
5282 return VINF_SUCCESS;
5283
5284 case IEMMODE_64BIT:
5285 IEM_MC_BEGIN(3, 2);
5286 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5287 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5288 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5289 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5290
5291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5292 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5293 IEM_MC_ASSIGN(cShiftArg, cShift);
5294 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5295 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5296 IEM_MC_FETCH_EFLAGS(EFlags);
5297 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5298
5299 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5300 IEM_MC_COMMIT_EFLAGS(EFlags);
5301 IEM_MC_ADVANCE_RIP();
5302 IEM_MC_END();
5303 return VINF_SUCCESS;
5304
5305 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5306 }
5307 }
5308}
5309
5310
5311/** Opcode 0xc2. */
5312FNIEMOP_DEF(iemOp_retn_Iw)
5313{
5314 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5315 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5316 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5317 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5318 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5319}
5320
5321
5322/** Opcode 0xc3. */
5323FNIEMOP_DEF(iemOp_retn)
5324{
5325 IEMOP_MNEMONIC(retn, "retn");
5326 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5327 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5328 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5329}
5330
5331
5332/** Opcode 0xc4. */
5333FNIEMOP_DEF(iemOp_les_Gv_Mp__vex2)
5334{
5335 /* The LES instruction is invalid 64-bit mode. In legacy and
5336 compatability mode it is invalid with MOD=3.
5337 The use as a VEX prefix is made possible by assigning the inverted
5338 REX.R to the top MOD bit, and the top bit in the inverted register
5339 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5340 to accessing registers 0..7 in this VEX form. */
5341 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5342 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5343 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5344 {
5345 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5346 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5347 {
5348 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5349 if ( ( pVCpu->iem.s.fPrefixes
5350 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5351 == 0)
5352 {
5353 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5354 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5355 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5356 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5357 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5358
5359 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5360 }
5361
5362 Log(("VEX2: Invalid prefix mix!\n"));
5363 }
5364 else
5365 Log(("VEX2: AVX support disabled!\n"));
5366
5367 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5368 return IEMOP_RAISE_INVALID_OPCODE();
5369 }
5370 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5371 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5372}
5373
5374
5375/** Opcode 0xc5. */
5376FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex3)
5377{
5378 /* The LDS instruction is invalid 64-bit mode. In legacy and
5379 compatability mode it is invalid with MOD=3.
5380 The use as a VEX prefix is made possible by assigning the inverted
5381 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5382 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5383 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5384 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5385 {
5386 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5387 {
5388 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5389 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5390 }
5391 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5392 }
5393
5394 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5395 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5396 {
5397 /** @todo Test when exctly the VEX conformance checks kick in during
5398 * instruction decoding and fetching (using \#PF). */
5399 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5400 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5401 if ( ( pVCpu->iem.s.fPrefixes
5402 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5403 == 0)
5404 {
5405 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5406 if (bVex2 & 0x80 /* VEX.W */)
5407 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5408 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5409 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5410 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5411 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5412 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5413 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5414
5415 switch (bRm & 0x1f)
5416 {
5417 case 1: /* 0x0f lead opcode byte. */
5418 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5419
5420 case 2: /* 0x0f 0x38 lead opcode bytes. */
5421 /** @todo VEX: Just use new tables and decoders. */
5422 IEMOP_BITCH_ABOUT_STUB();
5423 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5424
5425 case 3: /* 0x0f 0x3a lead opcode bytes. */
5426 /** @todo VEX: Just use new tables and decoders. */
5427 IEMOP_BITCH_ABOUT_STUB();
5428 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5429
5430 default:
5431 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5432 return IEMOP_RAISE_INVALID_OPCODE();
5433 }
5434 }
5435 else
5436 Log(("VEX3: Invalid prefix mix!\n"));
5437 }
5438 else
5439 Log(("VEX3: AVX support disabled!\n"));
5440 return IEMOP_RAISE_INVALID_OPCODE();
5441}
5442
5443
5444/** Opcode 0xc6. */
5445FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5446{
5447 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5448 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5449 return IEMOP_RAISE_INVALID_OPCODE();
5450 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5451
5452 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5453 {
5454 /* register access */
5455 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5457 IEM_MC_BEGIN(0, 0);
5458 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5459 IEM_MC_ADVANCE_RIP();
5460 IEM_MC_END();
5461 }
5462 else
5463 {
5464 /* memory access. */
5465 IEM_MC_BEGIN(0, 1);
5466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5467 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5468 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5470 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5471 IEM_MC_ADVANCE_RIP();
5472 IEM_MC_END();
5473 }
5474 return VINF_SUCCESS;
5475}
5476
5477
5478/** Opcode 0xc7. */
5479FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5480{
5481 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5482 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5483 return IEMOP_RAISE_INVALID_OPCODE();
5484 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
5485
5486 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5487 {
5488 /* register access */
5489 switch (pVCpu->iem.s.enmEffOpSize)
5490 {
5491 case IEMMODE_16BIT:
5492 IEM_MC_BEGIN(0, 0);
5493 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5494 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5495 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
5496 IEM_MC_ADVANCE_RIP();
5497 IEM_MC_END();
5498 return VINF_SUCCESS;
5499
5500 case IEMMODE_32BIT:
5501 IEM_MC_BEGIN(0, 0);
5502 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5504 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
5505 IEM_MC_ADVANCE_RIP();
5506 IEM_MC_END();
5507 return VINF_SUCCESS;
5508
5509 case IEMMODE_64BIT:
5510 IEM_MC_BEGIN(0, 0);
5511 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5512 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5513 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
5514 IEM_MC_ADVANCE_RIP();
5515 IEM_MC_END();
5516 return VINF_SUCCESS;
5517
5518 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5519 }
5520 }
5521 else
5522 {
5523 /* memory access. */
5524 switch (pVCpu->iem.s.enmEffOpSize)
5525 {
5526 case IEMMODE_16BIT:
5527 IEM_MC_BEGIN(0, 1);
5528 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5529 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
5530 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5532 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
5533 IEM_MC_ADVANCE_RIP();
5534 IEM_MC_END();
5535 return VINF_SUCCESS;
5536
5537 case IEMMODE_32BIT:
5538 IEM_MC_BEGIN(0, 1);
5539 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5540 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5541 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5543 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
5544 IEM_MC_ADVANCE_RIP();
5545 IEM_MC_END();
5546 return VINF_SUCCESS;
5547
5548 case IEMMODE_64BIT:
5549 IEM_MC_BEGIN(0, 1);
5550 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5552 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5553 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5554 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
5555 IEM_MC_ADVANCE_RIP();
5556 IEM_MC_END();
5557 return VINF_SUCCESS;
5558
5559 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5560 }
5561 }
5562}
5563
5564
5565
5566
5567/** Opcode 0xc8. */
5568FNIEMOP_DEF(iemOp_enter_Iw_Ib)
5569{
5570 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
5571 IEMOP_HLP_MIN_186();
5572 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5573 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
5574 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
5577}
5578
5579
5580/** Opcode 0xc9. */
5581FNIEMOP_DEF(iemOp_leave)
5582{
5583 IEMOP_MNEMONIC(leave, "leave");
5584 IEMOP_HLP_MIN_186();
5585 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5587 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
5588}
5589
5590
5591/** Opcode 0xca. */
5592FNIEMOP_DEF(iemOp_retf_Iw)
5593{
5594 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
5595 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5596 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5597 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5598 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
5599}
5600
5601
5602/** Opcode 0xcb. */
5603FNIEMOP_DEF(iemOp_retf)
5604{
5605 IEMOP_MNEMONIC(retf, "retf");
5606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5607 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5608 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
5609}
5610
5611
5612/** Opcode 0xcc. */
5613FNIEMOP_DEF(iemOp_int_3)
5614{
5615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5616 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
5617}
5618
5619
5620/** Opcode 0xcd. */
5621FNIEMOP_DEF(iemOp_int_Ib)
5622{
5623 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
5624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5625 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
5626}
5627
5628
5629/** Opcode 0xce. */
5630FNIEMOP_DEF(iemOp_into)
5631{
5632 IEMOP_MNEMONIC(into, "into");
5633 IEMOP_HLP_NO_64BIT();
5634
5635 IEM_MC_BEGIN(2, 0);
5636 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
5637 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
5638 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
5639 IEM_MC_END();
5640 return VINF_SUCCESS;
5641}
5642
5643
5644/** Opcode 0xcf. */
5645FNIEMOP_DEF(iemOp_iret)
5646{
5647 IEMOP_MNEMONIC(iret, "iret");
5648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5649 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
5650}
5651
5652
5653/** Opcode 0xd0. */
5654FNIEMOP_DEF(iemOp_Grp2_Eb_1)
5655{
5656 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5657 PCIEMOPSHIFTSIZES pImpl;
5658 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5659 {
5660 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
5661 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
5662 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
5663 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
5664 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
5665 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
5666 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
5667 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5668 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5669 }
5670 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5671
5672 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5673 {
5674 /* register */
5675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5676 IEM_MC_BEGIN(3, 0);
5677 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5678 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5679 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5680 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5681 IEM_MC_REF_EFLAGS(pEFlags);
5682 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5683 IEM_MC_ADVANCE_RIP();
5684 IEM_MC_END();
5685 }
5686 else
5687 {
5688 /* memory */
5689 IEM_MC_BEGIN(3, 2);
5690 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5691 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5692 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5694
5695 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5696 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5697 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5698 IEM_MC_FETCH_EFLAGS(EFlags);
5699 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5700
5701 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5702 IEM_MC_COMMIT_EFLAGS(EFlags);
5703 IEM_MC_ADVANCE_RIP();
5704 IEM_MC_END();
5705 }
5706 return VINF_SUCCESS;
5707}
5708
5709
5710
5711/** Opcode 0xd1. */
5712FNIEMOP_DEF(iemOp_Grp2_Ev_1)
5713{
5714 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5715 PCIEMOPSHIFTSIZES pImpl;
5716 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5717 {
5718 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
5719 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
5720 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
5721 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
5722 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
5723 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
5724 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
5725 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5726 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5727 }
5728 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5729
5730 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5731 {
5732 /* register */
5733 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5734 switch (pVCpu->iem.s.enmEffOpSize)
5735 {
5736 case IEMMODE_16BIT:
5737 IEM_MC_BEGIN(3, 0);
5738 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5739 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5740 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5741 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5742 IEM_MC_REF_EFLAGS(pEFlags);
5743 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5744 IEM_MC_ADVANCE_RIP();
5745 IEM_MC_END();
5746 return VINF_SUCCESS;
5747
5748 case IEMMODE_32BIT:
5749 IEM_MC_BEGIN(3, 0);
5750 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5751 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5752 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5753 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5754 IEM_MC_REF_EFLAGS(pEFlags);
5755 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5756 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5757 IEM_MC_ADVANCE_RIP();
5758 IEM_MC_END();
5759 return VINF_SUCCESS;
5760
5761 case IEMMODE_64BIT:
5762 IEM_MC_BEGIN(3, 0);
5763 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5764 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5765 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5766 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5767 IEM_MC_REF_EFLAGS(pEFlags);
5768 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5769 IEM_MC_ADVANCE_RIP();
5770 IEM_MC_END();
5771 return VINF_SUCCESS;
5772
5773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5774 }
5775 }
5776 else
5777 {
5778 /* memory */
5779 switch (pVCpu->iem.s.enmEffOpSize)
5780 {
5781 case IEMMODE_16BIT:
5782 IEM_MC_BEGIN(3, 2);
5783 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5784 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5785 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5786 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5787
5788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5790 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5791 IEM_MC_FETCH_EFLAGS(EFlags);
5792 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5793
5794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5795 IEM_MC_COMMIT_EFLAGS(EFlags);
5796 IEM_MC_ADVANCE_RIP();
5797 IEM_MC_END();
5798 return VINF_SUCCESS;
5799
5800 case IEMMODE_32BIT:
5801 IEM_MC_BEGIN(3, 2);
5802 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5803 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5804 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5805 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5806
5807 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5809 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5810 IEM_MC_FETCH_EFLAGS(EFlags);
5811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5812
5813 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5814 IEM_MC_COMMIT_EFLAGS(EFlags);
5815 IEM_MC_ADVANCE_RIP();
5816 IEM_MC_END();
5817 return VINF_SUCCESS;
5818
5819 case IEMMODE_64BIT:
5820 IEM_MC_BEGIN(3, 2);
5821 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5822 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5823 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5825
5826 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5827 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5828 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5829 IEM_MC_FETCH_EFLAGS(EFlags);
5830 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5831
5832 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5833 IEM_MC_COMMIT_EFLAGS(EFlags);
5834 IEM_MC_ADVANCE_RIP();
5835 IEM_MC_END();
5836 return VINF_SUCCESS;
5837
5838 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5839 }
5840 }
5841}
5842
5843
5844/** Opcode 0xd2. */
5845FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
5846{
5847 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5848 PCIEMOPSHIFTSIZES pImpl;
5849 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5850 {
5851 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
5852 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
5853 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
5854 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
5855 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
5856 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
5857 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
5858 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5859 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
5860 }
5861 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5862
5863 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5864 {
5865 /* register */
5866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5867 IEM_MC_BEGIN(3, 0);
5868 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5869 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5870 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5871 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5872 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5873 IEM_MC_REF_EFLAGS(pEFlags);
5874 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5875 IEM_MC_ADVANCE_RIP();
5876 IEM_MC_END();
5877 }
5878 else
5879 {
5880 /* memory */
5881 IEM_MC_BEGIN(3, 2);
5882 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5883 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5884 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5885 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5886
5887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5889 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5890 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5891 IEM_MC_FETCH_EFLAGS(EFlags);
5892 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5893
5894 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5895 IEM_MC_COMMIT_EFLAGS(EFlags);
5896 IEM_MC_ADVANCE_RIP();
5897 IEM_MC_END();
5898 }
5899 return VINF_SUCCESS;
5900}
5901
5902
5903/** Opcode 0xd3. */
5904FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
5905{
5906 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5907 PCIEMOPSHIFTSIZES pImpl;
5908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5909 {
5910 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
5911 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
5912 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
5913 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
5914 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
5915 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
5916 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
5917 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5918 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5919 }
5920 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5921
5922 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5923 {
5924 /* register */
5925 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5926 switch (pVCpu->iem.s.enmEffOpSize)
5927 {
5928 case IEMMODE_16BIT:
5929 IEM_MC_BEGIN(3, 0);
5930 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5931 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5932 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5933 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5934 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5935 IEM_MC_REF_EFLAGS(pEFlags);
5936 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5937 IEM_MC_ADVANCE_RIP();
5938 IEM_MC_END();
5939 return VINF_SUCCESS;
5940
5941 case IEMMODE_32BIT:
5942 IEM_MC_BEGIN(3, 0);
5943 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5944 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5945 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5946 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5947 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5948 IEM_MC_REF_EFLAGS(pEFlags);
5949 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5950 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5951 IEM_MC_ADVANCE_RIP();
5952 IEM_MC_END();
5953 return VINF_SUCCESS;
5954
5955 case IEMMODE_64BIT:
5956 IEM_MC_BEGIN(3, 0);
5957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5960 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5961 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5962 IEM_MC_REF_EFLAGS(pEFlags);
5963 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5964 IEM_MC_ADVANCE_RIP();
5965 IEM_MC_END();
5966 return VINF_SUCCESS;
5967
5968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5969 }
5970 }
5971 else
5972 {
5973 /* memory */
5974 switch (pVCpu->iem.s.enmEffOpSize)
5975 {
5976 case IEMMODE_16BIT:
5977 IEM_MC_BEGIN(3, 2);
5978 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5979 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5980 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5981 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5982
5983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5985 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5986 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5987 IEM_MC_FETCH_EFLAGS(EFlags);
5988 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5989
5990 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5991 IEM_MC_COMMIT_EFLAGS(EFlags);
5992 IEM_MC_ADVANCE_RIP();
5993 IEM_MC_END();
5994 return VINF_SUCCESS;
5995
5996 case IEMMODE_32BIT:
5997 IEM_MC_BEGIN(3, 2);
5998 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5999 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6000 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6001 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6002
6003 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6004 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6005 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6006 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6007 IEM_MC_FETCH_EFLAGS(EFlags);
6008 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6009
6010 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6011 IEM_MC_COMMIT_EFLAGS(EFlags);
6012 IEM_MC_ADVANCE_RIP();
6013 IEM_MC_END();
6014 return VINF_SUCCESS;
6015
6016 case IEMMODE_64BIT:
6017 IEM_MC_BEGIN(3, 2);
6018 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6019 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6022
6023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6024 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6025 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6026 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6027 IEM_MC_FETCH_EFLAGS(EFlags);
6028 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6029
6030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6031 IEM_MC_COMMIT_EFLAGS(EFlags);
6032 IEM_MC_ADVANCE_RIP();
6033 IEM_MC_END();
6034 return VINF_SUCCESS;
6035
6036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6037 }
6038 }
6039}
6040
6041/** Opcode 0xd4. */
6042FNIEMOP_DEF(iemOp_aam_Ib)
6043{
6044 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6045 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6046 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6047 IEMOP_HLP_NO_64BIT();
6048 if (!bImm)
6049 return IEMOP_RAISE_DIVIDE_ERROR();
6050 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6051}
6052
6053
6054/** Opcode 0xd5. */
6055FNIEMOP_DEF(iemOp_aad_Ib)
6056{
6057 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6058 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 IEMOP_HLP_NO_64BIT();
6061 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6062}
6063
6064
6065/** Opcode 0xd6. */
6066FNIEMOP_DEF(iemOp_salc)
6067{
6068 IEMOP_MNEMONIC(salc, "salc");
6069 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6070 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6072 IEMOP_HLP_NO_64BIT();
6073
6074 IEM_MC_BEGIN(0, 0);
6075 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6076 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6077 } IEM_MC_ELSE() {
6078 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6079 } IEM_MC_ENDIF();
6080 IEM_MC_ADVANCE_RIP();
6081 IEM_MC_END();
6082 return VINF_SUCCESS;
6083}
6084
6085
6086/** Opcode 0xd7. */
6087FNIEMOP_DEF(iemOp_xlat)
6088{
6089 IEMOP_MNEMONIC(xlat, "xlat");
6090 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6091 switch (pVCpu->iem.s.enmEffAddrMode)
6092 {
6093 case IEMMODE_16BIT:
6094 IEM_MC_BEGIN(2, 0);
6095 IEM_MC_LOCAL(uint8_t, u8Tmp);
6096 IEM_MC_LOCAL(uint16_t, u16Addr);
6097 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6098 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6099 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6100 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6101 IEM_MC_ADVANCE_RIP();
6102 IEM_MC_END();
6103 return VINF_SUCCESS;
6104
6105 case IEMMODE_32BIT:
6106 IEM_MC_BEGIN(2, 0);
6107 IEM_MC_LOCAL(uint8_t, u8Tmp);
6108 IEM_MC_LOCAL(uint32_t, u32Addr);
6109 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6110 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6111 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6112 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6113 IEM_MC_ADVANCE_RIP();
6114 IEM_MC_END();
6115 return VINF_SUCCESS;
6116
6117 case IEMMODE_64BIT:
6118 IEM_MC_BEGIN(2, 0);
6119 IEM_MC_LOCAL(uint8_t, u8Tmp);
6120 IEM_MC_LOCAL(uint64_t, u64Addr);
6121 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6122 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6123 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6124 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6125 IEM_MC_ADVANCE_RIP();
6126 IEM_MC_END();
6127 return VINF_SUCCESS;
6128
6129 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6130 }
6131}
6132
6133
6134/**
6135 * Common worker for FPU instructions working on ST0 and STn, and storing the
6136 * result in ST0.
6137 *
6138 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6139 */
6140FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6141{
6142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6143
6144 IEM_MC_BEGIN(3, 1);
6145 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6146 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6147 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6149
6150 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6151 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6152 IEM_MC_PREPARE_FPU_USAGE();
6153 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6154 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6155 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6156 IEM_MC_ELSE()
6157 IEM_MC_FPU_STACK_UNDERFLOW(0);
6158 IEM_MC_ENDIF();
6159 IEM_MC_ADVANCE_RIP();
6160
6161 IEM_MC_END();
6162 return VINF_SUCCESS;
6163}
6164
6165
6166/**
6167 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6168 * flags.
6169 *
6170 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6171 */
6172FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6173{
6174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6175
6176 IEM_MC_BEGIN(3, 1);
6177 IEM_MC_LOCAL(uint16_t, u16Fsw);
6178 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6179 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6180 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6181
6182 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6183 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6184 IEM_MC_PREPARE_FPU_USAGE();
6185 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6186 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6187 IEM_MC_UPDATE_FSW(u16Fsw);
6188 IEM_MC_ELSE()
6189 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6190 IEM_MC_ENDIF();
6191 IEM_MC_ADVANCE_RIP();
6192
6193 IEM_MC_END();
6194 return VINF_SUCCESS;
6195}
6196
6197
6198/**
6199 * Common worker for FPU instructions working on ST0 and STn, only affecting
6200 * flags, and popping when done.
6201 *
6202 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6203 */
6204FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6205{
6206 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6207
6208 IEM_MC_BEGIN(3, 1);
6209 IEM_MC_LOCAL(uint16_t, u16Fsw);
6210 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6211 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6212 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6213
6214 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6215 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6216 IEM_MC_PREPARE_FPU_USAGE();
6217 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6218 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6219 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6220 IEM_MC_ELSE()
6221 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6222 IEM_MC_ENDIF();
6223 IEM_MC_ADVANCE_RIP();
6224
6225 IEM_MC_END();
6226 return VINF_SUCCESS;
6227}
6228
6229
6230/** Opcode 0xd8 11/0. */
6231FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6232{
6233 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6234 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6235}
6236
6237
6238/** Opcode 0xd8 11/1. */
6239FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6240{
6241 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6242 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6243}
6244
6245
6246/** Opcode 0xd8 11/2. */
6247FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6248{
6249 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6250 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6251}
6252
6253
6254/** Opcode 0xd8 11/3. */
6255FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6256{
6257 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6258 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6259}
6260
6261
6262/** Opcode 0xd8 11/4. */
6263FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6264{
6265 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6266 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6267}
6268
6269
6270/** Opcode 0xd8 11/5. */
6271FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6272{
6273 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6274 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6275}
6276
6277
6278/** Opcode 0xd8 11/6. */
6279FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6280{
6281 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6282 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6283}
6284
6285
6286/** Opcode 0xd8 11/7. */
6287FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6288{
6289 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6290 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6291}
6292
6293
6294/**
6295 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6296 * the result in ST0.
6297 *
6298 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6299 */
6300FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6301{
6302 IEM_MC_BEGIN(3, 3);
6303 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6304 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6305 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6306 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6307 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6308 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6309
6310 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6311 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6312
6313 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6314 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6315 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6316
6317 IEM_MC_PREPARE_FPU_USAGE();
6318 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6319 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6320 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6321 IEM_MC_ELSE()
6322 IEM_MC_FPU_STACK_UNDERFLOW(0);
6323 IEM_MC_ENDIF();
6324 IEM_MC_ADVANCE_RIP();
6325
6326 IEM_MC_END();
6327 return VINF_SUCCESS;
6328}
6329
6330
6331/** Opcode 0xd8 !11/0. */
6332FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6333{
6334 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6335 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6336}
6337
6338
6339/** Opcode 0xd8 !11/1. */
6340FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6341{
6342 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6343 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6344}
6345
6346
6347/** Opcode 0xd8 !11/2. */
6348FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6349{
6350 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6351
6352 IEM_MC_BEGIN(3, 3);
6353 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6354 IEM_MC_LOCAL(uint16_t, u16Fsw);
6355 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6356 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6357 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6358 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6359
6360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6362
6363 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6364 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6365 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6366
6367 IEM_MC_PREPARE_FPU_USAGE();
6368 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6369 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6370 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6371 IEM_MC_ELSE()
6372 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6373 IEM_MC_ENDIF();
6374 IEM_MC_ADVANCE_RIP();
6375
6376 IEM_MC_END();
6377 return VINF_SUCCESS;
6378}
6379
6380
6381/** Opcode 0xd8 !11/3. */
6382FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6383{
6384 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6385
6386 IEM_MC_BEGIN(3, 3);
6387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6388 IEM_MC_LOCAL(uint16_t, u16Fsw);
6389 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6390 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6391 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6392 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6393
6394 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6396
6397 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6398 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6399 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6400
6401 IEM_MC_PREPARE_FPU_USAGE();
6402 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6403 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6404 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6405 IEM_MC_ELSE()
6406 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6407 IEM_MC_ENDIF();
6408 IEM_MC_ADVANCE_RIP();
6409
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412}
6413
6414
6415/** Opcode 0xd8 !11/4. */
6416FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6417{
6418 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6419 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6420}
6421
6422
6423/** Opcode 0xd8 !11/5. */
6424FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6425{
6426 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6427 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6428}
6429
6430
6431/** Opcode 0xd8 !11/6. */
6432FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6433{
6434 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6435 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6436}
6437
6438
6439/** Opcode 0xd8 !11/7. */
6440FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6441{
6442 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6443 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6444}
6445
6446
6447/** Opcode 0xd8. */
6448FNIEMOP_DEF(iemOp_EscF0)
6449{
6450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6451 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
6452
6453 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6454 {
6455 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6456 {
6457 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
6458 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
6459 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
6460 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
6461 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
6462 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
6463 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
6464 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
6465 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6466 }
6467 }
6468 else
6469 {
6470 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6471 {
6472 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
6473 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
6474 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
6475 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
6476 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
6477 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
6478 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
6479 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
6480 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6481 }
6482 }
6483}
6484
6485
6486/** Opcode 0xd9 /0 mem32real
6487 * @sa iemOp_fld_m64r */
6488FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
6489{
6490 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
6491
6492 IEM_MC_BEGIN(2, 3);
6493 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6494 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6495 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
6496 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6497 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
6498
6499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6501
6502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6504 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6505
6506 IEM_MC_PREPARE_FPU_USAGE();
6507 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6508 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
6509 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6510 IEM_MC_ELSE()
6511 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6512 IEM_MC_ENDIF();
6513 IEM_MC_ADVANCE_RIP();
6514
6515 IEM_MC_END();
6516 return VINF_SUCCESS;
6517}
6518
6519
6520/** Opcode 0xd9 !11/2 mem32real */
6521FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
6522{
6523 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
6524 IEM_MC_BEGIN(3, 2);
6525 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6526 IEM_MC_LOCAL(uint16_t, u16Fsw);
6527 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6528 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6529 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6530
6531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6532 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6535
6536 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6537 IEM_MC_PREPARE_FPU_USAGE();
6538 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6539 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6540 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6541 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6542 IEM_MC_ELSE()
6543 IEM_MC_IF_FCW_IM()
6544 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6545 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6546 IEM_MC_ENDIF();
6547 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6548 IEM_MC_ENDIF();
6549 IEM_MC_ADVANCE_RIP();
6550
6551 IEM_MC_END();
6552 return VINF_SUCCESS;
6553}
6554
6555
6556/** Opcode 0xd9 !11/3 */
6557FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
6558{
6559 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
6560 IEM_MC_BEGIN(3, 2);
6561 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6562 IEM_MC_LOCAL(uint16_t, u16Fsw);
6563 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6564 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6565 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6566
6567 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6569 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6570 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6571
6572 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6573 IEM_MC_PREPARE_FPU_USAGE();
6574 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6575 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6576 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6577 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6578 IEM_MC_ELSE()
6579 IEM_MC_IF_FCW_IM()
6580 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6581 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6582 IEM_MC_ENDIF();
6583 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6584 IEM_MC_ENDIF();
6585 IEM_MC_ADVANCE_RIP();
6586
6587 IEM_MC_END();
6588 return VINF_SUCCESS;
6589}
6590
6591
6592/** Opcode 0xd9 !11/4 */
6593FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
6594{
6595 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
6596 IEM_MC_BEGIN(3, 0);
6597 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6598 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6599 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
6600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6602 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6603 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6604 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6605 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
6606 IEM_MC_END();
6607 return VINF_SUCCESS;
6608}
6609
6610
6611/** Opcode 0xd9 !11/5 */
6612FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
6613{
6614 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
6615 IEM_MC_BEGIN(1, 1);
6616 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6617 IEM_MC_ARG(uint16_t, u16Fsw, 0);
6618 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6620 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6621 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6622 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6623 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
6624 IEM_MC_END();
6625 return VINF_SUCCESS;
6626}
6627
6628
6629/** Opcode 0xd9 !11/6 */
6630FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
6631{
6632 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
6633 IEM_MC_BEGIN(3, 0);
6634 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6635 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6636 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
6637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6640 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6641 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6642 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
6643 IEM_MC_END();
6644 return VINF_SUCCESS;
6645}
6646
6647
6648/** Opcode 0xd9 !11/7 */
6649FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
6650{
6651 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
6652 IEM_MC_BEGIN(2, 0);
6653 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6654 IEM_MC_LOCAL(uint16_t, u16Fcw);
6655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6658 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6659 IEM_MC_FETCH_FCW(u16Fcw);
6660 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
6661 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6662 IEM_MC_END();
6663 return VINF_SUCCESS;
6664}
6665
6666
6667/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
6668FNIEMOP_DEF(iemOp_fnop)
6669{
6670 IEMOP_MNEMONIC(fnop, "fnop");
6671 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6672
6673 IEM_MC_BEGIN(0, 0);
6674 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6675 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6676 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6677 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
6678 * intel optimizations. Investigate. */
6679 IEM_MC_UPDATE_FPU_OPCODE_IP();
6680 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6681 IEM_MC_END();
6682 return VINF_SUCCESS;
6683}
6684
6685
6686/** Opcode 0xd9 11/0 stN */
6687FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
6688{
6689 IEMOP_MNEMONIC(fld_stN, "fld stN");
6690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6691
6692 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6693 * indicates that it does. */
6694 IEM_MC_BEGIN(0, 2);
6695 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6696 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6699
6700 IEM_MC_PREPARE_FPU_USAGE();
6701 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
6702 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6703 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6704 IEM_MC_ELSE()
6705 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
6706 IEM_MC_ENDIF();
6707
6708 IEM_MC_ADVANCE_RIP();
6709 IEM_MC_END();
6710
6711 return VINF_SUCCESS;
6712}
6713
6714
6715/** Opcode 0xd9 11/3 stN */
6716FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
6717{
6718 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
6719 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6720
6721 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6722 * indicates that it does. */
6723 IEM_MC_BEGIN(1, 3);
6724 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
6725 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
6726 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6727 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
6728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6730
6731 IEM_MC_PREPARE_FPU_USAGE();
6732 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6733 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
6734 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
6735 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6736 IEM_MC_ELSE()
6737 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
6738 IEM_MC_ENDIF();
6739
6740 IEM_MC_ADVANCE_RIP();
6741 IEM_MC_END();
6742
6743 return VINF_SUCCESS;
6744}
6745
6746
6747/** Opcode 0xd9 11/4, 0xdd 11/2. */
6748FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
6749{
6750 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
6751 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6752
6753 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
6754 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
6755 if (!iDstReg)
6756 {
6757 IEM_MC_BEGIN(0, 1);
6758 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
6759 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6760 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6761
6762 IEM_MC_PREPARE_FPU_USAGE();
6763 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
6764 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6765 IEM_MC_ELSE()
6766 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
6767 IEM_MC_ENDIF();
6768
6769 IEM_MC_ADVANCE_RIP();
6770 IEM_MC_END();
6771 }
6772 else
6773 {
6774 IEM_MC_BEGIN(0, 2);
6775 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6776 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6777 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6778 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6779
6780 IEM_MC_PREPARE_FPU_USAGE();
6781 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6782 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6783 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
6784 IEM_MC_ELSE()
6785 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
6786 IEM_MC_ENDIF();
6787
6788 IEM_MC_ADVANCE_RIP();
6789 IEM_MC_END();
6790 }
6791 return VINF_SUCCESS;
6792}
6793
6794
6795/**
6796 * Common worker for FPU instructions working on ST0 and replaces it with the
6797 * result, i.e. unary operators.
6798 *
6799 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6800 */
6801FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
6802{
6803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6804
6805 IEM_MC_BEGIN(2, 1);
6806 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6807 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6808 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6809
6810 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6811 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6812 IEM_MC_PREPARE_FPU_USAGE();
6813 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6814 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
6815 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6816 IEM_MC_ELSE()
6817 IEM_MC_FPU_STACK_UNDERFLOW(0);
6818 IEM_MC_ENDIF();
6819 IEM_MC_ADVANCE_RIP();
6820
6821 IEM_MC_END();
6822 return VINF_SUCCESS;
6823}
6824
6825
6826/** Opcode 0xd9 0xe0. */
6827FNIEMOP_DEF(iemOp_fchs)
6828{
6829 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
6830 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
6831}
6832
6833
6834/** Opcode 0xd9 0xe1. */
6835FNIEMOP_DEF(iemOp_fabs)
6836{
6837 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
6838 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
6839}
6840
6841
6842/**
6843 * Common worker for FPU instructions working on ST0 and only returns FSW.
6844 *
6845 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6846 */
6847FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
6848{
6849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6850
6851 IEM_MC_BEGIN(2, 1);
6852 IEM_MC_LOCAL(uint16_t, u16Fsw);
6853 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6854 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6855
6856 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6857 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6858 IEM_MC_PREPARE_FPU_USAGE();
6859 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6860 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
6861 IEM_MC_UPDATE_FSW(u16Fsw);
6862 IEM_MC_ELSE()
6863 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6864 IEM_MC_ENDIF();
6865 IEM_MC_ADVANCE_RIP();
6866
6867 IEM_MC_END();
6868 return VINF_SUCCESS;
6869}
6870
6871
6872/** Opcode 0xd9 0xe4. */
6873FNIEMOP_DEF(iemOp_ftst)
6874{
6875 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
6876 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
6877}
6878
6879
6880/** Opcode 0xd9 0xe5. */
6881FNIEMOP_DEF(iemOp_fxam)
6882{
6883 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
6884 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
6885}
6886
6887
6888/**
6889 * Common worker for FPU instructions pushing a constant onto the FPU stack.
6890 *
6891 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6892 */
6893FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
6894{
6895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6896
6897 IEM_MC_BEGIN(1, 1);
6898 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6899 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6900
6901 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6902 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6903 IEM_MC_PREPARE_FPU_USAGE();
6904 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6905 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
6906 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6907 IEM_MC_ELSE()
6908 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
6909 IEM_MC_ENDIF();
6910 IEM_MC_ADVANCE_RIP();
6911
6912 IEM_MC_END();
6913 return VINF_SUCCESS;
6914}
6915
6916
6917/** Opcode 0xd9 0xe8. */
6918FNIEMOP_DEF(iemOp_fld1)
6919{
6920 IEMOP_MNEMONIC(fld1, "fld1");
6921 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
6922}
6923
6924
6925/** Opcode 0xd9 0xe9. */
6926FNIEMOP_DEF(iemOp_fldl2t)
6927{
6928 IEMOP_MNEMONIC(fldl2t, "fldl2t");
6929 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
6930}
6931
6932
6933/** Opcode 0xd9 0xea. */
6934FNIEMOP_DEF(iemOp_fldl2e)
6935{
6936 IEMOP_MNEMONIC(fldl2e, "fldl2e");
6937 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
6938}
6939
6940/** Opcode 0xd9 0xeb. */
6941FNIEMOP_DEF(iemOp_fldpi)
6942{
6943 IEMOP_MNEMONIC(fldpi, "fldpi");
6944 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
6945}
6946
6947
6948/** Opcode 0xd9 0xec. */
6949FNIEMOP_DEF(iemOp_fldlg2)
6950{
6951 IEMOP_MNEMONIC(fldlg2, "fldlg2");
6952 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
6953}
6954
6955/** Opcode 0xd9 0xed. */
6956FNIEMOP_DEF(iemOp_fldln2)
6957{
6958 IEMOP_MNEMONIC(fldln2, "fldln2");
6959 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
6960}
6961
6962
6963/** Opcode 0xd9 0xee. */
6964FNIEMOP_DEF(iemOp_fldz)
6965{
6966 IEMOP_MNEMONIC(fldz, "fldz");
6967 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
6968}
6969
6970
6971/** Opcode 0xd9 0xf0. */
6972FNIEMOP_DEF(iemOp_f2xm1)
6973{
6974 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
6975 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
6976}
6977
6978
6979/**
6980 * Common worker for FPU instructions working on STn and ST0, storing the result
6981 * in STn, and popping the stack unless IE, DE or ZE was raised.
6982 *
6983 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6984 */
6985FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6986{
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988
6989 IEM_MC_BEGIN(3, 1);
6990 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6991 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6992 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6993 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6994
6995 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6996 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6997
6998 IEM_MC_PREPARE_FPU_USAGE();
6999 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7000 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7001 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7002 IEM_MC_ELSE()
7003 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7004 IEM_MC_ENDIF();
7005 IEM_MC_ADVANCE_RIP();
7006
7007 IEM_MC_END();
7008 return VINF_SUCCESS;
7009}
7010
7011
7012/** Opcode 0xd9 0xf1. */
7013FNIEMOP_DEF(iemOp_fyl2x)
7014{
7015 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7016 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7017}
7018
7019
7020/**
7021 * Common worker for FPU instructions working on ST0 and having two outputs, one
7022 * replacing ST0 and one pushed onto the stack.
7023 *
7024 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7025 */
7026FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7027{
7028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7029
7030 IEM_MC_BEGIN(2, 1);
7031 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7032 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7033 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7034
7035 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7036 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7037 IEM_MC_PREPARE_FPU_USAGE();
7038 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7039 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7040 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7041 IEM_MC_ELSE()
7042 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7043 IEM_MC_ENDIF();
7044 IEM_MC_ADVANCE_RIP();
7045
7046 IEM_MC_END();
7047 return VINF_SUCCESS;
7048}
7049
7050
7051/** Opcode 0xd9 0xf2. */
7052FNIEMOP_DEF(iemOp_fptan)
7053{
7054 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7055 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7056}
7057
7058
7059/** Opcode 0xd9 0xf3. */
7060FNIEMOP_DEF(iemOp_fpatan)
7061{
7062 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7063 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7064}
7065
7066
7067/** Opcode 0xd9 0xf4. */
7068FNIEMOP_DEF(iemOp_fxtract)
7069{
7070 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7071 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7072}
7073
7074
7075/** Opcode 0xd9 0xf5. */
7076FNIEMOP_DEF(iemOp_fprem1)
7077{
7078 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7079 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7080}
7081
7082
7083/** Opcode 0xd9 0xf6. */
7084FNIEMOP_DEF(iemOp_fdecstp)
7085{
7086 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7089 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7090 * FINCSTP and FDECSTP. */
7091
7092 IEM_MC_BEGIN(0,0);
7093
7094 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7095 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7096
7097 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7098 IEM_MC_FPU_STACK_DEC_TOP();
7099 IEM_MC_UPDATE_FSW_CONST(0);
7100
7101 IEM_MC_ADVANCE_RIP();
7102 IEM_MC_END();
7103 return VINF_SUCCESS;
7104}
7105
7106
7107/** Opcode 0xd9 0xf7. */
7108FNIEMOP_DEF(iemOp_fincstp)
7109{
7110 IEMOP_MNEMONIC(fincstp, "fincstp");
7111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7112 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7113 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7114 * FINCSTP and FDECSTP. */
7115
7116 IEM_MC_BEGIN(0,0);
7117
7118 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7119 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7120
7121 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7122 IEM_MC_FPU_STACK_INC_TOP();
7123 IEM_MC_UPDATE_FSW_CONST(0);
7124
7125 IEM_MC_ADVANCE_RIP();
7126 IEM_MC_END();
7127 return VINF_SUCCESS;
7128}
7129
7130
7131/** Opcode 0xd9 0xf8. */
7132FNIEMOP_DEF(iemOp_fprem)
7133{
7134 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7135 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7136}
7137
7138
7139/** Opcode 0xd9 0xf9. */
7140FNIEMOP_DEF(iemOp_fyl2xp1)
7141{
7142 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7143 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7144}
7145
7146
7147/** Opcode 0xd9 0xfa. */
7148FNIEMOP_DEF(iemOp_fsqrt)
7149{
7150 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7151 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7152}
7153
7154
7155/** Opcode 0xd9 0xfb. */
7156FNIEMOP_DEF(iemOp_fsincos)
7157{
7158 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7159 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7160}
7161
7162
7163/** Opcode 0xd9 0xfc. */
7164FNIEMOP_DEF(iemOp_frndint)
7165{
7166 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7167 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7168}
7169
7170
7171/** Opcode 0xd9 0xfd. */
7172FNIEMOP_DEF(iemOp_fscale)
7173{
7174 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7175 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7176}
7177
7178
7179/** Opcode 0xd9 0xfe. */
7180FNIEMOP_DEF(iemOp_fsin)
7181{
7182 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7183 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7184}
7185
7186
7187/** Opcode 0xd9 0xff. */
7188FNIEMOP_DEF(iemOp_fcos)
7189{
7190 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7191 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7192}
7193
7194
7195/** Used by iemOp_EscF1. */
7196IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7197{
7198 /* 0xe0 */ iemOp_fchs,
7199 /* 0xe1 */ iemOp_fabs,
7200 /* 0xe2 */ iemOp_Invalid,
7201 /* 0xe3 */ iemOp_Invalid,
7202 /* 0xe4 */ iemOp_ftst,
7203 /* 0xe5 */ iemOp_fxam,
7204 /* 0xe6 */ iemOp_Invalid,
7205 /* 0xe7 */ iemOp_Invalid,
7206 /* 0xe8 */ iemOp_fld1,
7207 /* 0xe9 */ iemOp_fldl2t,
7208 /* 0xea */ iemOp_fldl2e,
7209 /* 0xeb */ iemOp_fldpi,
7210 /* 0xec */ iemOp_fldlg2,
7211 /* 0xed */ iemOp_fldln2,
7212 /* 0xee */ iemOp_fldz,
7213 /* 0xef */ iemOp_Invalid,
7214 /* 0xf0 */ iemOp_f2xm1,
7215 /* 0xf1 */ iemOp_fyl2x,
7216 /* 0xf2 */ iemOp_fptan,
7217 /* 0xf3 */ iemOp_fpatan,
7218 /* 0xf4 */ iemOp_fxtract,
7219 /* 0xf5 */ iemOp_fprem1,
7220 /* 0xf6 */ iemOp_fdecstp,
7221 /* 0xf7 */ iemOp_fincstp,
7222 /* 0xf8 */ iemOp_fprem,
7223 /* 0xf9 */ iemOp_fyl2xp1,
7224 /* 0xfa */ iemOp_fsqrt,
7225 /* 0xfb */ iemOp_fsincos,
7226 /* 0xfc */ iemOp_frndint,
7227 /* 0xfd */ iemOp_fscale,
7228 /* 0xfe */ iemOp_fsin,
7229 /* 0xff */ iemOp_fcos
7230};
7231
7232
7233/** Opcode 0xd9. */
7234FNIEMOP_DEF(iemOp_EscF1)
7235{
7236 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7237 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7238
7239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7240 {
7241 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7242 {
7243 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7244 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7245 case 2:
7246 if (bRm == 0xd0)
7247 return FNIEMOP_CALL(iemOp_fnop);
7248 return IEMOP_RAISE_INVALID_OPCODE();
7249 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7250 case 4:
7251 case 5:
7252 case 6:
7253 case 7:
7254 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7255 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7256 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7257 }
7258 }
7259 else
7260 {
7261 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7262 {
7263 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7264 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7265 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7266 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7267 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7268 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7269 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7270 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7271 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7272 }
7273 }
7274}
7275
7276
7277/** Opcode 0xda 11/0. */
7278FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7279{
7280 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7281 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7282
7283 IEM_MC_BEGIN(0, 1);
7284 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7285
7286 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7287 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7288
7289 IEM_MC_PREPARE_FPU_USAGE();
7290 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7291 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7292 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7293 IEM_MC_ENDIF();
7294 IEM_MC_UPDATE_FPU_OPCODE_IP();
7295 IEM_MC_ELSE()
7296 IEM_MC_FPU_STACK_UNDERFLOW(0);
7297 IEM_MC_ENDIF();
7298 IEM_MC_ADVANCE_RIP();
7299
7300 IEM_MC_END();
7301 return VINF_SUCCESS;
7302}
7303
7304
7305/** Opcode 0xda 11/1. */
7306FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7307{
7308 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7310
7311 IEM_MC_BEGIN(0, 1);
7312 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7313
7314 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7315 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7316
7317 IEM_MC_PREPARE_FPU_USAGE();
7318 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7319 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7320 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7321 IEM_MC_ENDIF();
7322 IEM_MC_UPDATE_FPU_OPCODE_IP();
7323 IEM_MC_ELSE()
7324 IEM_MC_FPU_STACK_UNDERFLOW(0);
7325 IEM_MC_ENDIF();
7326 IEM_MC_ADVANCE_RIP();
7327
7328 IEM_MC_END();
7329 return VINF_SUCCESS;
7330}
7331
7332
7333/** Opcode 0xda 11/2. */
7334FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7335{
7336 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7337 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7338
7339 IEM_MC_BEGIN(0, 1);
7340 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7341
7342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7344
7345 IEM_MC_PREPARE_FPU_USAGE();
7346 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7347 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7348 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7349 IEM_MC_ENDIF();
7350 IEM_MC_UPDATE_FPU_OPCODE_IP();
7351 IEM_MC_ELSE()
7352 IEM_MC_FPU_STACK_UNDERFLOW(0);
7353 IEM_MC_ENDIF();
7354 IEM_MC_ADVANCE_RIP();
7355
7356 IEM_MC_END();
7357 return VINF_SUCCESS;
7358}
7359
7360
7361/** Opcode 0xda 11/3. */
7362FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7363{
7364 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7366
7367 IEM_MC_BEGIN(0, 1);
7368 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7369
7370 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7371 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7372
7373 IEM_MC_PREPARE_FPU_USAGE();
7374 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7376 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7377 IEM_MC_ENDIF();
7378 IEM_MC_UPDATE_FPU_OPCODE_IP();
7379 IEM_MC_ELSE()
7380 IEM_MC_FPU_STACK_UNDERFLOW(0);
7381 IEM_MC_ENDIF();
7382 IEM_MC_ADVANCE_RIP();
7383
7384 IEM_MC_END();
7385 return VINF_SUCCESS;
7386}
7387
7388
7389/**
7390 * Common worker for FPU instructions working on ST0 and STn, only affecting
7391 * flags, and popping twice when done.
7392 *
7393 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7394 */
7395FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7396{
7397 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7398
7399 IEM_MC_BEGIN(3, 1);
7400 IEM_MC_LOCAL(uint16_t, u16Fsw);
7401 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7402 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7403 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7404
7405 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7406 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7407
7408 IEM_MC_PREPARE_FPU_USAGE();
7409 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7410 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7411 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7412 IEM_MC_ELSE()
7413 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7414 IEM_MC_ENDIF();
7415 IEM_MC_ADVANCE_RIP();
7416
7417 IEM_MC_END();
7418 return VINF_SUCCESS;
7419}
7420
7421
7422/** Opcode 0xda 0xe9. */
7423FNIEMOP_DEF(iemOp_fucompp)
7424{
7425 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7426 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7427}
7428
7429
7430/**
7431 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7432 * the result in ST0.
7433 *
7434 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7435 */
7436FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7437{
7438 IEM_MC_BEGIN(3, 3);
7439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7440 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7441 IEM_MC_LOCAL(int32_t, i32Val2);
7442 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7443 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7444 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7445
7446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448
7449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7451 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7452
7453 IEM_MC_PREPARE_FPU_USAGE();
7454 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7455 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
7456 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7457 IEM_MC_ELSE()
7458 IEM_MC_FPU_STACK_UNDERFLOW(0);
7459 IEM_MC_ENDIF();
7460 IEM_MC_ADVANCE_RIP();
7461
7462 IEM_MC_END();
7463 return VINF_SUCCESS;
7464}
7465
7466
7467/** Opcode 0xda !11/0. */
7468FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
7469{
7470 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
7471 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
7472}
7473
7474
7475/** Opcode 0xda !11/1. */
7476FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
7477{
7478 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
7479 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
7480}
7481
7482
7483/** Opcode 0xda !11/2. */
7484FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
7485{
7486 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
7487
7488 IEM_MC_BEGIN(3, 3);
7489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7490 IEM_MC_LOCAL(uint16_t, u16Fsw);
7491 IEM_MC_LOCAL(int32_t, i32Val2);
7492 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7493 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7494 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7495
7496 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7498
7499 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7500 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7501 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7502
7503 IEM_MC_PREPARE_FPU_USAGE();
7504 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7505 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7506 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7507 IEM_MC_ELSE()
7508 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7509 IEM_MC_ENDIF();
7510 IEM_MC_ADVANCE_RIP();
7511
7512 IEM_MC_END();
7513 return VINF_SUCCESS;
7514}
7515
7516
7517/** Opcode 0xda !11/3. */
7518FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
7519{
7520 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
7521
7522 IEM_MC_BEGIN(3, 3);
7523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7524 IEM_MC_LOCAL(uint16_t, u16Fsw);
7525 IEM_MC_LOCAL(int32_t, i32Val2);
7526 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7527 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7528 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7529
7530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7532
7533 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7534 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7535 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7536
7537 IEM_MC_PREPARE_FPU_USAGE();
7538 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7539 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7540 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7541 IEM_MC_ELSE()
7542 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7543 IEM_MC_ENDIF();
7544 IEM_MC_ADVANCE_RIP();
7545
7546 IEM_MC_END();
7547 return VINF_SUCCESS;
7548}
7549
7550
7551/** Opcode 0xda !11/4. */
7552FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
7553{
7554 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
7555 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
7556}
7557
7558
7559/** Opcode 0xda !11/5. */
7560FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
7561{
7562 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
7563 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
7564}
7565
7566
7567/** Opcode 0xda !11/6. */
7568FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
7569{
7570 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
7571 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
7572}
7573
7574
7575/** Opcode 0xda !11/7. */
7576FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
7577{
7578 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
7579 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
7580}
7581
7582
7583/** Opcode 0xda. */
7584FNIEMOP_DEF(iemOp_EscF2)
7585{
7586 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7587 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
7588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7589 {
7590 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7591 {
7592 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
7593 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
7594 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
7595 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
7596 case 4: return IEMOP_RAISE_INVALID_OPCODE();
7597 case 5:
7598 if (bRm == 0xe9)
7599 return FNIEMOP_CALL(iemOp_fucompp);
7600 return IEMOP_RAISE_INVALID_OPCODE();
7601 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7602 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7603 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7604 }
7605 }
7606 else
7607 {
7608 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7609 {
7610 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
7611 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
7612 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
7613 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
7614 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
7615 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
7616 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
7617 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
7618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7619 }
7620 }
7621}
7622
7623
7624/** Opcode 0xdb !11/0. */
7625FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
7626{
7627 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
7628
7629 IEM_MC_BEGIN(2, 3);
7630 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7631 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7632 IEM_MC_LOCAL(int32_t, i32Val);
7633 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7634 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
7635
7636 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638
7639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7641 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7642
7643 IEM_MC_PREPARE_FPU_USAGE();
7644 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7645 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
7646 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7647 IEM_MC_ELSE()
7648 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7649 IEM_MC_ENDIF();
7650 IEM_MC_ADVANCE_RIP();
7651
7652 IEM_MC_END();
7653 return VINF_SUCCESS;
7654}
7655
7656
7657/** Opcode 0xdb !11/1. */
7658FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
7659{
7660 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
7661 IEM_MC_BEGIN(3, 2);
7662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7663 IEM_MC_LOCAL(uint16_t, u16Fsw);
7664 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7665 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7666 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7667
7668 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7670 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7671 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7672
7673 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7674 IEM_MC_PREPARE_FPU_USAGE();
7675 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7676 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7677 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7678 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7679 IEM_MC_ELSE()
7680 IEM_MC_IF_FCW_IM()
7681 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7682 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7683 IEM_MC_ENDIF();
7684 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7685 IEM_MC_ENDIF();
7686 IEM_MC_ADVANCE_RIP();
7687
7688 IEM_MC_END();
7689 return VINF_SUCCESS;
7690}
7691
7692
7693/** Opcode 0xdb !11/2. */
7694FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
7695{
7696 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
7697 IEM_MC_BEGIN(3, 2);
7698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7699 IEM_MC_LOCAL(uint16_t, u16Fsw);
7700 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7701 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7702 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7703
7704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7708
7709 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7710 IEM_MC_PREPARE_FPU_USAGE();
7711 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7712 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7713 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7714 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7715 IEM_MC_ELSE()
7716 IEM_MC_IF_FCW_IM()
7717 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7718 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7719 IEM_MC_ENDIF();
7720 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7721 IEM_MC_ENDIF();
7722 IEM_MC_ADVANCE_RIP();
7723
7724 IEM_MC_END();
7725 return VINF_SUCCESS;
7726}
7727
7728
7729/** Opcode 0xdb !11/3. */
7730FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
7731{
7732 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
7733 IEM_MC_BEGIN(3, 2);
7734 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7735 IEM_MC_LOCAL(uint16_t, u16Fsw);
7736 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7737 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7738 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7739
7740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7742 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7743 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7744
7745 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7746 IEM_MC_PREPARE_FPU_USAGE();
7747 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7748 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7749 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7750 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7751 IEM_MC_ELSE()
7752 IEM_MC_IF_FCW_IM()
7753 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7754 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7755 IEM_MC_ENDIF();
7756 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7757 IEM_MC_ENDIF();
7758 IEM_MC_ADVANCE_RIP();
7759
7760 IEM_MC_END();
7761 return VINF_SUCCESS;
7762}
7763
7764
7765/** Opcode 0xdb !11/5. */
7766FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
7767{
7768 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
7769
7770 IEM_MC_BEGIN(2, 3);
7771 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7772 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7773 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
7774 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7775 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
7776
7777 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7778 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7779
7780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7782 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7783
7784 IEM_MC_PREPARE_FPU_USAGE();
7785 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7786 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
7787 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7788 IEM_MC_ELSE()
7789 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7790 IEM_MC_ENDIF();
7791 IEM_MC_ADVANCE_RIP();
7792
7793 IEM_MC_END();
7794 return VINF_SUCCESS;
7795}
7796
7797
7798/** Opcode 0xdb !11/7. */
7799FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
7800{
7801 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
7802 IEM_MC_BEGIN(3, 2);
7803 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7804 IEM_MC_LOCAL(uint16_t, u16Fsw);
7805 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7806 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
7807 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7808
7809 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7810 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7811 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7812 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7813
7814 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7815 IEM_MC_PREPARE_FPU_USAGE();
7816 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7817 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
7818 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
7819 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7820 IEM_MC_ELSE()
7821 IEM_MC_IF_FCW_IM()
7822 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
7823 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
7824 IEM_MC_ENDIF();
7825 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7826 IEM_MC_ENDIF();
7827 IEM_MC_ADVANCE_RIP();
7828
7829 IEM_MC_END();
7830 return VINF_SUCCESS;
7831}
7832
7833
7834/** Opcode 0xdb 11/0. */
7835FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
7836{
7837 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
7838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7839
7840 IEM_MC_BEGIN(0, 1);
7841 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7842
7843 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7845
7846 IEM_MC_PREPARE_FPU_USAGE();
7847 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7848 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
7849 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7850 IEM_MC_ENDIF();
7851 IEM_MC_UPDATE_FPU_OPCODE_IP();
7852 IEM_MC_ELSE()
7853 IEM_MC_FPU_STACK_UNDERFLOW(0);
7854 IEM_MC_ENDIF();
7855 IEM_MC_ADVANCE_RIP();
7856
7857 IEM_MC_END();
7858 return VINF_SUCCESS;
7859}
7860
7861
7862/** Opcode 0xdb 11/1. */
7863FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
7864{
7865 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
7866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7867
7868 IEM_MC_BEGIN(0, 1);
7869 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7870
7871 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7872 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7873
7874 IEM_MC_PREPARE_FPU_USAGE();
7875 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7876 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7877 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7878 IEM_MC_ENDIF();
7879 IEM_MC_UPDATE_FPU_OPCODE_IP();
7880 IEM_MC_ELSE()
7881 IEM_MC_FPU_STACK_UNDERFLOW(0);
7882 IEM_MC_ENDIF();
7883 IEM_MC_ADVANCE_RIP();
7884
7885 IEM_MC_END();
7886 return VINF_SUCCESS;
7887}
7888
7889
7890/** Opcode 0xdb 11/2. */
7891FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
7892{
7893 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
7894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7895
7896 IEM_MC_BEGIN(0, 1);
7897 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7898
7899 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7900 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7901
7902 IEM_MC_PREPARE_FPU_USAGE();
7903 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7904 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7905 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7906 IEM_MC_ENDIF();
7907 IEM_MC_UPDATE_FPU_OPCODE_IP();
7908 IEM_MC_ELSE()
7909 IEM_MC_FPU_STACK_UNDERFLOW(0);
7910 IEM_MC_ENDIF();
7911 IEM_MC_ADVANCE_RIP();
7912
7913 IEM_MC_END();
7914 return VINF_SUCCESS;
7915}
7916
7917
7918/** Opcode 0xdb 11/3. */
7919FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
7920{
7921 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
7922 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7923
7924 IEM_MC_BEGIN(0, 1);
7925 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7926
7927 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7928 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7929
7930 IEM_MC_PREPARE_FPU_USAGE();
7931 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7932 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
7933 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7934 IEM_MC_ENDIF();
7935 IEM_MC_UPDATE_FPU_OPCODE_IP();
7936 IEM_MC_ELSE()
7937 IEM_MC_FPU_STACK_UNDERFLOW(0);
7938 IEM_MC_ENDIF();
7939 IEM_MC_ADVANCE_RIP();
7940
7941 IEM_MC_END();
7942 return VINF_SUCCESS;
7943}
7944
7945
7946/** Opcode 0xdb 0xe0. */
7947FNIEMOP_DEF(iemOp_fneni)
7948{
7949 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
7950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7951 IEM_MC_BEGIN(0,0);
7952 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7953 IEM_MC_ADVANCE_RIP();
7954 IEM_MC_END();
7955 return VINF_SUCCESS;
7956}
7957
7958
7959/** Opcode 0xdb 0xe1. */
7960FNIEMOP_DEF(iemOp_fndisi)
7961{
7962 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
7963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7964 IEM_MC_BEGIN(0,0);
7965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7966 IEM_MC_ADVANCE_RIP();
7967 IEM_MC_END();
7968 return VINF_SUCCESS;
7969}
7970
7971
7972/** Opcode 0xdb 0xe2. */
7973FNIEMOP_DEF(iemOp_fnclex)
7974{
7975 IEMOP_MNEMONIC(fnclex, "fnclex");
7976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7977
7978 IEM_MC_BEGIN(0,0);
7979 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7980 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7981 IEM_MC_CLEAR_FSW_EX();
7982 IEM_MC_ADVANCE_RIP();
7983 IEM_MC_END();
7984 return VINF_SUCCESS;
7985}
7986
7987
7988/** Opcode 0xdb 0xe3. */
7989FNIEMOP_DEF(iemOp_fninit)
7990{
7991 IEMOP_MNEMONIC(fninit, "fninit");
7992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7993 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
7994}
7995
7996
7997/** Opcode 0xdb 0xe4. */
7998FNIEMOP_DEF(iemOp_fnsetpm)
7999{
8000 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8001 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8002 IEM_MC_BEGIN(0,0);
8003 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8004 IEM_MC_ADVANCE_RIP();
8005 IEM_MC_END();
8006 return VINF_SUCCESS;
8007}
8008
8009
8010/** Opcode 0xdb 0xe5. */
8011FNIEMOP_DEF(iemOp_frstpm)
8012{
8013 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8014#if 0 /* #UDs on newer CPUs */
8015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8016 IEM_MC_BEGIN(0,0);
8017 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8018 IEM_MC_ADVANCE_RIP();
8019 IEM_MC_END();
8020 return VINF_SUCCESS;
8021#else
8022 return IEMOP_RAISE_INVALID_OPCODE();
8023#endif
8024}
8025
8026
8027/** Opcode 0xdb 11/5. */
8028FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8029{
8030 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8031 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8032}
8033
8034
8035/** Opcode 0xdb 11/6. */
8036FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8037{
8038 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8039 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8040}
8041
8042
8043/** Opcode 0xdb. */
8044FNIEMOP_DEF(iemOp_EscF3)
8045{
8046 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8047 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8048 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8049 {
8050 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8051 {
8052 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8053 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8054 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8055 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8056 case 4:
8057 switch (bRm)
8058 {
8059 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8060 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8061 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8062 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8063 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8064 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8065 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8066 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8068 }
8069 break;
8070 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8071 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8072 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8073 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8074 }
8075 }
8076 else
8077 {
8078 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8079 {
8080 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8081 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8082 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8083 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8084 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8085 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8086 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8087 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8088 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8089 }
8090 }
8091}
8092
8093
8094/**
8095 * Common worker for FPU instructions working on STn and ST0, and storing the
8096 * result in STn unless IE, DE or ZE was raised.
8097 *
8098 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8099 */
8100FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8101{
8102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8103
8104 IEM_MC_BEGIN(3, 1);
8105 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8106 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8107 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8108 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8109
8110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8112
8113 IEM_MC_PREPARE_FPU_USAGE();
8114 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8115 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8116 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8117 IEM_MC_ELSE()
8118 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8119 IEM_MC_ENDIF();
8120 IEM_MC_ADVANCE_RIP();
8121
8122 IEM_MC_END();
8123 return VINF_SUCCESS;
8124}
8125
8126
8127/** Opcode 0xdc 11/0. */
8128FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8129{
8130 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8131 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8132}
8133
8134
8135/** Opcode 0xdc 11/1. */
8136FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8137{
8138 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8139 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8140}
8141
8142
8143/** Opcode 0xdc 11/4. */
8144FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8145{
8146 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8147 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8148}
8149
8150
8151/** Opcode 0xdc 11/5. */
8152FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8153{
8154 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8155 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8156}
8157
8158
8159/** Opcode 0xdc 11/6. */
8160FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8161{
8162 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8163 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8164}
8165
8166
8167/** Opcode 0xdc 11/7. */
8168FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8169{
8170 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8171 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8172}
8173
8174
8175/**
8176 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8177 * memory operand, and storing the result in ST0.
8178 *
8179 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8180 */
8181FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8182{
8183 IEM_MC_BEGIN(3, 3);
8184 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8185 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8186 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8187 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8188 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8189 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8190
8191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8193 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8194 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8195
8196 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8197 IEM_MC_PREPARE_FPU_USAGE();
8198 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8199 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8200 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8201 IEM_MC_ELSE()
8202 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8203 IEM_MC_ENDIF();
8204 IEM_MC_ADVANCE_RIP();
8205
8206 IEM_MC_END();
8207 return VINF_SUCCESS;
8208}
8209
8210
8211/** Opcode 0xdc !11/0. */
8212FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8213{
8214 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8215 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8216}
8217
8218
8219/** Opcode 0xdc !11/1. */
8220FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8221{
8222 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8223 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8224}
8225
8226
8227/** Opcode 0xdc !11/2. */
8228FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8229{
8230 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8231
8232 IEM_MC_BEGIN(3, 3);
8233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8234 IEM_MC_LOCAL(uint16_t, u16Fsw);
8235 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8236 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8237 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8238 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8239
8240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8242
8243 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8244 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8245 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8246
8247 IEM_MC_PREPARE_FPU_USAGE();
8248 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8249 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8250 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8251 IEM_MC_ELSE()
8252 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8253 IEM_MC_ENDIF();
8254 IEM_MC_ADVANCE_RIP();
8255
8256 IEM_MC_END();
8257 return VINF_SUCCESS;
8258}
8259
8260
8261/** Opcode 0xdc !11/3. */
8262FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8263{
8264 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8265
8266 IEM_MC_BEGIN(3, 3);
8267 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8268 IEM_MC_LOCAL(uint16_t, u16Fsw);
8269 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8270 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8271 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8272 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8273
8274 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8275 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8276
8277 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8278 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8279 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8280
8281 IEM_MC_PREPARE_FPU_USAGE();
8282 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8283 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8284 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8285 IEM_MC_ELSE()
8286 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8287 IEM_MC_ENDIF();
8288 IEM_MC_ADVANCE_RIP();
8289
8290 IEM_MC_END();
8291 return VINF_SUCCESS;
8292}
8293
8294
8295/** Opcode 0xdc !11/4. */
8296FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8297{
8298 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8299 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8300}
8301
8302
8303/** Opcode 0xdc !11/5. */
8304FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8305{
8306 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8307 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8308}
8309
8310
8311/** Opcode 0xdc !11/6. */
8312FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8313{
8314 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8315 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8316}
8317
8318
8319/** Opcode 0xdc !11/7. */
8320FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8321{
8322 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8323 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8324}
8325
8326
8327/** Opcode 0xdc. */
8328FNIEMOP_DEF(iemOp_EscF4)
8329{
8330 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8331 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8332 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8333 {
8334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8335 {
8336 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8337 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8338 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8339 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8340 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8341 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8342 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8343 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8344 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8345 }
8346 }
8347 else
8348 {
8349 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8350 {
8351 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8352 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8353 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8354 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8355 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8356 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8357 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8358 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8359 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8360 }
8361 }
8362}
8363
8364
8365/** Opcode 0xdd !11/0.
8366 * @sa iemOp_fld_m32r */
8367FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8368{
8369 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8370
8371 IEM_MC_BEGIN(2, 3);
8372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8373 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8374 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8375 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8376 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8377
8378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8382
8383 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8384 IEM_MC_PREPARE_FPU_USAGE();
8385 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8386 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8387 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8388 IEM_MC_ELSE()
8389 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8390 IEM_MC_ENDIF();
8391 IEM_MC_ADVANCE_RIP();
8392
8393 IEM_MC_END();
8394 return VINF_SUCCESS;
8395}
8396
8397
8398/** Opcode 0xdd !11/0. */
8399FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8400{
8401 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8402 IEM_MC_BEGIN(3, 2);
8403 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8404 IEM_MC_LOCAL(uint16_t, u16Fsw);
8405 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8406 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8407 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8408
8409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8411 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8412 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8413
8414 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8415 IEM_MC_PREPARE_FPU_USAGE();
8416 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8417 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8418 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8419 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8420 IEM_MC_ELSE()
8421 IEM_MC_IF_FCW_IM()
8422 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8423 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8424 IEM_MC_ENDIF();
8425 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8426 IEM_MC_ENDIF();
8427 IEM_MC_ADVANCE_RIP();
8428
8429 IEM_MC_END();
8430 return VINF_SUCCESS;
8431}
8432
8433
8434/** Opcode 0xdd !11/0. */
8435FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8436{
8437 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8438 IEM_MC_BEGIN(3, 2);
8439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8440 IEM_MC_LOCAL(uint16_t, u16Fsw);
8441 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8442 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8443 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8444
8445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8448 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8449
8450 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8451 IEM_MC_PREPARE_FPU_USAGE();
8452 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8453 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8454 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8455 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8456 IEM_MC_ELSE()
8457 IEM_MC_IF_FCW_IM()
8458 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8459 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8460 IEM_MC_ENDIF();
8461 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8462 IEM_MC_ENDIF();
8463 IEM_MC_ADVANCE_RIP();
8464
8465 IEM_MC_END();
8466 return VINF_SUCCESS;
8467}
8468
8469
8470
8471
8472/** Opcode 0xdd !11/0. */
8473FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
8474{
8475 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
8476 IEM_MC_BEGIN(3, 2);
8477 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8478 IEM_MC_LOCAL(uint16_t, u16Fsw);
8479 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8480 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8481 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8482
8483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8484 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8485 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8486 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8487
8488 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8489 IEM_MC_PREPARE_FPU_USAGE();
8490 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8491 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8492 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8493 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8494 IEM_MC_ELSE()
8495 IEM_MC_IF_FCW_IM()
8496 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8497 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8498 IEM_MC_ENDIF();
8499 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8500 IEM_MC_ENDIF();
8501 IEM_MC_ADVANCE_RIP();
8502
8503 IEM_MC_END();
8504 return VINF_SUCCESS;
8505}
8506
8507
8508/** Opcode 0xdd !11/0. */
8509FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
8510{
8511 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
8512 IEM_MC_BEGIN(3, 0);
8513 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8514 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8515 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8517 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8518 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8519 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8520 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8521 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8522 IEM_MC_END();
8523 return VINF_SUCCESS;
8524}
8525
8526
8527/** Opcode 0xdd !11/0. */
8528FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
8529{
8530 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
8531 IEM_MC_BEGIN(3, 0);
8532 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8533 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8534 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8535 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8538 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8539 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8540 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
8541 IEM_MC_END();
8542 return VINF_SUCCESS;
8543
8544}
8545
8546/** Opcode 0xdd !11/0. */
8547FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
8548{
8549 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
8550
8551 IEM_MC_BEGIN(0, 2);
8552 IEM_MC_LOCAL(uint16_t, u16Tmp);
8553 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8554
8555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8557 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8558
8559 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8560 IEM_MC_FETCH_FSW(u16Tmp);
8561 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
8562 IEM_MC_ADVANCE_RIP();
8563
8564/** @todo Debug / drop a hint to the verifier that things may differ
8565 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
8566 * NT4SP1. (X86_FSW_PE) */
8567 IEM_MC_END();
8568 return VINF_SUCCESS;
8569}
8570
8571
8572/** Opcode 0xdd 11/0. */
8573FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
8574{
8575 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
8576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8577 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
8578 unmodified. */
8579
8580 IEM_MC_BEGIN(0, 0);
8581
8582 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8583 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8584
8585 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8586 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8587 IEM_MC_UPDATE_FPU_OPCODE_IP();
8588
8589 IEM_MC_ADVANCE_RIP();
8590 IEM_MC_END();
8591 return VINF_SUCCESS;
8592}
8593
8594
8595/** Opcode 0xdd 11/1. */
8596FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
8597{
8598 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
8599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8600
8601 IEM_MC_BEGIN(0, 2);
8602 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8603 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8605 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8606
8607 IEM_MC_PREPARE_FPU_USAGE();
8608 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8609 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8610 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8611 IEM_MC_ELSE()
8612 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8613 IEM_MC_ENDIF();
8614
8615 IEM_MC_ADVANCE_RIP();
8616 IEM_MC_END();
8617 return VINF_SUCCESS;
8618}
8619
8620
8621/** Opcode 0xdd 11/3. */
8622FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
8623{
8624 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
8625 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
8626}
8627
8628
8629/** Opcode 0xdd 11/4. */
8630FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
8631{
8632 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
8633 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
8634}
8635
8636
8637/** Opcode 0xdd. */
8638FNIEMOP_DEF(iemOp_EscF5)
8639{
8640 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8641 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
8642 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8643 {
8644 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8645 {
8646 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
8647 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
8648 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
8649 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
8650 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
8651 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
8652 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8653 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8654 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8655 }
8656 }
8657 else
8658 {
8659 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8660 {
8661 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
8662 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
8663 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
8664 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
8665 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
8666 case 5: return IEMOP_RAISE_INVALID_OPCODE();
8667 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
8668 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
8669 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8670 }
8671 }
8672}
8673
8674
8675/** Opcode 0xde 11/0. */
8676FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
8677{
8678 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
8679 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
8680}
8681
8682
8683/** Opcode 0xde 11/0. */
8684FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
8685{
8686 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
8687 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
8688}
8689
8690
8691/** Opcode 0xde 0xd9. */
8692FNIEMOP_DEF(iemOp_fcompp)
8693{
8694 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
8695 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
8696}
8697
8698
8699/** Opcode 0xde 11/4. */
8700FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
8701{
8702 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
8703 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
8704}
8705
8706
8707/** Opcode 0xde 11/5. */
8708FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
8709{
8710 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
8711 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
8712}
8713
8714
8715/** Opcode 0xde 11/6. */
8716FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
8717{
8718 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
8719 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
8720}
8721
8722
8723/** Opcode 0xde 11/7. */
8724FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
8725{
8726 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
8727 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
8728}
8729
8730
8731/**
8732 * Common worker for FPU instructions working on ST0 and an m16i, and storing
8733 * the result in ST0.
8734 *
8735 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8736 */
8737FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
8738{
8739 IEM_MC_BEGIN(3, 3);
8740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8742 IEM_MC_LOCAL(int16_t, i16Val2);
8743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8744 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8745 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8746
8747 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8748 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8749
8750 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8751 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8752 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8753
8754 IEM_MC_PREPARE_FPU_USAGE();
8755 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8756 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
8757 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8758 IEM_MC_ELSE()
8759 IEM_MC_FPU_STACK_UNDERFLOW(0);
8760 IEM_MC_ENDIF();
8761 IEM_MC_ADVANCE_RIP();
8762
8763 IEM_MC_END();
8764 return VINF_SUCCESS;
8765}
8766
8767
8768/** Opcode 0xde !11/0. */
8769FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
8770{
8771 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
8772 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
8773}
8774
8775
8776/** Opcode 0xde !11/1. */
8777FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
8778{
8779 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
8780 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
8781}
8782
8783
8784/** Opcode 0xde !11/2. */
8785FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
8786{
8787 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
8788
8789 IEM_MC_BEGIN(3, 3);
8790 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8791 IEM_MC_LOCAL(uint16_t, u16Fsw);
8792 IEM_MC_LOCAL(int16_t, i16Val2);
8793 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8794 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8795 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8796
8797 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8798 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8799
8800 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8801 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8802 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8803
8804 IEM_MC_PREPARE_FPU_USAGE();
8805 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8806 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8807 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8808 IEM_MC_ELSE()
8809 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8810 IEM_MC_ENDIF();
8811 IEM_MC_ADVANCE_RIP();
8812
8813 IEM_MC_END();
8814 return VINF_SUCCESS;
8815}
8816
8817
8818/** Opcode 0xde !11/3. */
8819FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
8820{
8821 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
8822
8823 IEM_MC_BEGIN(3, 3);
8824 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8825 IEM_MC_LOCAL(uint16_t, u16Fsw);
8826 IEM_MC_LOCAL(int16_t, i16Val2);
8827 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8828 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8829 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8830
8831 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8832 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8833
8834 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8835 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8836 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8837
8838 IEM_MC_PREPARE_FPU_USAGE();
8839 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8840 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8841 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8842 IEM_MC_ELSE()
8843 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8844 IEM_MC_ENDIF();
8845 IEM_MC_ADVANCE_RIP();
8846
8847 IEM_MC_END();
8848 return VINF_SUCCESS;
8849}
8850
8851
8852/** Opcode 0xde !11/4. */
8853FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
8854{
8855 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
8856 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
8857}
8858
8859
8860/** Opcode 0xde !11/5. */
8861FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
8862{
8863 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
8864 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
8865}
8866
8867
8868/** Opcode 0xde !11/6. */
8869FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
8870{
8871 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
8872 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
8873}
8874
8875
8876/** Opcode 0xde !11/7. */
8877FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
8878{
8879 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
8880 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
8881}
8882
8883
8884/** Opcode 0xde. */
8885FNIEMOP_DEF(iemOp_EscF6)
8886{
8887 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8888 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
8889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8890 {
8891 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8892 {
8893 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
8894 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
8895 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8896 case 3: if (bRm == 0xd9)
8897 return FNIEMOP_CALL(iemOp_fcompp);
8898 return IEMOP_RAISE_INVALID_OPCODE();
8899 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
8900 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
8901 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
8902 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
8903 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8904 }
8905 }
8906 else
8907 {
8908 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8909 {
8910 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
8911 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
8912 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
8913 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
8914 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
8915 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
8916 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
8917 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
8918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8919 }
8920 }
8921}
8922
8923
8924/** Opcode 0xdf 11/0.
8925 * Undocument instruction, assumed to work like ffree + fincstp. */
8926FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
8927{
8928 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
8929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8930
8931 IEM_MC_BEGIN(0, 0);
8932
8933 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8934 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8935
8936 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8937 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8938 IEM_MC_FPU_STACK_INC_TOP();
8939 IEM_MC_UPDATE_FPU_OPCODE_IP();
8940
8941 IEM_MC_ADVANCE_RIP();
8942 IEM_MC_END();
8943 return VINF_SUCCESS;
8944}
8945
8946
8947/** Opcode 0xdf 0xe0. */
8948FNIEMOP_DEF(iemOp_fnstsw_ax)
8949{
8950 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
8951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8952
8953 IEM_MC_BEGIN(0, 1);
8954 IEM_MC_LOCAL(uint16_t, u16Tmp);
8955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8956 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8957 IEM_MC_FETCH_FSW(u16Tmp);
8958 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8959 IEM_MC_ADVANCE_RIP();
8960 IEM_MC_END();
8961 return VINF_SUCCESS;
8962}
8963
8964
8965/** Opcode 0xdf 11/5. */
8966FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
8967{
8968 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
8969 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8970}
8971
8972
8973/** Opcode 0xdf 11/6. */
8974FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
8975{
8976 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
8977 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8978}
8979
8980
8981/** Opcode 0xdf !11/0. */
8982FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
8983{
8984 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
8985
8986 IEM_MC_BEGIN(2, 3);
8987 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8988 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8989 IEM_MC_LOCAL(int16_t, i16Val);
8990 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8991 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
8992
8993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8995
8996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8998 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8999
9000 IEM_MC_PREPARE_FPU_USAGE();
9001 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9002 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9003 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9004 IEM_MC_ELSE()
9005 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9006 IEM_MC_ENDIF();
9007 IEM_MC_ADVANCE_RIP();
9008
9009 IEM_MC_END();
9010 return VINF_SUCCESS;
9011}
9012
9013
9014/** Opcode 0xdf !11/1. */
9015FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9016{
9017 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9018 IEM_MC_BEGIN(3, 2);
9019 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9020 IEM_MC_LOCAL(uint16_t, u16Fsw);
9021 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9022 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9023 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9024
9025 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9026 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9027 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9028 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9029
9030 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9031 IEM_MC_PREPARE_FPU_USAGE();
9032 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9033 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9034 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9035 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9036 IEM_MC_ELSE()
9037 IEM_MC_IF_FCW_IM()
9038 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9039 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9040 IEM_MC_ENDIF();
9041 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9042 IEM_MC_ENDIF();
9043 IEM_MC_ADVANCE_RIP();
9044
9045 IEM_MC_END();
9046 return VINF_SUCCESS;
9047}
9048
9049
9050/** Opcode 0xdf !11/2. */
9051FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9052{
9053 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9054 IEM_MC_BEGIN(3, 2);
9055 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9056 IEM_MC_LOCAL(uint16_t, u16Fsw);
9057 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9058 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9059 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9060
9061 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9062 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9065
9066 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9067 IEM_MC_PREPARE_FPU_USAGE();
9068 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9069 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9070 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9071 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9072 IEM_MC_ELSE()
9073 IEM_MC_IF_FCW_IM()
9074 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9075 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9076 IEM_MC_ENDIF();
9077 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9078 IEM_MC_ENDIF();
9079 IEM_MC_ADVANCE_RIP();
9080
9081 IEM_MC_END();
9082 return VINF_SUCCESS;
9083}
9084
9085
9086/** Opcode 0xdf !11/3. */
9087FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9088{
9089 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9090 IEM_MC_BEGIN(3, 2);
9091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9092 IEM_MC_LOCAL(uint16_t, u16Fsw);
9093 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9094 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9095 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9096
9097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9099 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9100 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9101
9102 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9103 IEM_MC_PREPARE_FPU_USAGE();
9104 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9105 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9106 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9107 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9108 IEM_MC_ELSE()
9109 IEM_MC_IF_FCW_IM()
9110 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9111 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9112 IEM_MC_ENDIF();
9113 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9114 IEM_MC_ENDIF();
9115 IEM_MC_ADVANCE_RIP();
9116
9117 IEM_MC_END();
9118 return VINF_SUCCESS;
9119}
9120
9121
9122/** Opcode 0xdf !11/4. */
9123FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9124
9125
9126/** Opcode 0xdf !11/5. */
9127FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9128{
9129 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9130
9131 IEM_MC_BEGIN(2, 3);
9132 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9133 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9134 IEM_MC_LOCAL(int64_t, i64Val);
9135 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9136 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9137
9138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9140
9141 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9142 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9143 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9144
9145 IEM_MC_PREPARE_FPU_USAGE();
9146 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9147 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9148 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9149 IEM_MC_ELSE()
9150 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9151 IEM_MC_ENDIF();
9152 IEM_MC_ADVANCE_RIP();
9153
9154 IEM_MC_END();
9155 return VINF_SUCCESS;
9156}
9157
9158
9159/** Opcode 0xdf !11/6. */
9160FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9161
9162
9163/** Opcode 0xdf !11/7. */
9164FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9165{
9166 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9167 IEM_MC_BEGIN(3, 2);
9168 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9169 IEM_MC_LOCAL(uint16_t, u16Fsw);
9170 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9171 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9172 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9173
9174 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9176 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9177 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9178
9179 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9180 IEM_MC_PREPARE_FPU_USAGE();
9181 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9182 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9183 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9184 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9185 IEM_MC_ELSE()
9186 IEM_MC_IF_FCW_IM()
9187 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9188 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9189 IEM_MC_ENDIF();
9190 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9191 IEM_MC_ENDIF();
9192 IEM_MC_ADVANCE_RIP();
9193
9194 IEM_MC_END();
9195 return VINF_SUCCESS;
9196}
9197
9198
9199/** Opcode 0xdf. */
9200FNIEMOP_DEF(iemOp_EscF7)
9201{
9202 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9203 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9204 {
9205 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9206 {
9207 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9208 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9209 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9210 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9211 case 4: if (bRm == 0xe0)
9212 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9213 return IEMOP_RAISE_INVALID_OPCODE();
9214 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9215 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9216 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9217 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9218 }
9219 }
9220 else
9221 {
9222 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9223 {
9224 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9225 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9226 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9227 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9228 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9229 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9230 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9231 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9232 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9233 }
9234 }
9235}
9236
9237
9238/** Opcode 0xe0. */
9239FNIEMOP_DEF(iemOp_loopne_Jb)
9240{
9241 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9242 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9243 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9244 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9245
9246 switch (pVCpu->iem.s.enmEffAddrMode)
9247 {
9248 case IEMMODE_16BIT:
9249 IEM_MC_BEGIN(0,0);
9250 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9251 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9252 IEM_MC_REL_JMP_S8(i8Imm);
9253 } IEM_MC_ELSE() {
9254 IEM_MC_ADVANCE_RIP();
9255 } IEM_MC_ENDIF();
9256 IEM_MC_END();
9257 return VINF_SUCCESS;
9258
9259 case IEMMODE_32BIT:
9260 IEM_MC_BEGIN(0,0);
9261 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9262 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9263 IEM_MC_REL_JMP_S8(i8Imm);
9264 } IEM_MC_ELSE() {
9265 IEM_MC_ADVANCE_RIP();
9266 } IEM_MC_ENDIF();
9267 IEM_MC_END();
9268 return VINF_SUCCESS;
9269
9270 case IEMMODE_64BIT:
9271 IEM_MC_BEGIN(0,0);
9272 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9273 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9274 IEM_MC_REL_JMP_S8(i8Imm);
9275 } IEM_MC_ELSE() {
9276 IEM_MC_ADVANCE_RIP();
9277 } IEM_MC_ENDIF();
9278 IEM_MC_END();
9279 return VINF_SUCCESS;
9280
9281 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9282 }
9283}
9284
9285
9286/** Opcode 0xe1. */
9287FNIEMOP_DEF(iemOp_loope_Jb)
9288{
9289 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9290 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9292 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9293
9294 switch (pVCpu->iem.s.enmEffAddrMode)
9295 {
9296 case IEMMODE_16BIT:
9297 IEM_MC_BEGIN(0,0);
9298 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9299 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9300 IEM_MC_REL_JMP_S8(i8Imm);
9301 } IEM_MC_ELSE() {
9302 IEM_MC_ADVANCE_RIP();
9303 } IEM_MC_ENDIF();
9304 IEM_MC_END();
9305 return VINF_SUCCESS;
9306
9307 case IEMMODE_32BIT:
9308 IEM_MC_BEGIN(0,0);
9309 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9310 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9311 IEM_MC_REL_JMP_S8(i8Imm);
9312 } IEM_MC_ELSE() {
9313 IEM_MC_ADVANCE_RIP();
9314 } IEM_MC_ENDIF();
9315 IEM_MC_END();
9316 return VINF_SUCCESS;
9317
9318 case IEMMODE_64BIT:
9319 IEM_MC_BEGIN(0,0);
9320 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9321 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9322 IEM_MC_REL_JMP_S8(i8Imm);
9323 } IEM_MC_ELSE() {
9324 IEM_MC_ADVANCE_RIP();
9325 } IEM_MC_ENDIF();
9326 IEM_MC_END();
9327 return VINF_SUCCESS;
9328
9329 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9330 }
9331}
9332
9333
9334/** Opcode 0xe2. */
9335FNIEMOP_DEF(iemOp_loop_Jb)
9336{
9337 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9338 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9339 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9340 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9341
9342 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9343 * using the 32-bit operand size override. How can that be restarted? See
9344 * weird pseudo code in intel manual. */
9345 switch (pVCpu->iem.s.enmEffAddrMode)
9346 {
9347 case IEMMODE_16BIT:
9348 IEM_MC_BEGIN(0,0);
9349 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9350 {
9351 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9352 IEM_MC_IF_CX_IS_NZ() {
9353 IEM_MC_REL_JMP_S8(i8Imm);
9354 } IEM_MC_ELSE() {
9355 IEM_MC_ADVANCE_RIP();
9356 } IEM_MC_ENDIF();
9357 }
9358 else
9359 {
9360 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9361 IEM_MC_ADVANCE_RIP();
9362 }
9363 IEM_MC_END();
9364 return VINF_SUCCESS;
9365
9366 case IEMMODE_32BIT:
9367 IEM_MC_BEGIN(0,0);
9368 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9369 {
9370 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9371 IEM_MC_IF_ECX_IS_NZ() {
9372 IEM_MC_REL_JMP_S8(i8Imm);
9373 } IEM_MC_ELSE() {
9374 IEM_MC_ADVANCE_RIP();
9375 } IEM_MC_ENDIF();
9376 }
9377 else
9378 {
9379 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9380 IEM_MC_ADVANCE_RIP();
9381 }
9382 IEM_MC_END();
9383 return VINF_SUCCESS;
9384
9385 case IEMMODE_64BIT:
9386 IEM_MC_BEGIN(0,0);
9387 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9388 {
9389 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9390 IEM_MC_IF_RCX_IS_NZ() {
9391 IEM_MC_REL_JMP_S8(i8Imm);
9392 } IEM_MC_ELSE() {
9393 IEM_MC_ADVANCE_RIP();
9394 } IEM_MC_ENDIF();
9395 }
9396 else
9397 {
9398 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9399 IEM_MC_ADVANCE_RIP();
9400 }
9401 IEM_MC_END();
9402 return VINF_SUCCESS;
9403
9404 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9405 }
9406}
9407
9408
9409/** Opcode 0xe3. */
9410FNIEMOP_DEF(iemOp_jecxz_Jb)
9411{
9412 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9413 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9414 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9415 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9416
9417 switch (pVCpu->iem.s.enmEffAddrMode)
9418 {
9419 case IEMMODE_16BIT:
9420 IEM_MC_BEGIN(0,0);
9421 IEM_MC_IF_CX_IS_NZ() {
9422 IEM_MC_ADVANCE_RIP();
9423 } IEM_MC_ELSE() {
9424 IEM_MC_REL_JMP_S8(i8Imm);
9425 } IEM_MC_ENDIF();
9426 IEM_MC_END();
9427 return VINF_SUCCESS;
9428
9429 case IEMMODE_32BIT:
9430 IEM_MC_BEGIN(0,0);
9431 IEM_MC_IF_ECX_IS_NZ() {
9432 IEM_MC_ADVANCE_RIP();
9433 } IEM_MC_ELSE() {
9434 IEM_MC_REL_JMP_S8(i8Imm);
9435 } IEM_MC_ENDIF();
9436 IEM_MC_END();
9437 return VINF_SUCCESS;
9438
9439 case IEMMODE_64BIT:
9440 IEM_MC_BEGIN(0,0);
9441 IEM_MC_IF_RCX_IS_NZ() {
9442 IEM_MC_ADVANCE_RIP();
9443 } IEM_MC_ELSE() {
9444 IEM_MC_REL_JMP_S8(i8Imm);
9445 } IEM_MC_ENDIF();
9446 IEM_MC_END();
9447 return VINF_SUCCESS;
9448
9449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9450 }
9451}
9452
9453
9454/** Opcode 0xe4 */
9455FNIEMOP_DEF(iemOp_in_AL_Ib)
9456{
9457 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
9458 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9459 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9460 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
9461}
9462
9463
9464/** Opcode 0xe5 */
9465FNIEMOP_DEF(iemOp_in_eAX_Ib)
9466{
9467 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
9468 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9470 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9471}
9472
9473
9474/** Opcode 0xe6 */
9475FNIEMOP_DEF(iemOp_out_Ib_AL)
9476{
9477 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
9478 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9480 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
9481}
9482
9483
9484/** Opcode 0xe7 */
9485FNIEMOP_DEF(iemOp_out_Ib_eAX)
9486{
9487 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
9488 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9490 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9491}
9492
9493
9494/** Opcode 0xe8. */
9495FNIEMOP_DEF(iemOp_call_Jv)
9496{
9497 IEMOP_MNEMONIC(call_Jv, "call Jv");
9498 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9499 switch (pVCpu->iem.s.enmEffOpSize)
9500 {
9501 case IEMMODE_16BIT:
9502 {
9503 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9504 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
9505 }
9506
9507 case IEMMODE_32BIT:
9508 {
9509 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9510 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
9511 }
9512
9513 case IEMMODE_64BIT:
9514 {
9515 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9516 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
9517 }
9518
9519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9520 }
9521}
9522
9523
9524/** Opcode 0xe9. */
9525FNIEMOP_DEF(iemOp_jmp_Jv)
9526{
9527 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
9528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9529 switch (pVCpu->iem.s.enmEffOpSize)
9530 {
9531 case IEMMODE_16BIT:
9532 {
9533 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
9534 IEM_MC_BEGIN(0, 0);
9535 IEM_MC_REL_JMP_S16(i16Imm);
9536 IEM_MC_END();
9537 return VINF_SUCCESS;
9538 }
9539
9540 case IEMMODE_64BIT:
9541 case IEMMODE_32BIT:
9542 {
9543 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
9544 IEM_MC_BEGIN(0, 0);
9545 IEM_MC_REL_JMP_S32(i32Imm);
9546 IEM_MC_END();
9547 return VINF_SUCCESS;
9548 }
9549
9550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9551 }
9552}
9553
9554
9555/** Opcode 0xea. */
9556FNIEMOP_DEF(iemOp_jmp_Ap)
9557{
9558 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
9559 IEMOP_HLP_NO_64BIT();
9560
9561 /* Decode the far pointer address and pass it on to the far call C implementation. */
9562 uint32_t offSeg;
9563 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
9564 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9565 else
9566 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9567 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9569 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
9570}
9571
9572
9573/** Opcode 0xeb. */
9574FNIEMOP_DEF(iemOp_jmp_Jb)
9575{
9576 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
9577 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9579 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9580
9581 IEM_MC_BEGIN(0, 0);
9582 IEM_MC_REL_JMP_S8(i8Imm);
9583 IEM_MC_END();
9584 return VINF_SUCCESS;
9585}
9586
9587
9588/** Opcode 0xec */
9589FNIEMOP_DEF(iemOp_in_AL_DX)
9590{
9591 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
9592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9593 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
9594}
9595
9596
9597/** Opcode 0xed */
9598FNIEMOP_DEF(iemOp_eAX_DX)
9599{
9600 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
9601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9602 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9603}
9604
9605
9606/** Opcode 0xee */
9607FNIEMOP_DEF(iemOp_out_DX_AL)
9608{
9609 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
9610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9611 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
9612}
9613
9614
9615/** Opcode 0xef */
9616FNIEMOP_DEF(iemOp_out_DX_eAX)
9617{
9618 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
9619 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9620 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9621}
9622
9623
9624/** Opcode 0xf0. */
9625FNIEMOP_DEF(iemOp_lock)
9626{
9627 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
9628 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
9629
9630 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9631 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9632}
9633
9634
9635/** Opcode 0xf1. */
9636FNIEMOP_DEF(iemOp_int_1)
9637{
9638 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
9639 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
9640 /** @todo testcase! */
9641 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
9642}
9643
9644
9645/** Opcode 0xf2. */
9646FNIEMOP_DEF(iemOp_repne)
9647{
9648 /* This overrides any previous REPE prefix. */
9649 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
9650 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
9651 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
9652
9653 /* For the 4 entry opcode tables, REPNZ overrides any previous
9654 REPZ and operand size prefixes. */
9655 pVCpu->iem.s.idxPrefix = 3;
9656
9657 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9658 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9659}
9660
9661
9662/** Opcode 0xf3. */
9663FNIEMOP_DEF(iemOp_repe)
9664{
9665 /* This overrides any previous REPNE prefix. */
9666 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
9667 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
9668 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
9669
9670 /* For the 4 entry opcode tables, REPNZ overrides any previous
9671 REPNZ and operand size prefixes. */
9672 pVCpu->iem.s.idxPrefix = 2;
9673
9674 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9675 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9676}
9677
9678
9679/** Opcode 0xf4. */
9680FNIEMOP_DEF(iemOp_hlt)
9681{
9682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9683 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
9684}
9685
9686
9687/** Opcode 0xf5. */
9688FNIEMOP_DEF(iemOp_cmc)
9689{
9690 IEMOP_MNEMONIC(cmc, "cmc");
9691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9692 IEM_MC_BEGIN(0, 0);
9693 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
9694 IEM_MC_ADVANCE_RIP();
9695 IEM_MC_END();
9696 return VINF_SUCCESS;
9697}
9698
9699
9700/**
9701 * Common implementation of 'inc/dec/not/neg Eb'.
9702 *
9703 * @param bRm The RM byte.
9704 * @param pImpl The instruction implementation.
9705 */
9706FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9707{
9708 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9709 {
9710 /* register access */
9711 IEM_MC_BEGIN(2, 0);
9712 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9713 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9714 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9715 IEM_MC_REF_EFLAGS(pEFlags);
9716 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9717 IEM_MC_ADVANCE_RIP();
9718 IEM_MC_END();
9719 }
9720 else
9721 {
9722 /* memory access. */
9723 IEM_MC_BEGIN(2, 2);
9724 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9725 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9726 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9727
9728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9729 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9730 IEM_MC_FETCH_EFLAGS(EFlags);
9731 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9732 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9733 else
9734 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
9735
9736 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9737 IEM_MC_COMMIT_EFLAGS(EFlags);
9738 IEM_MC_ADVANCE_RIP();
9739 IEM_MC_END();
9740 }
9741 return VINF_SUCCESS;
9742}
9743
9744
9745/**
9746 * Common implementation of 'inc/dec/not/neg Ev'.
9747 *
9748 * @param bRm The RM byte.
9749 * @param pImpl The instruction implementation.
9750 */
9751FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9752{
9753 /* Registers are handled by a common worker. */
9754 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9755 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9756
9757 /* Memory we do here. */
9758 switch (pVCpu->iem.s.enmEffOpSize)
9759 {
9760 case IEMMODE_16BIT:
9761 IEM_MC_BEGIN(2, 2);
9762 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9763 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9765
9766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9767 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9768 IEM_MC_FETCH_EFLAGS(EFlags);
9769 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9770 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9771 else
9772 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
9773
9774 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9775 IEM_MC_COMMIT_EFLAGS(EFlags);
9776 IEM_MC_ADVANCE_RIP();
9777 IEM_MC_END();
9778 return VINF_SUCCESS;
9779
9780 case IEMMODE_32BIT:
9781 IEM_MC_BEGIN(2, 2);
9782 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9783 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9784 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9785
9786 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9787 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9788 IEM_MC_FETCH_EFLAGS(EFlags);
9789 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9790 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9791 else
9792 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
9793
9794 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9795 IEM_MC_COMMIT_EFLAGS(EFlags);
9796 IEM_MC_ADVANCE_RIP();
9797 IEM_MC_END();
9798 return VINF_SUCCESS;
9799
9800 case IEMMODE_64BIT:
9801 IEM_MC_BEGIN(2, 2);
9802 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9803 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9804 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9805
9806 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9807 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9808 IEM_MC_FETCH_EFLAGS(EFlags);
9809 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9810 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9811 else
9812 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
9813
9814 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9815 IEM_MC_COMMIT_EFLAGS(EFlags);
9816 IEM_MC_ADVANCE_RIP();
9817 IEM_MC_END();
9818 return VINF_SUCCESS;
9819
9820 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9821 }
9822}
9823
9824
9825/** Opcode 0xf6 /0. */
9826FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
9827{
9828 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
9829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9830
9831 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9832 {
9833 /* register access */
9834 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9836
9837 IEM_MC_BEGIN(3, 0);
9838 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9839 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
9840 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9841 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9842 IEM_MC_REF_EFLAGS(pEFlags);
9843 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9844 IEM_MC_ADVANCE_RIP();
9845 IEM_MC_END();
9846 }
9847 else
9848 {
9849 /* memory access. */
9850 IEM_MC_BEGIN(3, 2);
9851 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9852 IEM_MC_ARG(uint8_t, u8Src, 1);
9853 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9855
9856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9857 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9858 IEM_MC_ASSIGN(u8Src, u8Imm);
9859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9860 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9861 IEM_MC_FETCH_EFLAGS(EFlags);
9862 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9863
9864 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
9865 IEM_MC_COMMIT_EFLAGS(EFlags);
9866 IEM_MC_ADVANCE_RIP();
9867 IEM_MC_END();
9868 }
9869 return VINF_SUCCESS;
9870}
9871
9872
9873/** Opcode 0xf7 /0. */
9874FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
9875{
9876 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
9877 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9878
9879 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9880 {
9881 /* register access */
9882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9883 switch (pVCpu->iem.s.enmEffOpSize)
9884 {
9885 case IEMMODE_16BIT:
9886 {
9887 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9888 IEM_MC_BEGIN(3, 0);
9889 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9890 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
9891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9892 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9893 IEM_MC_REF_EFLAGS(pEFlags);
9894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9895 IEM_MC_ADVANCE_RIP();
9896 IEM_MC_END();
9897 return VINF_SUCCESS;
9898 }
9899
9900 case IEMMODE_32BIT:
9901 {
9902 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9903 IEM_MC_BEGIN(3, 0);
9904 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9905 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
9906 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9907 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9908 IEM_MC_REF_EFLAGS(pEFlags);
9909 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9910 /* No clearing the high dword here - test doesn't write back the result. */
9911 IEM_MC_ADVANCE_RIP();
9912 IEM_MC_END();
9913 return VINF_SUCCESS;
9914 }
9915
9916 case IEMMODE_64BIT:
9917 {
9918 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9919 IEM_MC_BEGIN(3, 0);
9920 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9921 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
9922 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9923 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9924 IEM_MC_REF_EFLAGS(pEFlags);
9925 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9926 IEM_MC_ADVANCE_RIP();
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929 }
9930
9931 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9932 }
9933 }
9934 else
9935 {
9936 /* memory access. */
9937 switch (pVCpu->iem.s.enmEffOpSize)
9938 {
9939 case IEMMODE_16BIT:
9940 {
9941 IEM_MC_BEGIN(3, 2);
9942 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9943 IEM_MC_ARG(uint16_t, u16Src, 1);
9944 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9946
9947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9948 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9949 IEM_MC_ASSIGN(u16Src, u16Imm);
9950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9951 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9952 IEM_MC_FETCH_EFLAGS(EFlags);
9953 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9954
9955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
9956 IEM_MC_COMMIT_EFLAGS(EFlags);
9957 IEM_MC_ADVANCE_RIP();
9958 IEM_MC_END();
9959 return VINF_SUCCESS;
9960 }
9961
9962 case IEMMODE_32BIT:
9963 {
9964 IEM_MC_BEGIN(3, 2);
9965 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9966 IEM_MC_ARG(uint32_t, u32Src, 1);
9967 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9968 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9969
9970 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9971 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9972 IEM_MC_ASSIGN(u32Src, u32Imm);
9973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9974 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9975 IEM_MC_FETCH_EFLAGS(EFlags);
9976 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9977
9978 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
9979 IEM_MC_COMMIT_EFLAGS(EFlags);
9980 IEM_MC_ADVANCE_RIP();
9981 IEM_MC_END();
9982 return VINF_SUCCESS;
9983 }
9984
9985 case IEMMODE_64BIT:
9986 {
9987 IEM_MC_BEGIN(3, 2);
9988 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9989 IEM_MC_ARG(uint64_t, u64Src, 1);
9990 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9991 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9992
9993 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9994 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9995 IEM_MC_ASSIGN(u64Src, u64Imm);
9996 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9997 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9998 IEM_MC_FETCH_EFLAGS(EFlags);
9999 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10000
10001 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
10002 IEM_MC_COMMIT_EFLAGS(EFlags);
10003 IEM_MC_ADVANCE_RIP();
10004 IEM_MC_END();
10005 return VINF_SUCCESS;
10006 }
10007
10008 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10009 }
10010 }
10011}
10012
10013
10014/** Opcode 0xf6 /4, /5, /6 and /7. */
10015FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
10016{
10017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10018 {
10019 /* register access */
10020 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10021 IEM_MC_BEGIN(3, 1);
10022 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10023 IEM_MC_ARG(uint8_t, u8Value, 1);
10024 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10025 IEM_MC_LOCAL(int32_t, rc);
10026
10027 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10028 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10029 IEM_MC_REF_EFLAGS(pEFlags);
10030 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10031 IEM_MC_IF_LOCAL_IS_Z(rc) {
10032 IEM_MC_ADVANCE_RIP();
10033 } IEM_MC_ELSE() {
10034 IEM_MC_RAISE_DIVIDE_ERROR();
10035 } IEM_MC_ENDIF();
10036
10037 IEM_MC_END();
10038 }
10039 else
10040 {
10041 /* memory access. */
10042 IEM_MC_BEGIN(3, 2);
10043 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10044 IEM_MC_ARG(uint8_t, u8Value, 1);
10045 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10046 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10047 IEM_MC_LOCAL(int32_t, rc);
10048
10049 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10050 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10051 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10052 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10053 IEM_MC_REF_EFLAGS(pEFlags);
10054 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10055 IEM_MC_IF_LOCAL_IS_Z(rc) {
10056 IEM_MC_ADVANCE_RIP();
10057 } IEM_MC_ELSE() {
10058 IEM_MC_RAISE_DIVIDE_ERROR();
10059 } IEM_MC_ENDIF();
10060
10061 IEM_MC_END();
10062 }
10063 return VINF_SUCCESS;
10064}
10065
10066
10067/** Opcode 0xf7 /4, /5, /6 and /7. */
10068FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10069{
10070 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10071
10072 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10073 {
10074 /* register access */
10075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10076 switch (pVCpu->iem.s.enmEffOpSize)
10077 {
10078 case IEMMODE_16BIT:
10079 {
10080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10081 IEM_MC_BEGIN(4, 1);
10082 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10083 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10084 IEM_MC_ARG(uint16_t, u16Value, 2);
10085 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10086 IEM_MC_LOCAL(int32_t, rc);
10087
10088 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10089 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10090 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10091 IEM_MC_REF_EFLAGS(pEFlags);
10092 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10093 IEM_MC_IF_LOCAL_IS_Z(rc) {
10094 IEM_MC_ADVANCE_RIP();
10095 } IEM_MC_ELSE() {
10096 IEM_MC_RAISE_DIVIDE_ERROR();
10097 } IEM_MC_ENDIF();
10098
10099 IEM_MC_END();
10100 return VINF_SUCCESS;
10101 }
10102
10103 case IEMMODE_32BIT:
10104 {
10105 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10106 IEM_MC_BEGIN(4, 1);
10107 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10108 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10109 IEM_MC_ARG(uint32_t, u32Value, 2);
10110 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10111 IEM_MC_LOCAL(int32_t, rc);
10112
10113 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10114 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10115 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10116 IEM_MC_REF_EFLAGS(pEFlags);
10117 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10118 IEM_MC_IF_LOCAL_IS_Z(rc) {
10119 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10120 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10121 IEM_MC_ADVANCE_RIP();
10122 } IEM_MC_ELSE() {
10123 IEM_MC_RAISE_DIVIDE_ERROR();
10124 } IEM_MC_ENDIF();
10125
10126 IEM_MC_END();
10127 return VINF_SUCCESS;
10128 }
10129
10130 case IEMMODE_64BIT:
10131 {
10132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10133 IEM_MC_BEGIN(4, 1);
10134 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10135 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10136 IEM_MC_ARG(uint64_t, u64Value, 2);
10137 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10138 IEM_MC_LOCAL(int32_t, rc);
10139
10140 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10141 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10142 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10143 IEM_MC_REF_EFLAGS(pEFlags);
10144 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10145 IEM_MC_IF_LOCAL_IS_Z(rc) {
10146 IEM_MC_ADVANCE_RIP();
10147 } IEM_MC_ELSE() {
10148 IEM_MC_RAISE_DIVIDE_ERROR();
10149 } IEM_MC_ENDIF();
10150
10151 IEM_MC_END();
10152 return VINF_SUCCESS;
10153 }
10154
10155 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10156 }
10157 }
10158 else
10159 {
10160 /* memory access. */
10161 switch (pVCpu->iem.s.enmEffOpSize)
10162 {
10163 case IEMMODE_16BIT:
10164 {
10165 IEM_MC_BEGIN(4, 2);
10166 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10167 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10168 IEM_MC_ARG(uint16_t, u16Value, 2);
10169 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10171 IEM_MC_LOCAL(int32_t, rc);
10172
10173 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10175 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10176 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10177 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10178 IEM_MC_REF_EFLAGS(pEFlags);
10179 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10180 IEM_MC_IF_LOCAL_IS_Z(rc) {
10181 IEM_MC_ADVANCE_RIP();
10182 } IEM_MC_ELSE() {
10183 IEM_MC_RAISE_DIVIDE_ERROR();
10184 } IEM_MC_ENDIF();
10185
10186 IEM_MC_END();
10187 return VINF_SUCCESS;
10188 }
10189
10190 case IEMMODE_32BIT:
10191 {
10192 IEM_MC_BEGIN(4, 2);
10193 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10194 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10195 IEM_MC_ARG(uint32_t, u32Value, 2);
10196 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10197 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10198 IEM_MC_LOCAL(int32_t, rc);
10199
10200 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10201 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10202 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10203 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10204 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10205 IEM_MC_REF_EFLAGS(pEFlags);
10206 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10207 IEM_MC_IF_LOCAL_IS_Z(rc) {
10208 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10209 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10210 IEM_MC_ADVANCE_RIP();
10211 } IEM_MC_ELSE() {
10212 IEM_MC_RAISE_DIVIDE_ERROR();
10213 } IEM_MC_ENDIF();
10214
10215 IEM_MC_END();
10216 return VINF_SUCCESS;
10217 }
10218
10219 case IEMMODE_64BIT:
10220 {
10221 IEM_MC_BEGIN(4, 2);
10222 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10223 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10224 IEM_MC_ARG(uint64_t, u64Value, 2);
10225 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10226 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10227 IEM_MC_LOCAL(int32_t, rc);
10228
10229 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10231 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10232 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10233 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10234 IEM_MC_REF_EFLAGS(pEFlags);
10235 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10236 IEM_MC_IF_LOCAL_IS_Z(rc) {
10237 IEM_MC_ADVANCE_RIP();
10238 } IEM_MC_ELSE() {
10239 IEM_MC_RAISE_DIVIDE_ERROR();
10240 } IEM_MC_ENDIF();
10241
10242 IEM_MC_END();
10243 return VINF_SUCCESS;
10244 }
10245
10246 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10247 }
10248 }
10249}
10250
10251/** Opcode 0xf6. */
10252FNIEMOP_DEF(iemOp_Grp3_Eb)
10253{
10254 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10255 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10256 {
10257 case 0:
10258 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10259 case 1:
10260/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10261 return IEMOP_RAISE_INVALID_OPCODE();
10262 case 2:
10263 IEMOP_MNEMONIC(not_Eb, "not Eb");
10264 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10265 case 3:
10266 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10267 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10268 case 4:
10269 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10270 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10271 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10272 case 5:
10273 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10274 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10275 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10276 case 6:
10277 IEMOP_MNEMONIC(div_Eb, "div Eb");
10278 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10279 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10280 case 7:
10281 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10282 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10283 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10284 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10285 }
10286}
10287
10288
10289/** Opcode 0xf7. */
10290FNIEMOP_DEF(iemOp_Grp3_Ev)
10291{
10292 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10293 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10294 {
10295 case 0:
10296 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10297 case 1:
10298/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10299 return IEMOP_RAISE_INVALID_OPCODE();
10300 case 2:
10301 IEMOP_MNEMONIC(not_Ev, "not Ev");
10302 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10303 case 3:
10304 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10305 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10306 case 4:
10307 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10308 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10309 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10310 case 5:
10311 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10312 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10313 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10314 case 6:
10315 IEMOP_MNEMONIC(div_Ev, "div Ev");
10316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10317 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10318 case 7:
10319 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10320 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10321 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10322 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10323 }
10324}
10325
10326
10327/** Opcode 0xf8. */
10328FNIEMOP_DEF(iemOp_clc)
10329{
10330 IEMOP_MNEMONIC(clc, "clc");
10331 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10332 IEM_MC_BEGIN(0, 0);
10333 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10334 IEM_MC_ADVANCE_RIP();
10335 IEM_MC_END();
10336 return VINF_SUCCESS;
10337}
10338
10339
10340/** Opcode 0xf9. */
10341FNIEMOP_DEF(iemOp_stc)
10342{
10343 IEMOP_MNEMONIC(stc, "stc");
10344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10345 IEM_MC_BEGIN(0, 0);
10346 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10347 IEM_MC_ADVANCE_RIP();
10348 IEM_MC_END();
10349 return VINF_SUCCESS;
10350}
10351
10352
10353/** Opcode 0xfa. */
10354FNIEMOP_DEF(iemOp_cli)
10355{
10356 IEMOP_MNEMONIC(cli, "cli");
10357 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10358 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10359}
10360
10361
10362FNIEMOP_DEF(iemOp_sti)
10363{
10364 IEMOP_MNEMONIC(sti, "sti");
10365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10366 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10367}
10368
10369
10370/** Opcode 0xfc. */
10371FNIEMOP_DEF(iemOp_cld)
10372{
10373 IEMOP_MNEMONIC(cld, "cld");
10374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10375 IEM_MC_BEGIN(0, 0);
10376 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10377 IEM_MC_ADVANCE_RIP();
10378 IEM_MC_END();
10379 return VINF_SUCCESS;
10380}
10381
10382
10383/** Opcode 0xfd. */
10384FNIEMOP_DEF(iemOp_std)
10385{
10386 IEMOP_MNEMONIC(std, "std");
10387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10388 IEM_MC_BEGIN(0, 0);
10389 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10390 IEM_MC_ADVANCE_RIP();
10391 IEM_MC_END();
10392 return VINF_SUCCESS;
10393}
10394
10395
10396/** Opcode 0xfe. */
10397FNIEMOP_DEF(iemOp_Grp4)
10398{
10399 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10400 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10401 {
10402 case 0:
10403 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
10404 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
10405 case 1:
10406 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
10407 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
10408 default:
10409 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
10410 return IEMOP_RAISE_INVALID_OPCODE();
10411 }
10412}
10413
10414
10415/**
10416 * Opcode 0xff /2.
10417 * @param bRm The RM byte.
10418 */
10419FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
10420{
10421 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
10422 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10423
10424 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10425 {
10426 /* The new RIP is taken from a register. */
10427 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10428 switch (pVCpu->iem.s.enmEffOpSize)
10429 {
10430 case IEMMODE_16BIT:
10431 IEM_MC_BEGIN(1, 0);
10432 IEM_MC_ARG(uint16_t, u16Target, 0);
10433 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10434 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10435 IEM_MC_END()
10436 return VINF_SUCCESS;
10437
10438 case IEMMODE_32BIT:
10439 IEM_MC_BEGIN(1, 0);
10440 IEM_MC_ARG(uint32_t, u32Target, 0);
10441 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10442 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10443 IEM_MC_END()
10444 return VINF_SUCCESS;
10445
10446 case IEMMODE_64BIT:
10447 IEM_MC_BEGIN(1, 0);
10448 IEM_MC_ARG(uint64_t, u64Target, 0);
10449 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10450 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10451 IEM_MC_END()
10452 return VINF_SUCCESS;
10453
10454 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10455 }
10456 }
10457 else
10458 {
10459 /* The new RIP is taken from a register. */
10460 switch (pVCpu->iem.s.enmEffOpSize)
10461 {
10462 case IEMMODE_16BIT:
10463 IEM_MC_BEGIN(1, 1);
10464 IEM_MC_ARG(uint16_t, u16Target, 0);
10465 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10466 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10467 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10468 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10469 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10470 IEM_MC_END()
10471 return VINF_SUCCESS;
10472
10473 case IEMMODE_32BIT:
10474 IEM_MC_BEGIN(1, 1);
10475 IEM_MC_ARG(uint32_t, u32Target, 0);
10476 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10477 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10479 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10480 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10481 IEM_MC_END()
10482 return VINF_SUCCESS;
10483
10484 case IEMMODE_64BIT:
10485 IEM_MC_BEGIN(1, 1);
10486 IEM_MC_ARG(uint64_t, u64Target, 0);
10487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10491 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10492 IEM_MC_END()
10493 return VINF_SUCCESS;
10494
10495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10496 }
10497 }
10498}
10499
10500typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
10501
10502FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
10503{
10504 /* Registers? How?? */
10505 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
10506 { /* likely */ }
10507 else
10508 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
10509
10510 /* Far pointer loaded from memory. */
10511 switch (pVCpu->iem.s.enmEffOpSize)
10512 {
10513 case IEMMODE_16BIT:
10514 IEM_MC_BEGIN(3, 1);
10515 IEM_MC_ARG(uint16_t, u16Sel, 0);
10516 IEM_MC_ARG(uint16_t, offSeg, 1);
10517 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10521 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10522 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
10523 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10524 IEM_MC_END();
10525 return VINF_SUCCESS;
10526
10527 case IEMMODE_64BIT:
10528 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
10529 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
10530 * and call far qword [rsp] encodings. */
10531 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
10532 {
10533 IEM_MC_BEGIN(3, 1);
10534 IEM_MC_ARG(uint16_t, u16Sel, 0);
10535 IEM_MC_ARG(uint64_t, offSeg, 1);
10536 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10537 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10538 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10540 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10541 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
10542 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10543 IEM_MC_END();
10544 return VINF_SUCCESS;
10545 }
10546 /* AMD falls thru. */
10547 /* fall thru */
10548
10549 case IEMMODE_32BIT:
10550 IEM_MC_BEGIN(3, 1);
10551 IEM_MC_ARG(uint16_t, u16Sel, 0);
10552 IEM_MC_ARG(uint32_t, offSeg, 1);
10553 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
10554 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10555 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10557 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10558 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
10559 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10560 IEM_MC_END();
10561 return VINF_SUCCESS;
10562
10563 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10564 }
10565}
10566
10567
10568/**
10569 * Opcode 0xff /3.
10570 * @param bRm The RM byte.
10571 */
10572FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
10573{
10574 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
10575 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
10576}
10577
10578
10579/**
10580 * Opcode 0xff /4.
10581 * @param bRm The RM byte.
10582 */
10583FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
10584{
10585 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
10586 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10587
10588 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10589 {
10590 /* The new RIP is taken from a register. */
10591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10592 switch (pVCpu->iem.s.enmEffOpSize)
10593 {
10594 case IEMMODE_16BIT:
10595 IEM_MC_BEGIN(0, 1);
10596 IEM_MC_LOCAL(uint16_t, u16Target);
10597 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10598 IEM_MC_SET_RIP_U16(u16Target);
10599 IEM_MC_END()
10600 return VINF_SUCCESS;
10601
10602 case IEMMODE_32BIT:
10603 IEM_MC_BEGIN(0, 1);
10604 IEM_MC_LOCAL(uint32_t, u32Target);
10605 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10606 IEM_MC_SET_RIP_U32(u32Target);
10607 IEM_MC_END()
10608 return VINF_SUCCESS;
10609
10610 case IEMMODE_64BIT:
10611 IEM_MC_BEGIN(0, 1);
10612 IEM_MC_LOCAL(uint64_t, u64Target);
10613 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10614 IEM_MC_SET_RIP_U64(u64Target);
10615 IEM_MC_END()
10616 return VINF_SUCCESS;
10617
10618 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10619 }
10620 }
10621 else
10622 {
10623 /* The new RIP is taken from a memory location. */
10624 switch (pVCpu->iem.s.enmEffOpSize)
10625 {
10626 case IEMMODE_16BIT:
10627 IEM_MC_BEGIN(0, 2);
10628 IEM_MC_LOCAL(uint16_t, u16Target);
10629 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10630 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10631 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10632 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10633 IEM_MC_SET_RIP_U16(u16Target);
10634 IEM_MC_END()
10635 return VINF_SUCCESS;
10636
10637 case IEMMODE_32BIT:
10638 IEM_MC_BEGIN(0, 2);
10639 IEM_MC_LOCAL(uint32_t, u32Target);
10640 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10641 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10642 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10643 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10644 IEM_MC_SET_RIP_U32(u32Target);
10645 IEM_MC_END()
10646 return VINF_SUCCESS;
10647
10648 case IEMMODE_64BIT:
10649 IEM_MC_BEGIN(0, 2);
10650 IEM_MC_LOCAL(uint64_t, u64Target);
10651 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10653 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10654 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10655 IEM_MC_SET_RIP_U64(u64Target);
10656 IEM_MC_END()
10657 return VINF_SUCCESS;
10658
10659 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10660 }
10661 }
10662}
10663
10664
10665/**
10666 * Opcode 0xff /5.
10667 * @param bRm The RM byte.
10668 */
10669FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
10670{
10671 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
10672 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
10673}
10674
10675
10676/**
10677 * Opcode 0xff /6.
10678 * @param bRm The RM byte.
10679 */
10680FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
10681{
10682 IEMOP_MNEMONIC(push_Ev, "push Ev");
10683
10684 /* Registers are handled by a common worker. */
10685 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10686 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10687
10688 /* Memory we do here. */
10689 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10690 switch (pVCpu->iem.s.enmEffOpSize)
10691 {
10692 case IEMMODE_16BIT:
10693 IEM_MC_BEGIN(0, 2);
10694 IEM_MC_LOCAL(uint16_t, u16Src);
10695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10696 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10697 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10698 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10699 IEM_MC_PUSH_U16(u16Src);
10700 IEM_MC_ADVANCE_RIP();
10701 IEM_MC_END();
10702 return VINF_SUCCESS;
10703
10704 case IEMMODE_32BIT:
10705 IEM_MC_BEGIN(0, 2);
10706 IEM_MC_LOCAL(uint32_t, u32Src);
10707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10708 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10709 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10710 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10711 IEM_MC_PUSH_U32(u32Src);
10712 IEM_MC_ADVANCE_RIP();
10713 IEM_MC_END();
10714 return VINF_SUCCESS;
10715
10716 case IEMMODE_64BIT:
10717 IEM_MC_BEGIN(0, 2);
10718 IEM_MC_LOCAL(uint64_t, u64Src);
10719 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10720 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10722 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10723 IEM_MC_PUSH_U64(u64Src);
10724 IEM_MC_ADVANCE_RIP();
10725 IEM_MC_END();
10726 return VINF_SUCCESS;
10727
10728 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10729 }
10730}
10731
10732
10733/** Opcode 0xff. */
10734FNIEMOP_DEF(iemOp_Grp5)
10735{
10736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10737 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10738 {
10739 case 0:
10740 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
10741 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
10742 case 1:
10743 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
10744 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
10745 case 2:
10746 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
10747 case 3:
10748 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
10749 case 4:
10750 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
10751 case 5:
10752 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
10753 case 6:
10754 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
10755 case 7:
10756 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
10757 return IEMOP_RAISE_INVALID_OPCODE();
10758 }
10759 AssertFailedReturn(VERR_IEM_IPE_3);
10760}
10761
10762
10763
10764const PFNIEMOP g_apfnOneByteMap[256] =
10765{
10766 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
10767 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
10768 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
10769 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
10770 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
10771 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
10772 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
10773 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
10774 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
10775 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
10776 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
10777 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
10778 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
10779 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
10780 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
10781 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
10782 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
10783 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
10784 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
10785 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
10786 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
10787 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
10788 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
10789 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
10790 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
10791 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
10792 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
10793 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
10794 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
10795 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
10796 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
10797 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
10798 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
10799 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
10800 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
10801 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
10802 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
10803 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
10804 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
10805 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
10806 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
10807 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
10808 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
10809 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
10810 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
10811 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
10812 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
10813 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
10814 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
10815 /* 0xc4 */ iemOp_les_Gv_Mp__vex2, iemOp_lds_Gv_Mp__vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
10816 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
10817 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
10818 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
10819 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
10820 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
10821 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
10822 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
10823 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
10824 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
10825 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
10826 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
10827 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
10828 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
10829 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
10830};
10831
10832
10833/** @} */
10834
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette