VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 65767

Last change on this file since 65767 was 65766, checked in by vboxsync, 8 years ago

IEM: VEX decoding updates.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 365.4 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 65766 2017-02-13 13:49:15Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2016 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24
25
26/** @name One byte opcodes.
27 *
28 * @{
29 */
30
31/** Opcode 0x00. */
32FNIEMOP_DEF(iemOp_add_Eb_Gb)
33{
34 IEMOP_MNEMONIC(add_Eb_Gb, "add Eb,Gb");
35 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
36}
37
38
39/** Opcode 0x01. */
40FNIEMOP_DEF(iemOp_add_Ev_Gv)
41{
42 IEMOP_MNEMONIC(add_Ev_Gv, "add Ev,Gv");
43 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
44}
45
46
47/** Opcode 0x02. */
48FNIEMOP_DEF(iemOp_add_Gb_Eb)
49{
50 IEMOP_MNEMONIC(add_Gb_Eb, "add Gb,Eb");
51 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
52}
53
54
55/** Opcode 0x03. */
56FNIEMOP_DEF(iemOp_add_Gv_Ev)
57{
58 IEMOP_MNEMONIC(add_Gv_Ev, "add Gv,Ev");
59 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
60}
61
62
63/** Opcode 0x04. */
64FNIEMOP_DEF(iemOp_add_Al_Ib)
65{
66 IEMOP_MNEMONIC(add_al_Ib, "add al,Ib");
67 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
68}
69
70
71/** Opcode 0x05. */
72FNIEMOP_DEF(iemOp_add_eAX_Iz)
73{
74 IEMOP_MNEMONIC(add_rAX_Iz, "add rAX,Iz");
75 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
76}
77
78
79/** Opcode 0x06. */
80FNIEMOP_DEF(iemOp_push_ES)
81{
82 IEMOP_MNEMONIC(push_es, "push es");
83 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
84}
85
86
87/** Opcode 0x07. */
88FNIEMOP_DEF(iemOp_pop_ES)
89{
90 IEMOP_MNEMONIC(pop_es, "pop es");
91 IEMOP_HLP_NO_64BIT();
92 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
93 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
94}
95
96
97/** Opcode 0x08. */
98FNIEMOP_DEF(iemOp_or_Eb_Gb)
99{
100 IEMOP_MNEMONIC(or_Eb_Gb, "or Eb,Gb");
101 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
102 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
103}
104
105
106/** Opcode 0x09. */
107FNIEMOP_DEF(iemOp_or_Ev_Gv)
108{
109 IEMOP_MNEMONIC(or_Ev_Gv, "or Ev,Gv");
110 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
111 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
112}
113
114
115/** Opcode 0x0a. */
116FNIEMOP_DEF(iemOp_or_Gb_Eb)
117{
118 IEMOP_MNEMONIC(or_Gb_Eb, "or Gb,Eb");
119 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
120 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
121}
122
123
124/** Opcode 0x0b. */
125FNIEMOP_DEF(iemOp_or_Gv_Ev)
126{
127 IEMOP_MNEMONIC(or_Gv_Ev, "or Gv,Ev");
128 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
129 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
130}
131
132
133/** Opcode 0x0c. */
134FNIEMOP_DEF(iemOp_or_Al_Ib)
135{
136 IEMOP_MNEMONIC(or_al_Ib, "or al,Ib");
137 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
138 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
139}
140
141
142/** Opcode 0x0d. */
143FNIEMOP_DEF(iemOp_or_eAX_Iz)
144{
145 IEMOP_MNEMONIC(or_rAX_Iz, "or rAX,Iz");
146 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
147 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
148}
149
150
151/** Opcode 0x0e. */
152FNIEMOP_DEF(iemOp_push_CS)
153{
154 IEMOP_MNEMONIC(push_cs, "push cs");
155 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
156}
157
158
159/** Opcode 0x0f. */
160FNIEMOP_DEF(iemOp_2byteEscape)
161{
162#ifdef VBOX_STRICT
163 static bool s_fTested = false;
164 if (RT_LIKELY(s_fTested)) { /* likely */ }
165 else
166 {
167 s_fTested = true;
168 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
169 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
170 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
171 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
172 }
173#endif
174
175 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
176
177 /** @todo PUSH CS on 8086, undefined on 80186. */
178 IEMOP_HLP_MIN_286();
179 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
180}
181
182/** Opcode 0x10. */
183FNIEMOP_DEF(iemOp_adc_Eb_Gb)
184{
185 IEMOP_MNEMONIC(adc_Eb_Gb, "adc Eb,Gb");
186 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
187}
188
189
190/** Opcode 0x11. */
191FNIEMOP_DEF(iemOp_adc_Ev_Gv)
192{
193 IEMOP_MNEMONIC(adc_Ev_Gv, "adc Ev,Gv");
194 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
195}
196
197
198/** Opcode 0x12. */
199FNIEMOP_DEF(iemOp_adc_Gb_Eb)
200{
201 IEMOP_MNEMONIC(adc_Gb_Eb, "adc Gb,Eb");
202 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
203}
204
205
206/** Opcode 0x13. */
207FNIEMOP_DEF(iemOp_adc_Gv_Ev)
208{
209 IEMOP_MNEMONIC(adc_Gv_Ev, "adc Gv,Ev");
210 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
211}
212
213
214/** Opcode 0x14. */
215FNIEMOP_DEF(iemOp_adc_Al_Ib)
216{
217 IEMOP_MNEMONIC(adc_al_Ib, "adc al,Ib");
218 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
219}
220
221
222/** Opcode 0x15. */
223FNIEMOP_DEF(iemOp_adc_eAX_Iz)
224{
225 IEMOP_MNEMONIC(adc_rAX_Iz, "adc rAX,Iz");
226 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
227}
228
229
230/** Opcode 0x16. */
231FNIEMOP_DEF(iemOp_push_SS)
232{
233 IEMOP_MNEMONIC(push_ss, "push ss");
234 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
235}
236
237
238/** Opcode 0x17. */
239FNIEMOP_DEF(iemOp_pop_SS)
240{
241 IEMOP_MNEMONIC(pop_ss, "pop ss"); /** @todo implies instruction fusing? */
242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
243 IEMOP_HLP_NO_64BIT();
244 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
245}
246
247
248/** Opcode 0x18. */
249FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
250{
251 IEMOP_MNEMONIC(sbb_Eb_Gb, "sbb Eb,Gb");
252 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
253}
254
255
256/** Opcode 0x19. */
257FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
258{
259 IEMOP_MNEMONIC(sbb_Ev_Gv, "sbb Ev,Gv");
260 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
261}
262
263
264/** Opcode 0x1a. */
265FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
266{
267 IEMOP_MNEMONIC(sbb_Gb_Eb, "sbb Gb,Eb");
268 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
269}
270
271
272/** Opcode 0x1b. */
273FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
274{
275 IEMOP_MNEMONIC(sbb_Gv_Ev, "sbb Gv,Ev");
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
277}
278
279
280/** Opcode 0x1c. */
281FNIEMOP_DEF(iemOp_sbb_Al_Ib)
282{
283 IEMOP_MNEMONIC(sbb_al_Ib, "sbb al,Ib");
284 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
285}
286
287
288/** Opcode 0x1d. */
289FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
290{
291 IEMOP_MNEMONIC(sbb_rAX_Iz, "sbb rAX,Iz");
292 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
293}
294
295
296/** Opcode 0x1e. */
297FNIEMOP_DEF(iemOp_push_DS)
298{
299 IEMOP_MNEMONIC(push_ds, "push ds");
300 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
301}
302
303
304/** Opcode 0x1f. */
305FNIEMOP_DEF(iemOp_pop_DS)
306{
307 IEMOP_MNEMONIC(pop_ds, "pop ds");
308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
309 IEMOP_HLP_NO_64BIT();
310 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
311}
312
313
314/** Opcode 0x20. */
315FNIEMOP_DEF(iemOp_and_Eb_Gb)
316{
317 IEMOP_MNEMONIC(and_Eb_Gb, "and Eb,Gb");
318 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
319 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
320}
321
322
323/** Opcode 0x21. */
324FNIEMOP_DEF(iemOp_and_Ev_Gv)
325{
326 IEMOP_MNEMONIC(and_Ev_Gv, "and Ev,Gv");
327 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
328 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
329}
330
331
332/** Opcode 0x22. */
333FNIEMOP_DEF(iemOp_and_Gb_Eb)
334{
335 IEMOP_MNEMONIC(and_Gb_Eb, "and Gb,Eb");
336 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
337 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
338}
339
340
341/** Opcode 0x23. */
342FNIEMOP_DEF(iemOp_and_Gv_Ev)
343{
344 IEMOP_MNEMONIC(and_Gv_Ev, "and Gv,Ev");
345 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
346 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
347}
348
349
350/** Opcode 0x24. */
351FNIEMOP_DEF(iemOp_and_Al_Ib)
352{
353 IEMOP_MNEMONIC(and_al_Ib, "and al,Ib");
354 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
355 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
356}
357
358
359/** Opcode 0x25. */
360FNIEMOP_DEF(iemOp_and_eAX_Iz)
361{
362 IEMOP_MNEMONIC(and_rAX_Iz, "and rAX,Iz");
363 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
364 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
365}
366
367
368/** Opcode 0x26. */
369FNIEMOP_DEF(iemOp_seg_ES)
370{
371 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
372 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
373 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
374
375 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
376 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
377}
378
379
380/** Opcode 0x27. */
381FNIEMOP_DEF(iemOp_daa)
382{
383 IEMOP_MNEMONIC(daa_AL, "daa AL");
384 IEMOP_HLP_NO_64BIT();
385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
386 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
387 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
388}
389
390
391/** Opcode 0x28. */
392FNIEMOP_DEF(iemOp_sub_Eb_Gb)
393{
394 IEMOP_MNEMONIC(sub_Eb_Gb, "sub Eb,Gb");
395 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
396}
397
398
399/** Opcode 0x29. */
400FNIEMOP_DEF(iemOp_sub_Ev_Gv)
401{
402 IEMOP_MNEMONIC(sub_Ev_Gv, "sub Ev,Gv");
403 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
404}
405
406
407/** Opcode 0x2a. */
408FNIEMOP_DEF(iemOp_sub_Gb_Eb)
409{
410 IEMOP_MNEMONIC(sub_Gb_Eb, "sub Gb,Eb");
411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
412}
413
414
415/** Opcode 0x2b. */
416FNIEMOP_DEF(iemOp_sub_Gv_Ev)
417{
418 IEMOP_MNEMONIC(sub_Gv_Ev, "sub Gv,Ev");
419 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
420}
421
422
423/** Opcode 0x2c. */
424FNIEMOP_DEF(iemOp_sub_Al_Ib)
425{
426 IEMOP_MNEMONIC(sub_al_Ib, "sub al,Ib");
427 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
428}
429
430
431/** Opcode 0x2d. */
432FNIEMOP_DEF(iemOp_sub_eAX_Iz)
433{
434 IEMOP_MNEMONIC(sub_rAX_Iz, "sub rAX,Iz");
435 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
436}
437
438
439/** Opcode 0x2e. */
440FNIEMOP_DEF(iemOp_seg_CS)
441{
442 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
443 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
444 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
445
446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
448}
449
450
451/** Opcode 0x2f. */
452FNIEMOP_DEF(iemOp_das)
453{
454 IEMOP_MNEMONIC(das_AL, "das AL");
455 IEMOP_HLP_NO_64BIT();
456 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
457 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
458 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
459}
460
461
462/** Opcode 0x30. */
463FNIEMOP_DEF(iemOp_xor_Eb_Gb)
464{
465 IEMOP_MNEMONIC(xor_Eb_Gb, "xor Eb,Gb");
466 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
467 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
468}
469
470
471/** Opcode 0x31. */
472FNIEMOP_DEF(iemOp_xor_Ev_Gv)
473{
474 IEMOP_MNEMONIC(xor_Ev_Gv, "xor Ev,Gv");
475 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
476 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
477}
478
479
480/** Opcode 0x32. */
481FNIEMOP_DEF(iemOp_xor_Gb_Eb)
482{
483 IEMOP_MNEMONIC(xor_Gb_Eb, "xor Gb,Eb");
484 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
485 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
486}
487
488
489/** Opcode 0x33. */
490FNIEMOP_DEF(iemOp_xor_Gv_Ev)
491{
492 IEMOP_MNEMONIC(xor_Gv_Ev, "xor Gv,Ev");
493 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
494 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
495}
496
497
498/** Opcode 0x34. */
499FNIEMOP_DEF(iemOp_xor_Al_Ib)
500{
501 IEMOP_MNEMONIC(xor_al_Ib, "xor al,Ib");
502 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
503 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
504}
505
506
507/** Opcode 0x35. */
508FNIEMOP_DEF(iemOp_xor_eAX_Iz)
509{
510 IEMOP_MNEMONIC(xor_rAX_Iz, "xor rAX,Iz");
511 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
512 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
513}
514
515
516/** Opcode 0x36. */
517FNIEMOP_DEF(iemOp_seg_SS)
518{
519 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
520 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
521 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
522
523 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
524 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
525}
526
527
528/** Opcode 0x37. */
529FNIEMOP_STUB(iemOp_aaa);
530
531
532/** Opcode 0x38. */
533FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
534{
535 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
537}
538
539
540/** Opcode 0x39. */
541FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
542{
543 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
544 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
545}
546
547
548/** Opcode 0x3a. */
549FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
550{
551 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
552 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
553}
554
555
556/** Opcode 0x3b. */
557FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
558{
559 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
560 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
561}
562
563
564/** Opcode 0x3c. */
565FNIEMOP_DEF(iemOp_cmp_Al_Ib)
566{
567 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
568 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
569}
570
571
572/** Opcode 0x3d. */
573FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
574{
575 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
577}
578
579
580/** Opcode 0x3e. */
581FNIEMOP_DEF(iemOp_seg_DS)
582{
583 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
584 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
585 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
586
587 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
588 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
589}
590
591
592/** Opcode 0x3f. */
593FNIEMOP_STUB(iemOp_aas);
594
595/**
596 * Common 'inc/dec/not/neg register' helper.
597 */
598FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
599{
600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
601 switch (pVCpu->iem.s.enmEffOpSize)
602 {
603 case IEMMODE_16BIT:
604 IEM_MC_BEGIN(2, 0);
605 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
606 IEM_MC_ARG(uint32_t *, pEFlags, 1);
607 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
608 IEM_MC_REF_EFLAGS(pEFlags);
609 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
610 IEM_MC_ADVANCE_RIP();
611 IEM_MC_END();
612 return VINF_SUCCESS;
613
614 case IEMMODE_32BIT:
615 IEM_MC_BEGIN(2, 0);
616 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
617 IEM_MC_ARG(uint32_t *, pEFlags, 1);
618 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
619 IEM_MC_REF_EFLAGS(pEFlags);
620 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
621 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
622 IEM_MC_ADVANCE_RIP();
623 IEM_MC_END();
624 return VINF_SUCCESS;
625
626 case IEMMODE_64BIT:
627 IEM_MC_BEGIN(2, 0);
628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
629 IEM_MC_ARG(uint32_t *, pEFlags, 1);
630 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
631 IEM_MC_REF_EFLAGS(pEFlags);
632 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
633 IEM_MC_ADVANCE_RIP();
634 IEM_MC_END();
635 return VINF_SUCCESS;
636 }
637 return VINF_SUCCESS;
638}
639
640
641/** Opcode 0x40. */
642FNIEMOP_DEF(iemOp_inc_eAX)
643{
644 /*
645 * This is a REX prefix in 64-bit mode.
646 */
647 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
648 {
649 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
650 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
651
652 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
653 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
654 }
655
656 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
657 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
658}
659
660
661/** Opcode 0x41. */
662FNIEMOP_DEF(iemOp_inc_eCX)
663{
664 /*
665 * This is a REX prefix in 64-bit mode.
666 */
667 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
668 {
669 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
670 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
671 pVCpu->iem.s.uRexB = 1 << 3;
672
673 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
674 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
675 }
676
677 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
678 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
679}
680
681
682/** Opcode 0x42. */
683FNIEMOP_DEF(iemOp_inc_eDX)
684{
685 /*
686 * This is a REX prefix in 64-bit mode.
687 */
688 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
689 {
690 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
691 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
692 pVCpu->iem.s.uRexIndex = 1 << 3;
693
694 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
695 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
696 }
697
698 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
699 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
700}
701
702
703
704/** Opcode 0x43. */
705FNIEMOP_DEF(iemOp_inc_eBX)
706{
707 /*
708 * This is a REX prefix in 64-bit mode.
709 */
710 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
711 {
712 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
713 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
714 pVCpu->iem.s.uRexB = 1 << 3;
715 pVCpu->iem.s.uRexIndex = 1 << 3;
716
717 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
718 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
719 }
720
721 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
722 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
723}
724
725
726/** Opcode 0x44. */
727FNIEMOP_DEF(iemOp_inc_eSP)
728{
729 /*
730 * This is a REX prefix in 64-bit mode.
731 */
732 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
733 {
734 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
735 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
736 pVCpu->iem.s.uRexReg = 1 << 3;
737
738 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
739 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
740 }
741
742 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
743 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
744}
745
746
747/** Opcode 0x45. */
748FNIEMOP_DEF(iemOp_inc_eBP)
749{
750 /*
751 * This is a REX prefix in 64-bit mode.
752 */
753 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
754 {
755 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
756 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
757 pVCpu->iem.s.uRexReg = 1 << 3;
758 pVCpu->iem.s.uRexB = 1 << 3;
759
760 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
761 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
762 }
763
764 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
765 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
766}
767
768
769/** Opcode 0x46. */
770FNIEMOP_DEF(iemOp_inc_eSI)
771{
772 /*
773 * This is a REX prefix in 64-bit mode.
774 */
775 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
776 {
777 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
778 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
779 pVCpu->iem.s.uRexReg = 1 << 3;
780 pVCpu->iem.s.uRexIndex = 1 << 3;
781
782 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
783 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
784 }
785
786 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
787 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
788}
789
790
791/** Opcode 0x47. */
792FNIEMOP_DEF(iemOp_inc_eDI)
793{
794 /*
795 * This is a REX prefix in 64-bit mode.
796 */
797 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
798 {
799 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
800 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
801 pVCpu->iem.s.uRexReg = 1 << 3;
802 pVCpu->iem.s.uRexB = 1 << 3;
803 pVCpu->iem.s.uRexIndex = 1 << 3;
804
805 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
806 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
807 }
808
809 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
810 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
811}
812
813
814/** Opcode 0x48. */
815FNIEMOP_DEF(iemOp_dec_eAX)
816{
817 /*
818 * This is a REX prefix in 64-bit mode.
819 */
820 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
821 {
822 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
823 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
824 iemRecalEffOpSize(pVCpu);
825
826 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
827 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
828 }
829
830 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
831 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
832}
833
834
835/** Opcode 0x49. */
836FNIEMOP_DEF(iemOp_dec_eCX)
837{
838 /*
839 * This is a REX prefix in 64-bit mode.
840 */
841 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
842 {
843 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
844 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
845 pVCpu->iem.s.uRexB = 1 << 3;
846 iemRecalEffOpSize(pVCpu);
847
848 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
849 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
850 }
851
852 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
853 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
854}
855
856
857/** Opcode 0x4a. */
858FNIEMOP_DEF(iemOp_dec_eDX)
859{
860 /*
861 * This is a REX prefix in 64-bit mode.
862 */
863 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
864 {
865 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
866 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
867 pVCpu->iem.s.uRexIndex = 1 << 3;
868 iemRecalEffOpSize(pVCpu);
869
870 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
871 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
872 }
873
874 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
875 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
876}
877
878
879/** Opcode 0x4b. */
880FNIEMOP_DEF(iemOp_dec_eBX)
881{
882 /*
883 * This is a REX prefix in 64-bit mode.
884 */
885 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
886 {
887 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
888 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
889 pVCpu->iem.s.uRexB = 1 << 3;
890 pVCpu->iem.s.uRexIndex = 1 << 3;
891 iemRecalEffOpSize(pVCpu);
892
893 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
894 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
895 }
896
897 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
898 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
899}
900
901
902/** Opcode 0x4c. */
903FNIEMOP_DEF(iemOp_dec_eSP)
904{
905 /*
906 * This is a REX prefix in 64-bit mode.
907 */
908 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
909 {
910 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
911 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
912 pVCpu->iem.s.uRexReg = 1 << 3;
913 iemRecalEffOpSize(pVCpu);
914
915 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
916 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
917 }
918
919 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
920 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
921}
922
923
924/** Opcode 0x4d. */
925FNIEMOP_DEF(iemOp_dec_eBP)
926{
927 /*
928 * This is a REX prefix in 64-bit mode.
929 */
930 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
931 {
932 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
933 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
934 pVCpu->iem.s.uRexReg = 1 << 3;
935 pVCpu->iem.s.uRexB = 1 << 3;
936 iemRecalEffOpSize(pVCpu);
937
938 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
939 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
940 }
941
942 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
943 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
944}
945
946
947/** Opcode 0x4e. */
948FNIEMOP_DEF(iemOp_dec_eSI)
949{
950 /*
951 * This is a REX prefix in 64-bit mode.
952 */
953 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
954 {
955 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
956 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
957 pVCpu->iem.s.uRexReg = 1 << 3;
958 pVCpu->iem.s.uRexIndex = 1 << 3;
959 iemRecalEffOpSize(pVCpu);
960
961 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
962 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
963 }
964
965 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
966 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
967}
968
969
970/** Opcode 0x4f. */
971FNIEMOP_DEF(iemOp_dec_eDI)
972{
973 /*
974 * This is a REX prefix in 64-bit mode.
975 */
976 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
977 {
978 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
979 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
980 pVCpu->iem.s.uRexReg = 1 << 3;
981 pVCpu->iem.s.uRexB = 1 << 3;
982 pVCpu->iem.s.uRexIndex = 1 << 3;
983 iemRecalEffOpSize(pVCpu);
984
985 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
986 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
987 }
988
989 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
990 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
991}
992
993
994/**
995 * Common 'push register' helper.
996 */
997FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
998{
999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1000 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1001 {
1002 iReg |= pVCpu->iem.s.uRexB;
1003 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1004 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1005 }
1006
1007 switch (pVCpu->iem.s.enmEffOpSize)
1008 {
1009 case IEMMODE_16BIT:
1010 IEM_MC_BEGIN(0, 1);
1011 IEM_MC_LOCAL(uint16_t, u16Value);
1012 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1013 IEM_MC_PUSH_U16(u16Value);
1014 IEM_MC_ADVANCE_RIP();
1015 IEM_MC_END();
1016 break;
1017
1018 case IEMMODE_32BIT:
1019 IEM_MC_BEGIN(0, 1);
1020 IEM_MC_LOCAL(uint32_t, u32Value);
1021 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1022 IEM_MC_PUSH_U32(u32Value);
1023 IEM_MC_ADVANCE_RIP();
1024 IEM_MC_END();
1025 break;
1026
1027 case IEMMODE_64BIT:
1028 IEM_MC_BEGIN(0, 1);
1029 IEM_MC_LOCAL(uint64_t, u64Value);
1030 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1031 IEM_MC_PUSH_U64(u64Value);
1032 IEM_MC_ADVANCE_RIP();
1033 IEM_MC_END();
1034 break;
1035 }
1036
1037 return VINF_SUCCESS;
1038}
1039
1040
1041/** Opcode 0x50. */
1042FNIEMOP_DEF(iemOp_push_eAX)
1043{
1044 IEMOP_MNEMONIC(push_rAX, "push rAX");
1045 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1046}
1047
1048
1049/** Opcode 0x51. */
1050FNIEMOP_DEF(iemOp_push_eCX)
1051{
1052 IEMOP_MNEMONIC(push_rCX, "push rCX");
1053 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1054}
1055
1056
1057/** Opcode 0x52. */
1058FNIEMOP_DEF(iemOp_push_eDX)
1059{
1060 IEMOP_MNEMONIC(push_rDX, "push rDX");
1061 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1062}
1063
1064
1065/** Opcode 0x53. */
1066FNIEMOP_DEF(iemOp_push_eBX)
1067{
1068 IEMOP_MNEMONIC(push_rBX, "push rBX");
1069 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1070}
1071
1072
1073/** Opcode 0x54. */
1074FNIEMOP_DEF(iemOp_push_eSP)
1075{
1076 IEMOP_MNEMONIC(push_rSP, "push rSP");
1077 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1078 {
1079 IEM_MC_BEGIN(0, 1);
1080 IEM_MC_LOCAL(uint16_t, u16Value);
1081 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1082 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1083 IEM_MC_PUSH_U16(u16Value);
1084 IEM_MC_ADVANCE_RIP();
1085 IEM_MC_END();
1086 }
1087 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1088}
1089
1090
1091/** Opcode 0x55. */
1092FNIEMOP_DEF(iemOp_push_eBP)
1093{
1094 IEMOP_MNEMONIC(push_rBP, "push rBP");
1095 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1096}
1097
1098
1099/** Opcode 0x56. */
1100FNIEMOP_DEF(iemOp_push_eSI)
1101{
1102 IEMOP_MNEMONIC(push_rSI, "push rSI");
1103 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1104}
1105
1106
1107/** Opcode 0x57. */
1108FNIEMOP_DEF(iemOp_push_eDI)
1109{
1110 IEMOP_MNEMONIC(push_rDI, "push rDI");
1111 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1112}
1113
1114
1115/**
1116 * Common 'pop register' helper.
1117 */
1118FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1119{
1120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1121 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1122 {
1123 iReg |= pVCpu->iem.s.uRexB;
1124 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1125 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1126 }
1127
1128 switch (pVCpu->iem.s.enmEffOpSize)
1129 {
1130 case IEMMODE_16BIT:
1131 IEM_MC_BEGIN(0, 1);
1132 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1133 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1134 IEM_MC_POP_U16(pu16Dst);
1135 IEM_MC_ADVANCE_RIP();
1136 IEM_MC_END();
1137 break;
1138
1139 case IEMMODE_32BIT:
1140 IEM_MC_BEGIN(0, 1);
1141 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1142 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1143 IEM_MC_POP_U32(pu32Dst);
1144 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1145 IEM_MC_ADVANCE_RIP();
1146 IEM_MC_END();
1147 break;
1148
1149 case IEMMODE_64BIT:
1150 IEM_MC_BEGIN(0, 1);
1151 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1152 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1153 IEM_MC_POP_U64(pu64Dst);
1154 IEM_MC_ADVANCE_RIP();
1155 IEM_MC_END();
1156 break;
1157 }
1158
1159 return VINF_SUCCESS;
1160}
1161
1162
1163/** Opcode 0x58. */
1164FNIEMOP_DEF(iemOp_pop_eAX)
1165{
1166 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1167 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1168}
1169
1170
1171/** Opcode 0x59. */
1172FNIEMOP_DEF(iemOp_pop_eCX)
1173{
1174 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1175 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1176}
1177
1178
1179/** Opcode 0x5a. */
1180FNIEMOP_DEF(iemOp_pop_eDX)
1181{
1182 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1183 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1184}
1185
1186
1187/** Opcode 0x5b. */
1188FNIEMOP_DEF(iemOp_pop_eBX)
1189{
1190 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1191 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1192}
1193
1194
1195/** Opcode 0x5c. */
1196FNIEMOP_DEF(iemOp_pop_eSP)
1197{
1198 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1199 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1200 {
1201 if (pVCpu->iem.s.uRexB)
1202 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1203 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1204 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1205 }
1206
1207 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1208 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1209 /** @todo add testcase for this instruction. */
1210 switch (pVCpu->iem.s.enmEffOpSize)
1211 {
1212 case IEMMODE_16BIT:
1213 IEM_MC_BEGIN(0, 1);
1214 IEM_MC_LOCAL(uint16_t, u16Dst);
1215 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1216 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1217 IEM_MC_ADVANCE_RIP();
1218 IEM_MC_END();
1219 break;
1220
1221 case IEMMODE_32BIT:
1222 IEM_MC_BEGIN(0, 1);
1223 IEM_MC_LOCAL(uint32_t, u32Dst);
1224 IEM_MC_POP_U32(&u32Dst);
1225 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1226 IEM_MC_ADVANCE_RIP();
1227 IEM_MC_END();
1228 break;
1229
1230 case IEMMODE_64BIT:
1231 IEM_MC_BEGIN(0, 1);
1232 IEM_MC_LOCAL(uint64_t, u64Dst);
1233 IEM_MC_POP_U64(&u64Dst);
1234 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1235 IEM_MC_ADVANCE_RIP();
1236 IEM_MC_END();
1237 break;
1238 }
1239
1240 return VINF_SUCCESS;
1241}
1242
1243
1244/** Opcode 0x5d. */
1245FNIEMOP_DEF(iemOp_pop_eBP)
1246{
1247 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1248 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1249}
1250
1251
1252/** Opcode 0x5e. */
1253FNIEMOP_DEF(iemOp_pop_eSI)
1254{
1255 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1256 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1257}
1258
1259
1260/** Opcode 0x5f. */
1261FNIEMOP_DEF(iemOp_pop_eDI)
1262{
1263 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1264 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1265}
1266
1267
1268/** Opcode 0x60. */
1269FNIEMOP_DEF(iemOp_pusha)
1270{
1271 IEMOP_MNEMONIC(pusha, "pusha");
1272 IEMOP_HLP_MIN_186();
1273 IEMOP_HLP_NO_64BIT();
1274 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1275 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1276 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1277 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1278}
1279
1280
1281/** Opcode 0x61. */
1282FNIEMOP_DEF(iemOp_popa)
1283{
1284 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1285 {
1286 IEMOP_MNEMONIC(popa, "popa");
1287 IEMOP_HLP_MIN_186();
1288 IEMOP_HLP_NO_64BIT();
1289 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1290 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1291 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1292 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1293 }
1294 Log(("mvex is not supported!\n"));
1295 return IEMOP_RAISE_INVALID_OPCODE();
1296}
1297
1298
1299/** Opcode 0x62. */
1300FNIEMOP_STUB(iemOp_bound_Gv_Ma_evex);
1301// IEMOP_HLP_MIN_186();
1302
1303
1304/** Opcode 0x63 - non-64-bit modes. */
1305FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1306{
1307 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1308 IEMOP_HLP_MIN_286();
1309 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1311
1312 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1313 {
1314 /* Register */
1315 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1316 IEM_MC_BEGIN(3, 0);
1317 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1318 IEM_MC_ARG(uint16_t, u16Src, 1);
1319 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1320
1321 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1322 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
1323 IEM_MC_REF_EFLAGS(pEFlags);
1324 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1325
1326 IEM_MC_ADVANCE_RIP();
1327 IEM_MC_END();
1328 }
1329 else
1330 {
1331 /* Memory */
1332 IEM_MC_BEGIN(3, 2);
1333 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1334 IEM_MC_ARG(uint16_t, u16Src, 1);
1335 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1336 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1337
1338 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1339 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
1340 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1341 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1342 IEM_MC_FETCH_EFLAGS(EFlags);
1343 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
1344
1345 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
1346 IEM_MC_COMMIT_EFLAGS(EFlags);
1347 IEM_MC_ADVANCE_RIP();
1348 IEM_MC_END();
1349 }
1350 return VINF_SUCCESS;
1351
1352}
1353
1354
1355/** Opcode 0x63.
1356 * @note This is a weird one. It works like a regular move instruction if
1357 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
1358 * @todo This definitely needs a testcase to verify the odd cases. */
1359FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
1360{
1361 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
1362
1363 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
1364 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1365
1366 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1367 {
1368 /*
1369 * Register to register.
1370 */
1371 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1372 IEM_MC_BEGIN(0, 1);
1373 IEM_MC_LOCAL(uint64_t, u64Value);
1374 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1375 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1376 IEM_MC_ADVANCE_RIP();
1377 IEM_MC_END();
1378 }
1379 else
1380 {
1381 /*
1382 * We're loading a register from memory.
1383 */
1384 IEM_MC_BEGIN(0, 2);
1385 IEM_MC_LOCAL(uint64_t, u64Value);
1386 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1387 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1388 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1389 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1390 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
1391 IEM_MC_ADVANCE_RIP();
1392 IEM_MC_END();
1393 }
1394 return VINF_SUCCESS;
1395}
1396
1397
1398/** Opcode 0x64. */
1399FNIEMOP_DEF(iemOp_seg_FS)
1400{
1401 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
1402 IEMOP_HLP_MIN_386();
1403
1404 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
1405 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
1406
1407 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1408 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1409}
1410
1411
1412/** Opcode 0x65. */
1413FNIEMOP_DEF(iemOp_seg_GS)
1414{
1415 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
1416 IEMOP_HLP_MIN_386();
1417
1418 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
1419 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423}
1424
1425
1426/** Opcode 0x66. */
1427FNIEMOP_DEF(iemOp_op_size)
1428{
1429 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
1430 IEMOP_HLP_MIN_386();
1431
1432 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
1433 iemRecalEffOpSize(pVCpu);
1434
1435 /* For the 4 entry opcode tables, the operand prefix doesn't not count
1436 when REPZ or REPNZ are present. */
1437 if (pVCpu->iem.s.idxPrefix == 0)
1438 pVCpu->iem.s.idxPrefix = 1;
1439
1440 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1441 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1442}
1443
1444
1445/** Opcode 0x67. */
1446FNIEMOP_DEF(iemOp_addr_size)
1447{
1448 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
1449 IEMOP_HLP_MIN_386();
1450
1451 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
1452 switch (pVCpu->iem.s.enmDefAddrMode)
1453 {
1454 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1455 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
1456 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
1457 default: AssertFailed();
1458 }
1459
1460 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1461 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1462}
1463
1464
1465/** Opcode 0x68. */
1466FNIEMOP_DEF(iemOp_push_Iz)
1467{
1468 IEMOP_MNEMONIC(push_Iz, "push Iz");
1469 IEMOP_HLP_MIN_186();
1470 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1471 switch (pVCpu->iem.s.enmEffOpSize)
1472 {
1473 case IEMMODE_16BIT:
1474 {
1475 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1477 IEM_MC_BEGIN(0,0);
1478 IEM_MC_PUSH_U16(u16Imm);
1479 IEM_MC_ADVANCE_RIP();
1480 IEM_MC_END();
1481 return VINF_SUCCESS;
1482 }
1483
1484 case IEMMODE_32BIT:
1485 {
1486 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1488 IEM_MC_BEGIN(0,0);
1489 IEM_MC_PUSH_U32(u32Imm);
1490 IEM_MC_ADVANCE_RIP();
1491 IEM_MC_END();
1492 return VINF_SUCCESS;
1493 }
1494
1495 case IEMMODE_64BIT:
1496 {
1497 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1499 IEM_MC_BEGIN(0,0);
1500 IEM_MC_PUSH_U64(u64Imm);
1501 IEM_MC_ADVANCE_RIP();
1502 IEM_MC_END();
1503 return VINF_SUCCESS;
1504 }
1505
1506 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1507 }
1508}
1509
1510
1511/** Opcode 0x69. */
1512FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
1513{
1514 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
1515 IEMOP_HLP_MIN_186();
1516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1517 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 {
1523 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1524 {
1525 /* register operand */
1526 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1528
1529 IEM_MC_BEGIN(3, 1);
1530 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1531 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
1532 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1533 IEM_MC_LOCAL(uint16_t, u16Tmp);
1534
1535 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1536 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1537 IEM_MC_REF_EFLAGS(pEFlags);
1538 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1539 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1540
1541 IEM_MC_ADVANCE_RIP();
1542 IEM_MC_END();
1543 }
1544 else
1545 {
1546 /* memory operand */
1547 IEM_MC_BEGIN(3, 2);
1548 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1549 IEM_MC_ARG(uint16_t, u16Src, 1);
1550 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1551 IEM_MC_LOCAL(uint16_t, u16Tmp);
1552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1553
1554 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
1555 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1556 IEM_MC_ASSIGN(u16Src, u16Imm);
1557 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1558 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1559 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1560 IEM_MC_REF_EFLAGS(pEFlags);
1561 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1562 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1563
1564 IEM_MC_ADVANCE_RIP();
1565 IEM_MC_END();
1566 }
1567 return VINF_SUCCESS;
1568 }
1569
1570 case IEMMODE_32BIT:
1571 {
1572 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1573 {
1574 /* register operand */
1575 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1576 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1577
1578 IEM_MC_BEGIN(3, 1);
1579 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1580 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
1581 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1582 IEM_MC_LOCAL(uint32_t, u32Tmp);
1583
1584 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1585 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1586 IEM_MC_REF_EFLAGS(pEFlags);
1587 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1588 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1589
1590 IEM_MC_ADVANCE_RIP();
1591 IEM_MC_END();
1592 }
1593 else
1594 {
1595 /* memory operand */
1596 IEM_MC_BEGIN(3, 2);
1597 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1598 IEM_MC_ARG(uint32_t, u32Src, 1);
1599 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1600 IEM_MC_LOCAL(uint32_t, u32Tmp);
1601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1602
1603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1604 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1605 IEM_MC_ASSIGN(u32Src, u32Imm);
1606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1607 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1608 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1609 IEM_MC_REF_EFLAGS(pEFlags);
1610 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1611 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1612
1613 IEM_MC_ADVANCE_RIP();
1614 IEM_MC_END();
1615 }
1616 return VINF_SUCCESS;
1617 }
1618
1619 case IEMMODE_64BIT:
1620 {
1621 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1622 {
1623 /* register operand */
1624 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1626
1627 IEM_MC_BEGIN(3, 1);
1628 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1629 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
1630 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1631 IEM_MC_LOCAL(uint64_t, u64Tmp);
1632
1633 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1634 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1635 IEM_MC_REF_EFLAGS(pEFlags);
1636 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1637 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1638
1639 IEM_MC_ADVANCE_RIP();
1640 IEM_MC_END();
1641 }
1642 else
1643 {
1644 /* memory operand */
1645 IEM_MC_BEGIN(3, 2);
1646 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1647 IEM_MC_ARG(uint64_t, u64Src, 1);
1648 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1649 IEM_MC_LOCAL(uint64_t, u64Tmp);
1650 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1651
1652 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
1653 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1654 IEM_MC_ASSIGN(u64Src, u64Imm);
1655 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1656 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1657 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1658 IEM_MC_REF_EFLAGS(pEFlags);
1659 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1660 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1661
1662 IEM_MC_ADVANCE_RIP();
1663 IEM_MC_END();
1664 }
1665 return VINF_SUCCESS;
1666 }
1667 }
1668 AssertFailedReturn(VERR_IEM_IPE_9);
1669}
1670
1671
1672/** Opcode 0x6a. */
1673FNIEMOP_DEF(iemOp_push_Ib)
1674{
1675 IEMOP_MNEMONIC(push_Ib, "push Ib");
1676 IEMOP_HLP_MIN_186();
1677 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
1678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1679 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
1680
1681 IEM_MC_BEGIN(0,0);
1682 switch (pVCpu->iem.s.enmEffOpSize)
1683 {
1684 case IEMMODE_16BIT:
1685 IEM_MC_PUSH_U16(i8Imm);
1686 break;
1687 case IEMMODE_32BIT:
1688 IEM_MC_PUSH_U32(i8Imm);
1689 break;
1690 case IEMMODE_64BIT:
1691 IEM_MC_PUSH_U64(i8Imm);
1692 break;
1693 }
1694 IEM_MC_ADVANCE_RIP();
1695 IEM_MC_END();
1696 return VINF_SUCCESS;
1697}
1698
1699
1700/** Opcode 0x6b. */
1701FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
1702{
1703 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
1704 IEMOP_HLP_MIN_186();
1705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1706 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
1707
1708 switch (pVCpu->iem.s.enmEffOpSize)
1709 {
1710 case IEMMODE_16BIT:
1711 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1712 {
1713 /* register operand */
1714 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1715 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1716
1717 IEM_MC_BEGIN(3, 1);
1718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1719 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
1720 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1721 IEM_MC_LOCAL(uint16_t, u16Tmp);
1722
1723 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1724 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1725 IEM_MC_REF_EFLAGS(pEFlags);
1726 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1727 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1728
1729 IEM_MC_ADVANCE_RIP();
1730 IEM_MC_END();
1731 }
1732 else
1733 {
1734 /* memory operand */
1735 IEM_MC_BEGIN(3, 2);
1736 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1737 IEM_MC_ARG(uint16_t, u16Src, 1);
1738 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1739 IEM_MC_LOCAL(uint16_t, u16Tmp);
1740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1741
1742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1743 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
1744 IEM_MC_ASSIGN(u16Src, u16Imm);
1745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1746 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1747 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
1748 IEM_MC_REF_EFLAGS(pEFlags);
1749 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u16, pu16Dst, u16Src, pEFlags);
1750 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
1751
1752 IEM_MC_ADVANCE_RIP();
1753 IEM_MC_END();
1754 }
1755 return VINF_SUCCESS;
1756
1757 case IEMMODE_32BIT:
1758 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1759 {
1760 /* register operand */
1761 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1763
1764 IEM_MC_BEGIN(3, 1);
1765 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1766 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
1767 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1768 IEM_MC_LOCAL(uint32_t, u32Tmp);
1769
1770 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1771 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1772 IEM_MC_REF_EFLAGS(pEFlags);
1773 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1774 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1775
1776 IEM_MC_ADVANCE_RIP();
1777 IEM_MC_END();
1778 }
1779 else
1780 {
1781 /* memory operand */
1782 IEM_MC_BEGIN(3, 2);
1783 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1784 IEM_MC_ARG(uint32_t, u32Src, 1);
1785 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1786 IEM_MC_LOCAL(uint32_t, u32Tmp);
1787 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1788
1789 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1790 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
1791 IEM_MC_ASSIGN(u32Src, u32Imm);
1792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1793 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1794 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
1795 IEM_MC_REF_EFLAGS(pEFlags);
1796 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u32, pu32Dst, u32Src, pEFlags);
1797 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
1798
1799 IEM_MC_ADVANCE_RIP();
1800 IEM_MC_END();
1801 }
1802 return VINF_SUCCESS;
1803
1804 case IEMMODE_64BIT:
1805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1806 {
1807 /* register operand */
1808 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1810
1811 IEM_MC_BEGIN(3, 1);
1812 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1813 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
1814 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1815 IEM_MC_LOCAL(uint64_t, u64Tmp);
1816
1817 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1818 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1819 IEM_MC_REF_EFLAGS(pEFlags);
1820 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1821 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1822
1823 IEM_MC_ADVANCE_RIP();
1824 IEM_MC_END();
1825 }
1826 else
1827 {
1828 /* memory operand */
1829 IEM_MC_BEGIN(3, 2);
1830 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1831 IEM_MC_ARG(uint64_t, u64Src, 1);
1832 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1833 IEM_MC_LOCAL(uint64_t, u64Tmp);
1834 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1835
1836 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
1837 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
1838 IEM_MC_ASSIGN(u64Src, u64Imm);
1839 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1840 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1841 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
1842 IEM_MC_REF_EFLAGS(pEFlags);
1843 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_imul_two_u64, pu64Dst, u64Src, pEFlags);
1844 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
1845
1846 IEM_MC_ADVANCE_RIP();
1847 IEM_MC_END();
1848 }
1849 return VINF_SUCCESS;
1850 }
1851 AssertFailedReturn(VERR_IEM_IPE_8);
1852}
1853
1854
1855/** Opcode 0x6c. */
1856FNIEMOP_DEF(iemOp_insb_Yb_DX)
1857{
1858 IEMOP_HLP_MIN_186();
1859 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1860 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1861 {
1862 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
1863 switch (pVCpu->iem.s.enmEffAddrMode)
1864 {
1865 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
1866 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
1867 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
1868 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1869 }
1870 }
1871 else
1872 {
1873 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
1874 switch (pVCpu->iem.s.enmEffAddrMode)
1875 {
1876 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
1877 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
1878 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
1879 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1880 }
1881 }
1882}
1883
1884
1885/** Opcode 0x6d. */
1886FNIEMOP_DEF(iemOp_inswd_Yv_DX)
1887{
1888 IEMOP_HLP_MIN_186();
1889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1890 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1891 {
1892 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
1893 switch (pVCpu->iem.s.enmEffOpSize)
1894 {
1895 case IEMMODE_16BIT:
1896 switch (pVCpu->iem.s.enmEffAddrMode)
1897 {
1898 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
1899 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
1900 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
1901 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1902 }
1903 break;
1904 case IEMMODE_64BIT:
1905 case IEMMODE_32BIT:
1906 switch (pVCpu->iem.s.enmEffAddrMode)
1907 {
1908 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
1909 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
1910 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
1911 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1912 }
1913 break;
1914 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1915 }
1916 }
1917 else
1918 {
1919 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
1920 switch (pVCpu->iem.s.enmEffOpSize)
1921 {
1922 case IEMMODE_16BIT:
1923 switch (pVCpu->iem.s.enmEffAddrMode)
1924 {
1925 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
1926 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
1927 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
1928 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1929 }
1930 break;
1931 case IEMMODE_64BIT:
1932 case IEMMODE_32BIT:
1933 switch (pVCpu->iem.s.enmEffAddrMode)
1934 {
1935 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
1936 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
1937 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
1938 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1939 }
1940 break;
1941 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1942 }
1943 }
1944}
1945
1946
1947/** Opcode 0x6e. */
1948FNIEMOP_DEF(iemOp_outsb_Yb_DX)
1949{
1950 IEMOP_HLP_MIN_186();
1951 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1952 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
1953 {
1954 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
1955 switch (pVCpu->iem.s.enmEffAddrMode)
1956 {
1957 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1958 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1959 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1960 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1961 }
1962 }
1963 else
1964 {
1965 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
1966 switch (pVCpu->iem.s.enmEffAddrMode)
1967 {
1968 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
1969 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
1970 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
1971 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1972 }
1973 }
1974}
1975
1976
1977/** Opcode 0x6f. */
1978FNIEMOP_DEF(iemOp_outswd_Yv_DX)
1979{
1980 IEMOP_HLP_MIN_186();
1981 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1982 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
1983 {
1984 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
1985 switch (pVCpu->iem.s.enmEffOpSize)
1986 {
1987 case IEMMODE_16BIT:
1988 switch (pVCpu->iem.s.enmEffAddrMode)
1989 {
1990 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
1991 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
1992 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
1993 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1994 }
1995 break;
1996 case IEMMODE_64BIT:
1997 case IEMMODE_32BIT:
1998 switch (pVCpu->iem.s.enmEffAddrMode)
1999 {
2000 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2001 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2002 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2003 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2004 }
2005 break;
2006 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2007 }
2008 }
2009 else
2010 {
2011 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2012 switch (pVCpu->iem.s.enmEffOpSize)
2013 {
2014 case IEMMODE_16BIT:
2015 switch (pVCpu->iem.s.enmEffAddrMode)
2016 {
2017 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2018 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2019 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2021 }
2022 break;
2023 case IEMMODE_64BIT:
2024 case IEMMODE_32BIT:
2025 switch (pVCpu->iem.s.enmEffAddrMode)
2026 {
2027 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2028 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2029 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2031 }
2032 break;
2033 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2034 }
2035 }
2036}
2037
2038
2039/** Opcode 0x70. */
2040FNIEMOP_DEF(iemOp_jo_Jb)
2041{
2042 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2043 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2045 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2046
2047 IEM_MC_BEGIN(0, 0);
2048 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2049 IEM_MC_REL_JMP_S8(i8Imm);
2050 } IEM_MC_ELSE() {
2051 IEM_MC_ADVANCE_RIP();
2052 } IEM_MC_ENDIF();
2053 IEM_MC_END();
2054 return VINF_SUCCESS;
2055}
2056
2057
2058/** Opcode 0x71. */
2059FNIEMOP_DEF(iemOp_jno_Jb)
2060{
2061 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2062 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2063 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2064 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2065
2066 IEM_MC_BEGIN(0, 0);
2067 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2068 IEM_MC_ADVANCE_RIP();
2069 } IEM_MC_ELSE() {
2070 IEM_MC_REL_JMP_S8(i8Imm);
2071 } IEM_MC_ENDIF();
2072 IEM_MC_END();
2073 return VINF_SUCCESS;
2074}
2075
2076/** Opcode 0x72. */
2077FNIEMOP_DEF(iemOp_jc_Jb)
2078{
2079 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2080 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2081 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2082 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2083
2084 IEM_MC_BEGIN(0, 0);
2085 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2086 IEM_MC_REL_JMP_S8(i8Imm);
2087 } IEM_MC_ELSE() {
2088 IEM_MC_ADVANCE_RIP();
2089 } IEM_MC_ENDIF();
2090 IEM_MC_END();
2091 return VINF_SUCCESS;
2092}
2093
2094
2095/** Opcode 0x73. */
2096FNIEMOP_DEF(iemOp_jnc_Jb)
2097{
2098 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2099 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2100 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2101 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2102
2103 IEM_MC_BEGIN(0, 0);
2104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2105 IEM_MC_ADVANCE_RIP();
2106 } IEM_MC_ELSE() {
2107 IEM_MC_REL_JMP_S8(i8Imm);
2108 } IEM_MC_ENDIF();
2109 IEM_MC_END();
2110 return VINF_SUCCESS;
2111}
2112
2113
2114/** Opcode 0x74. */
2115FNIEMOP_DEF(iemOp_je_Jb)
2116{
2117 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2118 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2120 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2121
2122 IEM_MC_BEGIN(0, 0);
2123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2124 IEM_MC_REL_JMP_S8(i8Imm);
2125 } IEM_MC_ELSE() {
2126 IEM_MC_ADVANCE_RIP();
2127 } IEM_MC_ENDIF();
2128 IEM_MC_END();
2129 return VINF_SUCCESS;
2130}
2131
2132
2133/** Opcode 0x75. */
2134FNIEMOP_DEF(iemOp_jne_Jb)
2135{
2136 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2137 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2139 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2140
2141 IEM_MC_BEGIN(0, 0);
2142 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2143 IEM_MC_ADVANCE_RIP();
2144 } IEM_MC_ELSE() {
2145 IEM_MC_REL_JMP_S8(i8Imm);
2146 } IEM_MC_ENDIF();
2147 IEM_MC_END();
2148 return VINF_SUCCESS;
2149}
2150
2151
2152/** Opcode 0x76. */
2153FNIEMOP_DEF(iemOp_jbe_Jb)
2154{
2155 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2156 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2157 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2158 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2159
2160 IEM_MC_BEGIN(0, 0);
2161 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2162 IEM_MC_REL_JMP_S8(i8Imm);
2163 } IEM_MC_ELSE() {
2164 IEM_MC_ADVANCE_RIP();
2165 } IEM_MC_ENDIF();
2166 IEM_MC_END();
2167 return VINF_SUCCESS;
2168}
2169
2170
2171/** Opcode 0x77. */
2172FNIEMOP_DEF(iemOp_jnbe_Jb)
2173{
2174 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2175 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2176 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2177 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2178
2179 IEM_MC_BEGIN(0, 0);
2180 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2181 IEM_MC_ADVANCE_RIP();
2182 } IEM_MC_ELSE() {
2183 IEM_MC_REL_JMP_S8(i8Imm);
2184 } IEM_MC_ENDIF();
2185 IEM_MC_END();
2186 return VINF_SUCCESS;
2187}
2188
2189
2190/** Opcode 0x78. */
2191FNIEMOP_DEF(iemOp_js_Jb)
2192{
2193 IEMOP_MNEMONIC(js_Jb, "js Jb");
2194 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2196 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2197
2198 IEM_MC_BEGIN(0, 0);
2199 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2200 IEM_MC_REL_JMP_S8(i8Imm);
2201 } IEM_MC_ELSE() {
2202 IEM_MC_ADVANCE_RIP();
2203 } IEM_MC_ENDIF();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206}
2207
2208
2209/** Opcode 0x79. */
2210FNIEMOP_DEF(iemOp_jns_Jb)
2211{
2212 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2213 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2214 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2215 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2216
2217 IEM_MC_BEGIN(0, 0);
2218 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2219 IEM_MC_ADVANCE_RIP();
2220 } IEM_MC_ELSE() {
2221 IEM_MC_REL_JMP_S8(i8Imm);
2222 } IEM_MC_ENDIF();
2223 IEM_MC_END();
2224 return VINF_SUCCESS;
2225}
2226
2227
2228/** Opcode 0x7a. */
2229FNIEMOP_DEF(iemOp_jp_Jb)
2230{
2231 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2232 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2233 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2234 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2235
2236 IEM_MC_BEGIN(0, 0);
2237 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2238 IEM_MC_REL_JMP_S8(i8Imm);
2239 } IEM_MC_ELSE() {
2240 IEM_MC_ADVANCE_RIP();
2241 } IEM_MC_ENDIF();
2242 IEM_MC_END();
2243 return VINF_SUCCESS;
2244}
2245
2246
2247/** Opcode 0x7b. */
2248FNIEMOP_DEF(iemOp_jnp_Jb)
2249{
2250 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
2251 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2252 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2253 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2254
2255 IEM_MC_BEGIN(0, 0);
2256 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2257 IEM_MC_ADVANCE_RIP();
2258 } IEM_MC_ELSE() {
2259 IEM_MC_REL_JMP_S8(i8Imm);
2260 } IEM_MC_ENDIF();
2261 IEM_MC_END();
2262 return VINF_SUCCESS;
2263}
2264
2265
2266/** Opcode 0x7c. */
2267FNIEMOP_DEF(iemOp_jl_Jb)
2268{
2269 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
2270 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2271 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2272 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2273
2274 IEM_MC_BEGIN(0, 0);
2275 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2276 IEM_MC_REL_JMP_S8(i8Imm);
2277 } IEM_MC_ELSE() {
2278 IEM_MC_ADVANCE_RIP();
2279 } IEM_MC_ENDIF();
2280 IEM_MC_END();
2281 return VINF_SUCCESS;
2282}
2283
2284
2285/** Opcode 0x7d. */
2286FNIEMOP_DEF(iemOp_jnl_Jb)
2287{
2288 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
2289 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2290 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2291 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2292
2293 IEM_MC_BEGIN(0, 0);
2294 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
2295 IEM_MC_ADVANCE_RIP();
2296 } IEM_MC_ELSE() {
2297 IEM_MC_REL_JMP_S8(i8Imm);
2298 } IEM_MC_ENDIF();
2299 IEM_MC_END();
2300 return VINF_SUCCESS;
2301}
2302
2303
2304/** Opcode 0x7e. */
2305FNIEMOP_DEF(iemOp_jle_Jb)
2306{
2307 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
2308 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2309 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2310 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2311
2312 IEM_MC_BEGIN(0, 0);
2313 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2314 IEM_MC_REL_JMP_S8(i8Imm);
2315 } IEM_MC_ELSE() {
2316 IEM_MC_ADVANCE_RIP();
2317 } IEM_MC_ENDIF();
2318 IEM_MC_END();
2319 return VINF_SUCCESS;
2320}
2321
2322
2323/** Opcode 0x7f. */
2324FNIEMOP_DEF(iemOp_jnle_Jb)
2325{
2326 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
2327 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2329 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2330
2331 IEM_MC_BEGIN(0, 0);
2332 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
2333 IEM_MC_ADVANCE_RIP();
2334 } IEM_MC_ELSE() {
2335 IEM_MC_REL_JMP_S8(i8Imm);
2336 } IEM_MC_ENDIF();
2337 IEM_MC_END();
2338 return VINF_SUCCESS;
2339}
2340
2341
2342/** Opcode 0x80. */
2343FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
2344{
2345 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2346 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2347 {
2348 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
2349 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
2350 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
2351 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
2352 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
2353 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
2354 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
2355 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
2356 }
2357 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2358
2359 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2360 {
2361 /* register target */
2362 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2364 IEM_MC_BEGIN(3, 0);
2365 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2366 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2368
2369 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2370 IEM_MC_REF_EFLAGS(pEFlags);
2371 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2372
2373 IEM_MC_ADVANCE_RIP();
2374 IEM_MC_END();
2375 }
2376 else
2377 {
2378 /* memory target */
2379 uint32_t fAccess;
2380 if (pImpl->pfnLockedU8)
2381 fAccess = IEM_ACCESS_DATA_RW;
2382 else /* CMP */
2383 fAccess = IEM_ACCESS_DATA_R;
2384 IEM_MC_BEGIN(3, 2);
2385 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
2386 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2387 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2388
2389 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2390 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2391 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
2392 if (pImpl->pfnLockedU8)
2393 IEMOP_HLP_DONE_DECODING();
2394 else
2395 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2396
2397 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2398 IEM_MC_FETCH_EFLAGS(EFlags);
2399 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2400 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
2401 else
2402 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
2403
2404 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
2405 IEM_MC_COMMIT_EFLAGS(EFlags);
2406 IEM_MC_ADVANCE_RIP();
2407 IEM_MC_END();
2408 }
2409 return VINF_SUCCESS;
2410}
2411
2412
2413/** Opcode 0x81. */
2414FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
2415{
2416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2417 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2418 {
2419 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
2420 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
2421 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
2422 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
2423 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
2424 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
2425 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
2426 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
2427 }
2428 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2429
2430 switch (pVCpu->iem.s.enmEffOpSize)
2431 {
2432 case IEMMODE_16BIT:
2433 {
2434 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2435 {
2436 /* register target */
2437 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2439 IEM_MC_BEGIN(3, 0);
2440 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2441 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
2442 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2443
2444 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2445 IEM_MC_REF_EFLAGS(pEFlags);
2446 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2447
2448 IEM_MC_ADVANCE_RIP();
2449 IEM_MC_END();
2450 }
2451 else
2452 {
2453 /* memory target */
2454 uint32_t fAccess;
2455 if (pImpl->pfnLockedU16)
2456 fAccess = IEM_ACCESS_DATA_RW;
2457 else /* CMP, TEST */
2458 fAccess = IEM_ACCESS_DATA_R;
2459 IEM_MC_BEGIN(3, 2);
2460 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2461 IEM_MC_ARG(uint16_t, u16Src, 1);
2462 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2464
2465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2466 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2467 IEM_MC_ASSIGN(u16Src, u16Imm);
2468 if (pImpl->pfnLockedU16)
2469 IEMOP_HLP_DONE_DECODING();
2470 else
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2473 IEM_MC_FETCH_EFLAGS(EFlags);
2474 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2475 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2476 else
2477 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2478
2479 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2480 IEM_MC_COMMIT_EFLAGS(EFlags);
2481 IEM_MC_ADVANCE_RIP();
2482 IEM_MC_END();
2483 }
2484 break;
2485 }
2486
2487 case IEMMODE_32BIT:
2488 {
2489 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2490 {
2491 /* register target */
2492 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2494 IEM_MC_BEGIN(3, 0);
2495 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2496 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
2497 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2498
2499 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2500 IEM_MC_REF_EFLAGS(pEFlags);
2501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2502 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2503
2504 IEM_MC_ADVANCE_RIP();
2505 IEM_MC_END();
2506 }
2507 else
2508 {
2509 /* memory target */
2510 uint32_t fAccess;
2511 if (pImpl->pfnLockedU32)
2512 fAccess = IEM_ACCESS_DATA_RW;
2513 else /* CMP, TEST */
2514 fAccess = IEM_ACCESS_DATA_R;
2515 IEM_MC_BEGIN(3, 2);
2516 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2517 IEM_MC_ARG(uint32_t, u32Src, 1);
2518 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2520
2521 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2522 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2523 IEM_MC_ASSIGN(u32Src, u32Imm);
2524 if (pImpl->pfnLockedU32)
2525 IEMOP_HLP_DONE_DECODING();
2526 else
2527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2528 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2529 IEM_MC_FETCH_EFLAGS(EFlags);
2530 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2531 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2532 else
2533 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2534
2535 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2536 IEM_MC_COMMIT_EFLAGS(EFlags);
2537 IEM_MC_ADVANCE_RIP();
2538 IEM_MC_END();
2539 }
2540 break;
2541 }
2542
2543 case IEMMODE_64BIT:
2544 {
2545 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2546 {
2547 /* register target */
2548 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2550 IEM_MC_BEGIN(3, 0);
2551 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2552 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
2553 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2554
2555 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2556 IEM_MC_REF_EFLAGS(pEFlags);
2557 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2558
2559 IEM_MC_ADVANCE_RIP();
2560 IEM_MC_END();
2561 }
2562 else
2563 {
2564 /* memory target */
2565 uint32_t fAccess;
2566 if (pImpl->pfnLockedU64)
2567 fAccess = IEM_ACCESS_DATA_RW;
2568 else /* CMP */
2569 fAccess = IEM_ACCESS_DATA_R;
2570 IEM_MC_BEGIN(3, 2);
2571 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2572 IEM_MC_ARG(uint64_t, u64Src, 1);
2573 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2574 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2575
2576 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2577 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2578 if (pImpl->pfnLockedU64)
2579 IEMOP_HLP_DONE_DECODING();
2580 else
2581 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2582 IEM_MC_ASSIGN(u64Src, u64Imm);
2583 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2584 IEM_MC_FETCH_EFLAGS(EFlags);
2585 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2586 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2587 else
2588 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2589
2590 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2591 IEM_MC_COMMIT_EFLAGS(EFlags);
2592 IEM_MC_ADVANCE_RIP();
2593 IEM_MC_END();
2594 }
2595 break;
2596 }
2597 }
2598 return VINF_SUCCESS;
2599}
2600
2601
2602/** Opcode 0x82. */
2603FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
2604{
2605 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
2606 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
2607}
2608
2609
2610/** Opcode 0x83. */
2611FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
2612{
2613 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2614 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
2615 {
2616 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
2617 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
2618 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
2619 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
2620 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
2621 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
2622 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
2623 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
2624 }
2625 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
2626 to the 386 even if absent in the intel reference manuals and some
2627 3rd party opcode listings. */
2628 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
2629
2630 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2631 {
2632 /*
2633 * Register target
2634 */
2635 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2636 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2637 switch (pVCpu->iem.s.enmEffOpSize)
2638 {
2639 case IEMMODE_16BIT:
2640 {
2641 IEM_MC_BEGIN(3, 0);
2642 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2643 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
2644 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2645
2646 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2647 IEM_MC_REF_EFLAGS(pEFlags);
2648 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2649
2650 IEM_MC_ADVANCE_RIP();
2651 IEM_MC_END();
2652 break;
2653 }
2654
2655 case IEMMODE_32BIT:
2656 {
2657 IEM_MC_BEGIN(3, 0);
2658 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2659 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
2660 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2661
2662 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2663 IEM_MC_REF_EFLAGS(pEFlags);
2664 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2665 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
2666
2667 IEM_MC_ADVANCE_RIP();
2668 IEM_MC_END();
2669 break;
2670 }
2671
2672 case IEMMODE_64BIT:
2673 {
2674 IEM_MC_BEGIN(3, 0);
2675 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2676 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
2677 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2678
2679 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2680 IEM_MC_REF_EFLAGS(pEFlags);
2681 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2682
2683 IEM_MC_ADVANCE_RIP();
2684 IEM_MC_END();
2685 break;
2686 }
2687 }
2688 }
2689 else
2690 {
2691 /*
2692 * Memory target.
2693 */
2694 uint32_t fAccess;
2695 if (pImpl->pfnLockedU16)
2696 fAccess = IEM_ACCESS_DATA_RW;
2697 else /* CMP */
2698 fAccess = IEM_ACCESS_DATA_R;
2699
2700 switch (pVCpu->iem.s.enmEffOpSize)
2701 {
2702 case IEMMODE_16BIT:
2703 {
2704 IEM_MC_BEGIN(3, 2);
2705 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2706 IEM_MC_ARG(uint16_t, u16Src, 1);
2707 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2708 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2709
2710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2711 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2712 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
2713 if (pImpl->pfnLockedU16)
2714 IEMOP_HLP_DONE_DECODING();
2715 else
2716 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2717 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2718 IEM_MC_FETCH_EFLAGS(EFlags);
2719 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2720 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
2721 else
2722 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
2723
2724 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
2725 IEM_MC_COMMIT_EFLAGS(EFlags);
2726 IEM_MC_ADVANCE_RIP();
2727 IEM_MC_END();
2728 break;
2729 }
2730
2731 case IEMMODE_32BIT:
2732 {
2733 IEM_MC_BEGIN(3, 2);
2734 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2735 IEM_MC_ARG(uint32_t, u32Src, 1);
2736 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2737 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2738
2739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2740 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2741 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
2742 if (pImpl->pfnLockedU32)
2743 IEMOP_HLP_DONE_DECODING();
2744 else
2745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2746 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2747 IEM_MC_FETCH_EFLAGS(EFlags);
2748 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2749 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
2750 else
2751 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
2752
2753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
2754 IEM_MC_COMMIT_EFLAGS(EFlags);
2755 IEM_MC_ADVANCE_RIP();
2756 IEM_MC_END();
2757 break;
2758 }
2759
2760 case IEMMODE_64BIT:
2761 {
2762 IEM_MC_BEGIN(3, 2);
2763 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2764 IEM_MC_ARG(uint64_t, u64Src, 1);
2765 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
2766 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2767
2768 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2769 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2770 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
2771 if (pImpl->pfnLockedU64)
2772 IEMOP_HLP_DONE_DECODING();
2773 else
2774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2775 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2776 IEM_MC_FETCH_EFLAGS(EFlags);
2777 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
2778 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
2779 else
2780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
2781
2782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
2783 IEM_MC_COMMIT_EFLAGS(EFlags);
2784 IEM_MC_ADVANCE_RIP();
2785 IEM_MC_END();
2786 break;
2787 }
2788 }
2789 }
2790 return VINF_SUCCESS;
2791}
2792
2793
2794/** Opcode 0x84. */
2795FNIEMOP_DEF(iemOp_test_Eb_Gb)
2796{
2797 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
2798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2799 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
2800}
2801
2802
2803/** Opcode 0x85. */
2804FNIEMOP_DEF(iemOp_test_Ev_Gv)
2805{
2806 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
2807 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
2808 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
2809}
2810
2811
2812/** Opcode 0x86. */
2813FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
2814{
2815 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2816 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
2817
2818 /*
2819 * If rm is denoting a register, no more instruction bytes.
2820 */
2821 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2822 {
2823 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2824
2825 IEM_MC_BEGIN(0, 2);
2826 IEM_MC_LOCAL(uint8_t, uTmp1);
2827 IEM_MC_LOCAL(uint8_t, uTmp2);
2828
2829 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2830 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2831 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2832 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2833
2834 IEM_MC_ADVANCE_RIP();
2835 IEM_MC_END();
2836 }
2837 else
2838 {
2839 /*
2840 * We're accessing memory.
2841 */
2842/** @todo the register must be committed separately! */
2843 IEM_MC_BEGIN(2, 2);
2844 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
2845 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
2846 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2847
2848 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2849 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2850 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2851 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg);
2852 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
2853
2854 IEM_MC_ADVANCE_RIP();
2855 IEM_MC_END();
2856 }
2857 return VINF_SUCCESS;
2858}
2859
2860
2861/** Opcode 0x87. */
2862FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
2863{
2864 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
2865 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2866
2867 /*
2868 * If rm is denoting a register, no more instruction bytes.
2869 */
2870 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2871 {
2872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2873
2874 switch (pVCpu->iem.s.enmEffOpSize)
2875 {
2876 case IEMMODE_16BIT:
2877 IEM_MC_BEGIN(0, 2);
2878 IEM_MC_LOCAL(uint16_t, uTmp1);
2879 IEM_MC_LOCAL(uint16_t, uTmp2);
2880
2881 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2882 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2883 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2884 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2885
2886 IEM_MC_ADVANCE_RIP();
2887 IEM_MC_END();
2888 return VINF_SUCCESS;
2889
2890 case IEMMODE_32BIT:
2891 IEM_MC_BEGIN(0, 2);
2892 IEM_MC_LOCAL(uint32_t, uTmp1);
2893 IEM_MC_LOCAL(uint32_t, uTmp2);
2894
2895 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2896 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2897 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2898 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2899
2900 IEM_MC_ADVANCE_RIP();
2901 IEM_MC_END();
2902 return VINF_SUCCESS;
2903
2904 case IEMMODE_64BIT:
2905 IEM_MC_BEGIN(0, 2);
2906 IEM_MC_LOCAL(uint64_t, uTmp1);
2907 IEM_MC_LOCAL(uint64_t, uTmp2);
2908
2909 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2910 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2911 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
2912 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
2913
2914 IEM_MC_ADVANCE_RIP();
2915 IEM_MC_END();
2916 return VINF_SUCCESS;
2917
2918 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2919 }
2920 }
2921 else
2922 {
2923 /*
2924 * We're accessing memory.
2925 */
2926 switch (pVCpu->iem.s.enmEffOpSize)
2927 {
2928/** @todo the register must be committed separately! */
2929 case IEMMODE_16BIT:
2930 IEM_MC_BEGIN(2, 2);
2931 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
2932 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
2933 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2934
2935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2936 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2937 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2938 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg);
2939 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
2940
2941 IEM_MC_ADVANCE_RIP();
2942 IEM_MC_END();
2943 return VINF_SUCCESS;
2944
2945 case IEMMODE_32BIT:
2946 IEM_MC_BEGIN(2, 2);
2947 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
2948 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
2949 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2950
2951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2952 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2953 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2954 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg);
2955 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
2956
2957 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
2958 IEM_MC_ADVANCE_RIP();
2959 IEM_MC_END();
2960 return VINF_SUCCESS;
2961
2962 case IEMMODE_64BIT:
2963 IEM_MC_BEGIN(2, 2);
2964 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
2965 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
2966 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2967
2968 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2969 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2970 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
2971 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg);
2972 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
2973
2974 IEM_MC_ADVANCE_RIP();
2975 IEM_MC_END();
2976 return VINF_SUCCESS;
2977
2978 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2979 }
2980 }
2981}
2982
2983
2984/** Opcode 0x88. */
2985FNIEMOP_DEF(iemOp_mov_Eb_Gb)
2986{
2987 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
2988
2989 uint8_t bRm;
2990 IEM_OPCODE_GET_NEXT_U8(&bRm);
2991
2992 /*
2993 * If rm is denoting a register, no more instruction bytes.
2994 */
2995 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2996 {
2997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2998 IEM_MC_BEGIN(0, 1);
2999 IEM_MC_LOCAL(uint8_t, u8Value);
3000 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3001 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3002 IEM_MC_ADVANCE_RIP();
3003 IEM_MC_END();
3004 }
3005 else
3006 {
3007 /*
3008 * We're writing a register to memory.
3009 */
3010 IEM_MC_BEGIN(0, 2);
3011 IEM_MC_LOCAL(uint8_t, u8Value);
3012 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3013 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3015 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3016 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3017 IEM_MC_ADVANCE_RIP();
3018 IEM_MC_END();
3019 }
3020 return VINF_SUCCESS;
3021
3022}
3023
3024
3025/** Opcode 0x89. */
3026FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3027{
3028 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3029
3030 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3031
3032 /*
3033 * If rm is denoting a register, no more instruction bytes.
3034 */
3035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3036 {
3037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3038 switch (pVCpu->iem.s.enmEffOpSize)
3039 {
3040 case IEMMODE_16BIT:
3041 IEM_MC_BEGIN(0, 1);
3042 IEM_MC_LOCAL(uint16_t, u16Value);
3043 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3044 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3045 IEM_MC_ADVANCE_RIP();
3046 IEM_MC_END();
3047 break;
3048
3049 case IEMMODE_32BIT:
3050 IEM_MC_BEGIN(0, 1);
3051 IEM_MC_LOCAL(uint32_t, u32Value);
3052 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3053 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3054 IEM_MC_ADVANCE_RIP();
3055 IEM_MC_END();
3056 break;
3057
3058 case IEMMODE_64BIT:
3059 IEM_MC_BEGIN(0, 1);
3060 IEM_MC_LOCAL(uint64_t, u64Value);
3061 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3062 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3063 IEM_MC_ADVANCE_RIP();
3064 IEM_MC_END();
3065 break;
3066 }
3067 }
3068 else
3069 {
3070 /*
3071 * We're writing a register to memory.
3072 */
3073 switch (pVCpu->iem.s.enmEffOpSize)
3074 {
3075 case IEMMODE_16BIT:
3076 IEM_MC_BEGIN(0, 2);
3077 IEM_MC_LOCAL(uint16_t, u16Value);
3078 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3079 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3081 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3082 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3083 IEM_MC_ADVANCE_RIP();
3084 IEM_MC_END();
3085 break;
3086
3087 case IEMMODE_32BIT:
3088 IEM_MC_BEGIN(0, 2);
3089 IEM_MC_LOCAL(uint32_t, u32Value);
3090 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3091 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3093 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3094 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3095 IEM_MC_ADVANCE_RIP();
3096 IEM_MC_END();
3097 break;
3098
3099 case IEMMODE_64BIT:
3100 IEM_MC_BEGIN(0, 2);
3101 IEM_MC_LOCAL(uint64_t, u64Value);
3102 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3105 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3106 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3107 IEM_MC_ADVANCE_RIP();
3108 IEM_MC_END();
3109 break;
3110 }
3111 }
3112 return VINF_SUCCESS;
3113}
3114
3115
3116/** Opcode 0x8a. */
3117FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3118{
3119 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3120
3121 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3122
3123 /*
3124 * If rm is denoting a register, no more instruction bytes.
3125 */
3126 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3127 {
3128 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3129 IEM_MC_BEGIN(0, 1);
3130 IEM_MC_LOCAL(uint8_t, u8Value);
3131 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3132 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3133 IEM_MC_ADVANCE_RIP();
3134 IEM_MC_END();
3135 }
3136 else
3137 {
3138 /*
3139 * We're loading a register from memory.
3140 */
3141 IEM_MC_BEGIN(0, 2);
3142 IEM_MC_LOCAL(uint8_t, u8Value);
3143 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3146 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3147 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3148 IEM_MC_ADVANCE_RIP();
3149 IEM_MC_END();
3150 }
3151 return VINF_SUCCESS;
3152}
3153
3154
3155/** Opcode 0x8b. */
3156FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3157{
3158 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3159
3160 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3161
3162 /*
3163 * If rm is denoting a register, no more instruction bytes.
3164 */
3165 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3166 {
3167 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3168 switch (pVCpu->iem.s.enmEffOpSize)
3169 {
3170 case IEMMODE_16BIT:
3171 IEM_MC_BEGIN(0, 1);
3172 IEM_MC_LOCAL(uint16_t, u16Value);
3173 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3174 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3175 IEM_MC_ADVANCE_RIP();
3176 IEM_MC_END();
3177 break;
3178
3179 case IEMMODE_32BIT:
3180 IEM_MC_BEGIN(0, 1);
3181 IEM_MC_LOCAL(uint32_t, u32Value);
3182 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3183 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3184 IEM_MC_ADVANCE_RIP();
3185 IEM_MC_END();
3186 break;
3187
3188 case IEMMODE_64BIT:
3189 IEM_MC_BEGIN(0, 1);
3190 IEM_MC_LOCAL(uint64_t, u64Value);
3191 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3192 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3193 IEM_MC_ADVANCE_RIP();
3194 IEM_MC_END();
3195 break;
3196 }
3197 }
3198 else
3199 {
3200 /*
3201 * We're loading a register from memory.
3202 */
3203 switch (pVCpu->iem.s.enmEffOpSize)
3204 {
3205 case IEMMODE_16BIT:
3206 IEM_MC_BEGIN(0, 2);
3207 IEM_MC_LOCAL(uint16_t, u16Value);
3208 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3211 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3212 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3213 IEM_MC_ADVANCE_RIP();
3214 IEM_MC_END();
3215 break;
3216
3217 case IEMMODE_32BIT:
3218 IEM_MC_BEGIN(0, 2);
3219 IEM_MC_LOCAL(uint32_t, u32Value);
3220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3221 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3222 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3223 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3224 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3225 IEM_MC_ADVANCE_RIP();
3226 IEM_MC_END();
3227 break;
3228
3229 case IEMMODE_64BIT:
3230 IEM_MC_BEGIN(0, 2);
3231 IEM_MC_LOCAL(uint64_t, u64Value);
3232 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3233 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3234 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3235 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3236 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
3237 IEM_MC_ADVANCE_RIP();
3238 IEM_MC_END();
3239 break;
3240 }
3241 }
3242 return VINF_SUCCESS;
3243}
3244
3245
3246/** Opcode 0x63. */
3247FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
3248{
3249 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
3250 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
3251 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
3252 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
3253 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
3254}
3255
3256
3257/** Opcode 0x8c. */
3258FNIEMOP_DEF(iemOp_mov_Ev_Sw)
3259{
3260 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
3261
3262 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3263
3264 /*
3265 * Check that the destination register exists. The REX.R prefix is ignored.
3266 */
3267 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3268 if ( iSegReg > X86_SREG_GS)
3269 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3270
3271 /*
3272 * If rm is denoting a register, no more instruction bytes.
3273 * In that case, the operand size is respected and the upper bits are
3274 * cleared (starting with some pentium).
3275 */
3276 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3277 {
3278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3279 switch (pVCpu->iem.s.enmEffOpSize)
3280 {
3281 case IEMMODE_16BIT:
3282 IEM_MC_BEGIN(0, 1);
3283 IEM_MC_LOCAL(uint16_t, u16Value);
3284 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3285 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3286 IEM_MC_ADVANCE_RIP();
3287 IEM_MC_END();
3288 break;
3289
3290 case IEMMODE_32BIT:
3291 IEM_MC_BEGIN(0, 1);
3292 IEM_MC_LOCAL(uint32_t, u32Value);
3293 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
3294 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3295 IEM_MC_ADVANCE_RIP();
3296 IEM_MC_END();
3297 break;
3298
3299 case IEMMODE_64BIT:
3300 IEM_MC_BEGIN(0, 1);
3301 IEM_MC_LOCAL(uint64_t, u64Value);
3302 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
3303 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3304 IEM_MC_ADVANCE_RIP();
3305 IEM_MC_END();
3306 break;
3307 }
3308 }
3309 else
3310 {
3311 /*
3312 * We're saving the register to memory. The access is word sized
3313 * regardless of operand size prefixes.
3314 */
3315#if 0 /* not necessary */
3316 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3317#endif
3318 IEM_MC_BEGIN(0, 2);
3319 IEM_MC_LOCAL(uint16_t, u16Value);
3320 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3322 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3323 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
3324 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3325 IEM_MC_ADVANCE_RIP();
3326 IEM_MC_END();
3327 }
3328 return VINF_SUCCESS;
3329}
3330
3331
3332
3333
3334/** Opcode 0x8d. */
3335FNIEMOP_DEF(iemOp_lea_Gv_M)
3336{
3337 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
3338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3339 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3340 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
3341
3342 switch (pVCpu->iem.s.enmEffOpSize)
3343 {
3344 case IEMMODE_16BIT:
3345 IEM_MC_BEGIN(0, 2);
3346 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3347 IEM_MC_LOCAL(uint16_t, u16Cast);
3348 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3350 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
3351 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
3352 IEM_MC_ADVANCE_RIP();
3353 IEM_MC_END();
3354 return VINF_SUCCESS;
3355
3356 case IEMMODE_32BIT:
3357 IEM_MC_BEGIN(0, 2);
3358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3359 IEM_MC_LOCAL(uint32_t, u32Cast);
3360 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3361 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3362 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
3363 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
3364 IEM_MC_ADVANCE_RIP();
3365 IEM_MC_END();
3366 return VINF_SUCCESS;
3367
3368 case IEMMODE_64BIT:
3369 IEM_MC_BEGIN(0, 1);
3370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
3371 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
3372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3373 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
3374 IEM_MC_ADVANCE_RIP();
3375 IEM_MC_END();
3376 return VINF_SUCCESS;
3377 }
3378 AssertFailedReturn(VERR_IEM_IPE_7);
3379}
3380
3381
3382/** Opcode 0x8e. */
3383FNIEMOP_DEF(iemOp_mov_Sw_Ev)
3384{
3385 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
3386
3387 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3388
3389 /*
3390 * The practical operand size is 16-bit.
3391 */
3392#if 0 /* not necessary */
3393 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
3394#endif
3395
3396 /*
3397 * Check that the destination register exists and can be used with this
3398 * instruction. The REX.R prefix is ignored.
3399 */
3400 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
3401 if ( iSegReg == X86_SREG_CS
3402 || iSegReg > X86_SREG_GS)
3403 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
3404
3405 /*
3406 * If rm is denoting a register, no more instruction bytes.
3407 */
3408 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3409 {
3410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3411 IEM_MC_BEGIN(2, 0);
3412 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3413 IEM_MC_ARG(uint16_t, u16Value, 1);
3414 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3415 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3416 IEM_MC_END();
3417 }
3418 else
3419 {
3420 /*
3421 * We're loading the register from memory. The access is word sized
3422 * regardless of operand size prefixes.
3423 */
3424 IEM_MC_BEGIN(2, 1);
3425 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
3426 IEM_MC_ARG(uint16_t, u16Value, 1);
3427 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3428 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3429 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3430 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3431 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
3432 IEM_MC_END();
3433 }
3434 return VINF_SUCCESS;
3435}
3436
3437
3438/** Opcode 0x8f /0. */
3439FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
3440{
3441 /* This bugger is rather annoying as it requires rSP to be updated before
3442 doing the effective address calculations. Will eventually require a
3443 split between the R/M+SIB decoding and the effective address
3444 calculation - which is something that is required for any attempt at
3445 reusing this code for a recompiler. It may also be good to have if we
3446 need to delay #UD exception caused by invalid lock prefixes.
3447
3448 For now, we'll do a mostly safe interpreter-only implementation here. */
3449 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
3450 * now until tests show it's checked.. */
3451 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
3452
3453 /* Register access is relatively easy and can share code. */
3454 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3455 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3456
3457 /*
3458 * Memory target.
3459 *
3460 * Intel says that RSP is incremented before it's used in any effective
3461 * address calcuations. This means some serious extra annoyance here since
3462 * we decode and calculate the effective address in one step and like to
3463 * delay committing registers till everything is done.
3464 *
3465 * So, we'll decode and calculate the effective address twice. This will
3466 * require some recoding if turned into a recompiler.
3467 */
3468 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
3469
3470#ifndef TST_IEM_CHECK_MC
3471 /* Calc effective address with modified ESP. */
3472/** @todo testcase */
3473 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu);
3474 RTGCPTR GCPtrEff;
3475 VBOXSTRICTRC rcStrict;
3476 switch (pVCpu->iem.s.enmEffOpSize)
3477 {
3478 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
3479 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
3480 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
3481 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3482 }
3483 if (rcStrict != VINF_SUCCESS)
3484 return rcStrict;
3485 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3486
3487 /* Perform the operation - this should be CImpl. */
3488 RTUINT64U TmpRsp;
3489 TmpRsp.u = pCtx->rsp;
3490 switch (pVCpu->iem.s.enmEffOpSize)
3491 {
3492 case IEMMODE_16BIT:
3493 {
3494 uint16_t u16Value;
3495 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
3496 if (rcStrict == VINF_SUCCESS)
3497 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
3498 break;
3499 }
3500
3501 case IEMMODE_32BIT:
3502 {
3503 uint32_t u32Value;
3504 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
3505 if (rcStrict == VINF_SUCCESS)
3506 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
3507 break;
3508 }
3509
3510 case IEMMODE_64BIT:
3511 {
3512 uint64_t u64Value;
3513 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
3514 if (rcStrict == VINF_SUCCESS)
3515 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
3516 break;
3517 }
3518
3519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3520 }
3521 if (rcStrict == VINF_SUCCESS)
3522 {
3523 pCtx->rsp = TmpRsp.u;
3524 iemRegUpdateRipAndClearRF(pVCpu);
3525 }
3526 return rcStrict;
3527
3528#else
3529 return VERR_IEM_IPE_2;
3530#endif
3531}
3532
3533
3534/** Opcode 0x8f. */
3535FNIEMOP_DEF(iemOp_Grp1A_xop)
3536{
3537 /*
3538 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
3539 * three byte VEX prefix, except that the mmmmm field cannot have the values
3540 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
3541 */
3542 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3543 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
3544 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
3545
3546 IEMOP_MNEMONIC(xop, "xop");
3547 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
3548 {
3549 /** @todo Test when exctly the XOP conformance checks kick in during
3550 * instruction decoding and fetching (using \#PF). */
3551 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
3552 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
3553 if ( ( pVCpu->iem.s.fPrefixes
3554 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
3555 == 0)
3556 {
3557 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
3558 if (bXop2 & 0x80 /* VEX.W */)
3559 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
3560 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
3561 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
3562 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
3563 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
3564 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
3565 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
3566
3567 /** @todo XOP: Just use new tables and decoders. */
3568 switch (bRm & 0x1f)
3569 {
3570 case 8: /* xop opcode map 8. */
3571 IEMOP_BITCH_ABOUT_STUB();
3572 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3573
3574 case 9: /* xop opcode map 9. */
3575 IEMOP_BITCH_ABOUT_STUB();
3576 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3577
3578 case 10: /* xop opcode map 10. */
3579 IEMOP_BITCH_ABOUT_STUB();
3580 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
3581
3582 default:
3583 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
3584 return IEMOP_RAISE_INVALID_OPCODE();
3585 }
3586 }
3587 else
3588 Log(("XOP: Invalid prefix mix!\n"));
3589 }
3590 else
3591 Log(("XOP: XOP support disabled!\n"));
3592 return IEMOP_RAISE_INVALID_OPCODE();
3593}
3594
3595
3596/**
3597 * Common 'xchg reg,rAX' helper.
3598 */
3599FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
3600{
3601 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3602
3603 iReg |= pVCpu->iem.s.uRexB;
3604 switch (pVCpu->iem.s.enmEffOpSize)
3605 {
3606 case IEMMODE_16BIT:
3607 IEM_MC_BEGIN(0, 2);
3608 IEM_MC_LOCAL(uint16_t, u16Tmp1);
3609 IEM_MC_LOCAL(uint16_t, u16Tmp2);
3610 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
3611 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
3612 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
3613 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
3614 IEM_MC_ADVANCE_RIP();
3615 IEM_MC_END();
3616 return VINF_SUCCESS;
3617
3618 case IEMMODE_32BIT:
3619 IEM_MC_BEGIN(0, 2);
3620 IEM_MC_LOCAL(uint32_t, u32Tmp1);
3621 IEM_MC_LOCAL(uint32_t, u32Tmp2);
3622 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
3623 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
3624 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
3625 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
3626 IEM_MC_ADVANCE_RIP();
3627 IEM_MC_END();
3628 return VINF_SUCCESS;
3629
3630 case IEMMODE_64BIT:
3631 IEM_MC_BEGIN(0, 2);
3632 IEM_MC_LOCAL(uint64_t, u64Tmp1);
3633 IEM_MC_LOCAL(uint64_t, u64Tmp2);
3634 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
3635 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
3636 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
3637 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
3638 IEM_MC_ADVANCE_RIP();
3639 IEM_MC_END();
3640 return VINF_SUCCESS;
3641
3642 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3643 }
3644}
3645
3646
3647/** Opcode 0x90. */
3648FNIEMOP_DEF(iemOp_nop)
3649{
3650 /* R8/R8D and RAX/EAX can be exchanged. */
3651 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
3652 {
3653 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
3654 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
3655 }
3656
3657 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
3658 IEMOP_MNEMONIC(pause, "pause");
3659 else
3660 IEMOP_MNEMONIC(nop, "nop");
3661 IEM_MC_BEGIN(0, 0);
3662 IEM_MC_ADVANCE_RIP();
3663 IEM_MC_END();
3664 return VINF_SUCCESS;
3665}
3666
3667
3668/** Opcode 0x91. */
3669FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
3670{
3671 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
3672 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
3673}
3674
3675
3676/** Opcode 0x92. */
3677FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
3678{
3679 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
3680 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
3681}
3682
3683
3684/** Opcode 0x93. */
3685FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
3686{
3687 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
3688 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
3689}
3690
3691
3692/** Opcode 0x94. */
3693FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
3694{
3695 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
3696 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
3697}
3698
3699
3700/** Opcode 0x95. */
3701FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
3702{
3703 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
3704 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
3705}
3706
3707
3708/** Opcode 0x96. */
3709FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
3710{
3711 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
3712 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
3713}
3714
3715
3716/** Opcode 0x97. */
3717FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
3718{
3719 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
3720 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
3721}
3722
3723
3724/** Opcode 0x98. */
3725FNIEMOP_DEF(iemOp_cbw)
3726{
3727 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3728 switch (pVCpu->iem.s.enmEffOpSize)
3729 {
3730 case IEMMODE_16BIT:
3731 IEMOP_MNEMONIC(cbw, "cbw");
3732 IEM_MC_BEGIN(0, 1);
3733 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
3734 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
3735 } IEM_MC_ELSE() {
3736 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
3737 } IEM_MC_ENDIF();
3738 IEM_MC_ADVANCE_RIP();
3739 IEM_MC_END();
3740 return VINF_SUCCESS;
3741
3742 case IEMMODE_32BIT:
3743 IEMOP_MNEMONIC(cwde, "cwde");
3744 IEM_MC_BEGIN(0, 1);
3745 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3746 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
3747 } IEM_MC_ELSE() {
3748 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
3749 } IEM_MC_ENDIF();
3750 IEM_MC_ADVANCE_RIP();
3751 IEM_MC_END();
3752 return VINF_SUCCESS;
3753
3754 case IEMMODE_64BIT:
3755 IEMOP_MNEMONIC(cdqe, "cdqe");
3756 IEM_MC_BEGIN(0, 1);
3757 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3758 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
3759 } IEM_MC_ELSE() {
3760 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
3761 } IEM_MC_ENDIF();
3762 IEM_MC_ADVANCE_RIP();
3763 IEM_MC_END();
3764 return VINF_SUCCESS;
3765
3766 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3767 }
3768}
3769
3770
3771/** Opcode 0x99. */
3772FNIEMOP_DEF(iemOp_cwd)
3773{
3774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3775 switch (pVCpu->iem.s.enmEffOpSize)
3776 {
3777 case IEMMODE_16BIT:
3778 IEMOP_MNEMONIC(cwd, "cwd");
3779 IEM_MC_BEGIN(0, 1);
3780 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
3781 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
3782 } IEM_MC_ELSE() {
3783 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
3784 } IEM_MC_ENDIF();
3785 IEM_MC_ADVANCE_RIP();
3786 IEM_MC_END();
3787 return VINF_SUCCESS;
3788
3789 case IEMMODE_32BIT:
3790 IEMOP_MNEMONIC(cdq, "cdq");
3791 IEM_MC_BEGIN(0, 1);
3792 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
3793 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
3794 } IEM_MC_ELSE() {
3795 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
3796 } IEM_MC_ENDIF();
3797 IEM_MC_ADVANCE_RIP();
3798 IEM_MC_END();
3799 return VINF_SUCCESS;
3800
3801 case IEMMODE_64BIT:
3802 IEMOP_MNEMONIC(cqo, "cqo");
3803 IEM_MC_BEGIN(0, 1);
3804 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
3805 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
3806 } IEM_MC_ELSE() {
3807 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
3808 } IEM_MC_ENDIF();
3809 IEM_MC_ADVANCE_RIP();
3810 IEM_MC_END();
3811 return VINF_SUCCESS;
3812
3813 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3814 }
3815}
3816
3817
3818/** Opcode 0x9a. */
3819FNIEMOP_DEF(iemOp_call_Ap)
3820{
3821 IEMOP_MNEMONIC(call_Ap, "call Ap");
3822 IEMOP_HLP_NO_64BIT();
3823
3824 /* Decode the far pointer address and pass it on to the far call C implementation. */
3825 uint32_t offSeg;
3826 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
3827 IEM_OPCODE_GET_NEXT_U32(&offSeg);
3828 else
3829 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
3830 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
3831 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3832 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
3833}
3834
3835
3836/** Opcode 0x9b. (aka fwait) */
3837FNIEMOP_DEF(iemOp_wait)
3838{
3839 IEMOP_MNEMONIC(wait, "wait");
3840 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3841
3842 IEM_MC_BEGIN(0, 0);
3843 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
3844 IEM_MC_MAYBE_RAISE_FPU_XCPT();
3845 IEM_MC_ADVANCE_RIP();
3846 IEM_MC_END();
3847 return VINF_SUCCESS;
3848}
3849
3850
3851/** Opcode 0x9c. */
3852FNIEMOP_DEF(iemOp_pushf_Fv)
3853{
3854 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3855 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3856 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
3857}
3858
3859
3860/** Opcode 0x9d. */
3861FNIEMOP_DEF(iemOp_popf_Fv)
3862{
3863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3864 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3865 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
3866}
3867
3868
3869/** Opcode 0x9e. */
3870FNIEMOP_DEF(iemOp_sahf)
3871{
3872 IEMOP_MNEMONIC(sahf, "sahf");
3873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3874 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3875 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3876 return IEMOP_RAISE_INVALID_OPCODE();
3877 IEM_MC_BEGIN(0, 2);
3878 IEM_MC_LOCAL(uint32_t, u32Flags);
3879 IEM_MC_LOCAL(uint32_t, EFlags);
3880 IEM_MC_FETCH_EFLAGS(EFlags);
3881 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
3882 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
3883 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
3884 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
3885 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
3886 IEM_MC_COMMIT_EFLAGS(EFlags);
3887 IEM_MC_ADVANCE_RIP();
3888 IEM_MC_END();
3889 return VINF_SUCCESS;
3890}
3891
3892
3893/** Opcode 0x9f. */
3894FNIEMOP_DEF(iemOp_lahf)
3895{
3896 IEMOP_MNEMONIC(lahf, "lahf");
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3898 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
3899 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
3900 return IEMOP_RAISE_INVALID_OPCODE();
3901 IEM_MC_BEGIN(0, 1);
3902 IEM_MC_LOCAL(uint8_t, u8Flags);
3903 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
3904 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
3905 IEM_MC_ADVANCE_RIP();
3906 IEM_MC_END();
3907 return VINF_SUCCESS;
3908}
3909
3910
3911/**
3912 * Macro used by iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
3913 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend of lock
3914 * prefixes. Will return on failures.
3915 * @param a_GCPtrMemOff The variable to store the offset in.
3916 */
3917#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
3918 do \
3919 { \
3920 switch (pVCpu->iem.s.enmEffAddrMode) \
3921 { \
3922 case IEMMODE_16BIT: \
3923 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
3924 break; \
3925 case IEMMODE_32BIT: \
3926 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
3927 break; \
3928 case IEMMODE_64BIT: \
3929 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
3930 break; \
3931 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
3932 } \
3933 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
3934 } while (0)
3935
3936/** Opcode 0xa0. */
3937FNIEMOP_DEF(iemOp_mov_Al_Ob)
3938{
3939 /*
3940 * Get the offset and fend of lock prefixes.
3941 */
3942 RTGCPTR GCPtrMemOff;
3943 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3944
3945 /*
3946 * Fetch AL.
3947 */
3948 IEM_MC_BEGIN(0,1);
3949 IEM_MC_LOCAL(uint8_t, u8Tmp);
3950 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3951 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
3952 IEM_MC_ADVANCE_RIP();
3953 IEM_MC_END();
3954 return VINF_SUCCESS;
3955}
3956
3957
3958/** Opcode 0xa1. */
3959FNIEMOP_DEF(iemOp_mov_rAX_Ov)
3960{
3961 /*
3962 * Get the offset and fend of lock prefixes.
3963 */
3964 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
3965 RTGCPTR GCPtrMemOff;
3966 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
3967
3968 /*
3969 * Fetch rAX.
3970 */
3971 switch (pVCpu->iem.s.enmEffOpSize)
3972 {
3973 case IEMMODE_16BIT:
3974 IEM_MC_BEGIN(0,1);
3975 IEM_MC_LOCAL(uint16_t, u16Tmp);
3976 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3977 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
3978 IEM_MC_ADVANCE_RIP();
3979 IEM_MC_END();
3980 return VINF_SUCCESS;
3981
3982 case IEMMODE_32BIT:
3983 IEM_MC_BEGIN(0,1);
3984 IEM_MC_LOCAL(uint32_t, u32Tmp);
3985 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3986 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
3987 IEM_MC_ADVANCE_RIP();
3988 IEM_MC_END();
3989 return VINF_SUCCESS;
3990
3991 case IEMMODE_64BIT:
3992 IEM_MC_BEGIN(0,1);
3993 IEM_MC_LOCAL(uint64_t, u64Tmp);
3994 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
3995 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
3996 IEM_MC_ADVANCE_RIP();
3997 IEM_MC_END();
3998 return VINF_SUCCESS;
3999
4000 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4001 }
4002}
4003
4004
4005/** Opcode 0xa2. */
4006FNIEMOP_DEF(iemOp_mov_Ob_AL)
4007{
4008 /*
4009 * Get the offset and fend of lock prefixes.
4010 */
4011 RTGCPTR GCPtrMemOff;
4012 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4013
4014 /*
4015 * Store AL.
4016 */
4017 IEM_MC_BEGIN(0,1);
4018 IEM_MC_LOCAL(uint8_t, u8Tmp);
4019 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4020 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4021 IEM_MC_ADVANCE_RIP();
4022 IEM_MC_END();
4023 return VINF_SUCCESS;
4024}
4025
4026
4027/** Opcode 0xa3. */
4028FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4029{
4030 /*
4031 * Get the offset and fend of lock prefixes.
4032 */
4033 RTGCPTR GCPtrMemOff;
4034 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4035
4036 /*
4037 * Store rAX.
4038 */
4039 switch (pVCpu->iem.s.enmEffOpSize)
4040 {
4041 case IEMMODE_16BIT:
4042 IEM_MC_BEGIN(0,1);
4043 IEM_MC_LOCAL(uint16_t, u16Tmp);
4044 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4045 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 return VINF_SUCCESS;
4049
4050 case IEMMODE_32BIT:
4051 IEM_MC_BEGIN(0,1);
4052 IEM_MC_LOCAL(uint32_t, u32Tmp);
4053 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4054 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4055 IEM_MC_ADVANCE_RIP();
4056 IEM_MC_END();
4057 return VINF_SUCCESS;
4058
4059 case IEMMODE_64BIT:
4060 IEM_MC_BEGIN(0,1);
4061 IEM_MC_LOCAL(uint64_t, u64Tmp);
4062 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4063 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4064 IEM_MC_ADVANCE_RIP();
4065 IEM_MC_END();
4066 return VINF_SUCCESS;
4067
4068 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4069 }
4070}
4071
4072/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4073#define IEM_MOVS_CASE(ValBits, AddrBits) \
4074 IEM_MC_BEGIN(0, 2); \
4075 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4076 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4077 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4078 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4079 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4080 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4081 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4082 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4083 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4084 } IEM_MC_ELSE() { \
4085 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4086 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4087 } IEM_MC_ENDIF(); \
4088 IEM_MC_ADVANCE_RIP(); \
4089 IEM_MC_END();
4090
4091/** Opcode 0xa4. */
4092FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4093{
4094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4095
4096 /*
4097 * Use the C implementation if a repeat prefix is encountered.
4098 */
4099 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4100 {
4101 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4102 switch (pVCpu->iem.s.enmEffAddrMode)
4103 {
4104 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4105 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4106 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4107 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4108 }
4109 }
4110 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4111
4112 /*
4113 * Sharing case implementation with movs[wdq] below.
4114 */
4115 switch (pVCpu->iem.s.enmEffAddrMode)
4116 {
4117 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4118 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4119 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4120 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4121 }
4122 return VINF_SUCCESS;
4123}
4124
4125
4126/** Opcode 0xa5. */
4127FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
4128{
4129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4130
4131 /*
4132 * Use the C implementation if a repeat prefix is encountered.
4133 */
4134 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4135 {
4136 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
4137 switch (pVCpu->iem.s.enmEffOpSize)
4138 {
4139 case IEMMODE_16BIT:
4140 switch (pVCpu->iem.s.enmEffAddrMode)
4141 {
4142 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
4143 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
4144 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
4145 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4146 }
4147 break;
4148 case IEMMODE_32BIT:
4149 switch (pVCpu->iem.s.enmEffAddrMode)
4150 {
4151 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
4152 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
4153 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
4154 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4155 }
4156 case IEMMODE_64BIT:
4157 switch (pVCpu->iem.s.enmEffAddrMode)
4158 {
4159 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
4160 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
4161 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
4162 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4163 }
4164 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4165 }
4166 }
4167 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
4168
4169 /*
4170 * Annoying double switch here.
4171 * Using ugly macro for implementing the cases, sharing it with movsb.
4172 */
4173 switch (pVCpu->iem.s.enmEffOpSize)
4174 {
4175 case IEMMODE_16BIT:
4176 switch (pVCpu->iem.s.enmEffAddrMode)
4177 {
4178 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
4179 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
4180 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
4181 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4182 }
4183 break;
4184
4185 case IEMMODE_32BIT:
4186 switch (pVCpu->iem.s.enmEffAddrMode)
4187 {
4188 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
4189 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
4190 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
4191 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4192 }
4193 break;
4194
4195 case IEMMODE_64BIT:
4196 switch (pVCpu->iem.s.enmEffAddrMode)
4197 {
4198 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4199 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
4200 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
4201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4202 }
4203 break;
4204 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4205 }
4206 return VINF_SUCCESS;
4207}
4208
4209#undef IEM_MOVS_CASE
4210
4211/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
4212#define IEM_CMPS_CASE(ValBits, AddrBits) \
4213 IEM_MC_BEGIN(3, 3); \
4214 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
4215 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
4216 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4217 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
4218 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4219 \
4220 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4221 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
4222 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4223 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
4224 IEM_MC_REF_LOCAL(puValue1, uValue1); \
4225 IEM_MC_REF_EFLAGS(pEFlags); \
4226 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
4227 \
4228 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4229 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4230 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4231 } IEM_MC_ELSE() { \
4232 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4233 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4234 } IEM_MC_ENDIF(); \
4235 IEM_MC_ADVANCE_RIP(); \
4236 IEM_MC_END(); \
4237
4238/** Opcode 0xa6. */
4239FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
4240{
4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4242
4243 /*
4244 * Use the C implementation if a repeat prefix is encountered.
4245 */
4246 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4247 {
4248 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
4249 switch (pVCpu->iem.s.enmEffAddrMode)
4250 {
4251 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4252 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4253 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4254 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4255 }
4256 }
4257 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4258 {
4259 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
4260 switch (pVCpu->iem.s.enmEffAddrMode)
4261 {
4262 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
4263 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
4264 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
4265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4266 }
4267 }
4268 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
4269
4270 /*
4271 * Sharing case implementation with cmps[wdq] below.
4272 */
4273 switch (pVCpu->iem.s.enmEffAddrMode)
4274 {
4275 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
4276 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
4277 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
4278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4279 }
4280 return VINF_SUCCESS;
4281
4282}
4283
4284
4285/** Opcode 0xa7. */
4286FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
4287{
4288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4289
4290 /*
4291 * Use the C implementation if a repeat prefix is encountered.
4292 */
4293 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4294 {
4295 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
4296 switch (pVCpu->iem.s.enmEffOpSize)
4297 {
4298 case IEMMODE_16BIT:
4299 switch (pVCpu->iem.s.enmEffAddrMode)
4300 {
4301 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4302 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4303 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4304 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4305 }
4306 break;
4307 case IEMMODE_32BIT:
4308 switch (pVCpu->iem.s.enmEffAddrMode)
4309 {
4310 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4311 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4312 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4314 }
4315 case IEMMODE_64BIT:
4316 switch (pVCpu->iem.s.enmEffAddrMode)
4317 {
4318 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
4319 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4320 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4321 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4322 }
4323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4324 }
4325 }
4326
4327 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4328 {
4329 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
4330 switch (pVCpu->iem.s.enmEffOpSize)
4331 {
4332 case IEMMODE_16BIT:
4333 switch (pVCpu->iem.s.enmEffAddrMode)
4334 {
4335 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
4336 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
4337 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
4338 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4339 }
4340 break;
4341 case IEMMODE_32BIT:
4342 switch (pVCpu->iem.s.enmEffAddrMode)
4343 {
4344 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
4345 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
4346 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
4347 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4348 }
4349 case IEMMODE_64BIT:
4350 switch (pVCpu->iem.s.enmEffAddrMode)
4351 {
4352 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
4353 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
4354 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
4355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4356 }
4357 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4358 }
4359 }
4360
4361 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
4362
4363 /*
4364 * Annoying double switch here.
4365 * Using ugly macro for implementing the cases, sharing it with cmpsb.
4366 */
4367 switch (pVCpu->iem.s.enmEffOpSize)
4368 {
4369 case IEMMODE_16BIT:
4370 switch (pVCpu->iem.s.enmEffAddrMode)
4371 {
4372 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
4373 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
4374 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
4375 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4376 }
4377 break;
4378
4379 case IEMMODE_32BIT:
4380 switch (pVCpu->iem.s.enmEffAddrMode)
4381 {
4382 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
4383 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
4384 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
4385 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4386 }
4387 break;
4388
4389 case IEMMODE_64BIT:
4390 switch (pVCpu->iem.s.enmEffAddrMode)
4391 {
4392 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4393 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
4394 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
4395 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4396 }
4397 break;
4398 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4399 }
4400 return VINF_SUCCESS;
4401
4402}
4403
4404#undef IEM_CMPS_CASE
4405
4406/** Opcode 0xa8. */
4407FNIEMOP_DEF(iemOp_test_AL_Ib)
4408{
4409 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
4410 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4411 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
4412}
4413
4414
4415/** Opcode 0xa9. */
4416FNIEMOP_DEF(iemOp_test_eAX_Iz)
4417{
4418 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
4419 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
4420 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
4421}
4422
4423
4424/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
4425#define IEM_STOS_CASE(ValBits, AddrBits) \
4426 IEM_MC_BEGIN(0, 2); \
4427 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4428 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4429 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
4430 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4431 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4432 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4433 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4434 } IEM_MC_ELSE() { \
4435 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4436 } IEM_MC_ENDIF(); \
4437 IEM_MC_ADVANCE_RIP(); \
4438 IEM_MC_END(); \
4439
4440/** Opcode 0xaa. */
4441FNIEMOP_DEF(iemOp_stosb_Yb_AL)
4442{
4443 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4444
4445 /*
4446 * Use the C implementation if a repeat prefix is encountered.
4447 */
4448 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4449 {
4450 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
4451 switch (pVCpu->iem.s.enmEffAddrMode)
4452 {
4453 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
4454 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
4455 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
4456 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4457 }
4458 }
4459 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
4460
4461 /*
4462 * Sharing case implementation with stos[wdq] below.
4463 */
4464 switch (pVCpu->iem.s.enmEffAddrMode)
4465 {
4466 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
4467 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
4468 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
4469 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4470 }
4471 return VINF_SUCCESS;
4472}
4473
4474
4475/** Opcode 0xab. */
4476FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
4477{
4478 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4479
4480 /*
4481 * Use the C implementation if a repeat prefix is encountered.
4482 */
4483 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4484 {
4485 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
4486 switch (pVCpu->iem.s.enmEffOpSize)
4487 {
4488 case IEMMODE_16BIT:
4489 switch (pVCpu->iem.s.enmEffAddrMode)
4490 {
4491 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
4492 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
4493 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
4494 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4495 }
4496 break;
4497 case IEMMODE_32BIT:
4498 switch (pVCpu->iem.s.enmEffAddrMode)
4499 {
4500 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
4501 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
4502 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
4503 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4504 }
4505 case IEMMODE_64BIT:
4506 switch (pVCpu->iem.s.enmEffAddrMode)
4507 {
4508 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
4509 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
4510 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
4511 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4512 }
4513 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4514 }
4515 }
4516 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
4517
4518 /*
4519 * Annoying double switch here.
4520 * Using ugly macro for implementing the cases, sharing it with stosb.
4521 */
4522 switch (pVCpu->iem.s.enmEffOpSize)
4523 {
4524 case IEMMODE_16BIT:
4525 switch (pVCpu->iem.s.enmEffAddrMode)
4526 {
4527 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
4528 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
4529 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
4530 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4531 }
4532 break;
4533
4534 case IEMMODE_32BIT:
4535 switch (pVCpu->iem.s.enmEffAddrMode)
4536 {
4537 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
4538 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
4539 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
4540 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4541 }
4542 break;
4543
4544 case IEMMODE_64BIT:
4545 switch (pVCpu->iem.s.enmEffAddrMode)
4546 {
4547 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4548 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
4549 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
4550 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4551 }
4552 break;
4553 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4554 }
4555 return VINF_SUCCESS;
4556}
4557
4558#undef IEM_STOS_CASE
4559
4560/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
4561#define IEM_LODS_CASE(ValBits, AddrBits) \
4562 IEM_MC_BEGIN(0, 2); \
4563 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4564 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4565 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4566 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4567 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
4568 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4569 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4570 } IEM_MC_ELSE() { \
4571 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4572 } IEM_MC_ENDIF(); \
4573 IEM_MC_ADVANCE_RIP(); \
4574 IEM_MC_END();
4575
4576/** Opcode 0xac. */
4577FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
4578{
4579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4580
4581 /*
4582 * Use the C implementation if a repeat prefix is encountered.
4583 */
4584 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4585 {
4586 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
4587 switch (pVCpu->iem.s.enmEffAddrMode)
4588 {
4589 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
4590 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
4591 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
4592 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4593 }
4594 }
4595 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
4596
4597 /*
4598 * Sharing case implementation with stos[wdq] below.
4599 */
4600 switch (pVCpu->iem.s.enmEffAddrMode)
4601 {
4602 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
4603 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
4604 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
4605 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4606 }
4607 return VINF_SUCCESS;
4608}
4609
4610
4611/** Opcode 0xad. */
4612FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
4613{
4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4615
4616 /*
4617 * Use the C implementation if a repeat prefix is encountered.
4618 */
4619 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4620 {
4621 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
4622 switch (pVCpu->iem.s.enmEffOpSize)
4623 {
4624 case IEMMODE_16BIT:
4625 switch (pVCpu->iem.s.enmEffAddrMode)
4626 {
4627 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
4628 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
4629 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
4630 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4631 }
4632 break;
4633 case IEMMODE_32BIT:
4634 switch (pVCpu->iem.s.enmEffAddrMode)
4635 {
4636 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
4637 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
4638 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
4639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4640 }
4641 case IEMMODE_64BIT:
4642 switch (pVCpu->iem.s.enmEffAddrMode)
4643 {
4644 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
4645 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
4646 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
4647 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4648 }
4649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4650 }
4651 }
4652 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
4653
4654 /*
4655 * Annoying double switch here.
4656 * Using ugly macro for implementing the cases, sharing it with lodsb.
4657 */
4658 switch (pVCpu->iem.s.enmEffOpSize)
4659 {
4660 case IEMMODE_16BIT:
4661 switch (pVCpu->iem.s.enmEffAddrMode)
4662 {
4663 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
4664 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
4665 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
4666 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4667 }
4668 break;
4669
4670 case IEMMODE_32BIT:
4671 switch (pVCpu->iem.s.enmEffAddrMode)
4672 {
4673 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
4674 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
4675 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
4676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4677 }
4678 break;
4679
4680 case IEMMODE_64BIT:
4681 switch (pVCpu->iem.s.enmEffAddrMode)
4682 {
4683 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4684 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
4685 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
4686 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4687 }
4688 break;
4689 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4690 }
4691 return VINF_SUCCESS;
4692}
4693
4694#undef IEM_LODS_CASE
4695
4696/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
4697#define IEM_SCAS_CASE(ValBits, AddrBits) \
4698 IEM_MC_BEGIN(3, 2); \
4699 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
4700 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
4701 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
4702 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4703 \
4704 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4705 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
4706 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
4707 IEM_MC_REF_EFLAGS(pEFlags); \
4708 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
4709 \
4710 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4711 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4712 } IEM_MC_ELSE() { \
4713 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4714 } IEM_MC_ENDIF(); \
4715 IEM_MC_ADVANCE_RIP(); \
4716 IEM_MC_END();
4717
4718/** Opcode 0xae. */
4719FNIEMOP_DEF(iemOp_scasb_AL_Xb)
4720{
4721 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4722
4723 /*
4724 * Use the C implementation if a repeat prefix is encountered.
4725 */
4726 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4727 {
4728 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
4729 switch (pVCpu->iem.s.enmEffAddrMode)
4730 {
4731 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
4732 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
4733 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
4734 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4735 }
4736 }
4737 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4738 {
4739 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
4740 switch (pVCpu->iem.s.enmEffAddrMode)
4741 {
4742 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
4743 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
4744 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
4745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4746 }
4747 }
4748 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
4749
4750 /*
4751 * Sharing case implementation with stos[wdq] below.
4752 */
4753 switch (pVCpu->iem.s.enmEffAddrMode)
4754 {
4755 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
4756 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
4757 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
4758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4759 }
4760 return VINF_SUCCESS;
4761}
4762
4763
4764/** Opcode 0xaf. */
4765FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
4766{
4767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4768
4769 /*
4770 * Use the C implementation if a repeat prefix is encountered.
4771 */
4772 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
4773 {
4774 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
4775 switch (pVCpu->iem.s.enmEffOpSize)
4776 {
4777 case IEMMODE_16BIT:
4778 switch (pVCpu->iem.s.enmEffAddrMode)
4779 {
4780 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
4781 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
4782 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
4783 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4784 }
4785 break;
4786 case IEMMODE_32BIT:
4787 switch (pVCpu->iem.s.enmEffAddrMode)
4788 {
4789 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
4790 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
4791 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
4792 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4793 }
4794 case IEMMODE_64BIT:
4795 switch (pVCpu->iem.s.enmEffAddrMode)
4796 {
4797 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
4798 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
4799 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
4800 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4801 }
4802 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4803 }
4804 }
4805 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
4806 {
4807 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
4808 switch (pVCpu->iem.s.enmEffOpSize)
4809 {
4810 case IEMMODE_16BIT:
4811 switch (pVCpu->iem.s.enmEffAddrMode)
4812 {
4813 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
4814 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
4815 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
4816 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4817 }
4818 break;
4819 case IEMMODE_32BIT:
4820 switch (pVCpu->iem.s.enmEffAddrMode)
4821 {
4822 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
4823 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
4824 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
4825 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4826 }
4827 case IEMMODE_64BIT:
4828 switch (pVCpu->iem.s.enmEffAddrMode)
4829 {
4830 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
4831 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
4832 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
4833 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4834 }
4835 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4836 }
4837 }
4838 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
4839
4840 /*
4841 * Annoying double switch here.
4842 * Using ugly macro for implementing the cases, sharing it with scasb.
4843 */
4844 switch (pVCpu->iem.s.enmEffOpSize)
4845 {
4846 case IEMMODE_16BIT:
4847 switch (pVCpu->iem.s.enmEffAddrMode)
4848 {
4849 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
4850 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
4851 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
4852 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4853 }
4854 break;
4855
4856 case IEMMODE_32BIT:
4857 switch (pVCpu->iem.s.enmEffAddrMode)
4858 {
4859 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
4860 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
4861 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
4862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4863 }
4864 break;
4865
4866 case IEMMODE_64BIT:
4867 switch (pVCpu->iem.s.enmEffAddrMode)
4868 {
4869 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
4870 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
4871 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
4872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4873 }
4874 break;
4875 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4876 }
4877 return VINF_SUCCESS;
4878}
4879
4880#undef IEM_SCAS_CASE
4881
4882/**
4883 * Common 'mov r8, imm8' helper.
4884 */
4885FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
4886{
4887 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
4888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4889
4890 IEM_MC_BEGIN(0, 1);
4891 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
4892 IEM_MC_STORE_GREG_U8(iReg, u8Value);
4893 IEM_MC_ADVANCE_RIP();
4894 IEM_MC_END();
4895
4896 return VINF_SUCCESS;
4897}
4898
4899
4900/** Opcode 0xb0. */
4901FNIEMOP_DEF(iemOp_mov_AL_Ib)
4902{
4903 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
4904 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
4905}
4906
4907
4908/** Opcode 0xb1. */
4909FNIEMOP_DEF(iemOp_CL_Ib)
4910{
4911 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
4912 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
4913}
4914
4915
4916/** Opcode 0xb2. */
4917FNIEMOP_DEF(iemOp_DL_Ib)
4918{
4919 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
4920 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
4921}
4922
4923
4924/** Opcode 0xb3. */
4925FNIEMOP_DEF(iemOp_BL_Ib)
4926{
4927 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
4928 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
4929}
4930
4931
4932/** Opcode 0xb4. */
4933FNIEMOP_DEF(iemOp_mov_AH_Ib)
4934{
4935 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
4936 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
4937}
4938
4939
4940/** Opcode 0xb5. */
4941FNIEMOP_DEF(iemOp_CH_Ib)
4942{
4943 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
4944 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
4945}
4946
4947
4948/** Opcode 0xb6. */
4949FNIEMOP_DEF(iemOp_DH_Ib)
4950{
4951 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
4952 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
4953}
4954
4955
4956/** Opcode 0xb7. */
4957FNIEMOP_DEF(iemOp_BH_Ib)
4958{
4959 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
4960 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
4961}
4962
4963
4964/**
4965 * Common 'mov regX,immX' helper.
4966 */
4967FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
4968{
4969 switch (pVCpu->iem.s.enmEffOpSize)
4970 {
4971 case IEMMODE_16BIT:
4972 {
4973 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
4974 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4975
4976 IEM_MC_BEGIN(0, 1);
4977 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
4978 IEM_MC_STORE_GREG_U16(iReg, u16Value);
4979 IEM_MC_ADVANCE_RIP();
4980 IEM_MC_END();
4981 break;
4982 }
4983
4984 case IEMMODE_32BIT:
4985 {
4986 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
4987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4988
4989 IEM_MC_BEGIN(0, 1);
4990 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
4991 IEM_MC_STORE_GREG_U32(iReg, u32Value);
4992 IEM_MC_ADVANCE_RIP();
4993 IEM_MC_END();
4994 break;
4995 }
4996 case IEMMODE_64BIT:
4997 {
4998 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
4999 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5000
5001 IEM_MC_BEGIN(0, 1);
5002 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5003 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5004 IEM_MC_ADVANCE_RIP();
5005 IEM_MC_END();
5006 break;
5007 }
5008 }
5009
5010 return VINF_SUCCESS;
5011}
5012
5013
5014/** Opcode 0xb8. */
5015FNIEMOP_DEF(iemOp_eAX_Iv)
5016{
5017 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5018 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5019}
5020
5021
5022/** Opcode 0xb9. */
5023FNIEMOP_DEF(iemOp_eCX_Iv)
5024{
5025 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5026 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5027}
5028
5029
5030/** Opcode 0xba. */
5031FNIEMOP_DEF(iemOp_eDX_Iv)
5032{
5033 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5034 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5035}
5036
5037
5038/** Opcode 0xbb. */
5039FNIEMOP_DEF(iemOp_eBX_Iv)
5040{
5041 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5042 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5043}
5044
5045
5046/** Opcode 0xbc. */
5047FNIEMOP_DEF(iemOp_eSP_Iv)
5048{
5049 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5050 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5051}
5052
5053
5054/** Opcode 0xbd. */
5055FNIEMOP_DEF(iemOp_eBP_Iv)
5056{
5057 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5058 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5059}
5060
5061
5062/** Opcode 0xbe. */
5063FNIEMOP_DEF(iemOp_eSI_Iv)
5064{
5065 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5066 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5067}
5068
5069
5070/** Opcode 0xbf. */
5071FNIEMOP_DEF(iemOp_eDI_Iv)
5072{
5073 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
5074 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5075}
5076
5077
5078/** Opcode 0xc0. */
5079FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
5080{
5081 IEMOP_HLP_MIN_186();
5082 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5083 PCIEMOPSHIFTSIZES pImpl;
5084 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5085 {
5086 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
5087 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
5088 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
5089 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
5090 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
5091 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
5092 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
5093 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5094 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5095 }
5096 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5097
5098 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5099 {
5100 /* register */
5101 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5103 IEM_MC_BEGIN(3, 0);
5104 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5105 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5106 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5107 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5108 IEM_MC_REF_EFLAGS(pEFlags);
5109 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5110 IEM_MC_ADVANCE_RIP();
5111 IEM_MC_END();
5112 }
5113 else
5114 {
5115 /* memory */
5116 IEM_MC_BEGIN(3, 2);
5117 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5118 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5119 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5120 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5121
5122 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5123 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5124 IEM_MC_ASSIGN(cShiftArg, cShift);
5125 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5126 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5127 IEM_MC_FETCH_EFLAGS(EFlags);
5128 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5129
5130 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5131 IEM_MC_COMMIT_EFLAGS(EFlags);
5132 IEM_MC_ADVANCE_RIP();
5133 IEM_MC_END();
5134 }
5135 return VINF_SUCCESS;
5136}
5137
5138
5139/** Opcode 0xc1. */
5140FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
5141{
5142 IEMOP_HLP_MIN_186();
5143 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5144 PCIEMOPSHIFTSIZES pImpl;
5145 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5146 {
5147 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
5148 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
5149 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
5150 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
5151 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
5152 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
5153 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
5154 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5155 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5156 }
5157 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5158
5159 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5160 {
5161 /* register */
5162 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5163 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5164 switch (pVCpu->iem.s.enmEffOpSize)
5165 {
5166 case IEMMODE_16BIT:
5167 IEM_MC_BEGIN(3, 0);
5168 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5169 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5170 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5171 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5172 IEM_MC_REF_EFLAGS(pEFlags);
5173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5174 IEM_MC_ADVANCE_RIP();
5175 IEM_MC_END();
5176 return VINF_SUCCESS;
5177
5178 case IEMMODE_32BIT:
5179 IEM_MC_BEGIN(3, 0);
5180 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5181 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5182 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5183 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5184 IEM_MC_REF_EFLAGS(pEFlags);
5185 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5186 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5187 IEM_MC_ADVANCE_RIP();
5188 IEM_MC_END();
5189 return VINF_SUCCESS;
5190
5191 case IEMMODE_64BIT:
5192 IEM_MC_BEGIN(3, 0);
5193 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5194 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
5195 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5196 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5197 IEM_MC_REF_EFLAGS(pEFlags);
5198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5199 IEM_MC_ADVANCE_RIP();
5200 IEM_MC_END();
5201 return VINF_SUCCESS;
5202
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 }
5206 else
5207 {
5208 /* memory */
5209 switch (pVCpu->iem.s.enmEffOpSize)
5210 {
5211 case IEMMODE_16BIT:
5212 IEM_MC_BEGIN(3, 2);
5213 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5214 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5215 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5216 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5217
5218 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5219 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5220 IEM_MC_ASSIGN(cShiftArg, cShift);
5221 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5222 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5223 IEM_MC_FETCH_EFLAGS(EFlags);
5224 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5225
5226 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5227 IEM_MC_COMMIT_EFLAGS(EFlags);
5228 IEM_MC_ADVANCE_RIP();
5229 IEM_MC_END();
5230 return VINF_SUCCESS;
5231
5232 case IEMMODE_32BIT:
5233 IEM_MC_BEGIN(3, 2);
5234 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5235 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5236 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5237 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5238
5239 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5240 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5241 IEM_MC_ASSIGN(cShiftArg, cShift);
5242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5243 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5244 IEM_MC_FETCH_EFLAGS(EFlags);
5245 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5246
5247 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5248 IEM_MC_COMMIT_EFLAGS(EFlags);
5249 IEM_MC_ADVANCE_RIP();
5250 IEM_MC_END();
5251 return VINF_SUCCESS;
5252
5253 case IEMMODE_64BIT:
5254 IEM_MC_BEGIN(3, 2);
5255 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5256 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5257 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5258 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5259
5260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5261 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
5262 IEM_MC_ASSIGN(cShiftArg, cShift);
5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5264 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5265 IEM_MC_FETCH_EFLAGS(EFlags);
5266 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5267
5268 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5269 IEM_MC_COMMIT_EFLAGS(EFlags);
5270 IEM_MC_ADVANCE_RIP();
5271 IEM_MC_END();
5272 return VINF_SUCCESS;
5273
5274 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5275 }
5276 }
5277}
5278
5279
5280/** Opcode 0xc2. */
5281FNIEMOP_DEF(iemOp_retn_Iw)
5282{
5283 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
5284 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5285 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5287 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
5288}
5289
5290
5291/** Opcode 0xc3. */
5292FNIEMOP_DEF(iemOp_retn)
5293{
5294 IEMOP_MNEMONIC(retn, "retn");
5295 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5297 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
5298}
5299
5300
5301/** Opcode 0xc4. */
5302FNIEMOP_DEF(iemOp_les_Gv_Mp_vex2)
5303{
5304 /* The LES instruction is invalid 64-bit mode. In legacy and
5305 compatability mode it is invalid with MOD=3.
5306 The use as a VEX prefix is made possible by assigning the inverted
5307 REX.R to the top MOD bit, and the top bit in the inverted register
5308 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
5309 to accessing registers 0..7 in this VEX form. */
5310 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5311 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
5312 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5313 {
5314 IEMOP_MNEMONIC(vex2_prefix, "vex2");
5315 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5316 {
5317 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5318 if ( ( pVCpu->iem.s.fPrefixes
5319 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5320 == 0)
5321 {
5322 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5323 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5324 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
5325 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
5326 pVCpu->iem.s.idxPrefix = bRm & 0x3;
5327
5328 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5329 }
5330
5331 Log(("VEX2: Invalid prefix mix!\n"));
5332 }
5333 else
5334 Log(("VEX2: AVX support disabled!\n"));
5335
5336 /* @todo does intel completely decode the sequence with SIB/disp before \#UD? */
5337 return IEMOP_RAISE_INVALID_OPCODE();
5338 }
5339 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
5340 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
5341}
5342
5343
5344/** Opcode 0xc5. */
5345FNIEMOP_DEF(iemOp_lds_Gv_Mp_vex3)
5346{
5347 /* The LDS instruction is invalid 64-bit mode. In legacy and
5348 compatability mode it is invalid with MOD=3.
5349 The use as a VEX prefix is made possible by assigning the inverted
5350 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
5351 outside of 64-bit mode. VEX is not available in real or v86 mode. */
5352 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5353 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
5354 {
5355 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
5356 {
5357 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
5358 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
5359 }
5360 IEMOP_HLP_NO_REAL_OR_V86_MODE();
5361 }
5362
5363 IEMOP_MNEMONIC(vex3_prefix, "vex3");
5364 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
5365 {
5366 /** @todo Test when exctly the VEX conformance checks kick in during
5367 * instruction decoding and fetching (using \#PF). */
5368 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
5369 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
5370 if ( ( pVCpu->iem.s.fPrefixes
5371 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
5372 == 0)
5373 {
5374 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
5375 if (bVex2 & 0x80 /* VEX.W */)
5376 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
5377 pVCpu->iem.s.uRexReg = ~bRm >> (7 - 3);
5378 pVCpu->iem.s.uRexIndex = ~bRm >> (6 - 3);
5379 pVCpu->iem.s.uRexB = ~bRm >> (5 - 3);
5380 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
5381 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
5382 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
5383
5384 switch (bRm & 0x1f)
5385 {
5386 case 1: /* 0x0f lead opcode byte. */
5387 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
5388
5389 case 2: /* 0x0f 0x38 lead opcode bytes. */
5390 /** @todo VEX: Just use new tables and decoders. */
5391 IEMOP_BITCH_ABOUT_STUB();
5392 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5393
5394 case 3: /* 0x0f 0x3a lead opcode bytes. */
5395 /** @todo VEX: Just use new tables and decoders. */
5396 IEMOP_BITCH_ABOUT_STUB();
5397 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
5398
5399 default:
5400 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
5401 return IEMOP_RAISE_INVALID_OPCODE();
5402 }
5403 }
5404 else
5405 Log(("VEX3: Invalid prefix mix!\n"));
5406 }
5407 else
5408 Log(("VEX3: AVX support disabled!\n"));
5409 return IEMOP_RAISE_INVALID_OPCODE();
5410}
5411
5412
5413/** Opcode 0xc6. */
5414FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
5415{
5416 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5417 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5418 return IEMOP_RAISE_INVALID_OPCODE();
5419 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
5420
5421 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5422 {
5423 /* register access */
5424 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5426 IEM_MC_BEGIN(0, 0);
5427 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
5428 IEM_MC_ADVANCE_RIP();
5429 IEM_MC_END();
5430 }
5431 else
5432 {
5433 /* memory access. */
5434 IEM_MC_BEGIN(0, 1);
5435 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
5437 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5439 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
5440 IEM_MC_ADVANCE_RIP();
5441 IEM_MC_END();
5442 }
5443 return VINF_SUCCESS;
5444}
5445
5446
5447/** Opcode 0xc7. */
5448FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
5449{
5450 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5451 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
5452 return IEMOP_RAISE_INVALID_OPCODE();
5453 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
5454
5455 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5456 {
5457 /* register access */
5458 switch (pVCpu->iem.s.enmEffOpSize)
5459 {
5460 case IEMMODE_16BIT:
5461 IEM_MC_BEGIN(0, 0);
5462 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5463 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5464 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
5465 IEM_MC_ADVANCE_RIP();
5466 IEM_MC_END();
5467 return VINF_SUCCESS;
5468
5469 case IEMMODE_32BIT:
5470 IEM_MC_BEGIN(0, 0);
5471 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5473 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
5474 IEM_MC_ADVANCE_RIP();
5475 IEM_MC_END();
5476 return VINF_SUCCESS;
5477
5478 case IEMMODE_64BIT:
5479 IEM_MC_BEGIN(0, 0);
5480 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5482 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
5483 IEM_MC_ADVANCE_RIP();
5484 IEM_MC_END();
5485 return VINF_SUCCESS;
5486
5487 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5488 }
5489 }
5490 else
5491 {
5492 /* memory access. */
5493 switch (pVCpu->iem.s.enmEffOpSize)
5494 {
5495 case IEMMODE_16BIT:
5496 IEM_MC_BEGIN(0, 1);
5497 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
5499 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5501 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
5502 IEM_MC_ADVANCE_RIP();
5503 IEM_MC_END();
5504 return VINF_SUCCESS;
5505
5506 case IEMMODE_32BIT:
5507 IEM_MC_BEGIN(0, 1);
5508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5509 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5510 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5512 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
5513 IEM_MC_ADVANCE_RIP();
5514 IEM_MC_END();
5515 return VINF_SUCCESS;
5516
5517 case IEMMODE_64BIT:
5518 IEM_MC_BEGIN(0, 1);
5519 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
5521 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
5522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5523 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
5524 IEM_MC_ADVANCE_RIP();
5525 IEM_MC_END();
5526 return VINF_SUCCESS;
5527
5528 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5529 }
5530 }
5531}
5532
5533
5534
5535
5536/** Opcode 0xc8. */
5537FNIEMOP_DEF(iemOp_enter_Iw_Ib)
5538{
5539 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
5540 IEMOP_HLP_MIN_186();
5541 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5542 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
5543 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
5544 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5545 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
5546}
5547
5548
5549/** Opcode 0xc9. */
5550FNIEMOP_DEF(iemOp_leave)
5551{
5552 IEMOP_MNEMONIC(leave, "leave");
5553 IEMOP_HLP_MIN_186();
5554 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5556 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
5557}
5558
5559
5560/** Opcode 0xca. */
5561FNIEMOP_DEF(iemOp_retf_Iw)
5562{
5563 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
5564 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5565 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5566 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5567 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
5568}
5569
5570
5571/** Opcode 0xcb. */
5572FNIEMOP_DEF(iemOp_retf)
5573{
5574 IEMOP_MNEMONIC(retf, "retf");
5575 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5576 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
5577 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
5578}
5579
5580
5581/** Opcode 0xcc. */
5582FNIEMOP_DEF(iemOp_int_3)
5583{
5584 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5585 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, true /*fIsBpInstr*/);
5586}
5587
5588
5589/** Opcode 0xcd. */
5590FNIEMOP_DEF(iemOp_int_Ib)
5591{
5592 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
5593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5594 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, false /*fIsBpInstr*/);
5595}
5596
5597
5598/** Opcode 0xce. */
5599FNIEMOP_DEF(iemOp_into)
5600{
5601 IEMOP_MNEMONIC(into, "into");
5602 IEMOP_HLP_NO_64BIT();
5603
5604 IEM_MC_BEGIN(2, 0);
5605 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
5606 IEM_MC_ARG_CONST(bool, fIsBpInstr, /*=*/ false, 1);
5607 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, fIsBpInstr);
5608 IEM_MC_END();
5609 return VINF_SUCCESS;
5610}
5611
5612
5613/** Opcode 0xcf. */
5614FNIEMOP_DEF(iemOp_iret)
5615{
5616 IEMOP_MNEMONIC(iret, "iret");
5617 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5618 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
5619}
5620
5621
5622/** Opcode 0xd0. */
5623FNIEMOP_DEF(iemOp_Grp2_Eb_1)
5624{
5625 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5626 PCIEMOPSHIFTSIZES pImpl;
5627 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5628 {
5629 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
5630 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
5631 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
5632 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
5633 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
5634 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
5635 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
5636 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5637 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5638 }
5639 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5640
5641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5642 {
5643 /* register */
5644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5645 IEM_MC_BEGIN(3, 0);
5646 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5647 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5648 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5649 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5650 IEM_MC_REF_EFLAGS(pEFlags);
5651 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5652 IEM_MC_ADVANCE_RIP();
5653 IEM_MC_END();
5654 }
5655 else
5656 {
5657 /* memory */
5658 IEM_MC_BEGIN(3, 2);
5659 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5660 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
5661 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5662 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5663
5664 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5665 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5666 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5667 IEM_MC_FETCH_EFLAGS(EFlags);
5668 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5669
5670 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5671 IEM_MC_COMMIT_EFLAGS(EFlags);
5672 IEM_MC_ADVANCE_RIP();
5673 IEM_MC_END();
5674 }
5675 return VINF_SUCCESS;
5676}
5677
5678
5679
5680/** Opcode 0xd1. */
5681FNIEMOP_DEF(iemOp_Grp2_Ev_1)
5682{
5683 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5684 PCIEMOPSHIFTSIZES pImpl;
5685 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5686 {
5687 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
5688 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
5689 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
5690 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
5691 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
5692 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
5693 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
5694 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5695 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
5696 }
5697 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5698
5699 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5700 {
5701 /* register */
5702 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5703 switch (pVCpu->iem.s.enmEffOpSize)
5704 {
5705 case IEMMODE_16BIT:
5706 IEM_MC_BEGIN(3, 0);
5707 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5708 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5709 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5710 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5711 IEM_MC_REF_EFLAGS(pEFlags);
5712 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5713 IEM_MC_ADVANCE_RIP();
5714 IEM_MC_END();
5715 return VINF_SUCCESS;
5716
5717 case IEMMODE_32BIT:
5718 IEM_MC_BEGIN(3, 0);
5719 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5720 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5721 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5722 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5723 IEM_MC_REF_EFLAGS(pEFlags);
5724 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5725 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5726 IEM_MC_ADVANCE_RIP();
5727 IEM_MC_END();
5728 return VINF_SUCCESS;
5729
5730 case IEMMODE_64BIT:
5731 IEM_MC_BEGIN(3, 0);
5732 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5733 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5734 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5735 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5736 IEM_MC_REF_EFLAGS(pEFlags);
5737 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5738 IEM_MC_ADVANCE_RIP();
5739 IEM_MC_END();
5740 return VINF_SUCCESS;
5741
5742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5743 }
5744 }
5745 else
5746 {
5747 /* memory */
5748 switch (pVCpu->iem.s.enmEffOpSize)
5749 {
5750 case IEMMODE_16BIT:
5751 IEM_MC_BEGIN(3, 2);
5752 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5753 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5754 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5755 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5756
5757 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5759 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5760 IEM_MC_FETCH_EFLAGS(EFlags);
5761 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5762
5763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5764 IEM_MC_COMMIT_EFLAGS(EFlags);
5765 IEM_MC_ADVANCE_RIP();
5766 IEM_MC_END();
5767 return VINF_SUCCESS;
5768
5769 case IEMMODE_32BIT:
5770 IEM_MC_BEGIN(3, 2);
5771 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5772 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5773 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5774 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5775
5776 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5777 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5778 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5779 IEM_MC_FETCH_EFLAGS(EFlags);
5780 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5781
5782 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5783 IEM_MC_COMMIT_EFLAGS(EFlags);
5784 IEM_MC_ADVANCE_RIP();
5785 IEM_MC_END();
5786 return VINF_SUCCESS;
5787
5788 case IEMMODE_64BIT:
5789 IEM_MC_BEGIN(3, 2);
5790 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5791 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
5792 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5794
5795 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5797 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5798 IEM_MC_FETCH_EFLAGS(EFlags);
5799 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5800
5801 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
5802 IEM_MC_COMMIT_EFLAGS(EFlags);
5803 IEM_MC_ADVANCE_RIP();
5804 IEM_MC_END();
5805 return VINF_SUCCESS;
5806
5807 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5808 }
5809 }
5810}
5811
5812
5813/** Opcode 0xd2. */
5814FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
5815{
5816 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5817 PCIEMOPSHIFTSIZES pImpl;
5818 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5819 {
5820 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
5821 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
5822 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
5823 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
5824 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
5825 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
5826 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
5827 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5828 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
5829 }
5830 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5831
5832 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5833 {
5834 /* register */
5835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5836 IEM_MC_BEGIN(3, 0);
5837 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5838 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5839 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5840 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5841 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5842 IEM_MC_REF_EFLAGS(pEFlags);
5843 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5844 IEM_MC_ADVANCE_RIP();
5845 IEM_MC_END();
5846 }
5847 else
5848 {
5849 /* memory */
5850 IEM_MC_BEGIN(3, 2);
5851 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
5852 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5853 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5855
5856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5858 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5859 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5860 IEM_MC_FETCH_EFLAGS(EFlags);
5861 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
5862
5863 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
5864 IEM_MC_COMMIT_EFLAGS(EFlags);
5865 IEM_MC_ADVANCE_RIP();
5866 IEM_MC_END();
5867 }
5868 return VINF_SUCCESS;
5869}
5870
5871
5872/** Opcode 0xd3. */
5873FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
5874{
5875 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
5876 PCIEMOPSHIFTSIZES pImpl;
5877 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
5878 {
5879 case 0: pImpl = &g_iemAImpl_rol; IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
5880 case 1: pImpl = &g_iemAImpl_ror; IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
5881 case 2: pImpl = &g_iemAImpl_rcl; IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
5882 case 3: pImpl = &g_iemAImpl_rcr; IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
5883 case 4: pImpl = &g_iemAImpl_shl; IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
5884 case 5: pImpl = &g_iemAImpl_shr; IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
5885 case 7: pImpl = &g_iemAImpl_sar; IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
5886 case 6: return IEMOP_RAISE_INVALID_OPCODE();
5887 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
5888 }
5889 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
5890
5891 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
5892 {
5893 /* register */
5894 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5895 switch (pVCpu->iem.s.enmEffOpSize)
5896 {
5897 case IEMMODE_16BIT:
5898 IEM_MC_BEGIN(3, 0);
5899 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5900 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5901 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5902 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5903 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5904 IEM_MC_REF_EFLAGS(pEFlags);
5905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5906 IEM_MC_ADVANCE_RIP();
5907 IEM_MC_END();
5908 return VINF_SUCCESS;
5909
5910 case IEMMODE_32BIT:
5911 IEM_MC_BEGIN(3, 0);
5912 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5913 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5914 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5915 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5916 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5917 IEM_MC_REF_EFLAGS(pEFlags);
5918 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5919 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
5920 IEM_MC_ADVANCE_RIP();
5921 IEM_MC_END();
5922 return VINF_SUCCESS;
5923
5924 case IEMMODE_64BIT:
5925 IEM_MC_BEGIN(3, 0);
5926 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5927 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
5929 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
5930 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5931 IEM_MC_REF_EFLAGS(pEFlags);
5932 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5933 IEM_MC_ADVANCE_RIP();
5934 IEM_MC_END();
5935 return VINF_SUCCESS;
5936
5937 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5938 }
5939 }
5940 else
5941 {
5942 /* memory */
5943 switch (pVCpu->iem.s.enmEffOpSize)
5944 {
5945 case IEMMODE_16BIT:
5946 IEM_MC_BEGIN(3, 2);
5947 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
5948 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5949 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5951
5952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5954 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5955 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5956 IEM_MC_FETCH_EFLAGS(EFlags);
5957 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
5958
5959 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
5960 IEM_MC_COMMIT_EFLAGS(EFlags);
5961 IEM_MC_ADVANCE_RIP();
5962 IEM_MC_END();
5963 return VINF_SUCCESS;
5964
5965 case IEMMODE_32BIT:
5966 IEM_MC_BEGIN(3, 2);
5967 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
5968 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5969 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5970 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5971
5972 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5974 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5975 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5976 IEM_MC_FETCH_EFLAGS(EFlags);
5977 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
5978
5979 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
5980 IEM_MC_COMMIT_EFLAGS(EFlags);
5981 IEM_MC_ADVANCE_RIP();
5982 IEM_MC_END();
5983 return VINF_SUCCESS;
5984
5985 case IEMMODE_64BIT:
5986 IEM_MC_BEGIN(3, 2);
5987 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
5988 IEM_MC_ARG(uint8_t, cShiftArg, 1);
5989 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
5990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
5991
5992 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
5993 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5994 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
5995 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
5996 IEM_MC_FETCH_EFLAGS(EFlags);
5997 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
5998
5999 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6000 IEM_MC_COMMIT_EFLAGS(EFlags);
6001 IEM_MC_ADVANCE_RIP();
6002 IEM_MC_END();
6003 return VINF_SUCCESS;
6004
6005 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6006 }
6007 }
6008}
6009
6010/** Opcode 0xd4. */
6011FNIEMOP_DEF(iemOp_aam_Ib)
6012{
6013 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6014 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6016 IEMOP_HLP_NO_64BIT();
6017 if (!bImm)
6018 return IEMOP_RAISE_DIVIDE_ERROR();
6019 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6020}
6021
6022
6023/** Opcode 0xd5. */
6024FNIEMOP_DEF(iemOp_aad_Ib)
6025{
6026 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
6027 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6028 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6029 IEMOP_HLP_NO_64BIT();
6030 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
6031}
6032
6033
6034/** Opcode 0xd6. */
6035FNIEMOP_DEF(iemOp_salc)
6036{
6037 IEMOP_MNEMONIC(salc, "salc");
6038 IEMOP_HLP_MIN_286(); /* (undocument at the time) */
6039 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6041 IEMOP_HLP_NO_64BIT();
6042
6043 IEM_MC_BEGIN(0, 0);
6044 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
6045 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
6046 } IEM_MC_ELSE() {
6047 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
6048 } IEM_MC_ENDIF();
6049 IEM_MC_ADVANCE_RIP();
6050 IEM_MC_END();
6051 return VINF_SUCCESS;
6052}
6053
6054
6055/** Opcode 0xd7. */
6056FNIEMOP_DEF(iemOp_xlat)
6057{
6058 IEMOP_MNEMONIC(xlat, "xlat");
6059 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6060 switch (pVCpu->iem.s.enmEffAddrMode)
6061 {
6062 case IEMMODE_16BIT:
6063 IEM_MC_BEGIN(2, 0);
6064 IEM_MC_LOCAL(uint8_t, u8Tmp);
6065 IEM_MC_LOCAL(uint16_t, u16Addr);
6066 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
6067 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
6068 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
6069 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6070 IEM_MC_ADVANCE_RIP();
6071 IEM_MC_END();
6072 return VINF_SUCCESS;
6073
6074 case IEMMODE_32BIT:
6075 IEM_MC_BEGIN(2, 0);
6076 IEM_MC_LOCAL(uint8_t, u8Tmp);
6077 IEM_MC_LOCAL(uint32_t, u32Addr);
6078 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
6079 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
6080 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
6081 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6082 IEM_MC_ADVANCE_RIP();
6083 IEM_MC_END();
6084 return VINF_SUCCESS;
6085
6086 case IEMMODE_64BIT:
6087 IEM_MC_BEGIN(2, 0);
6088 IEM_MC_LOCAL(uint8_t, u8Tmp);
6089 IEM_MC_LOCAL(uint64_t, u64Addr);
6090 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
6091 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
6092 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
6093 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
6094 IEM_MC_ADVANCE_RIP();
6095 IEM_MC_END();
6096 return VINF_SUCCESS;
6097
6098 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6099 }
6100}
6101
6102
6103/**
6104 * Common worker for FPU instructions working on ST0 and STn, and storing the
6105 * result in ST0.
6106 *
6107 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6108 */
6109FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6110{
6111 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6112
6113 IEM_MC_BEGIN(3, 1);
6114 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6115 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6116 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6117 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6118
6119 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6120 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6121 IEM_MC_PREPARE_FPU_USAGE();
6122 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6123 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6124 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6125 IEM_MC_ELSE()
6126 IEM_MC_FPU_STACK_UNDERFLOW(0);
6127 IEM_MC_ENDIF();
6128 IEM_MC_ADVANCE_RIP();
6129
6130 IEM_MC_END();
6131 return VINF_SUCCESS;
6132}
6133
6134
6135/**
6136 * Common worker for FPU instructions working on ST0 and STn, and only affecting
6137 * flags.
6138 *
6139 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6140 */
6141FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6142{
6143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6144
6145 IEM_MC_BEGIN(3, 1);
6146 IEM_MC_LOCAL(uint16_t, u16Fsw);
6147 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6148 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6149 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6150
6151 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6152 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6153 IEM_MC_PREPARE_FPU_USAGE();
6154 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6155 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6156 IEM_MC_UPDATE_FSW(u16Fsw);
6157 IEM_MC_ELSE()
6158 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6159 IEM_MC_ENDIF();
6160 IEM_MC_ADVANCE_RIP();
6161
6162 IEM_MC_END();
6163 return VINF_SUCCESS;
6164}
6165
6166
6167/**
6168 * Common worker for FPU instructions working on ST0 and STn, only affecting
6169 * flags, and popping when done.
6170 *
6171 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6172 */
6173FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
6174{
6175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6176
6177 IEM_MC_BEGIN(3, 1);
6178 IEM_MC_LOCAL(uint16_t, u16Fsw);
6179 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6180 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6181 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6182
6183 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6184 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6185 IEM_MC_PREPARE_FPU_USAGE();
6186 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6187 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
6188 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6189 IEM_MC_ELSE()
6190 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
6191 IEM_MC_ENDIF();
6192 IEM_MC_ADVANCE_RIP();
6193
6194 IEM_MC_END();
6195 return VINF_SUCCESS;
6196}
6197
6198
6199/** Opcode 0xd8 11/0. */
6200FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
6201{
6202 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
6203 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
6204}
6205
6206
6207/** Opcode 0xd8 11/1. */
6208FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
6209{
6210 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
6211 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
6212}
6213
6214
6215/** Opcode 0xd8 11/2. */
6216FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
6217{
6218 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
6219 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
6220}
6221
6222
6223/** Opcode 0xd8 11/3. */
6224FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
6225{
6226 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
6227 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
6228}
6229
6230
6231/** Opcode 0xd8 11/4. */
6232FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
6233{
6234 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
6235 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
6236}
6237
6238
6239/** Opcode 0xd8 11/5. */
6240FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
6241{
6242 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
6243 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
6244}
6245
6246
6247/** Opcode 0xd8 11/6. */
6248FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
6249{
6250 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
6251 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
6252}
6253
6254
6255/** Opcode 0xd8 11/7. */
6256FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
6257{
6258 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
6259 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
6260}
6261
6262
6263/**
6264 * Common worker for FPU instructions working on ST0 and an m32r, and storing
6265 * the result in ST0.
6266 *
6267 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6268 */
6269FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
6270{
6271 IEM_MC_BEGIN(3, 3);
6272 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6273 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6274 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6275 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6276 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6277 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6278
6279 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6280 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6281
6282 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6283 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6284 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6285
6286 IEM_MC_PREPARE_FPU_USAGE();
6287 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6288 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
6289 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6290 IEM_MC_ELSE()
6291 IEM_MC_FPU_STACK_UNDERFLOW(0);
6292 IEM_MC_ENDIF();
6293 IEM_MC_ADVANCE_RIP();
6294
6295 IEM_MC_END();
6296 return VINF_SUCCESS;
6297}
6298
6299
6300/** Opcode 0xd8 !11/0. */
6301FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
6302{
6303 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
6304 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
6305}
6306
6307
6308/** Opcode 0xd8 !11/1. */
6309FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
6310{
6311 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
6312 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
6313}
6314
6315
6316/** Opcode 0xd8 !11/2. */
6317FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
6318{
6319 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
6320
6321 IEM_MC_BEGIN(3, 3);
6322 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6323 IEM_MC_LOCAL(uint16_t, u16Fsw);
6324 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6325 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6326 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6327 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6328
6329 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6331
6332 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6333 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6334 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6335
6336 IEM_MC_PREPARE_FPU_USAGE();
6337 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6338 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6339 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6340 IEM_MC_ELSE()
6341 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6342 IEM_MC_ENDIF();
6343 IEM_MC_ADVANCE_RIP();
6344
6345 IEM_MC_END();
6346 return VINF_SUCCESS;
6347}
6348
6349
6350/** Opcode 0xd8 !11/3. */
6351FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
6352{
6353 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
6354
6355 IEM_MC_BEGIN(3, 3);
6356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6357 IEM_MC_LOCAL(uint16_t, u16Fsw);
6358 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
6359 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6360 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6361 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
6362
6363 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6364 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6365
6366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6368 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6369
6370 IEM_MC_PREPARE_FPU_USAGE();
6371 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
6372 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
6373 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6374 IEM_MC_ELSE()
6375 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6376 IEM_MC_ENDIF();
6377 IEM_MC_ADVANCE_RIP();
6378
6379 IEM_MC_END();
6380 return VINF_SUCCESS;
6381}
6382
6383
6384/** Opcode 0xd8 !11/4. */
6385FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
6386{
6387 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
6388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
6389}
6390
6391
6392/** Opcode 0xd8 !11/5. */
6393FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
6394{
6395 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
6396 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
6397}
6398
6399
6400/** Opcode 0xd8 !11/6. */
6401FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
6402{
6403 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
6404 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
6405}
6406
6407
6408/** Opcode 0xd8 !11/7. */
6409FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
6410{
6411 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
6412 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
6413}
6414
6415
6416/** Opcode 0xd8. */
6417FNIEMOP_DEF(iemOp_EscF0)
6418{
6419 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6420 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
6421
6422 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6423 {
6424 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6425 {
6426 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
6427 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
6428 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
6429 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
6430 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
6431 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
6432 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
6433 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
6434 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6435 }
6436 }
6437 else
6438 {
6439 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6440 {
6441 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
6442 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
6443 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
6444 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
6445 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
6446 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
6447 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
6448 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
6449 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6450 }
6451 }
6452}
6453
6454
6455/** Opcode 0xd9 /0 mem32real
6456 * @sa iemOp_fld_m64r */
6457FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
6458{
6459 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
6460
6461 IEM_MC_BEGIN(2, 3);
6462 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6463 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6464 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
6465 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6466 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
6467
6468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6470
6471 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6472 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6473 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6474
6475 IEM_MC_PREPARE_FPU_USAGE();
6476 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6477 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
6478 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6479 IEM_MC_ELSE()
6480 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6481 IEM_MC_ENDIF();
6482 IEM_MC_ADVANCE_RIP();
6483
6484 IEM_MC_END();
6485 return VINF_SUCCESS;
6486}
6487
6488
6489/** Opcode 0xd9 !11/2 mem32real */
6490FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
6491{
6492 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
6493 IEM_MC_BEGIN(3, 2);
6494 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6495 IEM_MC_LOCAL(uint16_t, u16Fsw);
6496 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6497 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6498 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6499
6500 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6504
6505 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6506 IEM_MC_PREPARE_FPU_USAGE();
6507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6508 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6509 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6510 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6511 IEM_MC_ELSE()
6512 IEM_MC_IF_FCW_IM()
6513 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6514 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6515 IEM_MC_ENDIF();
6516 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6517 IEM_MC_ENDIF();
6518 IEM_MC_ADVANCE_RIP();
6519
6520 IEM_MC_END();
6521 return VINF_SUCCESS;
6522}
6523
6524
6525/** Opcode 0xd9 !11/3 */
6526FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
6527{
6528 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
6529 IEM_MC_BEGIN(3, 2);
6530 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6531 IEM_MC_LOCAL(uint16_t, u16Fsw);
6532 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6533 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
6534 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
6535
6536 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6538 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6539 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6540
6541 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
6542 IEM_MC_PREPARE_FPU_USAGE();
6543 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6544 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
6545 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
6546 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6547 IEM_MC_ELSE()
6548 IEM_MC_IF_FCW_IM()
6549 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
6551 IEM_MC_ENDIF();
6552 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
6553 IEM_MC_ENDIF();
6554 IEM_MC_ADVANCE_RIP();
6555
6556 IEM_MC_END();
6557 return VINF_SUCCESS;
6558}
6559
6560
6561/** Opcode 0xd9 !11/4 */
6562FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
6563{
6564 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
6565 IEM_MC_BEGIN(3, 0);
6566 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6567 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6568 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
6569 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6571 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6572 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6573 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6574 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
6575 IEM_MC_END();
6576 return VINF_SUCCESS;
6577}
6578
6579
6580/** Opcode 0xd9 !11/5 */
6581FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
6582{
6583 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
6584 IEM_MC_BEGIN(1, 1);
6585 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
6586 IEM_MC_ARG(uint16_t, u16Fsw, 0);
6587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
6588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6589 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6590 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6591 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
6592 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
6593 IEM_MC_END();
6594 return VINF_SUCCESS;
6595}
6596
6597
6598/** Opcode 0xd9 !11/6 */
6599FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
6600{
6601 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
6602 IEM_MC_BEGIN(3, 0);
6603 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
6604 IEM_MC_ARG(uint8_t, iEffSeg, 1);
6605 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
6606 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6607 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6609 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6610 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
6611 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
6612 IEM_MC_END();
6613 return VINF_SUCCESS;
6614}
6615
6616
6617/** Opcode 0xd9 !11/7 */
6618FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
6619{
6620 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
6621 IEM_MC_BEGIN(2, 0);
6622 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6623 IEM_MC_LOCAL(uint16_t, u16Fcw);
6624 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6625 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6626 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6627 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
6628 IEM_MC_FETCH_FCW(u16Fcw);
6629 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
6630 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6631 IEM_MC_END();
6632 return VINF_SUCCESS;
6633}
6634
6635
6636/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
6637FNIEMOP_DEF(iemOp_fnop)
6638{
6639 IEMOP_MNEMONIC(fnop, "fnop");
6640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6641
6642 IEM_MC_BEGIN(0, 0);
6643 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6644 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6645 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
6646 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
6647 * intel optimizations. Investigate. */
6648 IEM_MC_UPDATE_FPU_OPCODE_IP();
6649 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
6650 IEM_MC_END();
6651 return VINF_SUCCESS;
6652}
6653
6654
6655/** Opcode 0xd9 11/0 stN */
6656FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
6657{
6658 IEMOP_MNEMONIC(fld_stN, "fld stN");
6659 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6660
6661 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6662 * indicates that it does. */
6663 IEM_MC_BEGIN(0, 2);
6664 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6665 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6666 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6667 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6668
6669 IEM_MC_PREPARE_FPU_USAGE();
6670 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
6671 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6672 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6673 IEM_MC_ELSE()
6674 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
6675 IEM_MC_ENDIF();
6676
6677 IEM_MC_ADVANCE_RIP();
6678 IEM_MC_END();
6679
6680 return VINF_SUCCESS;
6681}
6682
6683
6684/** Opcode 0xd9 11/3 stN */
6685FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
6686{
6687 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
6688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6689
6690 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
6691 * indicates that it does. */
6692 IEM_MC_BEGIN(1, 3);
6693 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
6694 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
6695 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6696 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
6697 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6698 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6699
6700 IEM_MC_PREPARE_FPU_USAGE();
6701 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
6702 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
6703 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
6704 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6705 IEM_MC_ELSE()
6706 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
6707 IEM_MC_ENDIF();
6708
6709 IEM_MC_ADVANCE_RIP();
6710 IEM_MC_END();
6711
6712 return VINF_SUCCESS;
6713}
6714
6715
6716/** Opcode 0xd9 11/4, 0xdd 11/2. */
6717FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
6718{
6719 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
6720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6721
6722 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
6723 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
6724 if (!iDstReg)
6725 {
6726 IEM_MC_BEGIN(0, 1);
6727 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
6728 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6729 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6730
6731 IEM_MC_PREPARE_FPU_USAGE();
6732 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
6733 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
6734 IEM_MC_ELSE()
6735 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
6736 IEM_MC_ENDIF();
6737
6738 IEM_MC_ADVANCE_RIP();
6739 IEM_MC_END();
6740 }
6741 else
6742 {
6743 IEM_MC_BEGIN(0, 2);
6744 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
6745 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6746 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6747 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6748
6749 IEM_MC_PREPARE_FPU_USAGE();
6750 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6751 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
6752 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
6753 IEM_MC_ELSE()
6754 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
6755 IEM_MC_ENDIF();
6756
6757 IEM_MC_ADVANCE_RIP();
6758 IEM_MC_END();
6759 }
6760 return VINF_SUCCESS;
6761}
6762
6763
6764/**
6765 * Common worker for FPU instructions working on ST0 and replaces it with the
6766 * result, i.e. unary operators.
6767 *
6768 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6769 */
6770FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
6771{
6772 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6773
6774 IEM_MC_BEGIN(2, 1);
6775 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6776 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6777 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6778
6779 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6780 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6781 IEM_MC_PREPARE_FPU_USAGE();
6782 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6783 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
6784 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
6785 IEM_MC_ELSE()
6786 IEM_MC_FPU_STACK_UNDERFLOW(0);
6787 IEM_MC_ENDIF();
6788 IEM_MC_ADVANCE_RIP();
6789
6790 IEM_MC_END();
6791 return VINF_SUCCESS;
6792}
6793
6794
6795/** Opcode 0xd9 0xe0. */
6796FNIEMOP_DEF(iemOp_fchs)
6797{
6798 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
6799 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
6800}
6801
6802
6803/** Opcode 0xd9 0xe1. */
6804FNIEMOP_DEF(iemOp_fabs)
6805{
6806 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
6807 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
6808}
6809
6810
6811/**
6812 * Common worker for FPU instructions working on ST0 and only returns FSW.
6813 *
6814 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6815 */
6816FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
6817{
6818 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6819
6820 IEM_MC_BEGIN(2, 1);
6821 IEM_MC_LOCAL(uint16_t, u16Fsw);
6822 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
6823 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
6824
6825 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6826 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6827 IEM_MC_PREPARE_FPU_USAGE();
6828 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
6829 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
6830 IEM_MC_UPDATE_FSW(u16Fsw);
6831 IEM_MC_ELSE()
6832 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
6833 IEM_MC_ENDIF();
6834 IEM_MC_ADVANCE_RIP();
6835
6836 IEM_MC_END();
6837 return VINF_SUCCESS;
6838}
6839
6840
6841/** Opcode 0xd9 0xe4. */
6842FNIEMOP_DEF(iemOp_ftst)
6843{
6844 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
6845 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
6846}
6847
6848
6849/** Opcode 0xd9 0xe5. */
6850FNIEMOP_DEF(iemOp_fxam)
6851{
6852 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
6853 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
6854}
6855
6856
6857/**
6858 * Common worker for FPU instructions pushing a constant onto the FPU stack.
6859 *
6860 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6861 */
6862FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
6863{
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865
6866 IEM_MC_BEGIN(1, 1);
6867 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6868 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6869
6870 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6871 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6872 IEM_MC_PREPARE_FPU_USAGE();
6873 IEM_MC_IF_FPUREG_IS_EMPTY(7)
6874 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
6875 IEM_MC_PUSH_FPU_RESULT(FpuRes);
6876 IEM_MC_ELSE()
6877 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
6878 IEM_MC_ENDIF();
6879 IEM_MC_ADVANCE_RIP();
6880
6881 IEM_MC_END();
6882 return VINF_SUCCESS;
6883}
6884
6885
6886/** Opcode 0xd9 0xe8. */
6887FNIEMOP_DEF(iemOp_fld1)
6888{
6889 IEMOP_MNEMONIC(fld1, "fld1");
6890 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
6891}
6892
6893
6894/** Opcode 0xd9 0xe9. */
6895FNIEMOP_DEF(iemOp_fldl2t)
6896{
6897 IEMOP_MNEMONIC(fldl2t, "fldl2t");
6898 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
6899}
6900
6901
6902/** Opcode 0xd9 0xea. */
6903FNIEMOP_DEF(iemOp_fldl2e)
6904{
6905 IEMOP_MNEMONIC(fldl2e, "fldl2e");
6906 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
6907}
6908
6909/** Opcode 0xd9 0xeb. */
6910FNIEMOP_DEF(iemOp_fldpi)
6911{
6912 IEMOP_MNEMONIC(fldpi, "fldpi");
6913 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
6914}
6915
6916
6917/** Opcode 0xd9 0xec. */
6918FNIEMOP_DEF(iemOp_fldlg2)
6919{
6920 IEMOP_MNEMONIC(fldlg2, "fldlg2");
6921 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
6922}
6923
6924/** Opcode 0xd9 0xed. */
6925FNIEMOP_DEF(iemOp_fldln2)
6926{
6927 IEMOP_MNEMONIC(fldln2, "fldln2");
6928 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
6929}
6930
6931
6932/** Opcode 0xd9 0xee. */
6933FNIEMOP_DEF(iemOp_fldz)
6934{
6935 IEMOP_MNEMONIC(fldz, "fldz");
6936 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
6937}
6938
6939
6940/** Opcode 0xd9 0xf0. */
6941FNIEMOP_DEF(iemOp_f2xm1)
6942{
6943 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
6944 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
6945}
6946
6947
6948/**
6949 * Common worker for FPU instructions working on STn and ST0, storing the result
6950 * in STn, and popping the stack unless IE, DE or ZE was raised.
6951 *
6952 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6953 */
6954FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
6955{
6956 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6957
6958 IEM_MC_BEGIN(3, 1);
6959 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
6960 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
6961 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
6962 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
6963
6964 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
6965 IEM_MC_MAYBE_RAISE_FPU_XCPT();
6966
6967 IEM_MC_PREPARE_FPU_USAGE();
6968 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
6969 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
6970 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
6971 IEM_MC_ELSE()
6972 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
6973 IEM_MC_ENDIF();
6974 IEM_MC_ADVANCE_RIP();
6975
6976 IEM_MC_END();
6977 return VINF_SUCCESS;
6978}
6979
6980
6981/** Opcode 0xd9 0xf1. */
6982FNIEMOP_DEF(iemOp_fyl2x)
6983{
6984 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
6985 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
6986}
6987
6988
6989/**
6990 * Common worker for FPU instructions working on ST0 and having two outputs, one
6991 * replacing ST0 and one pushed onto the stack.
6992 *
6993 * @param pfnAImpl Pointer to the instruction implementation (assembly).
6994 */
6995FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
6996{
6997 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6998
6999 IEM_MC_BEGIN(2, 1);
7000 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7001 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7002 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7003
7004 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7005 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7006 IEM_MC_PREPARE_FPU_USAGE();
7007 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7008 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7009 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7010 IEM_MC_ELSE()
7011 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7012 IEM_MC_ENDIF();
7013 IEM_MC_ADVANCE_RIP();
7014
7015 IEM_MC_END();
7016 return VINF_SUCCESS;
7017}
7018
7019
7020/** Opcode 0xd9 0xf2. */
7021FNIEMOP_DEF(iemOp_fptan)
7022{
7023 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
7024 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
7025}
7026
7027
7028/** Opcode 0xd9 0xf3. */
7029FNIEMOP_DEF(iemOp_fpatan)
7030{
7031 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
7032 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
7033}
7034
7035
7036/** Opcode 0xd9 0xf4. */
7037FNIEMOP_DEF(iemOp_fxtract)
7038{
7039 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
7040 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
7041}
7042
7043
7044/** Opcode 0xd9 0xf5. */
7045FNIEMOP_DEF(iemOp_fprem1)
7046{
7047 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
7048 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
7049}
7050
7051
7052/** Opcode 0xd9 0xf6. */
7053FNIEMOP_DEF(iemOp_fdecstp)
7054{
7055 IEMOP_MNEMONIC(fdecstp, "fdecstp");
7056 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7057 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7058 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7059 * FINCSTP and FDECSTP. */
7060
7061 IEM_MC_BEGIN(0,0);
7062
7063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7065
7066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7067 IEM_MC_FPU_STACK_DEC_TOP();
7068 IEM_MC_UPDATE_FSW_CONST(0);
7069
7070 IEM_MC_ADVANCE_RIP();
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073}
7074
7075
7076/** Opcode 0xd9 0xf7. */
7077FNIEMOP_DEF(iemOp_fincstp)
7078{
7079 IEMOP_MNEMONIC(fincstp, "fincstp");
7080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7081 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
7082 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
7083 * FINCSTP and FDECSTP. */
7084
7085 IEM_MC_BEGIN(0,0);
7086
7087 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7088 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7089
7090 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7091 IEM_MC_FPU_STACK_INC_TOP();
7092 IEM_MC_UPDATE_FSW_CONST(0);
7093
7094 IEM_MC_ADVANCE_RIP();
7095 IEM_MC_END();
7096 return VINF_SUCCESS;
7097}
7098
7099
7100/** Opcode 0xd9 0xf8. */
7101FNIEMOP_DEF(iemOp_fprem)
7102{
7103 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
7104 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
7105}
7106
7107
7108/** Opcode 0xd9 0xf9. */
7109FNIEMOP_DEF(iemOp_fyl2xp1)
7110{
7111 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
7112 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
7113}
7114
7115
7116/** Opcode 0xd9 0xfa. */
7117FNIEMOP_DEF(iemOp_fsqrt)
7118{
7119 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
7120 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
7121}
7122
7123
7124/** Opcode 0xd9 0xfb. */
7125FNIEMOP_DEF(iemOp_fsincos)
7126{
7127 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
7128 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
7129}
7130
7131
7132/** Opcode 0xd9 0xfc. */
7133FNIEMOP_DEF(iemOp_frndint)
7134{
7135 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
7136 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
7137}
7138
7139
7140/** Opcode 0xd9 0xfd. */
7141FNIEMOP_DEF(iemOp_fscale)
7142{
7143 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
7144 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
7145}
7146
7147
7148/** Opcode 0xd9 0xfe. */
7149FNIEMOP_DEF(iemOp_fsin)
7150{
7151 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
7152 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
7153}
7154
7155
7156/** Opcode 0xd9 0xff. */
7157FNIEMOP_DEF(iemOp_fcos)
7158{
7159 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
7160 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
7161}
7162
7163
7164/** Used by iemOp_EscF1. */
7165IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
7166{
7167 /* 0xe0 */ iemOp_fchs,
7168 /* 0xe1 */ iemOp_fabs,
7169 /* 0xe2 */ iemOp_Invalid,
7170 /* 0xe3 */ iemOp_Invalid,
7171 /* 0xe4 */ iemOp_ftst,
7172 /* 0xe5 */ iemOp_fxam,
7173 /* 0xe6 */ iemOp_Invalid,
7174 /* 0xe7 */ iemOp_Invalid,
7175 /* 0xe8 */ iemOp_fld1,
7176 /* 0xe9 */ iemOp_fldl2t,
7177 /* 0xea */ iemOp_fldl2e,
7178 /* 0xeb */ iemOp_fldpi,
7179 /* 0xec */ iemOp_fldlg2,
7180 /* 0xed */ iemOp_fldln2,
7181 /* 0xee */ iemOp_fldz,
7182 /* 0xef */ iemOp_Invalid,
7183 /* 0xf0 */ iemOp_f2xm1,
7184 /* 0xf1 */ iemOp_fyl2x,
7185 /* 0xf2 */ iemOp_fptan,
7186 /* 0xf3 */ iemOp_fpatan,
7187 /* 0xf4 */ iemOp_fxtract,
7188 /* 0xf5 */ iemOp_fprem1,
7189 /* 0xf6 */ iemOp_fdecstp,
7190 /* 0xf7 */ iemOp_fincstp,
7191 /* 0xf8 */ iemOp_fprem,
7192 /* 0xf9 */ iemOp_fyl2xp1,
7193 /* 0xfa */ iemOp_fsqrt,
7194 /* 0xfb */ iemOp_fsincos,
7195 /* 0xfc */ iemOp_frndint,
7196 /* 0xfd */ iemOp_fscale,
7197 /* 0xfe */ iemOp_fsin,
7198 /* 0xff */ iemOp_fcos
7199};
7200
7201
7202/** Opcode 0xd9. */
7203FNIEMOP_DEF(iemOp_EscF1)
7204{
7205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7206 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
7207
7208 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7209 {
7210 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7211 {
7212 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
7213 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
7214 case 2:
7215 if (bRm == 0xd0)
7216 return FNIEMOP_CALL(iemOp_fnop);
7217 return IEMOP_RAISE_INVALID_OPCODE();
7218 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
7219 case 4:
7220 case 5:
7221 case 6:
7222 case 7:
7223 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
7224 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
7225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7226 }
7227 }
7228 else
7229 {
7230 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7231 {
7232 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
7233 case 1: return IEMOP_RAISE_INVALID_OPCODE();
7234 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
7235 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
7236 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
7237 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
7238 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
7239 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
7240 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7241 }
7242 }
7243}
7244
7245
7246/** Opcode 0xda 11/0. */
7247FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
7248{
7249 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
7250 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7251
7252 IEM_MC_BEGIN(0, 1);
7253 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7254
7255 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7256 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7257
7258 IEM_MC_PREPARE_FPU_USAGE();
7259 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7260 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
7261 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7262 IEM_MC_ENDIF();
7263 IEM_MC_UPDATE_FPU_OPCODE_IP();
7264 IEM_MC_ELSE()
7265 IEM_MC_FPU_STACK_UNDERFLOW(0);
7266 IEM_MC_ENDIF();
7267 IEM_MC_ADVANCE_RIP();
7268
7269 IEM_MC_END();
7270 return VINF_SUCCESS;
7271}
7272
7273
7274/** Opcode 0xda 11/1. */
7275FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
7276{
7277 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
7278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7279
7280 IEM_MC_BEGIN(0, 1);
7281 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7282
7283 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7284 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7285
7286 IEM_MC_PREPARE_FPU_USAGE();
7287 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7288 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
7289 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7290 IEM_MC_ENDIF();
7291 IEM_MC_UPDATE_FPU_OPCODE_IP();
7292 IEM_MC_ELSE()
7293 IEM_MC_FPU_STACK_UNDERFLOW(0);
7294 IEM_MC_ENDIF();
7295 IEM_MC_ADVANCE_RIP();
7296
7297 IEM_MC_END();
7298 return VINF_SUCCESS;
7299}
7300
7301
7302/** Opcode 0xda 11/2. */
7303FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
7304{
7305 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307
7308 IEM_MC_BEGIN(0, 1);
7309 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7310
7311 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7312 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7313
7314 IEM_MC_PREPARE_FPU_USAGE();
7315 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7316 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7317 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7318 IEM_MC_ENDIF();
7319 IEM_MC_UPDATE_FPU_OPCODE_IP();
7320 IEM_MC_ELSE()
7321 IEM_MC_FPU_STACK_UNDERFLOW(0);
7322 IEM_MC_ENDIF();
7323 IEM_MC_ADVANCE_RIP();
7324
7325 IEM_MC_END();
7326 return VINF_SUCCESS;
7327}
7328
7329
7330/** Opcode 0xda 11/3. */
7331FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
7332{
7333 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
7334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7335
7336 IEM_MC_BEGIN(0, 1);
7337 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7338
7339 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7340 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7341
7342 IEM_MC_PREPARE_FPU_USAGE();
7343 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7344 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
7345 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7346 IEM_MC_ENDIF();
7347 IEM_MC_UPDATE_FPU_OPCODE_IP();
7348 IEM_MC_ELSE()
7349 IEM_MC_FPU_STACK_UNDERFLOW(0);
7350 IEM_MC_ENDIF();
7351 IEM_MC_ADVANCE_RIP();
7352
7353 IEM_MC_END();
7354 return VINF_SUCCESS;
7355}
7356
7357
7358/**
7359 * Common worker for FPU instructions working on ST0 and STn, only affecting
7360 * flags, and popping twice when done.
7361 *
7362 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7363 */
7364FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7365{
7366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7367
7368 IEM_MC_BEGIN(3, 1);
7369 IEM_MC_LOCAL(uint16_t, u16Fsw);
7370 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7371 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7372 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7373
7374 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7375 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7376
7377 IEM_MC_PREPARE_FPU_USAGE();
7378 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
7379 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7380 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
7381 IEM_MC_ELSE()
7382 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
7383 IEM_MC_ENDIF();
7384 IEM_MC_ADVANCE_RIP();
7385
7386 IEM_MC_END();
7387 return VINF_SUCCESS;
7388}
7389
7390
7391/** Opcode 0xda 0xe9. */
7392FNIEMOP_DEF(iemOp_fucompp)
7393{
7394 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
7395 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
7396}
7397
7398
7399/**
7400 * Common worker for FPU instructions working on ST0 and an m32i, and storing
7401 * the result in ST0.
7402 *
7403 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7404 */
7405FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
7406{
7407 IEM_MC_BEGIN(3, 3);
7408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7409 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7410 IEM_MC_LOCAL(int32_t, i32Val2);
7411 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7412 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7413 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7414
7415 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7417
7418 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7419 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7420 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7421
7422 IEM_MC_PREPARE_FPU_USAGE();
7423 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7424 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
7425 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7426 IEM_MC_ELSE()
7427 IEM_MC_FPU_STACK_UNDERFLOW(0);
7428 IEM_MC_ENDIF();
7429 IEM_MC_ADVANCE_RIP();
7430
7431 IEM_MC_END();
7432 return VINF_SUCCESS;
7433}
7434
7435
7436/** Opcode 0xda !11/0. */
7437FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
7438{
7439 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
7440 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
7441}
7442
7443
7444/** Opcode 0xda !11/1. */
7445FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
7446{
7447 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
7448 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
7449}
7450
7451
7452/** Opcode 0xda !11/2. */
7453FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
7454{
7455 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
7456
7457 IEM_MC_BEGIN(3, 3);
7458 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7459 IEM_MC_LOCAL(uint16_t, u16Fsw);
7460 IEM_MC_LOCAL(int32_t, i32Val2);
7461 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7462 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7463 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7464
7465 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7467
7468 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7469 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7470 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7471
7472 IEM_MC_PREPARE_FPU_USAGE();
7473 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7474 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7475 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7476 IEM_MC_ELSE()
7477 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7478 IEM_MC_ENDIF();
7479 IEM_MC_ADVANCE_RIP();
7480
7481 IEM_MC_END();
7482 return VINF_SUCCESS;
7483}
7484
7485
7486/** Opcode 0xda !11/3. */
7487FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
7488{
7489 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
7490
7491 IEM_MC_BEGIN(3, 3);
7492 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7493 IEM_MC_LOCAL(uint16_t, u16Fsw);
7494 IEM_MC_LOCAL(int32_t, i32Val2);
7495 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7496 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7497 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
7498
7499 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7500 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7501
7502 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7503 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7504 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7505
7506 IEM_MC_PREPARE_FPU_USAGE();
7507 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7508 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
7509 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7510 IEM_MC_ELSE()
7511 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7512 IEM_MC_ENDIF();
7513 IEM_MC_ADVANCE_RIP();
7514
7515 IEM_MC_END();
7516 return VINF_SUCCESS;
7517}
7518
7519
7520/** Opcode 0xda !11/4. */
7521FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
7522{
7523 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
7524 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
7525}
7526
7527
7528/** Opcode 0xda !11/5. */
7529FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
7530{
7531 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
7532 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
7533}
7534
7535
7536/** Opcode 0xda !11/6. */
7537FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
7538{
7539 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
7540 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
7541}
7542
7543
7544/** Opcode 0xda !11/7. */
7545FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
7546{
7547 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
7548 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
7549}
7550
7551
7552/** Opcode 0xda. */
7553FNIEMOP_DEF(iemOp_EscF2)
7554{
7555 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7556 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
7557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7558 {
7559 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7560 {
7561 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
7562 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
7563 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
7564 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
7565 case 4: return IEMOP_RAISE_INVALID_OPCODE();
7566 case 5:
7567 if (bRm == 0xe9)
7568 return FNIEMOP_CALL(iemOp_fucompp);
7569 return IEMOP_RAISE_INVALID_OPCODE();
7570 case 6: return IEMOP_RAISE_INVALID_OPCODE();
7571 case 7: return IEMOP_RAISE_INVALID_OPCODE();
7572 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7573 }
7574 }
7575 else
7576 {
7577 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7578 {
7579 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
7580 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
7581 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
7582 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
7583 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
7584 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
7585 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
7586 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
7587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7588 }
7589 }
7590}
7591
7592
7593/** Opcode 0xdb !11/0. */
7594FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
7595{
7596 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
7597
7598 IEM_MC_BEGIN(2, 3);
7599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7600 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7601 IEM_MC_LOCAL(int32_t, i32Val);
7602 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7603 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
7604
7605 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7606 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7607
7608 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7609 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7610 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7611
7612 IEM_MC_PREPARE_FPU_USAGE();
7613 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7614 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
7615 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7616 IEM_MC_ELSE()
7617 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7618 IEM_MC_ENDIF();
7619 IEM_MC_ADVANCE_RIP();
7620
7621 IEM_MC_END();
7622 return VINF_SUCCESS;
7623}
7624
7625
7626/** Opcode 0xdb !11/1. */
7627FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
7628{
7629 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
7630 IEM_MC_BEGIN(3, 2);
7631 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7632 IEM_MC_LOCAL(uint16_t, u16Fsw);
7633 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7634 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7635 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7636
7637 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7639 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7640 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7641
7642 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7643 IEM_MC_PREPARE_FPU_USAGE();
7644 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7645 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7646 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7647 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7648 IEM_MC_ELSE()
7649 IEM_MC_IF_FCW_IM()
7650 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7651 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7652 IEM_MC_ENDIF();
7653 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7654 IEM_MC_ENDIF();
7655 IEM_MC_ADVANCE_RIP();
7656
7657 IEM_MC_END();
7658 return VINF_SUCCESS;
7659}
7660
7661
7662/** Opcode 0xdb !11/2. */
7663FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
7664{
7665 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
7666 IEM_MC_BEGIN(3, 2);
7667 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7668 IEM_MC_LOCAL(uint16_t, u16Fsw);
7669 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7670 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7671 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7672
7673 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7674 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7677
7678 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7679 IEM_MC_PREPARE_FPU_USAGE();
7680 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7681 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7682 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7683 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7684 IEM_MC_ELSE()
7685 IEM_MC_IF_FCW_IM()
7686 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7687 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7688 IEM_MC_ENDIF();
7689 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7690 IEM_MC_ENDIF();
7691 IEM_MC_ADVANCE_RIP();
7692
7693 IEM_MC_END();
7694 return VINF_SUCCESS;
7695}
7696
7697
7698/** Opcode 0xdb !11/3. */
7699FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
7700{
7701 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
7702 IEM_MC_BEGIN(3, 2);
7703 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7704 IEM_MC_LOCAL(uint16_t, u16Fsw);
7705 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7706 IEM_MC_ARG(int32_t *, pi32Dst, 1);
7707 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7708
7709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7711 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7712 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7713
7714 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7715 IEM_MC_PREPARE_FPU_USAGE();
7716 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7717 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
7718 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7719 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7720 IEM_MC_ELSE()
7721 IEM_MC_IF_FCW_IM()
7722 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
7723 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
7724 IEM_MC_ENDIF();
7725 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7726 IEM_MC_ENDIF();
7727 IEM_MC_ADVANCE_RIP();
7728
7729 IEM_MC_END();
7730 return VINF_SUCCESS;
7731}
7732
7733
7734/** Opcode 0xdb !11/5. */
7735FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
7736{
7737 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
7738
7739 IEM_MC_BEGIN(2, 3);
7740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7741 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7742 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
7743 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7744 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
7745
7746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7747 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7748
7749 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7750 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7751 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7752
7753 IEM_MC_PREPARE_FPU_USAGE();
7754 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7755 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
7756 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7757 IEM_MC_ELSE()
7758 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7759 IEM_MC_ENDIF();
7760 IEM_MC_ADVANCE_RIP();
7761
7762 IEM_MC_END();
7763 return VINF_SUCCESS;
7764}
7765
7766
7767/** Opcode 0xdb !11/7. */
7768FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
7769{
7770 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
7771 IEM_MC_BEGIN(3, 2);
7772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7773 IEM_MC_LOCAL(uint16_t, u16Fsw);
7774 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7775 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
7776 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7777
7778 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7780 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7781 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7782
7783 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7784 IEM_MC_PREPARE_FPU_USAGE();
7785 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7786 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
7787 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
7788 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7789 IEM_MC_ELSE()
7790 IEM_MC_IF_FCW_IM()
7791 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
7792 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
7793 IEM_MC_ENDIF();
7794 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7795 IEM_MC_ENDIF();
7796 IEM_MC_ADVANCE_RIP();
7797
7798 IEM_MC_END();
7799 return VINF_SUCCESS;
7800}
7801
7802
7803/** Opcode 0xdb 11/0. */
7804FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
7805{
7806 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
7807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7808
7809 IEM_MC_BEGIN(0, 1);
7810 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7811
7812 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7813 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7814
7815 IEM_MC_PREPARE_FPU_USAGE();
7816 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7817 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
7818 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7819 IEM_MC_ENDIF();
7820 IEM_MC_UPDATE_FPU_OPCODE_IP();
7821 IEM_MC_ELSE()
7822 IEM_MC_FPU_STACK_UNDERFLOW(0);
7823 IEM_MC_ENDIF();
7824 IEM_MC_ADVANCE_RIP();
7825
7826 IEM_MC_END();
7827 return VINF_SUCCESS;
7828}
7829
7830
7831/** Opcode 0xdb 11/1. */
7832FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
7833{
7834 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
7835 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7836
7837 IEM_MC_BEGIN(0, 1);
7838 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7839
7840 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7841 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7842
7843 IEM_MC_PREPARE_FPU_USAGE();
7844 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7845 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
7846 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7847 IEM_MC_ENDIF();
7848 IEM_MC_UPDATE_FPU_OPCODE_IP();
7849 IEM_MC_ELSE()
7850 IEM_MC_FPU_STACK_UNDERFLOW(0);
7851 IEM_MC_ENDIF();
7852 IEM_MC_ADVANCE_RIP();
7853
7854 IEM_MC_END();
7855 return VINF_SUCCESS;
7856}
7857
7858
7859/** Opcode 0xdb 11/2. */
7860FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
7861{
7862 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
7863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7864
7865 IEM_MC_BEGIN(0, 1);
7866 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7867
7868 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7869 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7870
7871 IEM_MC_PREPARE_FPU_USAGE();
7872 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7873 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
7874 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7875 IEM_MC_ENDIF();
7876 IEM_MC_UPDATE_FPU_OPCODE_IP();
7877 IEM_MC_ELSE()
7878 IEM_MC_FPU_STACK_UNDERFLOW(0);
7879 IEM_MC_ENDIF();
7880 IEM_MC_ADVANCE_RIP();
7881
7882 IEM_MC_END();
7883 return VINF_SUCCESS;
7884}
7885
7886
7887/** Opcode 0xdb 11/3. */
7888FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
7889{
7890 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
7891 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7892
7893 IEM_MC_BEGIN(0, 1);
7894 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
7895
7896 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7897 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7898
7899 IEM_MC_PREPARE_FPU_USAGE();
7900 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
7901 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
7902 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
7903 IEM_MC_ENDIF();
7904 IEM_MC_UPDATE_FPU_OPCODE_IP();
7905 IEM_MC_ELSE()
7906 IEM_MC_FPU_STACK_UNDERFLOW(0);
7907 IEM_MC_ENDIF();
7908 IEM_MC_ADVANCE_RIP();
7909
7910 IEM_MC_END();
7911 return VINF_SUCCESS;
7912}
7913
7914
7915/** Opcode 0xdb 0xe0. */
7916FNIEMOP_DEF(iemOp_fneni)
7917{
7918 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
7919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7920 IEM_MC_BEGIN(0,0);
7921 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7922 IEM_MC_ADVANCE_RIP();
7923 IEM_MC_END();
7924 return VINF_SUCCESS;
7925}
7926
7927
7928/** Opcode 0xdb 0xe1. */
7929FNIEMOP_DEF(iemOp_fndisi)
7930{
7931 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7933 IEM_MC_BEGIN(0,0);
7934 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7935 IEM_MC_ADVANCE_RIP();
7936 IEM_MC_END();
7937 return VINF_SUCCESS;
7938}
7939
7940
7941/** Opcode 0xdb 0xe2. */
7942FNIEMOP_DEF(iemOp_fnclex)
7943{
7944 IEMOP_MNEMONIC(fnclex, "fnclex");
7945 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7946
7947 IEM_MC_BEGIN(0,0);
7948 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7949 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7950 IEM_MC_CLEAR_FSW_EX();
7951 IEM_MC_ADVANCE_RIP();
7952 IEM_MC_END();
7953 return VINF_SUCCESS;
7954}
7955
7956
7957/** Opcode 0xdb 0xe3. */
7958FNIEMOP_DEF(iemOp_fninit)
7959{
7960 IEMOP_MNEMONIC(fninit, "fninit");
7961 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7962 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
7963}
7964
7965
7966/** Opcode 0xdb 0xe4. */
7967FNIEMOP_DEF(iemOp_fnsetpm)
7968{
7969 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
7970 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7971 IEM_MC_BEGIN(0,0);
7972 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7973 IEM_MC_ADVANCE_RIP();
7974 IEM_MC_END();
7975 return VINF_SUCCESS;
7976}
7977
7978
7979/** Opcode 0xdb 0xe5. */
7980FNIEMOP_DEF(iemOp_frstpm)
7981{
7982 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
7983#if 0 /* #UDs on newer CPUs */
7984 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7985 IEM_MC_BEGIN(0,0);
7986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7987 IEM_MC_ADVANCE_RIP();
7988 IEM_MC_END();
7989 return VINF_SUCCESS;
7990#else
7991 return IEMOP_RAISE_INVALID_OPCODE();
7992#endif
7993}
7994
7995
7996/** Opcode 0xdb 11/5. */
7997FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
7998{
7999 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8000 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8001}
8002
8003
8004/** Opcode 0xdb 11/6. */
8005FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8006{
8007 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8008 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8009}
8010
8011
8012/** Opcode 0xdb. */
8013FNIEMOP_DEF(iemOp_EscF3)
8014{
8015 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8016 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
8017 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8018 {
8019 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8020 {
8021 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
8022 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
8023 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
8024 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
8025 case 4:
8026 switch (bRm)
8027 {
8028 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
8029 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
8030 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
8031 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
8032 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
8033 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
8034 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
8035 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
8036 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8037 }
8038 break;
8039 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
8040 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
8041 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8042 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8043 }
8044 }
8045 else
8046 {
8047 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8048 {
8049 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
8050 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
8051 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
8052 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
8053 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8054 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
8055 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8056 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
8057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8058 }
8059 }
8060}
8061
8062
8063/**
8064 * Common worker for FPU instructions working on STn and ST0, and storing the
8065 * result in STn unless IE, DE or ZE was raised.
8066 *
8067 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8068 */
8069FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
8070{
8071 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8072
8073 IEM_MC_BEGIN(3, 1);
8074 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8075 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8076 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8077 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8078
8079 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8080 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8081
8082 IEM_MC_PREPARE_FPU_USAGE();
8083 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
8084 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
8085 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8086 IEM_MC_ELSE()
8087 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8088 IEM_MC_ENDIF();
8089 IEM_MC_ADVANCE_RIP();
8090
8091 IEM_MC_END();
8092 return VINF_SUCCESS;
8093}
8094
8095
8096/** Opcode 0xdc 11/0. */
8097FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
8098{
8099 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
8100 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
8101}
8102
8103
8104/** Opcode 0xdc 11/1. */
8105FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
8106{
8107 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
8108 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
8109}
8110
8111
8112/** Opcode 0xdc 11/4. */
8113FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
8114{
8115 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
8116 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
8117}
8118
8119
8120/** Opcode 0xdc 11/5. */
8121FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
8122{
8123 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
8124 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
8125}
8126
8127
8128/** Opcode 0xdc 11/6. */
8129FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
8130{
8131 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
8132 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
8133}
8134
8135
8136/** Opcode 0xdc 11/7. */
8137FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
8138{
8139 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
8140 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
8141}
8142
8143
8144/**
8145 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
8146 * memory operand, and storing the result in ST0.
8147 *
8148 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8149 */
8150FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
8151{
8152 IEM_MC_BEGIN(3, 3);
8153 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8154 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8155 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
8156 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8157 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
8158 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
8159
8160 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8161 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8162 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8163 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8164
8165 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8166 IEM_MC_PREPARE_FPU_USAGE();
8167 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
8168 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
8169 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8170 IEM_MC_ELSE()
8171 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8172 IEM_MC_ENDIF();
8173 IEM_MC_ADVANCE_RIP();
8174
8175 IEM_MC_END();
8176 return VINF_SUCCESS;
8177}
8178
8179
8180/** Opcode 0xdc !11/0. */
8181FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
8182{
8183 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
8184 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
8185}
8186
8187
8188/** Opcode 0xdc !11/1. */
8189FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
8190{
8191 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
8192 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
8193}
8194
8195
8196/** Opcode 0xdc !11/2. */
8197FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
8198{
8199 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
8200
8201 IEM_MC_BEGIN(3, 3);
8202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8203 IEM_MC_LOCAL(uint16_t, u16Fsw);
8204 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8205 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8206 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8207 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8208
8209 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8211
8212 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8213 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8214 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8215
8216 IEM_MC_PREPARE_FPU_USAGE();
8217 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8218 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8219 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8220 IEM_MC_ELSE()
8221 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8222 IEM_MC_ENDIF();
8223 IEM_MC_ADVANCE_RIP();
8224
8225 IEM_MC_END();
8226 return VINF_SUCCESS;
8227}
8228
8229
8230/** Opcode 0xdc !11/3. */
8231FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
8232{
8233 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
8234
8235 IEM_MC_BEGIN(3, 3);
8236 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8237 IEM_MC_LOCAL(uint16_t, u16Fsw);
8238 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
8239 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8240 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8241 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
8242
8243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8244 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8245
8246 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8247 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8248 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8249
8250 IEM_MC_PREPARE_FPU_USAGE();
8251 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8252 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
8253 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8254 IEM_MC_ELSE()
8255 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8256 IEM_MC_ENDIF();
8257 IEM_MC_ADVANCE_RIP();
8258
8259 IEM_MC_END();
8260 return VINF_SUCCESS;
8261}
8262
8263
8264/** Opcode 0xdc !11/4. */
8265FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
8266{
8267 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
8268 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
8269}
8270
8271
8272/** Opcode 0xdc !11/5. */
8273FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
8274{
8275 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
8276 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
8277}
8278
8279
8280/** Opcode 0xdc !11/6. */
8281FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
8282{
8283 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
8284 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
8285}
8286
8287
8288/** Opcode 0xdc !11/7. */
8289FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
8290{
8291 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
8292 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
8293}
8294
8295
8296/** Opcode 0xdc. */
8297FNIEMOP_DEF(iemOp_EscF4)
8298{
8299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8300 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
8301 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8302 {
8303 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8304 {
8305 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
8306 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
8307 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
8308 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
8309 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
8310 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
8311 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
8312 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
8313 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8314 }
8315 }
8316 else
8317 {
8318 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8319 {
8320 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
8321 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
8322 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
8323 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
8324 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
8325 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
8326 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
8327 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
8328 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8329 }
8330 }
8331}
8332
8333
8334/** Opcode 0xdd !11/0.
8335 * @sa iemOp_fld_m32r */
8336FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
8337{
8338 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
8339
8340 IEM_MC_BEGIN(2, 3);
8341 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8342 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8343 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
8344 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8345 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
8346
8347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8348 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8349 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8350 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8351
8352 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8353 IEM_MC_PREPARE_FPU_USAGE();
8354 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8355 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
8356 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8357 IEM_MC_ELSE()
8358 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8359 IEM_MC_ENDIF();
8360 IEM_MC_ADVANCE_RIP();
8361
8362 IEM_MC_END();
8363 return VINF_SUCCESS;
8364}
8365
8366
8367/** Opcode 0xdd !11/0. */
8368FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
8369{
8370 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
8371 IEM_MC_BEGIN(3, 2);
8372 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8373 IEM_MC_LOCAL(uint16_t, u16Fsw);
8374 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8375 IEM_MC_ARG(int64_t *, pi64Dst, 1);
8376 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8377
8378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8379 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8380 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8381 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8382
8383 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8384 IEM_MC_PREPARE_FPU_USAGE();
8385 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8386 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
8387 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8388 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8389 IEM_MC_ELSE()
8390 IEM_MC_IF_FCW_IM()
8391 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
8392 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
8393 IEM_MC_ENDIF();
8394 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8395 IEM_MC_ENDIF();
8396 IEM_MC_ADVANCE_RIP();
8397
8398 IEM_MC_END();
8399 return VINF_SUCCESS;
8400}
8401
8402
8403/** Opcode 0xdd !11/0. */
8404FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
8405{
8406 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
8407 IEM_MC_BEGIN(3, 2);
8408 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8409 IEM_MC_LOCAL(uint16_t, u16Fsw);
8410 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8411 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8412 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8413
8414 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8415 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8416 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8417 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8418
8419 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8420 IEM_MC_PREPARE_FPU_USAGE();
8421 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8422 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8423 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8424 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8425 IEM_MC_ELSE()
8426 IEM_MC_IF_FCW_IM()
8427 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8428 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8429 IEM_MC_ENDIF();
8430 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8431 IEM_MC_ENDIF();
8432 IEM_MC_ADVANCE_RIP();
8433
8434 IEM_MC_END();
8435 return VINF_SUCCESS;
8436}
8437
8438
8439
8440
8441/** Opcode 0xdd !11/0. */
8442FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
8443{
8444 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
8445 IEM_MC_BEGIN(3, 2);
8446 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8447 IEM_MC_LOCAL(uint16_t, u16Fsw);
8448 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8449 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
8450 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8451
8452 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8453 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8454 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8455 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8456
8457 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8458 IEM_MC_PREPARE_FPU_USAGE();
8459 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8460 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
8461 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
8462 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8463 IEM_MC_ELSE()
8464 IEM_MC_IF_FCW_IM()
8465 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
8466 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
8467 IEM_MC_ENDIF();
8468 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8469 IEM_MC_ENDIF();
8470 IEM_MC_ADVANCE_RIP();
8471
8472 IEM_MC_END();
8473 return VINF_SUCCESS;
8474}
8475
8476
8477/** Opcode 0xdd !11/0. */
8478FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
8479{
8480 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
8481 IEM_MC_BEGIN(3, 0);
8482 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8483 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8484 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
8485 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8487 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8488 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8489 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8490 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
8491 IEM_MC_END();
8492 return VINF_SUCCESS;
8493}
8494
8495
8496/** Opcode 0xdd !11/0. */
8497FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
8498{
8499 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
8500 IEM_MC_BEGIN(3, 0);
8501 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
8502 IEM_MC_ARG(uint8_t, iEffSeg, 1);
8503 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
8504 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8505 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8506 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8507 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8508 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
8509 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
8510 IEM_MC_END();
8511 return VINF_SUCCESS;
8512
8513}
8514
8515/** Opcode 0xdd !11/0. */
8516FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
8517{
8518 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
8519
8520 IEM_MC_BEGIN(0, 2);
8521 IEM_MC_LOCAL(uint16_t, u16Tmp);
8522 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8523
8524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8526 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8527
8528 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8529 IEM_MC_FETCH_FSW(u16Tmp);
8530 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
8531 IEM_MC_ADVANCE_RIP();
8532
8533/** @todo Debug / drop a hint to the verifier that things may differ
8534 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
8535 * NT4SP1. (X86_FSW_PE) */
8536 IEM_MC_END();
8537 return VINF_SUCCESS;
8538}
8539
8540
8541/** Opcode 0xdd 11/0. */
8542FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
8543{
8544 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
8545 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8546 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
8547 unmodified. */
8548
8549 IEM_MC_BEGIN(0, 0);
8550
8551 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8552 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8553
8554 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8555 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8556 IEM_MC_UPDATE_FPU_OPCODE_IP();
8557
8558 IEM_MC_ADVANCE_RIP();
8559 IEM_MC_END();
8560 return VINF_SUCCESS;
8561}
8562
8563
8564/** Opcode 0xdd 11/1. */
8565FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
8566{
8567 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
8568 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8569
8570 IEM_MC_BEGIN(0, 2);
8571 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
8572 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8573 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8574 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8575
8576 IEM_MC_PREPARE_FPU_USAGE();
8577 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8578 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
8579 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
8580 IEM_MC_ELSE()
8581 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
8582 IEM_MC_ENDIF();
8583
8584 IEM_MC_ADVANCE_RIP();
8585 IEM_MC_END();
8586 return VINF_SUCCESS;
8587}
8588
8589
8590/** Opcode 0xdd 11/3. */
8591FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
8592{
8593 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
8594 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
8595}
8596
8597
8598/** Opcode 0xdd 11/4. */
8599FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
8600{
8601 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
8602 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
8603}
8604
8605
8606/** Opcode 0xdd. */
8607FNIEMOP_DEF(iemOp_EscF5)
8608{
8609 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8610 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
8611 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8612 {
8613 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8614 {
8615 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
8616 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
8617 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
8618 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
8619 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
8620 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
8621 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8622 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8623 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8624 }
8625 }
8626 else
8627 {
8628 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8629 {
8630 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
8631 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
8632 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
8633 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
8634 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
8635 case 5: return IEMOP_RAISE_INVALID_OPCODE();
8636 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
8637 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
8638 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8639 }
8640 }
8641}
8642
8643
8644/** Opcode 0xde 11/0. */
8645FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
8646{
8647 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
8648 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
8649}
8650
8651
8652/** Opcode 0xde 11/0. */
8653FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
8654{
8655 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
8656 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
8657}
8658
8659
8660/** Opcode 0xde 0xd9. */
8661FNIEMOP_DEF(iemOp_fcompp)
8662{
8663 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
8664 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
8665}
8666
8667
8668/** Opcode 0xde 11/4. */
8669FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
8670{
8671 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
8672 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
8673}
8674
8675
8676/** Opcode 0xde 11/5. */
8677FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
8678{
8679 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
8680 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
8681}
8682
8683
8684/** Opcode 0xde 11/6. */
8685FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
8686{
8687 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
8688 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
8689}
8690
8691
8692/** Opcode 0xde 11/7. */
8693FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
8694{
8695 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
8696 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
8697}
8698
8699
8700/**
8701 * Common worker for FPU instructions working on ST0 and an m16i, and storing
8702 * the result in ST0.
8703 *
8704 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8705 */
8706FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
8707{
8708 IEM_MC_BEGIN(3, 3);
8709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8710 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8711 IEM_MC_LOCAL(int16_t, i16Val2);
8712 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8713 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8714 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8715
8716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8718
8719 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8720 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8721 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8722
8723 IEM_MC_PREPARE_FPU_USAGE();
8724 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8725 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
8726 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8727 IEM_MC_ELSE()
8728 IEM_MC_FPU_STACK_UNDERFLOW(0);
8729 IEM_MC_ENDIF();
8730 IEM_MC_ADVANCE_RIP();
8731
8732 IEM_MC_END();
8733 return VINF_SUCCESS;
8734}
8735
8736
8737/** Opcode 0xde !11/0. */
8738FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
8739{
8740 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
8741 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
8742}
8743
8744
8745/** Opcode 0xde !11/1. */
8746FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
8747{
8748 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
8749 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
8750}
8751
8752
8753/** Opcode 0xde !11/2. */
8754FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
8755{
8756 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
8757
8758 IEM_MC_BEGIN(3, 3);
8759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8760 IEM_MC_LOCAL(uint16_t, u16Fsw);
8761 IEM_MC_LOCAL(int16_t, i16Val2);
8762 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8763 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8764 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8765
8766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8767 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8768
8769 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8770 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8771 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8772
8773 IEM_MC_PREPARE_FPU_USAGE();
8774 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8775 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8776 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8777 IEM_MC_ELSE()
8778 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8779 IEM_MC_ENDIF();
8780 IEM_MC_ADVANCE_RIP();
8781
8782 IEM_MC_END();
8783 return VINF_SUCCESS;
8784}
8785
8786
8787/** Opcode 0xde !11/3. */
8788FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
8789{
8790 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
8791
8792 IEM_MC_BEGIN(3, 3);
8793 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8794 IEM_MC_LOCAL(uint16_t, u16Fsw);
8795 IEM_MC_LOCAL(int16_t, i16Val2);
8796 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8797 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8798 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
8799
8800 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8801 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8802
8803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8805 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8806
8807 IEM_MC_PREPARE_FPU_USAGE();
8808 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8809 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
8810 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8811 IEM_MC_ELSE()
8812 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8813 IEM_MC_ENDIF();
8814 IEM_MC_ADVANCE_RIP();
8815
8816 IEM_MC_END();
8817 return VINF_SUCCESS;
8818}
8819
8820
8821/** Opcode 0xde !11/4. */
8822FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
8823{
8824 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
8825 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
8826}
8827
8828
8829/** Opcode 0xde !11/5. */
8830FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
8831{
8832 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
8833 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
8834}
8835
8836
8837/** Opcode 0xde !11/6. */
8838FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
8839{
8840 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
8841 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
8842}
8843
8844
8845/** Opcode 0xde !11/7. */
8846FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
8847{
8848 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
8849 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
8850}
8851
8852
8853/** Opcode 0xde. */
8854FNIEMOP_DEF(iemOp_EscF6)
8855{
8856 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8857 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
8858 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8859 {
8860 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8861 {
8862 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
8863 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
8864 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
8865 case 3: if (bRm == 0xd9)
8866 return FNIEMOP_CALL(iemOp_fcompp);
8867 return IEMOP_RAISE_INVALID_OPCODE();
8868 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
8869 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
8870 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
8871 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
8872 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8873 }
8874 }
8875 else
8876 {
8877 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8878 {
8879 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
8880 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
8881 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
8882 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
8883 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
8884 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
8885 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
8886 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
8887 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8888 }
8889 }
8890}
8891
8892
8893/** Opcode 0xdf 11/0.
8894 * Undocument instruction, assumed to work like ffree + fincstp. */
8895FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
8896{
8897 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
8898 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8899
8900 IEM_MC_BEGIN(0, 0);
8901
8902 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8903 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8904
8905 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8906 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
8907 IEM_MC_FPU_STACK_INC_TOP();
8908 IEM_MC_UPDATE_FPU_OPCODE_IP();
8909
8910 IEM_MC_ADVANCE_RIP();
8911 IEM_MC_END();
8912 return VINF_SUCCESS;
8913}
8914
8915
8916/** Opcode 0xdf 0xe0. */
8917FNIEMOP_DEF(iemOp_fnstsw_ax)
8918{
8919 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
8920 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8921
8922 IEM_MC_BEGIN(0, 1);
8923 IEM_MC_LOCAL(uint16_t, u16Tmp);
8924 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8925 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
8926 IEM_MC_FETCH_FSW(u16Tmp);
8927 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
8928 IEM_MC_ADVANCE_RIP();
8929 IEM_MC_END();
8930 return VINF_SUCCESS;
8931}
8932
8933
8934/** Opcode 0xdf 11/5. */
8935FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
8936{
8937 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
8938 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8939}
8940
8941
8942/** Opcode 0xdf 11/6. */
8943FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
8944{
8945 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
8946 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
8947}
8948
8949
8950/** Opcode 0xdf !11/0. */
8951FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
8952{
8953 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
8954
8955 IEM_MC_BEGIN(2, 3);
8956 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8957 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8958 IEM_MC_LOCAL(int16_t, i16Val);
8959 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8960 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
8961
8962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8964
8965 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8966 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8967 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8968
8969 IEM_MC_PREPARE_FPU_USAGE();
8970 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8971 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
8972 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8973 IEM_MC_ELSE()
8974 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8975 IEM_MC_ENDIF();
8976 IEM_MC_ADVANCE_RIP();
8977
8978 IEM_MC_END();
8979 return VINF_SUCCESS;
8980}
8981
8982
8983/** Opcode 0xdf !11/1. */
8984FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
8985{
8986 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
8987 IEM_MC_BEGIN(3, 2);
8988 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8989 IEM_MC_LOCAL(uint16_t, u16Fsw);
8990 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8991 IEM_MC_ARG(int16_t *, pi16Dst, 1);
8992 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8993
8994 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8995 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8996 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8997 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8998
8999 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9000 IEM_MC_PREPARE_FPU_USAGE();
9001 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9002 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9003 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9004 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9005 IEM_MC_ELSE()
9006 IEM_MC_IF_FCW_IM()
9007 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9008 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9009 IEM_MC_ENDIF();
9010 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9011 IEM_MC_ENDIF();
9012 IEM_MC_ADVANCE_RIP();
9013
9014 IEM_MC_END();
9015 return VINF_SUCCESS;
9016}
9017
9018
9019/** Opcode 0xdf !11/2. */
9020FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
9021{
9022 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
9023 IEM_MC_BEGIN(3, 2);
9024 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9025 IEM_MC_LOCAL(uint16_t, u16Fsw);
9026 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9027 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9028 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9029
9030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9032 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9033 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9034
9035 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9036 IEM_MC_PREPARE_FPU_USAGE();
9037 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9038 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9039 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9040 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9041 IEM_MC_ELSE()
9042 IEM_MC_IF_FCW_IM()
9043 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9044 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9045 IEM_MC_ENDIF();
9046 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9047 IEM_MC_ENDIF();
9048 IEM_MC_ADVANCE_RIP();
9049
9050 IEM_MC_END();
9051 return VINF_SUCCESS;
9052}
9053
9054
9055/** Opcode 0xdf !11/3. */
9056FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
9057{
9058 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
9059 IEM_MC_BEGIN(3, 2);
9060 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9061 IEM_MC_LOCAL(uint16_t, u16Fsw);
9062 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9063 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9064 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9065
9066 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9067 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9068 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9069 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9070
9071 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9072 IEM_MC_PREPARE_FPU_USAGE();
9073 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9074 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9075 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9076 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9077 IEM_MC_ELSE()
9078 IEM_MC_IF_FCW_IM()
9079 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9080 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9081 IEM_MC_ENDIF();
9082 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9083 IEM_MC_ENDIF();
9084 IEM_MC_ADVANCE_RIP();
9085
9086 IEM_MC_END();
9087 return VINF_SUCCESS;
9088}
9089
9090
9091/** Opcode 0xdf !11/4. */
9092FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
9093
9094
9095/** Opcode 0xdf !11/5. */
9096FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
9097{
9098 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
9099
9100 IEM_MC_BEGIN(2, 3);
9101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9102 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9103 IEM_MC_LOCAL(int64_t, i64Val);
9104 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9105 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
9106
9107 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9108 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9109
9110 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9111 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9112 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9113
9114 IEM_MC_PREPARE_FPU_USAGE();
9115 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9116 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
9117 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9118 IEM_MC_ELSE()
9119 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9120 IEM_MC_ENDIF();
9121 IEM_MC_ADVANCE_RIP();
9122
9123 IEM_MC_END();
9124 return VINF_SUCCESS;
9125}
9126
9127
9128/** Opcode 0xdf !11/6. */
9129FNIEMOP_STUB_1(iemOp_fbstp_m80d, uint8_t, bRm);
9130
9131
9132/** Opcode 0xdf !11/7. */
9133FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
9134{
9135 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
9136 IEM_MC_BEGIN(3, 2);
9137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9138 IEM_MC_LOCAL(uint16_t, u16Fsw);
9139 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9140 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9141 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9142
9143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9145 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9146 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9147
9148 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9149 IEM_MC_PREPARE_FPU_USAGE();
9150 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9151 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9152 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9153 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9154 IEM_MC_ELSE()
9155 IEM_MC_IF_FCW_IM()
9156 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9157 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9158 IEM_MC_ENDIF();
9159 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9160 IEM_MC_ENDIF();
9161 IEM_MC_ADVANCE_RIP();
9162
9163 IEM_MC_END();
9164 return VINF_SUCCESS;
9165}
9166
9167
9168/** Opcode 0xdf. */
9169FNIEMOP_DEF(iemOp_EscF7)
9170{
9171 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9172 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9173 {
9174 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9175 {
9176 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
9177 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
9178 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9179 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
9180 case 4: if (bRm == 0xe0)
9181 return FNIEMOP_CALL(iemOp_fnstsw_ax);
9182 return IEMOP_RAISE_INVALID_OPCODE();
9183 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
9184 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
9185 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9186 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9187 }
9188 }
9189 else
9190 {
9191 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9192 {
9193 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
9194 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
9195 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
9196 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
9197 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
9198 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
9199 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
9200 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
9201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9202 }
9203 }
9204}
9205
9206
9207/** Opcode 0xe0. */
9208FNIEMOP_DEF(iemOp_loopne_Jb)
9209{
9210 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
9211 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9212 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9213 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9214
9215 switch (pVCpu->iem.s.enmEffAddrMode)
9216 {
9217 case IEMMODE_16BIT:
9218 IEM_MC_BEGIN(0,0);
9219 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9220 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9221 IEM_MC_REL_JMP_S8(i8Imm);
9222 } IEM_MC_ELSE() {
9223 IEM_MC_ADVANCE_RIP();
9224 } IEM_MC_ENDIF();
9225 IEM_MC_END();
9226 return VINF_SUCCESS;
9227
9228 case IEMMODE_32BIT:
9229 IEM_MC_BEGIN(0,0);
9230 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9231 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9232 IEM_MC_REL_JMP_S8(i8Imm);
9233 } IEM_MC_ELSE() {
9234 IEM_MC_ADVANCE_RIP();
9235 } IEM_MC_ENDIF();
9236 IEM_MC_END();
9237 return VINF_SUCCESS;
9238
9239 case IEMMODE_64BIT:
9240 IEM_MC_BEGIN(0,0);
9241 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9242 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
9243 IEM_MC_REL_JMP_S8(i8Imm);
9244 } IEM_MC_ELSE() {
9245 IEM_MC_ADVANCE_RIP();
9246 } IEM_MC_ENDIF();
9247 IEM_MC_END();
9248 return VINF_SUCCESS;
9249
9250 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9251 }
9252}
9253
9254
9255/** Opcode 0xe1. */
9256FNIEMOP_DEF(iemOp_loope_Jb)
9257{
9258 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
9259 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9260 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9261 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9262
9263 switch (pVCpu->iem.s.enmEffAddrMode)
9264 {
9265 case IEMMODE_16BIT:
9266 IEM_MC_BEGIN(0,0);
9267 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9268 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9269 IEM_MC_REL_JMP_S8(i8Imm);
9270 } IEM_MC_ELSE() {
9271 IEM_MC_ADVANCE_RIP();
9272 } IEM_MC_ENDIF();
9273 IEM_MC_END();
9274 return VINF_SUCCESS;
9275
9276 case IEMMODE_32BIT:
9277 IEM_MC_BEGIN(0,0);
9278 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9279 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9280 IEM_MC_REL_JMP_S8(i8Imm);
9281 } IEM_MC_ELSE() {
9282 IEM_MC_ADVANCE_RIP();
9283 } IEM_MC_ENDIF();
9284 IEM_MC_END();
9285 return VINF_SUCCESS;
9286
9287 case IEMMODE_64BIT:
9288 IEM_MC_BEGIN(0,0);
9289 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9290 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
9291 IEM_MC_REL_JMP_S8(i8Imm);
9292 } IEM_MC_ELSE() {
9293 IEM_MC_ADVANCE_RIP();
9294 } IEM_MC_ENDIF();
9295 IEM_MC_END();
9296 return VINF_SUCCESS;
9297
9298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9299 }
9300}
9301
9302
9303/** Opcode 0xe2. */
9304FNIEMOP_DEF(iemOp_loop_Jb)
9305{
9306 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
9307 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9308 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9309 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9310
9311 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
9312 * using the 32-bit operand size override. How can that be restarted? See
9313 * weird pseudo code in intel manual. */
9314 switch (pVCpu->iem.s.enmEffAddrMode)
9315 {
9316 case IEMMODE_16BIT:
9317 IEM_MC_BEGIN(0,0);
9318 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9319 {
9320 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
9321 IEM_MC_IF_CX_IS_NZ() {
9322 IEM_MC_REL_JMP_S8(i8Imm);
9323 } IEM_MC_ELSE() {
9324 IEM_MC_ADVANCE_RIP();
9325 } IEM_MC_ENDIF();
9326 }
9327 else
9328 {
9329 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
9330 IEM_MC_ADVANCE_RIP();
9331 }
9332 IEM_MC_END();
9333 return VINF_SUCCESS;
9334
9335 case IEMMODE_32BIT:
9336 IEM_MC_BEGIN(0,0);
9337 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9338 {
9339 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
9340 IEM_MC_IF_ECX_IS_NZ() {
9341 IEM_MC_REL_JMP_S8(i8Imm);
9342 } IEM_MC_ELSE() {
9343 IEM_MC_ADVANCE_RIP();
9344 } IEM_MC_ENDIF();
9345 }
9346 else
9347 {
9348 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
9349 IEM_MC_ADVANCE_RIP();
9350 }
9351 IEM_MC_END();
9352 return VINF_SUCCESS;
9353
9354 case IEMMODE_64BIT:
9355 IEM_MC_BEGIN(0,0);
9356 if (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) != i8Imm)
9357 {
9358 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
9359 IEM_MC_IF_RCX_IS_NZ() {
9360 IEM_MC_REL_JMP_S8(i8Imm);
9361 } IEM_MC_ELSE() {
9362 IEM_MC_ADVANCE_RIP();
9363 } IEM_MC_ENDIF();
9364 }
9365 else
9366 {
9367 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
9368 IEM_MC_ADVANCE_RIP();
9369 }
9370 IEM_MC_END();
9371 return VINF_SUCCESS;
9372
9373 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9374 }
9375}
9376
9377
9378/** Opcode 0xe3. */
9379FNIEMOP_DEF(iemOp_jecxz_Jb)
9380{
9381 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
9382 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9383 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9384 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9385
9386 switch (pVCpu->iem.s.enmEffAddrMode)
9387 {
9388 case IEMMODE_16BIT:
9389 IEM_MC_BEGIN(0,0);
9390 IEM_MC_IF_CX_IS_NZ() {
9391 IEM_MC_ADVANCE_RIP();
9392 } IEM_MC_ELSE() {
9393 IEM_MC_REL_JMP_S8(i8Imm);
9394 } IEM_MC_ENDIF();
9395 IEM_MC_END();
9396 return VINF_SUCCESS;
9397
9398 case IEMMODE_32BIT:
9399 IEM_MC_BEGIN(0,0);
9400 IEM_MC_IF_ECX_IS_NZ() {
9401 IEM_MC_ADVANCE_RIP();
9402 } IEM_MC_ELSE() {
9403 IEM_MC_REL_JMP_S8(i8Imm);
9404 } IEM_MC_ENDIF();
9405 IEM_MC_END();
9406 return VINF_SUCCESS;
9407
9408 case IEMMODE_64BIT:
9409 IEM_MC_BEGIN(0,0);
9410 IEM_MC_IF_RCX_IS_NZ() {
9411 IEM_MC_ADVANCE_RIP();
9412 } IEM_MC_ELSE() {
9413 IEM_MC_REL_JMP_S8(i8Imm);
9414 } IEM_MC_ENDIF();
9415 IEM_MC_END();
9416 return VINF_SUCCESS;
9417
9418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9419 }
9420}
9421
9422
9423/** Opcode 0xe4 */
9424FNIEMOP_DEF(iemOp_in_AL_Ib)
9425{
9426 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
9427 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9429 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, 1);
9430}
9431
9432
9433/** Opcode 0xe5 */
9434FNIEMOP_DEF(iemOp_in_eAX_Ib)
9435{
9436 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
9437 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9439 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_in, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9440}
9441
9442
9443/** Opcode 0xe6 */
9444FNIEMOP_DEF(iemOp_out_Ib_AL)
9445{
9446 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
9447 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9449 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, 1);
9450}
9451
9452
9453/** Opcode 0xe7 */
9454FNIEMOP_DEF(iemOp_out_Ib_eAX)
9455{
9456 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
9457 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9459 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_out, u8Imm, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9460}
9461
9462
9463/** Opcode 0xe8. */
9464FNIEMOP_DEF(iemOp_call_Jv)
9465{
9466 IEMOP_MNEMONIC(call_Jv, "call Jv");
9467 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9468 switch (pVCpu->iem.s.enmEffOpSize)
9469 {
9470 case IEMMODE_16BIT:
9471 {
9472 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9473 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
9474 }
9475
9476 case IEMMODE_32BIT:
9477 {
9478 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9479 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
9480 }
9481
9482 case IEMMODE_64BIT:
9483 {
9484 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9485 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
9486 }
9487
9488 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9489 }
9490}
9491
9492
9493/** Opcode 0xe9. */
9494FNIEMOP_DEF(iemOp_jmp_Jv)
9495{
9496 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
9497 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9498 switch (pVCpu->iem.s.enmEffOpSize)
9499 {
9500 case IEMMODE_16BIT:
9501 {
9502 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
9503 IEM_MC_BEGIN(0, 0);
9504 IEM_MC_REL_JMP_S16(i16Imm);
9505 IEM_MC_END();
9506 return VINF_SUCCESS;
9507 }
9508
9509 case IEMMODE_64BIT:
9510 case IEMMODE_32BIT:
9511 {
9512 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
9513 IEM_MC_BEGIN(0, 0);
9514 IEM_MC_REL_JMP_S32(i32Imm);
9515 IEM_MC_END();
9516 return VINF_SUCCESS;
9517 }
9518
9519 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9520 }
9521}
9522
9523
9524/** Opcode 0xea. */
9525FNIEMOP_DEF(iemOp_jmp_Ap)
9526{
9527 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
9528 IEMOP_HLP_NO_64BIT();
9529
9530 /* Decode the far pointer address and pass it on to the far call C implementation. */
9531 uint32_t offSeg;
9532 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
9533 IEM_OPCODE_GET_NEXT_U32(&offSeg);
9534 else
9535 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
9536 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
9537 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9538 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
9539}
9540
9541
9542/** Opcode 0xeb. */
9543FNIEMOP_DEF(iemOp_jmp_Jb)
9544{
9545 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
9546 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
9547 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9548 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
9549
9550 IEM_MC_BEGIN(0, 0);
9551 IEM_MC_REL_JMP_S8(i8Imm);
9552 IEM_MC_END();
9553 return VINF_SUCCESS;
9554}
9555
9556
9557/** Opcode 0xec */
9558FNIEMOP_DEF(iemOp_in_AL_DX)
9559{
9560 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
9561 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9562 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
9563}
9564
9565
9566/** Opcode 0xed */
9567FNIEMOP_DEF(iemOp_eAX_DX)
9568{
9569 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
9570 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9571 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9572}
9573
9574
9575/** Opcode 0xee */
9576FNIEMOP_DEF(iemOp_out_DX_AL)
9577{
9578 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
9579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9580 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
9581}
9582
9583
9584/** Opcode 0xef */
9585FNIEMOP_DEF(iemOp_out_DX_eAX)
9586{
9587 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
9588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9589 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
9590}
9591
9592
9593/** Opcode 0xf0. */
9594FNIEMOP_DEF(iemOp_lock)
9595{
9596 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
9597 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
9598
9599 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9600 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9601}
9602
9603
9604/** Opcode 0xf1. */
9605FNIEMOP_DEF(iemOp_int_1)
9606{
9607 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
9608 IEMOP_HLP_MIN_386(); /** @todo does not generate #UD on 286, or so they say... */
9609 /** @todo testcase! */
9610 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, false /*fIsBpInstr*/);
9611}
9612
9613
9614/** Opcode 0xf2. */
9615FNIEMOP_DEF(iemOp_repne)
9616{
9617 /* This overrides any previous REPE prefix. */
9618 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
9619 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
9620 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
9621
9622 /* For the 4 entry opcode tables, REPNZ overrides any previous
9623 REPZ and operand size prefixes. */
9624 pVCpu->iem.s.idxPrefix = 3;
9625
9626 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9627 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9628}
9629
9630
9631/** Opcode 0xf3. */
9632FNIEMOP_DEF(iemOp_repe)
9633{
9634 /* This overrides any previous REPNE prefix. */
9635 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
9636 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
9637 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
9638
9639 /* For the 4 entry opcode tables, REPNZ overrides any previous
9640 REPNZ and operand size prefixes. */
9641 pVCpu->iem.s.idxPrefix = 2;
9642
9643 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
9644 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
9645}
9646
9647
9648/** Opcode 0xf4. */
9649FNIEMOP_DEF(iemOp_hlt)
9650{
9651 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9652 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
9653}
9654
9655
9656/** Opcode 0xf5. */
9657FNIEMOP_DEF(iemOp_cmc)
9658{
9659 IEMOP_MNEMONIC(cmc, "cmc");
9660 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9661 IEM_MC_BEGIN(0, 0);
9662 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
9663 IEM_MC_ADVANCE_RIP();
9664 IEM_MC_END();
9665 return VINF_SUCCESS;
9666}
9667
9668
9669/**
9670 * Common implementation of 'inc/dec/not/neg Eb'.
9671 *
9672 * @param bRm The RM byte.
9673 * @param pImpl The instruction implementation.
9674 */
9675FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9676{
9677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9678 {
9679 /* register access */
9680 IEM_MC_BEGIN(2, 0);
9681 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9682 IEM_MC_ARG(uint32_t *, pEFlags, 1);
9683 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9684 IEM_MC_REF_EFLAGS(pEFlags);
9685 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9686 IEM_MC_ADVANCE_RIP();
9687 IEM_MC_END();
9688 }
9689 else
9690 {
9691 /* memory access. */
9692 IEM_MC_BEGIN(2, 2);
9693 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9694 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9695 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9696
9697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9698 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9699 IEM_MC_FETCH_EFLAGS(EFlags);
9700 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9701 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
9702 else
9703 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
9704
9705 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
9706 IEM_MC_COMMIT_EFLAGS(EFlags);
9707 IEM_MC_ADVANCE_RIP();
9708 IEM_MC_END();
9709 }
9710 return VINF_SUCCESS;
9711}
9712
9713
9714/**
9715 * Common implementation of 'inc/dec/not/neg Ev'.
9716 *
9717 * @param bRm The RM byte.
9718 * @param pImpl The instruction implementation.
9719 */
9720FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
9721{
9722 /* Registers are handled by a common worker. */
9723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9724 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9725
9726 /* Memory we do here. */
9727 switch (pVCpu->iem.s.enmEffOpSize)
9728 {
9729 case IEMMODE_16BIT:
9730 IEM_MC_BEGIN(2, 2);
9731 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9732 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9733 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9734
9735 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9736 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9737 IEM_MC_FETCH_EFLAGS(EFlags);
9738 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9739 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
9740 else
9741 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
9742
9743 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
9744 IEM_MC_COMMIT_EFLAGS(EFlags);
9745 IEM_MC_ADVANCE_RIP();
9746 IEM_MC_END();
9747 return VINF_SUCCESS;
9748
9749 case IEMMODE_32BIT:
9750 IEM_MC_BEGIN(2, 2);
9751 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9752 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9753 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9754
9755 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9756 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9757 IEM_MC_FETCH_EFLAGS(EFlags);
9758 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9759 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
9760 else
9761 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
9762
9763 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
9764 IEM_MC_COMMIT_EFLAGS(EFlags);
9765 IEM_MC_ADVANCE_RIP();
9766 IEM_MC_END();
9767 return VINF_SUCCESS;
9768
9769 case IEMMODE_64BIT:
9770 IEM_MC_BEGIN(2, 2);
9771 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9772 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
9773 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9774
9775 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9776 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9777 IEM_MC_FETCH_EFLAGS(EFlags);
9778 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
9779 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
9780 else
9781 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
9782
9783 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
9784 IEM_MC_COMMIT_EFLAGS(EFlags);
9785 IEM_MC_ADVANCE_RIP();
9786 IEM_MC_END();
9787 return VINF_SUCCESS;
9788
9789 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9790 }
9791}
9792
9793
9794/** Opcode 0xf6 /0. */
9795FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
9796{
9797 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
9798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9799
9800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9801 {
9802 /* register access */
9803 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9804 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9805
9806 IEM_MC_BEGIN(3, 0);
9807 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9808 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
9809 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9810 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9811 IEM_MC_REF_EFLAGS(pEFlags);
9812 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9813 IEM_MC_ADVANCE_RIP();
9814 IEM_MC_END();
9815 }
9816 else
9817 {
9818 /* memory access. */
9819 IEM_MC_BEGIN(3, 2);
9820 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
9821 IEM_MC_ARG(uint8_t, u8Src, 1);
9822 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9824
9825 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
9826 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
9827 IEM_MC_ASSIGN(u8Src, u8Imm);
9828 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9829 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9830 IEM_MC_FETCH_EFLAGS(EFlags);
9831 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
9832
9833 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
9834 IEM_MC_COMMIT_EFLAGS(EFlags);
9835 IEM_MC_ADVANCE_RIP();
9836 IEM_MC_END();
9837 }
9838 return VINF_SUCCESS;
9839}
9840
9841
9842/** Opcode 0xf7 /0. */
9843FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
9844{
9845 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
9846 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
9847
9848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9849 {
9850 /* register access */
9851 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9852 switch (pVCpu->iem.s.enmEffOpSize)
9853 {
9854 case IEMMODE_16BIT:
9855 {
9856 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9857 IEM_MC_BEGIN(3, 0);
9858 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9859 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
9860 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9861 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9862 IEM_MC_REF_EFLAGS(pEFlags);
9863 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9864 IEM_MC_ADVANCE_RIP();
9865 IEM_MC_END();
9866 return VINF_SUCCESS;
9867 }
9868
9869 case IEMMODE_32BIT:
9870 {
9871 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9872 IEM_MC_BEGIN(3, 0);
9873 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9874 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
9875 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9876 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9877 IEM_MC_REF_EFLAGS(pEFlags);
9878 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9879 /* No clearing the high dword here - test doesn't write back the result. */
9880 IEM_MC_ADVANCE_RIP();
9881 IEM_MC_END();
9882 return VINF_SUCCESS;
9883 }
9884
9885 case IEMMODE_64BIT:
9886 {
9887 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9888 IEM_MC_BEGIN(3, 0);
9889 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9890 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
9891 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9892 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9893 IEM_MC_REF_EFLAGS(pEFlags);
9894 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9895 IEM_MC_ADVANCE_RIP();
9896 IEM_MC_END();
9897 return VINF_SUCCESS;
9898 }
9899
9900 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9901 }
9902 }
9903 else
9904 {
9905 /* memory access. */
9906 switch (pVCpu->iem.s.enmEffOpSize)
9907 {
9908 case IEMMODE_16BIT:
9909 {
9910 IEM_MC_BEGIN(3, 2);
9911 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
9912 IEM_MC_ARG(uint16_t, u16Src, 1);
9913 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9914 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9915
9916 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
9917 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
9918 IEM_MC_ASSIGN(u16Src, u16Imm);
9919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9920 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9921 IEM_MC_FETCH_EFLAGS(EFlags);
9922 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
9923
9924 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
9925 IEM_MC_COMMIT_EFLAGS(EFlags);
9926 IEM_MC_ADVANCE_RIP();
9927 IEM_MC_END();
9928 return VINF_SUCCESS;
9929 }
9930
9931 case IEMMODE_32BIT:
9932 {
9933 IEM_MC_BEGIN(3, 2);
9934 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
9935 IEM_MC_ARG(uint32_t, u32Src, 1);
9936 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9938
9939 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9940 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
9941 IEM_MC_ASSIGN(u32Src, u32Imm);
9942 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9943 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9944 IEM_MC_FETCH_EFLAGS(EFlags);
9945 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
9946
9947 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
9948 IEM_MC_COMMIT_EFLAGS(EFlags);
9949 IEM_MC_ADVANCE_RIP();
9950 IEM_MC_END();
9951 return VINF_SUCCESS;
9952 }
9953
9954 case IEMMODE_64BIT:
9955 {
9956 IEM_MC_BEGIN(3, 2);
9957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
9958 IEM_MC_ARG(uint64_t, u64Src, 1);
9959 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
9960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9961
9962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
9963 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
9964 IEM_MC_ASSIGN(u64Src, u64Imm);
9965 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9966 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
9967 IEM_MC_FETCH_EFLAGS(EFlags);
9968 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
9969
9970 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
9971 IEM_MC_COMMIT_EFLAGS(EFlags);
9972 IEM_MC_ADVANCE_RIP();
9973 IEM_MC_END();
9974 return VINF_SUCCESS;
9975 }
9976
9977 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9978 }
9979 }
9980}
9981
9982
9983/** Opcode 0xf6 /4, /5, /6 and /7. */
9984FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
9985{
9986 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9987 {
9988 /* register access */
9989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9990 IEM_MC_BEGIN(3, 1);
9991 IEM_MC_ARG(uint16_t *, pu16AX, 0);
9992 IEM_MC_ARG(uint8_t, u8Value, 1);
9993 IEM_MC_ARG(uint32_t *, pEFlags, 2);
9994 IEM_MC_LOCAL(int32_t, rc);
9995
9996 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
9997 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
9998 IEM_MC_REF_EFLAGS(pEFlags);
9999 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10000 IEM_MC_IF_LOCAL_IS_Z(rc) {
10001 IEM_MC_ADVANCE_RIP();
10002 } IEM_MC_ELSE() {
10003 IEM_MC_RAISE_DIVIDE_ERROR();
10004 } IEM_MC_ENDIF();
10005
10006 IEM_MC_END();
10007 }
10008 else
10009 {
10010 /* memory access. */
10011 IEM_MC_BEGIN(3, 2);
10012 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10013 IEM_MC_ARG(uint8_t, u8Value, 1);
10014 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10015 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10016 IEM_MC_LOCAL(int32_t, rc);
10017
10018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10020 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10021 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10022 IEM_MC_REF_EFLAGS(pEFlags);
10023 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
10024 IEM_MC_IF_LOCAL_IS_Z(rc) {
10025 IEM_MC_ADVANCE_RIP();
10026 } IEM_MC_ELSE() {
10027 IEM_MC_RAISE_DIVIDE_ERROR();
10028 } IEM_MC_ENDIF();
10029
10030 IEM_MC_END();
10031 }
10032 return VINF_SUCCESS;
10033}
10034
10035
10036/** Opcode 0xf7 /4, /5, /6 and /7. */
10037FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
10038{
10039 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10040
10041 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10042 {
10043 /* register access */
10044 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10045 switch (pVCpu->iem.s.enmEffOpSize)
10046 {
10047 case IEMMODE_16BIT:
10048 {
10049 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10050 IEM_MC_BEGIN(4, 1);
10051 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10052 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10053 IEM_MC_ARG(uint16_t, u16Value, 2);
10054 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10055 IEM_MC_LOCAL(int32_t, rc);
10056
10057 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10058 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10059 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10060 IEM_MC_REF_EFLAGS(pEFlags);
10061 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10062 IEM_MC_IF_LOCAL_IS_Z(rc) {
10063 IEM_MC_ADVANCE_RIP();
10064 } IEM_MC_ELSE() {
10065 IEM_MC_RAISE_DIVIDE_ERROR();
10066 } IEM_MC_ENDIF();
10067
10068 IEM_MC_END();
10069 return VINF_SUCCESS;
10070 }
10071
10072 case IEMMODE_32BIT:
10073 {
10074 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10075 IEM_MC_BEGIN(4, 1);
10076 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10077 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10078 IEM_MC_ARG(uint32_t, u32Value, 2);
10079 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10080 IEM_MC_LOCAL(int32_t, rc);
10081
10082 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10083 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10084 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10085 IEM_MC_REF_EFLAGS(pEFlags);
10086 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10087 IEM_MC_IF_LOCAL_IS_Z(rc) {
10088 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10089 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10090 IEM_MC_ADVANCE_RIP();
10091 } IEM_MC_ELSE() {
10092 IEM_MC_RAISE_DIVIDE_ERROR();
10093 } IEM_MC_ENDIF();
10094
10095 IEM_MC_END();
10096 return VINF_SUCCESS;
10097 }
10098
10099 case IEMMODE_64BIT:
10100 {
10101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10102 IEM_MC_BEGIN(4, 1);
10103 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10104 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10105 IEM_MC_ARG(uint64_t, u64Value, 2);
10106 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10107 IEM_MC_LOCAL(int32_t, rc);
10108
10109 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10110 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10111 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10112 IEM_MC_REF_EFLAGS(pEFlags);
10113 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10114 IEM_MC_IF_LOCAL_IS_Z(rc) {
10115 IEM_MC_ADVANCE_RIP();
10116 } IEM_MC_ELSE() {
10117 IEM_MC_RAISE_DIVIDE_ERROR();
10118 } IEM_MC_ENDIF();
10119
10120 IEM_MC_END();
10121 return VINF_SUCCESS;
10122 }
10123
10124 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10125 }
10126 }
10127 else
10128 {
10129 /* memory access. */
10130 switch (pVCpu->iem.s.enmEffOpSize)
10131 {
10132 case IEMMODE_16BIT:
10133 {
10134 IEM_MC_BEGIN(4, 2);
10135 IEM_MC_ARG(uint16_t *, pu16AX, 0);
10136 IEM_MC_ARG(uint16_t *, pu16DX, 1);
10137 IEM_MC_ARG(uint16_t, u16Value, 2);
10138 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10140 IEM_MC_LOCAL(int32_t, rc);
10141
10142 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10143 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10144 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10145 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
10146 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
10147 IEM_MC_REF_EFLAGS(pEFlags);
10148 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
10149 IEM_MC_IF_LOCAL_IS_Z(rc) {
10150 IEM_MC_ADVANCE_RIP();
10151 } IEM_MC_ELSE() {
10152 IEM_MC_RAISE_DIVIDE_ERROR();
10153 } IEM_MC_ENDIF();
10154
10155 IEM_MC_END();
10156 return VINF_SUCCESS;
10157 }
10158
10159 case IEMMODE_32BIT:
10160 {
10161 IEM_MC_BEGIN(4, 2);
10162 IEM_MC_ARG(uint32_t *, pu32AX, 0);
10163 IEM_MC_ARG(uint32_t *, pu32DX, 1);
10164 IEM_MC_ARG(uint32_t, u32Value, 2);
10165 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10166 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10167 IEM_MC_LOCAL(int32_t, rc);
10168
10169 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10171 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10172 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
10173 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
10174 IEM_MC_REF_EFLAGS(pEFlags);
10175 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
10176 IEM_MC_IF_LOCAL_IS_Z(rc) {
10177 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
10178 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
10179 IEM_MC_ADVANCE_RIP();
10180 } IEM_MC_ELSE() {
10181 IEM_MC_RAISE_DIVIDE_ERROR();
10182 } IEM_MC_ENDIF();
10183
10184 IEM_MC_END();
10185 return VINF_SUCCESS;
10186 }
10187
10188 case IEMMODE_64BIT:
10189 {
10190 IEM_MC_BEGIN(4, 2);
10191 IEM_MC_ARG(uint64_t *, pu64AX, 0);
10192 IEM_MC_ARG(uint64_t *, pu64DX, 1);
10193 IEM_MC_ARG(uint64_t, u64Value, 2);
10194 IEM_MC_ARG(uint32_t *, pEFlags, 3);
10195 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10196 IEM_MC_LOCAL(int32_t, rc);
10197
10198 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10199 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10200 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10201 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
10202 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
10203 IEM_MC_REF_EFLAGS(pEFlags);
10204 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
10205 IEM_MC_IF_LOCAL_IS_Z(rc) {
10206 IEM_MC_ADVANCE_RIP();
10207 } IEM_MC_ELSE() {
10208 IEM_MC_RAISE_DIVIDE_ERROR();
10209 } IEM_MC_ENDIF();
10210
10211 IEM_MC_END();
10212 return VINF_SUCCESS;
10213 }
10214
10215 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10216 }
10217 }
10218}
10219
10220/** Opcode 0xf6. */
10221FNIEMOP_DEF(iemOp_Grp3_Eb)
10222{
10223 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10224 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10225 {
10226 case 0:
10227 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
10228 case 1:
10229/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10230 return IEMOP_RAISE_INVALID_OPCODE();
10231 case 2:
10232 IEMOP_MNEMONIC(not_Eb, "not Eb");
10233 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
10234 case 3:
10235 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
10236 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
10237 case 4:
10238 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
10239 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10240 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_mul_u8);
10241 case 5:
10242 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
10243 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10244 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_imul_u8);
10245 case 6:
10246 IEMOP_MNEMONIC(div_Eb, "div Eb");
10247 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10248 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_div_u8);
10249 case 7:
10250 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
10251 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10252 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, iemAImpl_idiv_u8);
10253 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10254 }
10255}
10256
10257
10258/** Opcode 0xf7. */
10259FNIEMOP_DEF(iemOp_Grp3_Ev)
10260{
10261 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10262 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10263 {
10264 case 0:
10265 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
10266 case 1:
10267/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
10268 return IEMOP_RAISE_INVALID_OPCODE();
10269 case 2:
10270 IEMOP_MNEMONIC(not_Ev, "not Ev");
10271 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
10272 case 3:
10273 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
10274 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
10275 case 4:
10276 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
10277 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10278 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_mul);
10279 case 5:
10280 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
10281 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
10282 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_imul);
10283 case 6:
10284 IEMOP_MNEMONIC(div_Ev, "div Ev");
10285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10286 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_div);
10287 case 7:
10288 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
10289 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
10290 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, &g_iemAImpl_idiv);
10291 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10292 }
10293}
10294
10295
10296/** Opcode 0xf8. */
10297FNIEMOP_DEF(iemOp_clc)
10298{
10299 IEMOP_MNEMONIC(clc, "clc");
10300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10301 IEM_MC_BEGIN(0, 0);
10302 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
10303 IEM_MC_ADVANCE_RIP();
10304 IEM_MC_END();
10305 return VINF_SUCCESS;
10306}
10307
10308
10309/** Opcode 0xf9. */
10310FNIEMOP_DEF(iemOp_stc)
10311{
10312 IEMOP_MNEMONIC(stc, "stc");
10313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10314 IEM_MC_BEGIN(0, 0);
10315 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
10316 IEM_MC_ADVANCE_RIP();
10317 IEM_MC_END();
10318 return VINF_SUCCESS;
10319}
10320
10321
10322/** Opcode 0xfa. */
10323FNIEMOP_DEF(iemOp_cli)
10324{
10325 IEMOP_MNEMONIC(cli, "cli");
10326 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10327 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
10328}
10329
10330
10331FNIEMOP_DEF(iemOp_sti)
10332{
10333 IEMOP_MNEMONIC(sti, "sti");
10334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10335 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
10336}
10337
10338
10339/** Opcode 0xfc. */
10340FNIEMOP_DEF(iemOp_cld)
10341{
10342 IEMOP_MNEMONIC(cld, "cld");
10343 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10344 IEM_MC_BEGIN(0, 0);
10345 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
10346 IEM_MC_ADVANCE_RIP();
10347 IEM_MC_END();
10348 return VINF_SUCCESS;
10349}
10350
10351
10352/** Opcode 0xfd. */
10353FNIEMOP_DEF(iemOp_std)
10354{
10355 IEMOP_MNEMONIC(std, "std");
10356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10357 IEM_MC_BEGIN(0, 0);
10358 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
10359 IEM_MC_ADVANCE_RIP();
10360 IEM_MC_END();
10361 return VINF_SUCCESS;
10362}
10363
10364
10365/** Opcode 0xfe. */
10366FNIEMOP_DEF(iemOp_Grp4)
10367{
10368 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10369 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10370 {
10371 case 0:
10372 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
10373 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
10374 case 1:
10375 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
10376 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
10377 default:
10378 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
10379 return IEMOP_RAISE_INVALID_OPCODE();
10380 }
10381}
10382
10383
10384/**
10385 * Opcode 0xff /2.
10386 * @param bRm The RM byte.
10387 */
10388FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
10389{
10390 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
10391 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10392
10393 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10394 {
10395 /* The new RIP is taken from a register. */
10396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10397 switch (pVCpu->iem.s.enmEffOpSize)
10398 {
10399 case IEMMODE_16BIT:
10400 IEM_MC_BEGIN(1, 0);
10401 IEM_MC_ARG(uint16_t, u16Target, 0);
10402 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10403 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10404 IEM_MC_END()
10405 return VINF_SUCCESS;
10406
10407 case IEMMODE_32BIT:
10408 IEM_MC_BEGIN(1, 0);
10409 IEM_MC_ARG(uint32_t, u32Target, 0);
10410 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10411 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10412 IEM_MC_END()
10413 return VINF_SUCCESS;
10414
10415 case IEMMODE_64BIT:
10416 IEM_MC_BEGIN(1, 0);
10417 IEM_MC_ARG(uint64_t, u64Target, 0);
10418 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10419 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10420 IEM_MC_END()
10421 return VINF_SUCCESS;
10422
10423 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10424 }
10425 }
10426 else
10427 {
10428 /* The new RIP is taken from a register. */
10429 switch (pVCpu->iem.s.enmEffOpSize)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(1, 1);
10433 IEM_MC_ARG(uint16_t, u16Target, 0);
10434 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10435 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10436 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10437 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10438 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
10439 IEM_MC_END()
10440 return VINF_SUCCESS;
10441
10442 case IEMMODE_32BIT:
10443 IEM_MC_BEGIN(1, 1);
10444 IEM_MC_ARG(uint32_t, u32Target, 0);
10445 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10448 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10449 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
10450 IEM_MC_END()
10451 return VINF_SUCCESS;
10452
10453 case IEMMODE_64BIT:
10454 IEM_MC_BEGIN(1, 1);
10455 IEM_MC_ARG(uint64_t, u64Target, 0);
10456 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10457 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10458 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10459 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10460 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
10461 IEM_MC_END()
10462 return VINF_SUCCESS;
10463
10464 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10465 }
10466 }
10467}
10468
10469typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
10470
10471FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
10472{
10473 /* Registers? How?? */
10474 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
10475 { /* likely */ }
10476 else
10477 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
10478
10479 /* Far pointer loaded from memory. */
10480 switch (pVCpu->iem.s.enmEffOpSize)
10481 {
10482 case IEMMODE_16BIT:
10483 IEM_MC_BEGIN(3, 1);
10484 IEM_MC_ARG(uint16_t, u16Sel, 0);
10485 IEM_MC_ARG(uint16_t, offSeg, 1);
10486 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10487 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10488 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10490 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10491 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
10492 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10493 IEM_MC_END();
10494 return VINF_SUCCESS;
10495
10496 case IEMMODE_64BIT:
10497 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
10498 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
10499 * and call far qword [rsp] encodings. */
10500 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
10501 {
10502 IEM_MC_BEGIN(3, 1);
10503 IEM_MC_ARG(uint16_t, u16Sel, 0);
10504 IEM_MC_ARG(uint64_t, offSeg, 1);
10505 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
10506 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10507 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10508 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10509 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10510 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
10511 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10512 IEM_MC_END();
10513 return VINF_SUCCESS;
10514 }
10515 /* AMD falls thru. */
10516 /* fall thru */
10517
10518 case IEMMODE_32BIT:
10519 IEM_MC_BEGIN(3, 1);
10520 IEM_MC_ARG(uint16_t, u16Sel, 0);
10521 IEM_MC_ARG(uint32_t, offSeg, 1);
10522 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
10523 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10524 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10525 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10526 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10527 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
10528 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
10529 IEM_MC_END();
10530 return VINF_SUCCESS;
10531
10532 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10533 }
10534}
10535
10536
10537/**
10538 * Opcode 0xff /3.
10539 * @param bRm The RM byte.
10540 */
10541FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
10542{
10543 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
10544 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
10545}
10546
10547
10548/**
10549 * Opcode 0xff /4.
10550 * @param bRm The RM byte.
10551 */
10552FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
10553{
10554 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
10555 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10556
10557 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10558 {
10559 /* The new RIP is taken from a register. */
10560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10561 switch (pVCpu->iem.s.enmEffOpSize)
10562 {
10563 case IEMMODE_16BIT:
10564 IEM_MC_BEGIN(0, 1);
10565 IEM_MC_LOCAL(uint16_t, u16Target);
10566 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10567 IEM_MC_SET_RIP_U16(u16Target);
10568 IEM_MC_END()
10569 return VINF_SUCCESS;
10570
10571 case IEMMODE_32BIT:
10572 IEM_MC_BEGIN(0, 1);
10573 IEM_MC_LOCAL(uint32_t, u32Target);
10574 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10575 IEM_MC_SET_RIP_U32(u32Target);
10576 IEM_MC_END()
10577 return VINF_SUCCESS;
10578
10579 case IEMMODE_64BIT:
10580 IEM_MC_BEGIN(0, 1);
10581 IEM_MC_LOCAL(uint64_t, u64Target);
10582 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10583 IEM_MC_SET_RIP_U64(u64Target);
10584 IEM_MC_END()
10585 return VINF_SUCCESS;
10586
10587 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10588 }
10589 }
10590 else
10591 {
10592 /* The new RIP is taken from a memory location. */
10593 switch (pVCpu->iem.s.enmEffOpSize)
10594 {
10595 case IEMMODE_16BIT:
10596 IEM_MC_BEGIN(0, 2);
10597 IEM_MC_LOCAL(uint16_t, u16Target);
10598 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10599 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10600 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10601 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10602 IEM_MC_SET_RIP_U16(u16Target);
10603 IEM_MC_END()
10604 return VINF_SUCCESS;
10605
10606 case IEMMODE_32BIT:
10607 IEM_MC_BEGIN(0, 2);
10608 IEM_MC_LOCAL(uint32_t, u32Target);
10609 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10612 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10613 IEM_MC_SET_RIP_U32(u32Target);
10614 IEM_MC_END()
10615 return VINF_SUCCESS;
10616
10617 case IEMMODE_64BIT:
10618 IEM_MC_BEGIN(0, 2);
10619 IEM_MC_LOCAL(uint64_t, u64Target);
10620 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10621 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10622 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10623 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10624 IEM_MC_SET_RIP_U64(u64Target);
10625 IEM_MC_END()
10626 return VINF_SUCCESS;
10627
10628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10629 }
10630 }
10631}
10632
10633
10634/**
10635 * Opcode 0xff /5.
10636 * @param bRm The RM byte.
10637 */
10638FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
10639{
10640 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
10641 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
10642}
10643
10644
10645/**
10646 * Opcode 0xff /6.
10647 * @param bRm The RM byte.
10648 */
10649FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
10650{
10651 IEMOP_MNEMONIC(push_Ev, "push Ev");
10652
10653 /* Registers are handled by a common worker. */
10654 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10655 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10656
10657 /* Memory we do here. */
10658 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10659 switch (pVCpu->iem.s.enmEffOpSize)
10660 {
10661 case IEMMODE_16BIT:
10662 IEM_MC_BEGIN(0, 2);
10663 IEM_MC_LOCAL(uint16_t, u16Src);
10664 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10665 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10667 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10668 IEM_MC_PUSH_U16(u16Src);
10669 IEM_MC_ADVANCE_RIP();
10670 IEM_MC_END();
10671 return VINF_SUCCESS;
10672
10673 case IEMMODE_32BIT:
10674 IEM_MC_BEGIN(0, 2);
10675 IEM_MC_LOCAL(uint32_t, u32Src);
10676 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10677 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10678 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10679 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10680 IEM_MC_PUSH_U32(u32Src);
10681 IEM_MC_ADVANCE_RIP();
10682 IEM_MC_END();
10683 return VINF_SUCCESS;
10684
10685 case IEMMODE_64BIT:
10686 IEM_MC_BEGIN(0, 2);
10687 IEM_MC_LOCAL(uint64_t, u64Src);
10688 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10689 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10690 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10691 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10692 IEM_MC_PUSH_U64(u64Src);
10693 IEM_MC_ADVANCE_RIP();
10694 IEM_MC_END();
10695 return VINF_SUCCESS;
10696
10697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10698 }
10699}
10700
10701
10702/** Opcode 0xff. */
10703FNIEMOP_DEF(iemOp_Grp5)
10704{
10705 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10706 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10707 {
10708 case 0:
10709 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
10710 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
10711 case 1:
10712 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
10713 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
10714 case 2:
10715 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
10716 case 3:
10717 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
10718 case 4:
10719 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
10720 case 5:
10721 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
10722 case 6:
10723 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
10724 case 7:
10725 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
10726 return IEMOP_RAISE_INVALID_OPCODE();
10727 }
10728 AssertFailedReturn(VERR_IEM_IPE_3);
10729}
10730
10731
10732
10733const PFNIEMOP g_apfnOneByteMap[256] =
10734{
10735 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
10736 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
10737 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
10738 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
10739 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
10740 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
10741 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
10742 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
10743 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
10744 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
10745 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
10746 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
10747 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
10748 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
10749 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
10750 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
10751 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
10752 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
10753 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
10754 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
10755 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
10756 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
10757 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
10758 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
10759 /* 0x60 */ iemOp_pusha, iemOp_popa, iemOp_bound_Gv_Ma_evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
10760 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
10761 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
10762 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
10763 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
10764 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
10765 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
10766 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
10767 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
10768 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
10769 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
10770 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A_xop,
10771 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
10772 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
10773 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
10774 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
10775 /* 0xa0 */ iemOp_mov_Al_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
10776 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
10777 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
10778 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
10779 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
10780 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
10781 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
10782 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
10783 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
10784 /* 0xc4 */ iemOp_les_Gv_Mp_vex2, iemOp_lds_Gv_Mp_vex3, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
10785 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
10786 /* 0xcc */ iemOp_int_3, iemOp_int_Ib, iemOp_into, iemOp_iret,
10787 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
10788 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
10789 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
10790 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
10791 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
10792 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
10793 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
10794 /* 0xec */ iemOp_in_AL_DX, iemOp_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
10795 /* 0xf0 */ iemOp_lock, iemOp_int_1, iemOp_repne, iemOp_repe,
10796 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
10797 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
10798 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
10799};
10800
10801
10802/** @} */
10803
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette