VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h@ 94169

Last change on this file since 94169 was 94163, checked in by vboxsync, 3 years ago

VMM/IEM: Try deal with basic Intel/AMD EFLAGS difference for shifts (intel side tests). bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 398.6 KB
Line 
1/* $Id: IEMAllInstructionsOneByte.cpp.h 94163 2022-03-11 00:56:22Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*******************************************************************************
20* Global Variables *
21*******************************************************************************/
22extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */
23
24/* Instruction group definitions: */
25
26/** @defgroup og_gen General
27 * @{ */
28 /** @defgroup og_gen_arith Arithmetic
29 * @{ */
30 /** @defgroup og_gen_arith_bin Binary numbers */
31 /** @defgroup og_gen_arith_dec Decimal numbers */
32 /** @} */
33/** @} */
34
35/** @defgroup og_stack Stack
36 * @{ */
37 /** @defgroup og_stack_sreg Segment registers */
38/** @} */
39
40/** @defgroup og_prefix Prefixes */
41/** @defgroup og_escapes Escape bytes */
42
43
44
45/** @name One byte opcodes.
46 * @{
47 */
48
49/* Instruction specification format - work in progress: */
50
51/**
52 * @opcode 0x00
53 * @opmnemonic add
54 * @op1 rm:Eb
55 * @op2 reg:Gb
56 * @opmaps one
57 * @openc ModR/M
58 * @opflmodify cf,pf,af,zf,sf,of
59 * @ophints harmless ignores_op_sizes
60 * @opstats add_Eb_Gb
61 * @opgroup og_gen_arith_bin
62 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
63 * @optest efl|=cf op1=1 op2=2 -> op1=3 efl&|=nc,po,na,nz,pl,nv
64 * @optest op1=254 op2=1 -> op1=255 efl&|=nc,po,na,nz,ng,nv
65 * @optest op1=128 op2=128 -> op1=0 efl&|=ov,pl,zf,na,po,cf
66 */
67FNIEMOP_DEF(iemOp_add_Eb_Gb)
68{
69 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
70 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add);
71}
72
73
74/**
75 * @opcode 0x01
76 * @opgroup og_gen_arith_bin
77 * @opflmodify cf,pf,af,zf,sf,of
78 * @optest op1=1 op2=1 -> op1=2 efl&|=nc,pe,na,nz,pl,nv
79 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
80 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
81 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
82 */
83FNIEMOP_DEF(iemOp_add_Ev_Gv)
84{
85 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
86 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add);
87}
88
89
90/**
91 * @opcode 0x02
92 * @opgroup og_gen_arith_bin
93 * @opflmodify cf,pf,af,zf,sf,of
94 * @opcopytests iemOp_add_Eb_Gb
95 */
96FNIEMOP_DEF(iemOp_add_Gb_Eb)
97{
98 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
99 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);
100}
101
102
103/**
104 * @opcode 0x03
105 * @opgroup og_gen_arith_bin
106 * @opflmodify cf,pf,af,zf,sf,of
107 * @opcopytests iemOp_add_Ev_Gv
108 */
109FNIEMOP_DEF(iemOp_add_Gv_Ev)
110{
111 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0);
112 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);
113}
114
115
116/**
117 * @opcode 0x04
118 * @opgroup og_gen_arith_bin
119 * @opflmodify cf,pf,af,zf,sf,of
120 * @opcopytests iemOp_add_Eb_Gb
121 */
122FNIEMOP_DEF(iemOp_add_Al_Ib)
123{
124 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
125 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);
126}
127
128
129/**
130 * @opcode 0x05
131 * @opgroup og_gen_arith_bin
132 * @opflmodify cf,pf,af,zf,sf,of
133 * @optest op1=1 op2=1 -> op1=2 efl&|=nv,pl,nz,na,pe
134 * @optest efl|=cf op1=2 op2=2 -> op1=4 efl&|=nc,pe,na,nz,pl,nv
135 * @optest efl&~=cf op1=-1 op2=1 -> op1=0 efl&|=cf,po,af,zf,pl,nv
136 * @optest op1=-1 op2=-1 -> op1=-2 efl&|=cf,pe,af,nz,ng,nv
137 */
138FNIEMOP_DEF(iemOp_add_eAX_Iz)
139{
140 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0);
141 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);
142}
143
144
145/**
146 * @opcode 0x06
147 * @opgroup og_stack_sreg
148 */
149FNIEMOP_DEF(iemOp_push_ES)
150{
151 IEMOP_MNEMONIC1(FIXED, PUSH, push, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
152 IEMOP_HLP_NO_64BIT();
153 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_ES);
154}
155
156
157/**
158 * @opcode 0x07
159 * @opgroup og_stack_sreg
160 */
161FNIEMOP_DEF(iemOp_pop_ES)
162{
163 IEMOP_MNEMONIC1(FIXED, POP, pop, ES, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
164 IEMOP_HLP_NO_64BIT();
165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
166 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
167}
168
169
170/**
171 * @opcode 0x08
172 * @opgroup og_gen_arith_bin
173 * @opflmodify cf,pf,af,zf,sf,of
174 * @opflundef af
175 * @opflclear of,cf
176 * @optest op1=7 op2=12 -> op1=15 efl&|=nc,po,na,nz,pl,nv
177 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
178 * @optest op1=0xee op2=0x11 -> op1=0xff efl&|=nc,po,na,nz,ng,nv
179 * @optest op1=0xff op2=0xff -> op1=0xff efl&|=nc,po,na,nz,ng,nv
180 */
181FNIEMOP_DEF(iemOp_or_Eb_Gb)
182{
183 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
184 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
185 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or);
186}
187
188
189/*
190 * @opcode 0x09
191 * @opgroup og_gen_arith_bin
192 * @opflmodify cf,pf,af,zf,sf,of
193 * @opflundef af
194 * @opflclear of,cf
195 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
196 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
197 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
198 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
199 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
200 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5a5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
201 */
202FNIEMOP_DEF(iemOp_or_Ev_Gv)
203{
204 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
205 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
206 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or);
207}
208
209
210/**
211 * @opcode 0x0a
212 * @opgroup og_gen_arith_bin
213 * @opflmodify cf,pf,af,zf,sf,of
214 * @opflundef af
215 * @opflclear of,cf
216 * @opcopytests iemOp_or_Eb_Gb
217 */
218FNIEMOP_DEF(iemOp_or_Gb_Eb)
219{
220 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
221 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
222 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);
223}
224
225
226/**
227 * @opcode 0x0b
228 * @opgroup og_gen_arith_bin
229 * @opflmodify cf,pf,af,zf,sf,of
230 * @opflundef af
231 * @opflclear of,cf
232 * @opcopytests iemOp_or_Ev_Gv
233 */
234FNIEMOP_DEF(iemOp_or_Gv_Ev)
235{
236 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0);
237 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
238 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);
239}
240
241
242/**
243 * @opcode 0x0c
244 * @opgroup og_gen_arith_bin
245 * @opflmodify cf,pf,af,zf,sf,of
246 * @opflundef af
247 * @opflclear of,cf
248 * @opcopytests iemOp_or_Eb_Gb
249 */
250FNIEMOP_DEF(iemOp_or_Al_Ib)
251{
252 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
253 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
254 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);
255}
256
257
258/**
259 * @opcode 0x0d
260 * @opgroup og_gen_arith_bin
261 * @opflmodify cf,pf,af,zf,sf,of
262 * @opflundef af
263 * @opflclear of,cf
264 * @optest efl|=of,cf op1=12 op2=7 -> op1=15 efl&|=nc,po,na,nz,pl,nv
265 * @optest efl|=of,cf op1=0 op2=0 -> op1=0 efl&|=nc,po,na,zf,pl,nv
266 * @optest op1=-2 op2=1 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
267 * @optest o16 / op1=0x5a5a op2=0xa5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
268 * @optest o32 / op1=0x5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
269 * @optest o64 / op1=0x5a5a5a5a5a5a5a5a op2=0xa5a5a5a5 -> op1=-1 efl&|=nc,po,na,nz,ng,nv
270 * @optest o64 / op1=0x5a5a5a5aa5a5a5a5 op2=0x5a5a5a5a -> op1=0x5a5a5a5affffffff efl&|=nc,po,na,nz,pl,nv
271 */
272FNIEMOP_DEF(iemOp_or_eAX_Iz)
273{
274 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0);
275 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
276 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);
277}
278
279
280/**
281 * @opcode 0x0e
282 * @opgroup og_stack_sreg
283 */
284FNIEMOP_DEF(iemOp_push_CS)
285{
286 IEMOP_MNEMONIC1(FIXED, PUSH, push, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, 0);
287 IEMOP_HLP_NO_64BIT();
288 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_CS);
289}
290
291
292/**
293 * @opcode 0x0f
294 * @opmnemonic EscTwo0f
295 * @openc two0f
296 * @opdisenum OP_2B_ESC
297 * @ophints harmless
298 * @opgroup og_escapes
299 */
300FNIEMOP_DEF(iemOp_2byteEscape)
301{
302#ifdef VBOX_STRICT
303 /* Sanity check the table the first time around. */
304 static bool s_fTested = false;
305 if (RT_LIKELY(s_fTested)) { /* likely */ }
306 else
307 {
308 s_fTested = true;
309 Assert(g_apfnTwoByteMap[0xbc * 4 + 0] == iemOp_bsf_Gv_Ev);
310 Assert(g_apfnTwoByteMap[0xbc * 4 + 1] == iemOp_bsf_Gv_Ev);
311 Assert(g_apfnTwoByteMap[0xbc * 4 + 2] == iemOp_tzcnt_Gv_Ev);
312 Assert(g_apfnTwoByteMap[0xbc * 4 + 3] == iemOp_bsf_Gv_Ev);
313 }
314#endif
315
316 if (RT_LIKELY(IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_286))
317 {
318 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
319 IEMOP_HLP_MIN_286();
320 return FNIEMOP_CALL(g_apfnTwoByteMap[(uintptr_t)b * 4 + pVCpu->iem.s.idxPrefix]);
321 }
322 /* @opdone */
323
324 /*
325 * On the 8086 this is a POP CS instruction.
326 * For the time being we don't specify this this.
327 */
328 IEMOP_MNEMONIC1(FIXED, POP, pop, CS, DISOPTYPE_HARMLESS | DISOPTYPE_POTENTIALLY_DANGEROUS | DISOPTYPE_INVALID_64, IEMOPHINT_SKIP_PYTHON);
329 IEMOP_HLP_NO_64BIT();
330 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
331 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_ES, pVCpu->iem.s.enmEffOpSize);
332}
333
334/**
335 * @opcode 0x10
336 * @opgroup og_gen_arith_bin
337 * @opfltest cf
338 * @opflmodify cf,pf,af,zf,sf,of
339 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
340 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
341 * @optest op1=0xff op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
342 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
343 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
344 */
345FNIEMOP_DEF(iemOp_adc_Eb_Gb)
346{
347 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
348 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc);
349}
350
351
352/**
353 * @opcode 0x11
354 * @opgroup og_gen_arith_bin
355 * @opfltest cf
356 * @opflmodify cf,pf,af,zf,sf,of
357 * @optest op1=1 op2=1 efl&~=cf -> op1=2 efl&|=nc,pe,na,nz,pl,nv
358 * @optest op1=1 op2=1 efl|=cf -> op1=3 efl&|=nc,po,na,nz,pl,nv
359 * @optest op1=-1 op2=0 efl|=cf -> op1=0 efl&|=cf,po,af,zf,pl,nv
360 * @optest op1=0 op2=0 efl|=cf -> op1=1 efl&|=nc,pe,na,nz,pl,nv
361 * @optest op1=0 op2=0 efl&~=cf -> op1=0 efl&|=nc,po,na,zf,pl,nv
362 */
363FNIEMOP_DEF(iemOp_adc_Ev_Gv)
364{
365 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
366 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc);
367}
368
369
370/**
371 * @opcode 0x12
372 * @opgroup og_gen_arith_bin
373 * @opfltest cf
374 * @opflmodify cf,pf,af,zf,sf,of
375 * @opcopytests iemOp_adc_Eb_Gb
376 */
377FNIEMOP_DEF(iemOp_adc_Gb_Eb)
378{
379 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
380 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);
381}
382
383
384/**
385 * @opcode 0x13
386 * @opgroup og_gen_arith_bin
387 * @opfltest cf
388 * @opflmodify cf,pf,af,zf,sf,of
389 * @opcopytests iemOp_adc_Ev_Gv
390 */
391FNIEMOP_DEF(iemOp_adc_Gv_Ev)
392{
393 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0);
394 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);
395}
396
397
398/**
399 * @opcode 0x14
400 * @opgroup og_gen_arith_bin
401 * @opfltest cf
402 * @opflmodify cf,pf,af,zf,sf,of
403 * @opcopytests iemOp_adc_Eb_Gb
404 */
405FNIEMOP_DEF(iemOp_adc_Al_Ib)
406{
407 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
408 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);
409}
410
411
412/**
413 * @opcode 0x15
414 * @opgroup og_gen_arith_bin
415 * @opfltest cf
416 * @opflmodify cf,pf,af,zf,sf,of
417 * @opcopytests iemOp_adc_Ev_Gv
418 */
419FNIEMOP_DEF(iemOp_adc_eAX_Iz)
420{
421 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0);
422 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);
423}
424
425
426/**
427 * @opcode 0x16
428 */
429FNIEMOP_DEF(iemOp_push_SS)
430{
431 IEMOP_MNEMONIC1(FIXED, PUSH, push, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
432 IEMOP_HLP_NO_64BIT();
433 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_SS);
434}
435
436
437/**
438 * @opcode 0x17
439 * @opgroup og_gen_arith_bin
440 * @opfltest cf
441 * @opflmodify cf,pf,af,zf,sf,of
442 */
443FNIEMOP_DEF(iemOp_pop_SS)
444{
445 IEMOP_MNEMONIC1(FIXED, POP, pop, SS, DISOPTYPE_HARMLESS | DISOPTYPE_INHIBIT_IRQS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS , 0);
446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
447 IEMOP_HLP_NO_64BIT();
448 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_SS, pVCpu->iem.s.enmEffOpSize);
449}
450
451
452/**
453 * @opcode 0x18
454 * @opgroup og_gen_arith_bin
455 * @opfltest cf
456 * @opflmodify cf,pf,af,zf,sf,of
457 */
458FNIEMOP_DEF(iemOp_sbb_Eb_Gb)
459{
460 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
461 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb);
462}
463
464
465/**
466 * @opcode 0x19
467 * @opgroup og_gen_arith_bin
468 * @opfltest cf
469 * @opflmodify cf,pf,af,zf,sf,of
470 */
471FNIEMOP_DEF(iemOp_sbb_Ev_Gv)
472{
473 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
474 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb);
475}
476
477
478/**
479 * @opcode 0x1a
480 * @opgroup og_gen_arith_bin
481 * @opfltest cf
482 * @opflmodify cf,pf,af,zf,sf,of
483 */
484FNIEMOP_DEF(iemOp_sbb_Gb_Eb)
485{
486 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
487 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);
488}
489
490
491/**
492 * @opcode 0x1b
493 * @opgroup og_gen_arith_bin
494 * @opfltest cf
495 * @opflmodify cf,pf,af,zf,sf,of
496 */
497FNIEMOP_DEF(iemOp_sbb_Gv_Ev)
498{
499 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0);
500 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);
501}
502
503
504/**
505 * @opcode 0x1c
506 * @opgroup og_gen_arith_bin
507 * @opfltest cf
508 * @opflmodify cf,pf,af,zf,sf,of
509 */
510FNIEMOP_DEF(iemOp_sbb_Al_Ib)
511{
512 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
513 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);
514}
515
516
517/**
518 * @opcode 0x1d
519 * @opgroup og_gen_arith_bin
520 * @opfltest cf
521 * @opflmodify cf,pf,af,zf,sf,of
522 */
523FNIEMOP_DEF(iemOp_sbb_eAX_Iz)
524{
525 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0);
526 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);
527}
528
529
530/**
531 * @opcode 0x1e
532 * @opgroup og_stack_sreg
533 */
534FNIEMOP_DEF(iemOp_push_DS)
535{
536 IEMOP_MNEMONIC1(FIXED, PUSH, push, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0);
537 IEMOP_HLP_NO_64BIT();
538 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_DS);
539}
540
541
542/**
543 * @opcode 0x1f
544 * @opgroup og_stack_sreg
545 */
546FNIEMOP_DEF(iemOp_pop_DS)
547{
548 IEMOP_MNEMONIC1(FIXED, POP, pop, DS, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64 | DISOPTYPE_RRM_DANGEROUS, 0);
549 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
550 IEMOP_HLP_NO_64BIT();
551 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_DS, pVCpu->iem.s.enmEffOpSize);
552}
553
554
555/**
556 * @opcode 0x20
557 * @opgroup og_gen_arith_bin
558 * @opflmodify cf,pf,af,zf,sf,of
559 * @opflundef af
560 * @opflclear of,cf
561 */
562FNIEMOP_DEF(iemOp_and_Eb_Gb)
563{
564 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
565 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
566 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and);
567}
568
569
570/**
571 * @opcode 0x21
572 * @opgroup og_gen_arith_bin
573 * @opflmodify cf,pf,af,zf,sf,of
574 * @opflundef af
575 * @opflclear of,cf
576 */
577FNIEMOP_DEF(iemOp_and_Ev_Gv)
578{
579 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
580 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
581 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and);
582}
583
584
585/**
586 * @opcode 0x22
587 * @opgroup og_gen_arith_bin
588 * @opflmodify cf,pf,af,zf,sf,of
589 * @opflundef af
590 * @opflclear of,cf
591 */
592FNIEMOP_DEF(iemOp_and_Gb_Eb)
593{
594 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
595 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
596 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);
597}
598
599
600/**
601 * @opcode 0x23
602 * @opgroup og_gen_arith_bin
603 * @opflmodify cf,pf,af,zf,sf,of
604 * @opflundef af
605 * @opflclear of,cf
606 */
607FNIEMOP_DEF(iemOp_and_Gv_Ev)
608{
609 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0);
610 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
611 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);
612}
613
614
615/**
616 * @opcode 0x24
617 * @opgroup og_gen_arith_bin
618 * @opflmodify cf,pf,af,zf,sf,of
619 * @opflundef af
620 * @opflclear of,cf
621 */
622FNIEMOP_DEF(iemOp_and_Al_Ib)
623{
624 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0);
625 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
626 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);
627}
628
629
630/**
631 * @opcode 0x25
632 * @opgroup og_gen_arith_bin
633 * @opflmodify cf,pf,af,zf,sf,of
634 * @opflundef af
635 * @opflclear of,cf
636 */
637FNIEMOP_DEF(iemOp_and_eAX_Iz)
638{
639 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0);
640 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
641 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);
642}
643
644
645/**
646 * @opcode 0x26
647 * @opmnemonic SEG
648 * @op1 ES
649 * @opgroup og_prefix
650 * @openc prefix
651 * @opdisenum OP_SEG
652 * @ophints harmless
653 */
654FNIEMOP_DEF(iemOp_seg_ES)
655{
656 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg es");
657 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_ES;
658 pVCpu->iem.s.iEffSeg = X86_SREG_ES;
659
660 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
661 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
662}
663
664
665/**
666 * @opcode 0x27
667 * @opfltest af,cf
668 * @opflmodify cf,pf,af,zf,sf,of
669 * @opflundef of
670 */
671FNIEMOP_DEF(iemOp_daa)
672{
673 IEMOP_MNEMONIC0(FIXED, DAA, daa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
674 IEMOP_HLP_NO_64BIT();
675 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
676 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
677 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_daa);
678}
679
680
681/**
682 * @opcode 0x28
683 * @opgroup og_gen_arith_bin
684 * @opflmodify cf,pf,af,zf,sf,of
685 */
686FNIEMOP_DEF(iemOp_sub_Eb_Gb)
687{
688 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
689 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub);
690}
691
692
693/**
694 * @opcode 0x29
695 * @opgroup og_gen_arith_bin
696 * @opflmodify cf,pf,af,zf,sf,of
697 */
698FNIEMOP_DEF(iemOp_sub_Ev_Gv)
699{
700 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
701 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub);
702}
703
704
705/**
706 * @opcode 0x2a
707 * @opgroup og_gen_arith_bin
708 * @opflmodify cf,pf,af,zf,sf,of
709 */
710FNIEMOP_DEF(iemOp_sub_Gb_Eb)
711{
712 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
713 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);
714}
715
716
717/**
718 * @opcode 0x2b
719 * @opgroup og_gen_arith_bin
720 * @opflmodify cf,pf,af,zf,sf,of
721 */
722FNIEMOP_DEF(iemOp_sub_Gv_Ev)
723{
724 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0);
725 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);
726}
727
728
729/**
730 * @opcode 0x2c
731 * @opgroup og_gen_arith_bin
732 * @opflmodify cf,pf,af,zf,sf,of
733 */
734FNIEMOP_DEF(iemOp_sub_Al_Ib)
735{
736 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
737 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);
738}
739
740
741/**
742 * @opcode 0x2d
743 * @opgroup og_gen_arith_bin
744 * @opflmodify cf,pf,af,zf,sf,of
745 */
746FNIEMOP_DEF(iemOp_sub_eAX_Iz)
747{
748 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0);
749 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);
750}
751
752
753/**
754 * @opcode 0x2e
755 * @opmnemonic SEG
756 * @op1 CS
757 * @opgroup og_prefix
758 * @openc prefix
759 * @opdisenum OP_SEG
760 * @ophints harmless
761 */
762FNIEMOP_DEF(iemOp_seg_CS)
763{
764 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg cs");
765 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_CS;
766 pVCpu->iem.s.iEffSeg = X86_SREG_CS;
767
768 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
769 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
770}
771
772
773/**
774 * @opcode 0x2f
775 * @opfltest af,cf
776 * @opflmodify cf,pf,af,zf,sf,of
777 * @opflundef of
778 */
779FNIEMOP_DEF(iemOp_das)
780{
781 IEMOP_MNEMONIC0(FIXED, DAS, das, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL register use */
782 IEMOP_HLP_NO_64BIT();
783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
784 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
785 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_das);
786}
787
788
789/**
790 * @opcode 0x30
791 * @opgroup og_gen_arith_bin
792 * @opflmodify cf,pf,af,zf,sf,of
793 * @opflundef af
794 * @opflclear of,cf
795 */
796FNIEMOP_DEF(iemOp_xor_Eb_Gb)
797{
798 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED);
799 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
800 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor);
801}
802
803
804/**
805 * @opcode 0x31
806 * @opgroup og_gen_arith_bin
807 * @opflmodify cf,pf,af,zf,sf,of
808 * @opflundef af
809 * @opflclear of,cf
810 */
811FNIEMOP_DEF(iemOp_xor_Ev_Gv)
812{
813 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED);
814 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
815 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor);
816}
817
818
819/**
820 * @opcode 0x32
821 * @opgroup og_gen_arith_bin
822 * @opflmodify cf,pf,af,zf,sf,of
823 * @opflundef af
824 * @opflclear of,cf
825 */
826FNIEMOP_DEF(iemOp_xor_Gb_Eb)
827{
828 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
829 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
830 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);
831}
832
833
834/**
835 * @opcode 0x33
836 * @opgroup og_gen_arith_bin
837 * @opflmodify cf,pf,af,zf,sf,of
838 * @opflundef af
839 * @opflclear of,cf
840 */
841FNIEMOP_DEF(iemOp_xor_Gv_Ev)
842{
843 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0);
844 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
845 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);
846}
847
848
849/**
850 * @opcode 0x34
851 * @opgroup og_gen_arith_bin
852 * @opflmodify cf,pf,af,zf,sf,of
853 * @opflundef af
854 * @opflclear of,cf
855 */
856FNIEMOP_DEF(iemOp_xor_Al_Ib)
857{
858 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
860 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);
861}
862
863
864/**
865 * @opcode 0x35
866 * @opgroup og_gen_arith_bin
867 * @opflmodify cf,pf,af,zf,sf,of
868 * @opflundef af
869 * @opflclear of,cf
870 */
871FNIEMOP_DEF(iemOp_xor_eAX_Iz)
872{
873 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
874 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
875 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);
876}
877
878
879/**
880 * @opcode 0x36
881 * @opmnemonic SEG
882 * @op1 SS
883 * @opgroup og_prefix
884 * @openc prefix
885 * @opdisenum OP_SEG
886 * @ophints harmless
887 */
888FNIEMOP_DEF(iemOp_seg_SS)
889{
890 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ss");
891 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_SS;
892 pVCpu->iem.s.iEffSeg = X86_SREG_SS;
893
894 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
895 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
896}
897
898
899/**
900 * @opcode 0x37
901 * @opfltest af,cf
902 * @opflmodify cf,pf,af,zf,sf,of
903 * @opflundef pf,zf,sf,of
904 * @opgroup og_gen_arith_dec
905 * @optest efl&~=af ax=9 -> efl&|=nc,po,na,nz,pl,nv
906 * @optest efl&~=af ax=0 -> efl&|=nc,po,na,zf,pl,nv
907 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
908 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
909 * @optest efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
910 * @optest efl|=af ax=0 -> ax=0x0106 efl&|=cf,po,af,nz,pl,nv
911 * @optest efl|=af ax=0x0100 -> ax=0x0206 efl&|=cf,po,af,nz,pl,nv
912 * @optest intel / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,po,af,zf,pl,nv
913 * @optest amd / efl|=af ax=0x000a -> ax=0x0100 efl&|=cf,pe,af,nz,pl,nv
914 * @optest intel / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,po,af,zf,pl,nv
915 * @optest amd / efl|=af ax=0x010a -> ax=0x0200 efl&|=cf,pe,af,nz,pl,nv
916 * @optest intel / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,po,af,zf,pl,nv
917 * @optest amd / efl|=af ax=0x0f0a -> ax=0x1000 efl&|=cf,pe,af,nz,pl,nv
918 * @optest intel / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,po,af,zf,pl,nv
919 * @optest amd / efl|=af ax=0x7f0a -> ax=0x8000 efl&|=cf,pe,af,nz,ng,ov
920 * @optest intel / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
921 * @optest amd / efl|=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
922 * @optest intel / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,po,af,zf,pl,nv
923 * @optest amd / efl&~=af ax=0xff0a -> ax=0x0000 efl&|=cf,pe,af,nz,pl,nv
924 * @optest intel / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,pe,af,nz,pl,nv
925 * @optest amd / efl&~=af ax=0x000b -> ax=0x0101 efl&|=cf,po,af,nz,pl,nv
926 * @optest intel / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,pe,af,nz,pl,nv
927 * @optest amd / efl&~=af ax=0x000c -> ax=0x0102 efl&|=cf,po,af,nz,pl,nv
928 * @optest intel / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,po,af,nz,pl,nv
929 * @optest amd / efl&~=af ax=0x000d -> ax=0x0103 efl&|=cf,pe,af,nz,pl,nv
930 * @optest intel / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
931 * @optest amd / efl&~=af ax=0x000e -> ax=0x0104 efl&|=cf,po,af,nz,pl,nv
932 * @optest intel / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,po,af,nz,pl,nv
933 * @optest amd / efl&~=af ax=0x000f -> ax=0x0105 efl&|=cf,pe,af,nz,pl,nv
934 * @optest intel / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,po,af,nz,pl,nv
935 * @optest amd / efl&~=af ax=0x020f -> ax=0x0305 efl&|=cf,pe,af,nz,pl,nv
936 */
937FNIEMOP_DEF(iemOp_aaa)
938{
939 IEMOP_MNEMONIC0(FIXED, AAA, aaa, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
940 IEMOP_HLP_NO_64BIT();
941 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
942 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF);
943
944 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aaa);
945}
946
947
948/**
949 * @opcode 0x38
950 */
951FNIEMOP_DEF(iemOp_cmp_Eb_Gb)
952{
953 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb");
954 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp);
955}
956
957
958/**
959 * @opcode 0x39
960 */
961FNIEMOP_DEF(iemOp_cmp_Ev_Gv)
962{
963 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv");
964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp);
965}
966
967
968/**
969 * @opcode 0x3a
970 */
971FNIEMOP_DEF(iemOp_cmp_Gb_Eb)
972{
973 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb");
974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);
975}
976
977
978/**
979 * @opcode 0x3b
980 */
981FNIEMOP_DEF(iemOp_cmp_Gv_Ev)
982{
983 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev");
984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);
985}
986
987
988/**
989 * @opcode 0x3c
990 */
991FNIEMOP_DEF(iemOp_cmp_Al_Ib)
992{
993 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib");
994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);
995}
996
997
998/**
999 * @opcode 0x3d
1000 */
1001FNIEMOP_DEF(iemOp_cmp_eAX_Iz)
1002{
1003 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz");
1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);
1005}
1006
1007
1008/**
1009 * @opcode 0x3e
1010 */
1011FNIEMOP_DEF(iemOp_seg_DS)
1012{
1013 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg ds");
1014 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_DS;
1015 pVCpu->iem.s.iEffSeg = X86_SREG_DS;
1016
1017 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1018 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1019}
1020
1021
1022/**
1023 * @opcode 0x3f
1024 * @opfltest af,cf
1025 * @opflmodify cf,pf,af,zf,sf,of
1026 * @opflundef pf,zf,sf,of
1027 * @opgroup og_gen_arith_dec
1028 * @optest / efl&~=af ax=0x0009 -> efl&|=nc,po,na,nz,pl,nv
1029 * @optest / efl&~=af ax=0x0000 -> efl&|=nc,po,na,zf,pl,nv
1030 * @optest intel / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,zf,pl,nv
1031 * @optest amd / efl&~=af ax=0x00f0 -> ax=0x0000 efl&|=nc,po,na,nz,pl,nv
1032 * @optest / efl&~=af ax=0x00f9 -> ax=0x0009 efl&|=nc,po,na,nz,pl,nv
1033 * @optest intel / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,pl,nv
1034 * @optest amd / efl|=af ax=0x0000 -> ax=0xfe0a efl&|=cf,po,af,nz,ng,nv
1035 * @optest intel / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,pl,nv
1036 * @optest8 amd / efl|=af ax=0x0100 -> ax=0xff0a efl&|=cf,po,af,nz,ng,nv
1037 * @optest intel / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1038 * @optest10 amd / efl|=af ax=0x000a -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1039 * @optest / efl|=af ax=0x010a -> ax=0x0004 efl&|=cf,pe,af,nz,pl,nv
1040 * @optest / efl|=af ax=0x020a -> ax=0x0104 efl&|=cf,pe,af,nz,pl,nv
1041 * @optest / efl|=af ax=0x0f0a -> ax=0x0e04 efl&|=cf,pe,af,nz,pl,nv
1042 * @optest / efl|=af ax=0x7f0a -> ax=0x7e04 efl&|=cf,pe,af,nz,pl,nv
1043 * @optest intel / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1044 * @optest amd / efl|=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1045 * @optest intel / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1046 * @optest amd / efl&~=af ax=0xff0a -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1047 * @optest intel / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,pl,nv
1048 * @optest amd / efl&~=af ax=0xff09 -> ax=0xff09 efl&|=nc,po,na,nz,ng,nv
1049 * @optest intel / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,pl,nv
1050 * @optest22 amd / efl&~=af ax=0x000b -> ax=0xff05 efl&|=cf,po,af,nz,ng,nv
1051 * @optest intel / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,pl,nv
1052 * @optest24 amd / efl&~=af ax=0x000c -> ax=0xff06 efl&|=cf,po,af,nz,ng,nv
1053 * @optest intel / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,pl,nv
1054 * @optest26 amd / efl&~=af ax=0x000d -> ax=0xff07 efl&|=cf,pe,af,nz,ng,nv
1055 * @optest intel / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,pl,nv
1056 * @optest28 amd / efl&~=af ax=0x000e -> ax=0xff08 efl&|=cf,pe,af,nz,ng,nv
1057 * @optest intel / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,pl,nv
1058 * @optest30 amd / efl&~=af ax=0x000f -> ax=0xff09 efl&|=cf,po,af,nz,ng,nv
1059 * @optest31 intel / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,pl,nv
1060 * @optest32 amd / efl&~=af ax=0x00fa -> ax=0xff04 efl&|=cf,pe,af,nz,ng,nv
1061 * @optest33 intel / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,pl,nv
1062 * @optest34 amd / efl&~=af ax=0xfffa -> ax=0xfe04 efl&|=cf,pe,af,nz,ng,nv
1063 */
1064FNIEMOP_DEF(iemOp_aas)
1065{
1066 IEMOP_MNEMONIC0(FIXED, AAS, aas, DISOPTYPE_HARMLESS | DISOPTYPE_INVALID_64, 0); /* express implicit AL/AX register use */
1067 IEMOP_HLP_NO_64BIT();
1068 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1069 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_OF);
1070
1071 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_aas);
1072}
1073
1074
1075/**
1076 * Common 'inc/dec/not/neg register' helper.
1077 */
1078FNIEMOP_DEF_2(iemOpCommonUnaryGReg, PCIEMOPUNARYSIZES, pImpl, uint8_t, iReg)
1079{
1080 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1081 switch (pVCpu->iem.s.enmEffOpSize)
1082 {
1083 case IEMMODE_16BIT:
1084 IEM_MC_BEGIN(2, 0);
1085 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1086 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1087 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1088 IEM_MC_REF_EFLAGS(pEFlags);
1089 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
1090 IEM_MC_ADVANCE_RIP();
1091 IEM_MC_END();
1092 return VINF_SUCCESS;
1093
1094 case IEMMODE_32BIT:
1095 IEM_MC_BEGIN(2, 0);
1096 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1097 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1098 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1099 IEM_MC_REF_EFLAGS(pEFlags);
1100 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
1101 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1102 IEM_MC_ADVANCE_RIP();
1103 IEM_MC_END();
1104 return VINF_SUCCESS;
1105
1106 case IEMMODE_64BIT:
1107 IEM_MC_BEGIN(2, 0);
1108 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1109 IEM_MC_ARG(uint32_t *, pEFlags, 1);
1110 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1111 IEM_MC_REF_EFLAGS(pEFlags);
1112 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
1113 IEM_MC_ADVANCE_RIP();
1114 IEM_MC_END();
1115 return VINF_SUCCESS;
1116 }
1117 return VINF_SUCCESS;
1118}
1119
1120
1121/**
1122 * @opcode 0x40
1123 */
1124FNIEMOP_DEF(iemOp_inc_eAX)
1125{
1126 /*
1127 * This is a REX prefix in 64-bit mode.
1128 */
1129 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1130 {
1131 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex");
1132 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX;
1133
1134 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1135 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1136 }
1137
1138 IEMOP_MNEMONIC(inc_eAX, "inc eAX");
1139 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xAX);
1140}
1141
1142
1143/**
1144 * @opcode 0x41
1145 */
1146FNIEMOP_DEF(iemOp_inc_eCX)
1147{
1148 /*
1149 * This is a REX prefix in 64-bit mode.
1150 */
1151 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1152 {
1153 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.b");
1154 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B;
1155 pVCpu->iem.s.uRexB = 1 << 3;
1156
1157 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1158 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1159 }
1160
1161 IEMOP_MNEMONIC(inc_eCX, "inc eCX");
1162 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xCX);
1163}
1164
1165
1166/**
1167 * @opcode 0x42
1168 */
1169FNIEMOP_DEF(iemOp_inc_eDX)
1170{
1171 /*
1172 * This is a REX prefix in 64-bit mode.
1173 */
1174 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1175 {
1176 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.x");
1177 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X;
1178 pVCpu->iem.s.uRexIndex = 1 << 3;
1179
1180 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1181 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1182 }
1183
1184 IEMOP_MNEMONIC(inc_eDX, "inc eDX");
1185 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDX);
1186}
1187
1188
1189
1190/**
1191 * @opcode 0x43
1192 */
1193FNIEMOP_DEF(iemOp_inc_eBX)
1194{
1195 /*
1196 * This is a REX prefix in 64-bit mode.
1197 */
1198 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1199 {
1200 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bx");
1201 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1202 pVCpu->iem.s.uRexB = 1 << 3;
1203 pVCpu->iem.s.uRexIndex = 1 << 3;
1204
1205 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1206 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1207 }
1208
1209 IEMOP_MNEMONIC(inc_eBX, "inc eBX");
1210 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBX);
1211}
1212
1213
1214/**
1215 * @opcode 0x44
1216 */
1217FNIEMOP_DEF(iemOp_inc_eSP)
1218{
1219 /*
1220 * This is a REX prefix in 64-bit mode.
1221 */
1222 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1223 {
1224 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.r");
1225 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R;
1226 pVCpu->iem.s.uRexReg = 1 << 3;
1227
1228 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1229 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1230 }
1231
1232 IEMOP_MNEMONIC(inc_eSP, "inc eSP");
1233 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSP);
1234}
1235
1236
1237/**
1238 * @opcode 0x45
1239 */
1240FNIEMOP_DEF(iemOp_inc_eBP)
1241{
1242 /*
1243 * This is a REX prefix in 64-bit mode.
1244 */
1245 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1246 {
1247 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rb");
1248 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B;
1249 pVCpu->iem.s.uRexReg = 1 << 3;
1250 pVCpu->iem.s.uRexB = 1 << 3;
1251
1252 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1253 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1254 }
1255
1256 IEMOP_MNEMONIC(inc_eBP, "inc eBP");
1257 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xBP);
1258}
1259
1260
1261/**
1262 * @opcode 0x46
1263 */
1264FNIEMOP_DEF(iemOp_inc_eSI)
1265{
1266 /*
1267 * This is a REX prefix in 64-bit mode.
1268 */
1269 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1270 {
1271 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rx");
1272 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X;
1273 pVCpu->iem.s.uRexReg = 1 << 3;
1274 pVCpu->iem.s.uRexIndex = 1 << 3;
1275
1276 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1277 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1278 }
1279
1280 IEMOP_MNEMONIC(inc_eSI, "inc eSI");
1281 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xSI);
1282}
1283
1284
1285/**
1286 * @opcode 0x47
1287 */
1288FNIEMOP_DEF(iemOp_inc_eDI)
1289{
1290 /*
1291 * This is a REX prefix in 64-bit mode.
1292 */
1293 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1294 {
1295 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbx");
1296 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X;
1297 pVCpu->iem.s.uRexReg = 1 << 3;
1298 pVCpu->iem.s.uRexB = 1 << 3;
1299 pVCpu->iem.s.uRexIndex = 1 << 3;
1300
1301 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1302 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1303 }
1304
1305 IEMOP_MNEMONIC(inc_eDI, "inc eDI");
1306 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_inc, X86_GREG_xDI);
1307}
1308
1309
1310/**
1311 * @opcode 0x48
1312 */
1313FNIEMOP_DEF(iemOp_dec_eAX)
1314{
1315 /*
1316 * This is a REX prefix in 64-bit mode.
1317 */
1318 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1319 {
1320 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.w");
1321 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_SIZE_REX_W;
1322 iemRecalEffOpSize(pVCpu);
1323
1324 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1325 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1326 }
1327
1328 IEMOP_MNEMONIC(dec_eAX, "dec eAX");
1329 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xAX);
1330}
1331
1332
1333/**
1334 * @opcode 0x49
1335 */
1336FNIEMOP_DEF(iemOp_dec_eCX)
1337{
1338 /*
1339 * This is a REX prefix in 64-bit mode.
1340 */
1341 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1342 {
1343 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bw");
1344 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1345 pVCpu->iem.s.uRexB = 1 << 3;
1346 iemRecalEffOpSize(pVCpu);
1347
1348 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1349 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1350 }
1351
1352 IEMOP_MNEMONIC(dec_eCX, "dec eCX");
1353 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xCX);
1354}
1355
1356
1357/**
1358 * @opcode 0x4a
1359 */
1360FNIEMOP_DEF(iemOp_dec_eDX)
1361{
1362 /*
1363 * This is a REX prefix in 64-bit mode.
1364 */
1365 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1366 {
1367 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.xw");
1368 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1369 pVCpu->iem.s.uRexIndex = 1 << 3;
1370 iemRecalEffOpSize(pVCpu);
1371
1372 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1373 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1374 }
1375
1376 IEMOP_MNEMONIC(dec_eDX, "dec eDX");
1377 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDX);
1378}
1379
1380
1381/**
1382 * @opcode 0x4b
1383 */
1384FNIEMOP_DEF(iemOp_dec_eBX)
1385{
1386 /*
1387 * This is a REX prefix in 64-bit mode.
1388 */
1389 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1390 {
1391 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.bxw");
1392 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1393 pVCpu->iem.s.uRexB = 1 << 3;
1394 pVCpu->iem.s.uRexIndex = 1 << 3;
1395 iemRecalEffOpSize(pVCpu);
1396
1397 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1398 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1399 }
1400
1401 IEMOP_MNEMONIC(dec_eBX, "dec eBX");
1402 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBX);
1403}
1404
1405
1406/**
1407 * @opcode 0x4c
1408 */
1409FNIEMOP_DEF(iemOp_dec_eSP)
1410{
1411 /*
1412 * This is a REX prefix in 64-bit mode.
1413 */
1414 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1415 {
1416 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rw");
1417 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_SIZE_REX_W;
1418 pVCpu->iem.s.uRexReg = 1 << 3;
1419 iemRecalEffOpSize(pVCpu);
1420
1421 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1422 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1423 }
1424
1425 IEMOP_MNEMONIC(dec_eSP, "dec eSP");
1426 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSP);
1427}
1428
1429
1430/**
1431 * @opcode 0x4d
1432 */
1433FNIEMOP_DEF(iemOp_dec_eBP)
1434{
1435 /*
1436 * This is a REX prefix in 64-bit mode.
1437 */
1438 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1439 {
1440 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbw");
1441 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_SIZE_REX_W;
1442 pVCpu->iem.s.uRexReg = 1 << 3;
1443 pVCpu->iem.s.uRexB = 1 << 3;
1444 iemRecalEffOpSize(pVCpu);
1445
1446 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1447 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1448 }
1449
1450 IEMOP_MNEMONIC(dec_eBP, "dec eBP");
1451 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xBP);
1452}
1453
1454
1455/**
1456 * @opcode 0x4e
1457 */
1458FNIEMOP_DEF(iemOp_dec_eSI)
1459{
1460 /*
1461 * This is a REX prefix in 64-bit mode.
1462 */
1463 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1464 {
1465 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rxw");
1466 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1467 pVCpu->iem.s.uRexReg = 1 << 3;
1468 pVCpu->iem.s.uRexIndex = 1 << 3;
1469 iemRecalEffOpSize(pVCpu);
1470
1471 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1472 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1473 }
1474
1475 IEMOP_MNEMONIC(dec_eSI, "dec eSI");
1476 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xSI);
1477}
1478
1479
1480/**
1481 * @opcode 0x4f
1482 */
1483FNIEMOP_DEF(iemOp_dec_eDI)
1484{
1485 /*
1486 * This is a REX prefix in 64-bit mode.
1487 */
1488 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1489 {
1490 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("rex.rbxw");
1491 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REX | IEM_OP_PRF_REX_R | IEM_OP_PRF_REX_B | IEM_OP_PRF_REX_X | IEM_OP_PRF_SIZE_REX_W;
1492 pVCpu->iem.s.uRexReg = 1 << 3;
1493 pVCpu->iem.s.uRexB = 1 << 3;
1494 pVCpu->iem.s.uRexIndex = 1 << 3;
1495 iemRecalEffOpSize(pVCpu);
1496
1497 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
1498 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
1499 }
1500
1501 IEMOP_MNEMONIC(dec_eDI, "dec eDI");
1502 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, &g_iemAImpl_dec, X86_GREG_xDI);
1503}
1504
1505
1506/**
1507 * Common 'push register' helper.
1508 */
1509FNIEMOP_DEF_1(iemOpCommonPushGReg, uint8_t, iReg)
1510{
1511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1512 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1513 {
1514 iReg |= pVCpu->iem.s.uRexB;
1515 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1516 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1517 }
1518
1519 switch (pVCpu->iem.s.enmEffOpSize)
1520 {
1521 case IEMMODE_16BIT:
1522 IEM_MC_BEGIN(0, 1);
1523 IEM_MC_LOCAL(uint16_t, u16Value);
1524 IEM_MC_FETCH_GREG_U16(u16Value, iReg);
1525 IEM_MC_PUSH_U16(u16Value);
1526 IEM_MC_ADVANCE_RIP();
1527 IEM_MC_END();
1528 break;
1529
1530 case IEMMODE_32BIT:
1531 IEM_MC_BEGIN(0, 1);
1532 IEM_MC_LOCAL(uint32_t, u32Value);
1533 IEM_MC_FETCH_GREG_U32(u32Value, iReg);
1534 IEM_MC_PUSH_U32(u32Value);
1535 IEM_MC_ADVANCE_RIP();
1536 IEM_MC_END();
1537 break;
1538
1539 case IEMMODE_64BIT:
1540 IEM_MC_BEGIN(0, 1);
1541 IEM_MC_LOCAL(uint64_t, u64Value);
1542 IEM_MC_FETCH_GREG_U64(u64Value, iReg);
1543 IEM_MC_PUSH_U64(u64Value);
1544 IEM_MC_ADVANCE_RIP();
1545 IEM_MC_END();
1546 break;
1547 }
1548
1549 return VINF_SUCCESS;
1550}
1551
1552
1553/**
1554 * @opcode 0x50
1555 */
1556FNIEMOP_DEF(iemOp_push_eAX)
1557{
1558 IEMOP_MNEMONIC(push_rAX, "push rAX");
1559 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xAX);
1560}
1561
1562
1563/**
1564 * @opcode 0x51
1565 */
1566FNIEMOP_DEF(iemOp_push_eCX)
1567{
1568 IEMOP_MNEMONIC(push_rCX, "push rCX");
1569 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xCX);
1570}
1571
1572
1573/**
1574 * @opcode 0x52
1575 */
1576FNIEMOP_DEF(iemOp_push_eDX)
1577{
1578 IEMOP_MNEMONIC(push_rDX, "push rDX");
1579 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDX);
1580}
1581
1582
1583/**
1584 * @opcode 0x53
1585 */
1586FNIEMOP_DEF(iemOp_push_eBX)
1587{
1588 IEMOP_MNEMONIC(push_rBX, "push rBX");
1589 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBX);
1590}
1591
1592
1593/**
1594 * @opcode 0x54
1595 */
1596FNIEMOP_DEF(iemOp_push_eSP)
1597{
1598 IEMOP_MNEMONIC(push_rSP, "push rSP");
1599 if (IEM_GET_TARGET_CPU(pVCpu) == IEMTARGETCPU_8086)
1600 {
1601 IEM_MC_BEGIN(0, 1);
1602 IEM_MC_LOCAL(uint16_t, u16Value);
1603 IEM_MC_FETCH_GREG_U16(u16Value, X86_GREG_xSP);
1604 IEM_MC_SUB_LOCAL_U16(u16Value, 2);
1605 IEM_MC_PUSH_U16(u16Value);
1606 IEM_MC_ADVANCE_RIP();
1607 IEM_MC_END();
1608 }
1609 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSP);
1610}
1611
1612
1613/**
1614 * @opcode 0x55
1615 */
1616FNIEMOP_DEF(iemOp_push_eBP)
1617{
1618 IEMOP_MNEMONIC(push_rBP, "push rBP");
1619 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xBP);
1620}
1621
1622
1623/**
1624 * @opcode 0x56
1625 */
1626FNIEMOP_DEF(iemOp_push_eSI)
1627{
1628 IEMOP_MNEMONIC(push_rSI, "push rSI");
1629 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xSI);
1630}
1631
1632
1633/**
1634 * @opcode 0x57
1635 */
1636FNIEMOP_DEF(iemOp_push_eDI)
1637{
1638 IEMOP_MNEMONIC(push_rDI, "push rDI");
1639 return FNIEMOP_CALL_1(iemOpCommonPushGReg, X86_GREG_xDI);
1640}
1641
1642
1643/**
1644 * Common 'pop register' helper.
1645 */
1646FNIEMOP_DEF_1(iemOpCommonPopGReg, uint8_t, iReg)
1647{
1648 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1649 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1650 {
1651 iReg |= pVCpu->iem.s.uRexB;
1652 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1653 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1654 }
1655
1656 switch (pVCpu->iem.s.enmEffOpSize)
1657 {
1658 case IEMMODE_16BIT:
1659 IEM_MC_BEGIN(0, 1);
1660 IEM_MC_LOCAL(uint16_t *, pu16Dst);
1661 IEM_MC_REF_GREG_U16(pu16Dst, iReg);
1662 IEM_MC_POP_U16(pu16Dst);
1663 IEM_MC_ADVANCE_RIP();
1664 IEM_MC_END();
1665 break;
1666
1667 case IEMMODE_32BIT:
1668 IEM_MC_BEGIN(0, 1);
1669 IEM_MC_LOCAL(uint32_t *, pu32Dst);
1670 IEM_MC_REF_GREG_U32(pu32Dst, iReg);
1671 IEM_MC_POP_U32(pu32Dst);
1672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); /** @todo testcase*/
1673 IEM_MC_ADVANCE_RIP();
1674 IEM_MC_END();
1675 break;
1676
1677 case IEMMODE_64BIT:
1678 IEM_MC_BEGIN(0, 1);
1679 IEM_MC_LOCAL(uint64_t *, pu64Dst);
1680 IEM_MC_REF_GREG_U64(pu64Dst, iReg);
1681 IEM_MC_POP_U64(pu64Dst);
1682 IEM_MC_ADVANCE_RIP();
1683 IEM_MC_END();
1684 break;
1685 }
1686
1687 return VINF_SUCCESS;
1688}
1689
1690
1691/**
1692 * @opcode 0x58
1693 */
1694FNIEMOP_DEF(iemOp_pop_eAX)
1695{
1696 IEMOP_MNEMONIC(pop_rAX, "pop rAX");
1697 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xAX);
1698}
1699
1700
1701/**
1702 * @opcode 0x59
1703 */
1704FNIEMOP_DEF(iemOp_pop_eCX)
1705{
1706 IEMOP_MNEMONIC(pop_rCX, "pop rCX");
1707 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xCX);
1708}
1709
1710
1711/**
1712 * @opcode 0x5a
1713 */
1714FNIEMOP_DEF(iemOp_pop_eDX)
1715{
1716 IEMOP_MNEMONIC(pop_rDX, "pop rDX");
1717 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDX);
1718}
1719
1720
1721/**
1722 * @opcode 0x5b
1723 */
1724FNIEMOP_DEF(iemOp_pop_eBX)
1725{
1726 IEMOP_MNEMONIC(pop_rBX, "pop rBX");
1727 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBX);
1728}
1729
1730
1731/**
1732 * @opcode 0x5c
1733 */
1734FNIEMOP_DEF(iemOp_pop_eSP)
1735{
1736 IEMOP_MNEMONIC(pop_rSP, "pop rSP");
1737 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
1738 {
1739 if (pVCpu->iem.s.uRexB)
1740 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSP);
1741 pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT;
1742 pVCpu->iem.s.enmEffOpSize = !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_OP) ? IEMMODE_64BIT : IEMMODE_16BIT;
1743 }
1744
1745 IEMOP_HLP_DECODED_NL_1(OP_POP, IEMOPFORM_FIXED, OP_PARM_REG_ESP,
1746 DISOPTYPE_HARMLESS | DISOPTYPE_DEFAULT_64_OP_SIZE | DISOPTYPE_REXB_EXTENDS_OPREG);
1747 /** @todo add testcase for this instruction. */
1748 switch (pVCpu->iem.s.enmEffOpSize)
1749 {
1750 case IEMMODE_16BIT:
1751 IEM_MC_BEGIN(0, 1);
1752 IEM_MC_LOCAL(uint16_t, u16Dst);
1753 IEM_MC_POP_U16(&u16Dst); /** @todo not correct MC, fix later. */
1754 IEM_MC_STORE_GREG_U16(X86_GREG_xSP, u16Dst);
1755 IEM_MC_ADVANCE_RIP();
1756 IEM_MC_END();
1757 break;
1758
1759 case IEMMODE_32BIT:
1760 IEM_MC_BEGIN(0, 1);
1761 IEM_MC_LOCAL(uint32_t, u32Dst);
1762 IEM_MC_POP_U32(&u32Dst);
1763 IEM_MC_STORE_GREG_U32(X86_GREG_xSP, u32Dst);
1764 IEM_MC_ADVANCE_RIP();
1765 IEM_MC_END();
1766 break;
1767
1768 case IEMMODE_64BIT:
1769 IEM_MC_BEGIN(0, 1);
1770 IEM_MC_LOCAL(uint64_t, u64Dst);
1771 IEM_MC_POP_U64(&u64Dst);
1772 IEM_MC_STORE_GREG_U64(X86_GREG_xSP, u64Dst);
1773 IEM_MC_ADVANCE_RIP();
1774 IEM_MC_END();
1775 break;
1776 }
1777
1778 return VINF_SUCCESS;
1779}
1780
1781
1782/**
1783 * @opcode 0x5d
1784 */
1785FNIEMOP_DEF(iemOp_pop_eBP)
1786{
1787 IEMOP_MNEMONIC(pop_rBP, "pop rBP");
1788 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xBP);
1789}
1790
1791
1792/**
1793 * @opcode 0x5e
1794 */
1795FNIEMOP_DEF(iemOp_pop_eSI)
1796{
1797 IEMOP_MNEMONIC(pop_rSI, "pop rSI");
1798 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xSI);
1799}
1800
1801
1802/**
1803 * @opcode 0x5f
1804 */
1805FNIEMOP_DEF(iemOp_pop_eDI)
1806{
1807 IEMOP_MNEMONIC(pop_rDI, "pop rDI");
1808 return FNIEMOP_CALL_1(iemOpCommonPopGReg, X86_GREG_xDI);
1809}
1810
1811
1812/**
1813 * @opcode 0x60
1814 */
1815FNIEMOP_DEF(iemOp_pusha)
1816{
1817 IEMOP_MNEMONIC(pusha, "pusha");
1818 IEMOP_HLP_MIN_186();
1819 IEMOP_HLP_NO_64BIT();
1820 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1821 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_16);
1822 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1823 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_pusha_32);
1824}
1825
1826
1827/**
1828 * @opcode 0x61
1829 */
1830FNIEMOP_DEF(iemOp_popa__mvex)
1831{
1832 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1833 {
1834 IEMOP_MNEMONIC(popa, "popa");
1835 IEMOP_HLP_MIN_186();
1836 IEMOP_HLP_NO_64BIT();
1837 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1838 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_16);
1839 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_32BIT);
1840 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_popa_32);
1841 }
1842 IEMOP_MNEMONIC(mvex, "mvex");
1843 Log(("mvex prefix is not supported!\n"));
1844 return IEMOP_RAISE_INVALID_OPCODE();
1845}
1846
1847
1848/**
1849 * @opcode 0x62
1850 * @opmnemonic bound
1851 * @op1 Gv_RO
1852 * @op2 Ma
1853 * @opmincpu 80186
1854 * @ophints harmless invalid_64
1855 * @optest op1=0 op2=0 ->
1856 * @optest op1=1 op2=0 -> value.xcpt=5
1857 * @optest o16 / op1=0xffff op2=0x0000fffe ->
1858 * @optest o16 / op1=0xfffe op2=0x0000fffe ->
1859 * @optest o16 / op1=0x7fff op2=0x0000fffe -> value.xcpt=5
1860 * @optest o16 / op1=0x7fff op2=0x7ffffffe ->
1861 * @optest o16 / op1=0x7fff op2=0xfffe8000 -> value.xcpt=5
1862 * @optest o16 / op1=0x8000 op2=0xfffe8000 ->
1863 * @optest o16 / op1=0xffff op2=0xfffe8000 -> value.xcpt=5
1864 * @optest o16 / op1=0xfffe op2=0xfffe8000 ->
1865 * @optest o16 / op1=0xfffe op2=0x8000fffe -> value.xcpt=5
1866 * @optest o16 / op1=0x8000 op2=0x8000fffe -> value.xcpt=5
1867 * @optest o16 / op1=0x0000 op2=0x8000fffe -> value.xcpt=5
1868 * @optest o16 / op1=0x0001 op2=0x8000fffe -> value.xcpt=5
1869 * @optest o16 / op1=0xffff op2=0x0001000f -> value.xcpt=5
1870 * @optest o16 / op1=0x0000 op2=0x0001000f -> value.xcpt=5
1871 * @optest o16 / op1=0x0001 op2=0x0001000f -> value.xcpt=5
1872 * @optest o16 / op1=0x0002 op2=0x0001000f -> value.xcpt=5
1873 * @optest o16 / op1=0x0003 op2=0x0001000f -> value.xcpt=5
1874 * @optest o16 / op1=0x0004 op2=0x0001000f -> value.xcpt=5
1875 * @optest o16 / op1=0x000e op2=0x0001000f -> value.xcpt=5
1876 * @optest o16 / op1=0x000f op2=0x0001000f -> value.xcpt=5
1877 * @optest o16 / op1=0x0010 op2=0x0001000f -> value.xcpt=5
1878 * @optest o16 / op1=0x0011 op2=0x0001000f -> value.xcpt=5
1879 * @optest o32 / op1=0xffffffff op2=0x00000000fffffffe ->
1880 * @optest o32 / op1=0xfffffffe op2=0x00000000fffffffe ->
1881 * @optest o32 / op1=0x7fffffff op2=0x00000000fffffffe -> value.xcpt=5
1882 * @optest o32 / op1=0x7fffffff op2=0x7ffffffffffffffe ->
1883 * @optest o32 / op1=0x7fffffff op2=0xfffffffe80000000 -> value.xcpt=5
1884 * @optest o32 / op1=0x80000000 op2=0xfffffffe80000000 ->
1885 * @optest o32 / op1=0xffffffff op2=0xfffffffe80000000 -> value.xcpt=5
1886 * @optest o32 / op1=0xfffffffe op2=0xfffffffe80000000 ->
1887 * @optest o32 / op1=0xfffffffe op2=0x80000000fffffffe -> value.xcpt=5
1888 * @optest o32 / op1=0x80000000 op2=0x80000000fffffffe -> value.xcpt=5
1889 * @optest o32 / op1=0x00000000 op2=0x80000000fffffffe -> value.xcpt=5
1890 * @optest o32 / op1=0x00000002 op2=0x80000000fffffffe -> value.xcpt=5
1891 * @optest o32 / op1=0x00000001 op2=0x0000000100000003 -> value.xcpt=5
1892 * @optest o32 / op1=0x00000002 op2=0x0000000100000003 -> value.xcpt=5
1893 * @optest o32 / op1=0x00000003 op2=0x0000000100000003 -> value.xcpt=5
1894 * @optest o32 / op1=0x00000004 op2=0x0000000100000003 -> value.xcpt=5
1895 * @optest o32 / op1=0x00000005 op2=0x0000000100000003 -> value.xcpt=5
1896 * @optest o32 / op1=0x0000000e op2=0x0000000100000003 -> value.xcpt=5
1897 * @optest o32 / op1=0x0000000f op2=0x0000000100000003 -> value.xcpt=5
1898 * @optest o32 / op1=0x00000010 op2=0x0000000100000003 -> value.xcpt=5
1899 */
1900FNIEMOP_DEF(iemOp_bound_Gv_Ma__evex)
1901{
1902 /* The BOUND instruction is invalid 64-bit mode. In legacy and
1903 compatability mode it is invalid with MOD=3.
1904
1905 In 32-bit mode, the EVEX prefix works by having the top two bits (MOD)
1906 both be set. In the Intel EVEX documentation (sdm vol 2) these are simply
1907 given as R and X without an exact description, so we assume it builds on
1908 the VEX one and means they are inverted wrt REX.R and REX.X. Thus, just
1909 like with the 3-byte VEX, 32-bit code is restrict wrt addressable registers. */
1910 uint8_t bRm;
1911 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
1912 {
1913 IEMOP_MNEMONIC2(RM_MEM, BOUND, bound, Gv_RO, Ma, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES);
1914 IEMOP_HLP_MIN_186();
1915 IEM_OPCODE_GET_NEXT_U8(&bRm);
1916 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1917 {
1918 /** @todo testcase: check that there are two memory accesses involved. Check
1919 * whether they're both read before the \#BR triggers. */
1920 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT)
1921 {
1922 IEM_MC_BEGIN(3, 1);
1923 IEM_MC_ARG(uint16_t, u16Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1924 IEM_MC_ARG(uint16_t, u16LowerBounds, 1);
1925 IEM_MC_ARG(uint16_t, u16UpperBounds, 2);
1926 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1927
1928 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1929 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1930
1931 IEM_MC_FETCH_GREG_U16(u16Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1932 IEM_MC_FETCH_MEM_U16(u16LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1933 IEM_MC_FETCH_MEM_U16_DISP(u16UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
1934
1935 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_16, u16Index, u16LowerBounds, u16UpperBounds); /* returns */
1936 IEM_MC_END();
1937 }
1938 else /* 32-bit operands */
1939 {
1940 IEM_MC_BEGIN(3, 1);
1941 IEM_MC_ARG(uint32_t, u32Index, 0); /* Note! All operands are actually signed. Lazy unsigned bird. */
1942 IEM_MC_ARG(uint32_t, u32LowerBounds, 1);
1943 IEM_MC_ARG(uint32_t, u32UpperBounds, 2);
1944 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
1945
1946 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
1947 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1948
1949 IEM_MC_FETCH_GREG_U32(u32Index, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
1950 IEM_MC_FETCH_MEM_U32(u32LowerBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
1951 IEM_MC_FETCH_MEM_U32_DISP(u32UpperBounds, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
1952
1953 IEM_MC_CALL_CIMPL_3(iemCImpl_bound_32, u32Index, u32LowerBounds, u32UpperBounds); /* returns */
1954 IEM_MC_END();
1955 }
1956 }
1957
1958 /*
1959 * @opdone
1960 */
1961 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1962 {
1963 /* Note that there is no need for the CPU to fetch further bytes
1964 here because MODRM.MOD == 3. */
1965 Log(("evex not supported by the guest CPU!\n"));
1966 return IEMOP_RAISE_INVALID_OPCODE();
1967 }
1968 }
1969 else
1970 {
1971 /** @todo check how this is decoded in 64-bit mode w/o EVEX. Intel probably
1972 * does modr/m read, whereas AMD probably doesn't... */
1973 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx512Foundation)
1974 {
1975 Log(("evex not supported by the guest CPU!\n"));
1976 return FNIEMOP_CALL(iemOp_InvalidAllNeedRM);
1977 }
1978 IEM_OPCODE_GET_NEXT_U8(&bRm);
1979 }
1980
1981 IEMOP_MNEMONIC(evex, "evex");
1982 uint8_t bP2; IEM_OPCODE_GET_NEXT_U8(&bP2);
1983 uint8_t bP3; IEM_OPCODE_GET_NEXT_U8(&bP3);
1984 Log(("evex prefix is not implemented!\n"));
1985 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
1986}
1987
1988
1989/** Opcode 0x63 - non-64-bit modes. */
1990FNIEMOP_DEF(iemOp_arpl_Ew_Gw)
1991{
1992 IEMOP_MNEMONIC(arpl_Ew_Gw, "arpl Ew,Gw");
1993 IEMOP_HLP_MIN_286();
1994 IEMOP_HLP_NO_REAL_OR_V86_MODE();
1995 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1996
1997 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1998 {
1999 /* Register */
2000 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2001 IEM_MC_BEGIN(3, 0);
2002 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2003 IEM_MC_ARG(uint16_t, u16Src, 1);
2004 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2005
2006 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2007 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK));
2008 IEM_MC_REF_EFLAGS(pEFlags);
2009 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2010
2011 IEM_MC_ADVANCE_RIP();
2012 IEM_MC_END();
2013 }
2014 else
2015 {
2016 /* Memory */
2017 IEM_MC_BEGIN(3, 2);
2018 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2019 IEM_MC_ARG(uint16_t, u16Src, 1);
2020 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
2021 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2022
2023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2024 IEMOP_HLP_DECODED_NL_2(OP_ARPL, IEMOPFORM_MR_REG, OP_PARM_Ew, OP_PARM_Gw, DISOPTYPE_HARMLESS);
2025 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
2026 IEM_MC_FETCH_GREG_U16(u16Src, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
2027 IEM_MC_FETCH_EFLAGS(EFlags);
2028 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_arpl, pu16Dst, u16Src, pEFlags);
2029
2030 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
2031 IEM_MC_COMMIT_EFLAGS(EFlags);
2032 IEM_MC_ADVANCE_RIP();
2033 IEM_MC_END();
2034 }
2035 return VINF_SUCCESS;
2036
2037}
2038
2039
2040/**
2041 * @opcode 0x63
2042 *
2043 * @note This is a weird one. It works like a regular move instruction if
2044 * REX.W isn't set, at least according to AMD docs (rev 3.15, 2009-11).
2045 * @todo This definitely needs a testcase to verify the odd cases. */
2046FNIEMOP_DEF(iemOp_movsxd_Gv_Ev)
2047{
2048 Assert(pVCpu->iem.s.enmEffOpSize == IEMMODE_64BIT); /* Caller branched already . */
2049
2050 IEMOP_MNEMONIC(movsxd_Gv_Ev, "movsxd Gv,Ev");
2051 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2052
2053 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2054 {
2055 /*
2056 * Register to register.
2057 */
2058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2059 IEM_MC_BEGIN(0, 1);
2060 IEM_MC_LOCAL(uint64_t, u64Value);
2061 IEM_MC_FETCH_GREG_U32_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2062 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2063 IEM_MC_ADVANCE_RIP();
2064 IEM_MC_END();
2065 }
2066 else
2067 {
2068 /*
2069 * We're loading a register from memory.
2070 */
2071 IEM_MC_BEGIN(0, 2);
2072 IEM_MC_LOCAL(uint64_t, u64Value);
2073 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2074 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
2075 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2076 IEM_MC_FETCH_MEM_U32_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2077 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
2078 IEM_MC_ADVANCE_RIP();
2079 IEM_MC_END();
2080 }
2081 return VINF_SUCCESS;
2082}
2083
2084
2085/**
2086 * @opcode 0x64
2087 * @opmnemonic segfs
2088 * @opmincpu 80386
2089 * @opgroup og_prefixes
2090 */
2091FNIEMOP_DEF(iemOp_seg_FS)
2092{
2093 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg fs");
2094 IEMOP_HLP_MIN_386();
2095
2096 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_FS;
2097 pVCpu->iem.s.iEffSeg = X86_SREG_FS;
2098
2099 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2100 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2101}
2102
2103
2104/**
2105 * @opcode 0x65
2106 * @opmnemonic seggs
2107 * @opmincpu 80386
2108 * @opgroup og_prefixes
2109 */
2110FNIEMOP_DEF(iemOp_seg_GS)
2111{
2112 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("seg gs");
2113 IEMOP_HLP_MIN_386();
2114
2115 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SEG_GS;
2116 pVCpu->iem.s.iEffSeg = X86_SREG_GS;
2117
2118 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2119 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2120}
2121
2122
2123/**
2124 * @opcode 0x66
2125 * @opmnemonic opsize
2126 * @openc prefix
2127 * @opmincpu 80386
2128 * @ophints harmless
2129 * @opgroup og_prefixes
2130 */
2131FNIEMOP_DEF(iemOp_op_size)
2132{
2133 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("op size");
2134 IEMOP_HLP_MIN_386();
2135
2136 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_OP;
2137 iemRecalEffOpSize(pVCpu);
2138
2139 /* For the 4 entry opcode tables, the operand prefix doesn't not count
2140 when REPZ or REPNZ are present. */
2141 if (pVCpu->iem.s.idxPrefix == 0)
2142 pVCpu->iem.s.idxPrefix = 1;
2143
2144 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2145 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2146}
2147
2148
2149/**
2150 * @opcode 0x67
2151 * @opmnemonic addrsize
2152 * @openc prefix
2153 * @opmincpu 80386
2154 * @ophints harmless
2155 * @opgroup og_prefixes
2156 */
2157FNIEMOP_DEF(iemOp_addr_size)
2158{
2159 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("addr size");
2160 IEMOP_HLP_MIN_386();
2161
2162 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_ADDR;
2163 switch (pVCpu->iem.s.enmDefAddrMode)
2164 {
2165 case IEMMODE_16BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2166 case IEMMODE_32BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_16BIT; break;
2167 case IEMMODE_64BIT: pVCpu->iem.s.enmEffAddrMode = IEMMODE_32BIT; break;
2168 default: AssertFailed();
2169 }
2170
2171 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
2172 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
2173}
2174
2175
2176/**
2177 * @opcode 0x68
2178 */
2179FNIEMOP_DEF(iemOp_push_Iz)
2180{
2181 IEMOP_MNEMONIC(push_Iz, "push Iz");
2182 IEMOP_HLP_MIN_186();
2183 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2184 switch (pVCpu->iem.s.enmEffOpSize)
2185 {
2186 case IEMMODE_16BIT:
2187 {
2188 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2189 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2190 IEM_MC_BEGIN(0,0);
2191 IEM_MC_PUSH_U16(u16Imm);
2192 IEM_MC_ADVANCE_RIP();
2193 IEM_MC_END();
2194 return VINF_SUCCESS;
2195 }
2196
2197 case IEMMODE_32BIT:
2198 {
2199 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2200 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2201 IEM_MC_BEGIN(0,0);
2202 IEM_MC_PUSH_U32(u32Imm);
2203 IEM_MC_ADVANCE_RIP();
2204 IEM_MC_END();
2205 return VINF_SUCCESS;
2206 }
2207
2208 case IEMMODE_64BIT:
2209 {
2210 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2212 IEM_MC_BEGIN(0,0);
2213 IEM_MC_PUSH_U64(u64Imm);
2214 IEM_MC_ADVANCE_RIP();
2215 IEM_MC_END();
2216 return VINF_SUCCESS;
2217 }
2218
2219 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2220 }
2221}
2222
2223
2224/**
2225 * @opcode 0x69
2226 */
2227FNIEMOP_DEF(iemOp_imul_Gv_Ev_Iz)
2228{
2229 IEMOP_MNEMONIC(imul_Gv_Ev_Iz, "imul Gv,Ev,Iz"); /* Gv = Ev * Iz; */
2230 IEMOP_HLP_MIN_186();
2231 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2232 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2233
2234 switch (pVCpu->iem.s.enmEffOpSize)
2235 {
2236 case IEMMODE_16BIT:
2237 {
2238 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2239 {
2240 /* register operand */
2241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2242 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2243
2244 IEM_MC_BEGIN(3, 1);
2245 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2246 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm,1);
2247 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2248 IEM_MC_LOCAL(uint16_t, u16Tmp);
2249
2250 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2251 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2252 IEM_MC_REF_EFLAGS(pEFlags);
2253 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2254 pu16Dst, u16Src, pEFlags);
2255 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2256
2257 IEM_MC_ADVANCE_RIP();
2258 IEM_MC_END();
2259 }
2260 else
2261 {
2262 /* memory operand */
2263 IEM_MC_BEGIN(3, 2);
2264 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2265 IEM_MC_ARG(uint16_t, u16Src, 1);
2266 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2267 IEM_MC_LOCAL(uint16_t, u16Tmp);
2268 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2269
2270 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
2271 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
2272 IEM_MC_ASSIGN(u16Src, u16Imm);
2273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2274 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2275 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2276 IEM_MC_REF_EFLAGS(pEFlags);
2277 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2278 pu16Dst, u16Src, pEFlags);
2279 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2280
2281 IEM_MC_ADVANCE_RIP();
2282 IEM_MC_END();
2283 }
2284 return VINF_SUCCESS;
2285 }
2286
2287 case IEMMODE_32BIT:
2288 {
2289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2290 {
2291 /* register operand */
2292 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2293 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2294
2295 IEM_MC_BEGIN(3, 1);
2296 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2297 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm,1);
2298 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2299 IEM_MC_LOCAL(uint32_t, u32Tmp);
2300
2301 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2302 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2303 IEM_MC_REF_EFLAGS(pEFlags);
2304 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2305 pu32Dst, u32Src, pEFlags);
2306 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2307
2308 IEM_MC_ADVANCE_RIP();
2309 IEM_MC_END();
2310 }
2311 else
2312 {
2313 /* memory operand */
2314 IEM_MC_BEGIN(3, 2);
2315 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2316 IEM_MC_ARG(uint32_t, u32Src, 1);
2317 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2318 IEM_MC_LOCAL(uint32_t, u32Tmp);
2319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2320
2321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2322 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
2323 IEM_MC_ASSIGN(u32Src, u32Imm);
2324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2325 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2326 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2327 IEM_MC_REF_EFLAGS(pEFlags);
2328 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2329 pu32Dst, u32Src, pEFlags);
2330 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2331
2332 IEM_MC_ADVANCE_RIP();
2333 IEM_MC_END();
2334 }
2335 return VINF_SUCCESS;
2336 }
2337
2338 case IEMMODE_64BIT:
2339 {
2340 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2341 {
2342 /* register operand */
2343 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2344 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2345
2346 IEM_MC_BEGIN(3, 1);
2347 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2348 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm,1);
2349 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2350 IEM_MC_LOCAL(uint64_t, u64Tmp);
2351
2352 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2353 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2354 IEM_MC_REF_EFLAGS(pEFlags);
2355 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2356 pu64Dst, u64Src, pEFlags);
2357 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2358
2359 IEM_MC_ADVANCE_RIP();
2360 IEM_MC_END();
2361 }
2362 else
2363 {
2364 /* memory operand */
2365 IEM_MC_BEGIN(3, 2);
2366 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2367 IEM_MC_ARG(uint64_t, u64Src, 1);
2368 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2369 IEM_MC_LOCAL(uint64_t, u64Tmp);
2370 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2371
2372 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
2373 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
2374 IEM_MC_ASSIGN(u64Src, u64Imm);
2375 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2376 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2377 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2378 IEM_MC_REF_EFLAGS(pEFlags);
2379 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2380 pu64Dst, u64Src, pEFlags);
2381 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2382
2383 IEM_MC_ADVANCE_RIP();
2384 IEM_MC_END();
2385 }
2386 return VINF_SUCCESS;
2387 }
2388 }
2389 AssertFailedReturn(VERR_IEM_IPE_9);
2390}
2391
2392
2393/**
2394 * @opcode 0x6a
2395 */
2396FNIEMOP_DEF(iemOp_push_Ib)
2397{
2398 IEMOP_MNEMONIC(push_Ib, "push Ib");
2399 IEMOP_HLP_MIN_186();
2400 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2402 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2403
2404 IEM_MC_BEGIN(0,0);
2405 switch (pVCpu->iem.s.enmEffOpSize)
2406 {
2407 case IEMMODE_16BIT:
2408 IEM_MC_PUSH_U16(i8Imm);
2409 break;
2410 case IEMMODE_32BIT:
2411 IEM_MC_PUSH_U32(i8Imm);
2412 break;
2413 case IEMMODE_64BIT:
2414 IEM_MC_PUSH_U64(i8Imm);
2415 break;
2416 }
2417 IEM_MC_ADVANCE_RIP();
2418 IEM_MC_END();
2419 return VINF_SUCCESS;
2420}
2421
2422
2423/**
2424 * @opcode 0x6b
2425 */
2426FNIEMOP_DEF(iemOp_imul_Gv_Ev_Ib)
2427{
2428 IEMOP_MNEMONIC(imul_Gv_Ev_Ib, "imul Gv,Ev,Ib"); /* Gv = Ev * Iz; */
2429 IEMOP_HLP_MIN_186();
2430 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
2431 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
2432
2433 switch (pVCpu->iem.s.enmEffOpSize)
2434 {
2435 case IEMMODE_16BIT:
2436 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2437 {
2438 /* register operand */
2439 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2441
2442 IEM_MC_BEGIN(3, 1);
2443 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2444 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ (int8_t)u8Imm, 1);
2445 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2446 IEM_MC_LOCAL(uint16_t, u16Tmp);
2447
2448 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2449 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2450 IEM_MC_REF_EFLAGS(pEFlags);
2451 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2452 pu16Dst, u16Src, pEFlags);
2453 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2454
2455 IEM_MC_ADVANCE_RIP();
2456 IEM_MC_END();
2457 }
2458 else
2459 {
2460 /* memory operand */
2461 IEM_MC_BEGIN(3, 2);
2462 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
2463 IEM_MC_ARG(uint16_t, u16Src, 1);
2464 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2465 IEM_MC_LOCAL(uint16_t, u16Tmp);
2466 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2467
2468 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2469 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_S8_SX_U16(&u16Imm);
2470 IEM_MC_ASSIGN(u16Src, u16Imm);
2471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2472 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2473 IEM_MC_REF_LOCAL(pu16Dst, u16Tmp);
2474 IEM_MC_REF_EFLAGS(pEFlags);
2475 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u16_eflags),
2476 pu16Dst, u16Src, pEFlags);
2477 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp);
2478
2479 IEM_MC_ADVANCE_RIP();
2480 IEM_MC_END();
2481 }
2482 return VINF_SUCCESS;
2483
2484 case IEMMODE_32BIT:
2485 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2486 {
2487 /* register operand */
2488 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2489 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2490
2491 IEM_MC_BEGIN(3, 1);
2492 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2493 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ (int8_t)u8Imm, 1);
2494 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2495 IEM_MC_LOCAL(uint32_t, u32Tmp);
2496
2497 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2498 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2499 IEM_MC_REF_EFLAGS(pEFlags);
2500 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2501 pu32Dst, u32Src, pEFlags);
2502 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2503
2504 IEM_MC_ADVANCE_RIP();
2505 IEM_MC_END();
2506 }
2507 else
2508 {
2509 /* memory operand */
2510 IEM_MC_BEGIN(3, 2);
2511 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
2512 IEM_MC_ARG(uint32_t, u32Src, 1);
2513 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2514 IEM_MC_LOCAL(uint32_t, u32Tmp);
2515 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2516
2517 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2518 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_S8_SX_U32(&u32Imm);
2519 IEM_MC_ASSIGN(u32Src, u32Imm);
2520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2521 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2522 IEM_MC_REF_LOCAL(pu32Dst, u32Tmp);
2523 IEM_MC_REF_EFLAGS(pEFlags);
2524 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u32_eflags),
2525 pu32Dst, u32Src, pEFlags);
2526 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp);
2527
2528 IEM_MC_ADVANCE_RIP();
2529 IEM_MC_END();
2530 }
2531 return VINF_SUCCESS;
2532
2533 case IEMMODE_64BIT:
2534 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
2535 {
2536 /* register operand */
2537 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
2538 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2539
2540 IEM_MC_BEGIN(3, 1);
2541 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2542 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ (int8_t)u8Imm, 1);
2543 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2544 IEM_MC_LOCAL(uint64_t, u64Tmp);
2545
2546 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
2547 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2548 IEM_MC_REF_EFLAGS(pEFlags);
2549 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2550 pu64Dst, u64Src, pEFlags);
2551 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2552
2553 IEM_MC_ADVANCE_RIP();
2554 IEM_MC_END();
2555 }
2556 else
2557 {
2558 /* memory operand */
2559 IEM_MC_BEGIN(3, 2);
2560 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
2561 IEM_MC_ARG(uint64_t, u64Src, 1);
2562 IEM_MC_ARG(uint32_t *, pEFlags, 2);
2563 IEM_MC_LOCAL(uint64_t, u64Tmp);
2564 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
2565
2566 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
2567 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S8_SX_U64(&u64Imm);
2568 IEM_MC_ASSIGN(u64Src, u64Imm);
2569 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2570 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
2571 IEM_MC_REF_LOCAL(pu64Dst, u64Tmp);
2572 IEM_MC_REF_EFLAGS(pEFlags);
2573 IEM_MC_CALL_VOID_AIMPL_3(IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_u64_eflags),
2574 pu64Dst, u64Src, pEFlags);
2575 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp);
2576
2577 IEM_MC_ADVANCE_RIP();
2578 IEM_MC_END();
2579 }
2580 return VINF_SUCCESS;
2581 }
2582 AssertFailedReturn(VERR_IEM_IPE_8);
2583}
2584
2585
2586/**
2587 * @opcode 0x6c
2588 */
2589FNIEMOP_DEF(iemOp_insb_Yb_DX)
2590{
2591 IEMOP_HLP_MIN_186();
2592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2593 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2594 {
2595 IEMOP_MNEMONIC(rep_insb_Yb_DX, "rep ins Yb,DX");
2596 switch (pVCpu->iem.s.enmEffAddrMode)
2597 {
2598 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr16, false);
2599 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr32, false);
2600 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op8_addr64, false);
2601 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2602 }
2603 }
2604 else
2605 {
2606 IEMOP_MNEMONIC(ins_Yb_DX, "ins Yb,DX");
2607 switch (pVCpu->iem.s.enmEffAddrMode)
2608 {
2609 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr16, false);
2610 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr32, false);
2611 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op8_addr64, false);
2612 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2613 }
2614 }
2615}
2616
2617
2618/**
2619 * @opcode 0x6d
2620 */
2621FNIEMOP_DEF(iemOp_inswd_Yv_DX)
2622{
2623 IEMOP_HLP_MIN_186();
2624 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2625 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2626 {
2627 IEMOP_MNEMONIC(rep_ins_Yv_DX, "rep ins Yv,DX");
2628 switch (pVCpu->iem.s.enmEffOpSize)
2629 {
2630 case IEMMODE_16BIT:
2631 switch (pVCpu->iem.s.enmEffAddrMode)
2632 {
2633 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr16, false);
2634 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr32, false);
2635 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op16_addr64, false);
2636 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2637 }
2638 break;
2639 case IEMMODE_64BIT:
2640 case IEMMODE_32BIT:
2641 switch (pVCpu->iem.s.enmEffAddrMode)
2642 {
2643 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr16, false);
2644 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr32, false);
2645 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_ins_op32_addr64, false);
2646 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2647 }
2648 break;
2649 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2650 }
2651 }
2652 else
2653 {
2654 IEMOP_MNEMONIC(ins_Yv_DX, "ins Yv,DX");
2655 switch (pVCpu->iem.s.enmEffOpSize)
2656 {
2657 case IEMMODE_16BIT:
2658 switch (pVCpu->iem.s.enmEffAddrMode)
2659 {
2660 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr16, false);
2661 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr32, false);
2662 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op16_addr64, false);
2663 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2664 }
2665 break;
2666 case IEMMODE_64BIT:
2667 case IEMMODE_32BIT:
2668 switch (pVCpu->iem.s.enmEffAddrMode)
2669 {
2670 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr16, false);
2671 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr32, false);
2672 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_ins_op32_addr64, false);
2673 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2674 }
2675 break;
2676 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2677 }
2678 }
2679}
2680
2681
2682/**
2683 * @opcode 0x6e
2684 */
2685FNIEMOP_DEF(iemOp_outsb_Yb_DX)
2686{
2687 IEMOP_HLP_MIN_186();
2688 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2689 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
2690 {
2691 IEMOP_MNEMONIC(rep_outsb_DX_Yb, "rep outs DX,Yb");
2692 switch (pVCpu->iem.s.enmEffAddrMode)
2693 {
2694 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2695 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2696 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2697 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2698 }
2699 }
2700 else
2701 {
2702 IEMOP_MNEMONIC(outs_DX_Yb, "outs DX,Yb");
2703 switch (pVCpu->iem.s.enmEffAddrMode)
2704 {
2705 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr16, pVCpu->iem.s.iEffSeg, false);
2706 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr32, pVCpu->iem.s.iEffSeg, false);
2707 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op8_addr64, pVCpu->iem.s.iEffSeg, false);
2708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2709 }
2710 }
2711}
2712
2713
2714/**
2715 * @opcode 0x6f
2716 */
2717FNIEMOP_DEF(iemOp_outswd_Yv_DX)
2718{
2719 IEMOP_HLP_MIN_186();
2720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2721 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ))
2722 {
2723 IEMOP_MNEMONIC(rep_outs_DX_Yv, "rep outs DX,Yv");
2724 switch (pVCpu->iem.s.enmEffOpSize)
2725 {
2726 case IEMMODE_16BIT:
2727 switch (pVCpu->iem.s.enmEffAddrMode)
2728 {
2729 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2730 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2731 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2732 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2733 }
2734 break;
2735 case IEMMODE_64BIT:
2736 case IEMMODE_32BIT:
2737 switch (pVCpu->iem.s.enmEffAddrMode)
2738 {
2739 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2740 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2741 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_rep_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2742 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2743 }
2744 break;
2745 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2746 }
2747 }
2748 else
2749 {
2750 IEMOP_MNEMONIC(outs_DX_Yv, "outs DX,Yv");
2751 switch (pVCpu->iem.s.enmEffOpSize)
2752 {
2753 case IEMMODE_16BIT:
2754 switch (pVCpu->iem.s.enmEffAddrMode)
2755 {
2756 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr16, pVCpu->iem.s.iEffSeg, false);
2757 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr32, pVCpu->iem.s.iEffSeg, false);
2758 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op16_addr64, pVCpu->iem.s.iEffSeg, false);
2759 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2760 }
2761 break;
2762 case IEMMODE_64BIT:
2763 case IEMMODE_32BIT:
2764 switch (pVCpu->iem.s.enmEffAddrMode)
2765 {
2766 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr16, pVCpu->iem.s.iEffSeg, false);
2767 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr32, pVCpu->iem.s.iEffSeg, false);
2768 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_outs_op32_addr64, pVCpu->iem.s.iEffSeg, false);
2769 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2770 }
2771 break;
2772 IEM_NOT_REACHED_DEFAULT_CASE_RET();
2773 }
2774 }
2775}
2776
2777
2778/**
2779 * @opcode 0x70
2780 */
2781FNIEMOP_DEF(iemOp_jo_Jb)
2782{
2783 IEMOP_MNEMONIC(jo_Jb, "jo Jb");
2784 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2785 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2786 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2787
2788 IEM_MC_BEGIN(0, 0);
2789 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2790 IEM_MC_REL_JMP_S8(i8Imm);
2791 } IEM_MC_ELSE() {
2792 IEM_MC_ADVANCE_RIP();
2793 } IEM_MC_ENDIF();
2794 IEM_MC_END();
2795 return VINF_SUCCESS;
2796}
2797
2798
2799/**
2800 * @opcode 0x71
2801 */
2802FNIEMOP_DEF(iemOp_jno_Jb)
2803{
2804 IEMOP_MNEMONIC(jno_Jb, "jno Jb");
2805 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2806 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2807 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2808
2809 IEM_MC_BEGIN(0, 0);
2810 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) {
2811 IEM_MC_ADVANCE_RIP();
2812 } IEM_MC_ELSE() {
2813 IEM_MC_REL_JMP_S8(i8Imm);
2814 } IEM_MC_ENDIF();
2815 IEM_MC_END();
2816 return VINF_SUCCESS;
2817}
2818
2819/**
2820 * @opcode 0x72
2821 */
2822FNIEMOP_DEF(iemOp_jc_Jb)
2823{
2824 IEMOP_MNEMONIC(jc_Jb, "jc/jnae Jb");
2825 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2826 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2827 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2828
2829 IEM_MC_BEGIN(0, 0);
2830 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2831 IEM_MC_REL_JMP_S8(i8Imm);
2832 } IEM_MC_ELSE() {
2833 IEM_MC_ADVANCE_RIP();
2834 } IEM_MC_ENDIF();
2835 IEM_MC_END();
2836 return VINF_SUCCESS;
2837}
2838
2839
2840/**
2841 * @opcode 0x73
2842 */
2843FNIEMOP_DEF(iemOp_jnc_Jb)
2844{
2845 IEMOP_MNEMONIC(jnc_Jb, "jnc/jnb Jb");
2846 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2847 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2848 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2849
2850 IEM_MC_BEGIN(0, 0);
2851 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
2852 IEM_MC_ADVANCE_RIP();
2853 } IEM_MC_ELSE() {
2854 IEM_MC_REL_JMP_S8(i8Imm);
2855 } IEM_MC_ENDIF();
2856 IEM_MC_END();
2857 return VINF_SUCCESS;
2858}
2859
2860
2861/**
2862 * @opcode 0x74
2863 */
2864FNIEMOP_DEF(iemOp_je_Jb)
2865{
2866 IEMOP_MNEMONIC(je_Jb, "je/jz Jb");
2867 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2868 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2869 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2870
2871 IEM_MC_BEGIN(0, 0);
2872 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2873 IEM_MC_REL_JMP_S8(i8Imm);
2874 } IEM_MC_ELSE() {
2875 IEM_MC_ADVANCE_RIP();
2876 } IEM_MC_ENDIF();
2877 IEM_MC_END();
2878 return VINF_SUCCESS;
2879}
2880
2881
2882/**
2883 * @opcode 0x75
2884 */
2885FNIEMOP_DEF(iemOp_jne_Jb)
2886{
2887 IEMOP_MNEMONIC(jne_Jb, "jne/jnz Jb");
2888 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2890 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2891
2892 IEM_MC_BEGIN(0, 0);
2893 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) {
2894 IEM_MC_ADVANCE_RIP();
2895 } IEM_MC_ELSE() {
2896 IEM_MC_REL_JMP_S8(i8Imm);
2897 } IEM_MC_ENDIF();
2898 IEM_MC_END();
2899 return VINF_SUCCESS;
2900}
2901
2902
2903/**
2904 * @opcode 0x76
2905 */
2906FNIEMOP_DEF(iemOp_jbe_Jb)
2907{
2908 IEMOP_MNEMONIC(jbe_Jb, "jbe/jna Jb");
2909 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2911 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2912
2913 IEM_MC_BEGIN(0, 0);
2914 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2915 IEM_MC_REL_JMP_S8(i8Imm);
2916 } IEM_MC_ELSE() {
2917 IEM_MC_ADVANCE_RIP();
2918 } IEM_MC_ENDIF();
2919 IEM_MC_END();
2920 return VINF_SUCCESS;
2921}
2922
2923
2924/**
2925 * @opcode 0x77
2926 */
2927FNIEMOP_DEF(iemOp_jnbe_Jb)
2928{
2929 IEMOP_MNEMONIC(ja_Jb, "ja/jnbe Jb");
2930 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2932 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2933
2934 IEM_MC_BEGIN(0, 0);
2935 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) {
2936 IEM_MC_ADVANCE_RIP();
2937 } IEM_MC_ELSE() {
2938 IEM_MC_REL_JMP_S8(i8Imm);
2939 } IEM_MC_ENDIF();
2940 IEM_MC_END();
2941 return VINF_SUCCESS;
2942}
2943
2944
2945/**
2946 * @opcode 0x78
2947 */
2948FNIEMOP_DEF(iemOp_js_Jb)
2949{
2950 IEMOP_MNEMONIC(js_Jb, "js Jb");
2951 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2953 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2954
2955 IEM_MC_BEGIN(0, 0);
2956 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2957 IEM_MC_REL_JMP_S8(i8Imm);
2958 } IEM_MC_ELSE() {
2959 IEM_MC_ADVANCE_RIP();
2960 } IEM_MC_ENDIF();
2961 IEM_MC_END();
2962 return VINF_SUCCESS;
2963}
2964
2965
2966/**
2967 * @opcode 0x79
2968 */
2969FNIEMOP_DEF(iemOp_jns_Jb)
2970{
2971 IEMOP_MNEMONIC(jns_Jb, "jns Jb");
2972 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2974 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2975
2976 IEM_MC_BEGIN(0, 0);
2977 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) {
2978 IEM_MC_ADVANCE_RIP();
2979 } IEM_MC_ELSE() {
2980 IEM_MC_REL_JMP_S8(i8Imm);
2981 } IEM_MC_ENDIF();
2982 IEM_MC_END();
2983 return VINF_SUCCESS;
2984}
2985
2986
2987/**
2988 * @opcode 0x7a
2989 */
2990FNIEMOP_DEF(iemOp_jp_Jb)
2991{
2992 IEMOP_MNEMONIC(jp_Jb, "jp Jb");
2993 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
2994 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
2995 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
2996
2997 IEM_MC_BEGIN(0, 0);
2998 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
2999 IEM_MC_REL_JMP_S8(i8Imm);
3000 } IEM_MC_ELSE() {
3001 IEM_MC_ADVANCE_RIP();
3002 } IEM_MC_ENDIF();
3003 IEM_MC_END();
3004 return VINF_SUCCESS;
3005}
3006
3007
3008/**
3009 * @opcode 0x7b
3010 */
3011FNIEMOP_DEF(iemOp_jnp_Jb)
3012{
3013 IEMOP_MNEMONIC(jnp_Jb, "jnp Jb");
3014 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3015 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3016 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3017
3018 IEM_MC_BEGIN(0, 0);
3019 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) {
3020 IEM_MC_ADVANCE_RIP();
3021 } IEM_MC_ELSE() {
3022 IEM_MC_REL_JMP_S8(i8Imm);
3023 } IEM_MC_ENDIF();
3024 IEM_MC_END();
3025 return VINF_SUCCESS;
3026}
3027
3028
3029/**
3030 * @opcode 0x7c
3031 */
3032FNIEMOP_DEF(iemOp_jl_Jb)
3033{
3034 IEMOP_MNEMONIC(jl_Jb, "jl/jnge Jb");
3035 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3036 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3037 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3038
3039 IEM_MC_BEGIN(0, 0);
3040 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3041 IEM_MC_REL_JMP_S8(i8Imm);
3042 } IEM_MC_ELSE() {
3043 IEM_MC_ADVANCE_RIP();
3044 } IEM_MC_ENDIF();
3045 IEM_MC_END();
3046 return VINF_SUCCESS;
3047}
3048
3049
3050/**
3051 * @opcode 0x7d
3052 */
3053FNIEMOP_DEF(iemOp_jnl_Jb)
3054{
3055 IEMOP_MNEMONIC(jge_Jb, "jnl/jge Jb");
3056 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3058 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3059
3060 IEM_MC_BEGIN(0, 0);
3061 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) {
3062 IEM_MC_ADVANCE_RIP();
3063 } IEM_MC_ELSE() {
3064 IEM_MC_REL_JMP_S8(i8Imm);
3065 } IEM_MC_ENDIF();
3066 IEM_MC_END();
3067 return VINF_SUCCESS;
3068}
3069
3070
3071/**
3072 * @opcode 0x7e
3073 */
3074FNIEMOP_DEF(iemOp_jle_Jb)
3075{
3076 IEMOP_MNEMONIC(jle_Jb, "jle/jng Jb");
3077 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3078 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3079 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3080
3081 IEM_MC_BEGIN(0, 0);
3082 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3083 IEM_MC_REL_JMP_S8(i8Imm);
3084 } IEM_MC_ELSE() {
3085 IEM_MC_ADVANCE_RIP();
3086 } IEM_MC_ENDIF();
3087 IEM_MC_END();
3088 return VINF_SUCCESS;
3089}
3090
3091
3092/**
3093 * @opcode 0x7f
3094 */
3095FNIEMOP_DEF(iemOp_jnle_Jb)
3096{
3097 IEMOP_MNEMONIC(jg_Jb, "jnle/jg Jb");
3098 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
3099 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3100 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
3101
3102 IEM_MC_BEGIN(0, 0);
3103 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) {
3104 IEM_MC_ADVANCE_RIP();
3105 } IEM_MC_ELSE() {
3106 IEM_MC_REL_JMP_S8(i8Imm);
3107 } IEM_MC_ENDIF();
3108 IEM_MC_END();
3109 return VINF_SUCCESS;
3110}
3111
3112
3113/**
3114 * @opcode 0x80
3115 */
3116FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_80)
3117{
3118 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3119 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3120 {
3121 case 0: IEMOP_MNEMONIC(add_Eb_Ib, "add Eb,Ib"); break;
3122 case 1: IEMOP_MNEMONIC(or_Eb_Ib, "or Eb,Ib"); break;
3123 case 2: IEMOP_MNEMONIC(adc_Eb_Ib, "adc Eb,Ib"); break;
3124 case 3: IEMOP_MNEMONIC(sbb_Eb_Ib, "sbb Eb,Ib"); break;
3125 case 4: IEMOP_MNEMONIC(and_Eb_Ib, "and Eb,Ib"); break;
3126 case 5: IEMOP_MNEMONIC(sub_Eb_Ib, "sub Eb,Ib"); break;
3127 case 6: IEMOP_MNEMONIC(xor_Eb_Ib, "xor Eb,Ib"); break;
3128 case 7: IEMOP_MNEMONIC(cmp_Eb_Ib, "cmp Eb,Ib"); break;
3129 }
3130 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3131
3132 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3133 {
3134 /* register target */
3135 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3137 IEM_MC_BEGIN(3, 0);
3138 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3139 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3140 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3141
3142 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3143 IEM_MC_REF_EFLAGS(pEFlags);
3144 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3145
3146 IEM_MC_ADVANCE_RIP();
3147 IEM_MC_END();
3148 }
3149 else
3150 {
3151 /* memory target */
3152 uint32_t fAccess;
3153 if (pImpl->pfnLockedU8)
3154 fAccess = IEM_ACCESS_DATA_RW;
3155 else /* CMP */
3156 fAccess = IEM_ACCESS_DATA_R;
3157 IEM_MC_BEGIN(3, 2);
3158 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
3159 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3160 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3161
3162 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3163 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3164 IEM_MC_ARG_CONST(uint8_t, u8Src, /*=*/ u8Imm, 1);
3165 if (pImpl->pfnLockedU8)
3166 IEMOP_HLP_DONE_DECODING();
3167 else
3168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3169
3170 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3171 IEM_MC_FETCH_EFLAGS(EFlags);
3172 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3173 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
3174 else
3175 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
3176
3177 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
3178 IEM_MC_COMMIT_EFLAGS(EFlags);
3179 IEM_MC_ADVANCE_RIP();
3180 IEM_MC_END();
3181 }
3182 return VINF_SUCCESS;
3183}
3184
3185
3186/**
3187 * @opcode 0x81
3188 */
3189FNIEMOP_DEF(iemOp_Grp1_Ev_Iz)
3190{
3191 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3192 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3193 {
3194 case 0: IEMOP_MNEMONIC(add_Ev_Iz, "add Ev,Iz"); break;
3195 case 1: IEMOP_MNEMONIC(or_Ev_Iz, "or Ev,Iz"); break;
3196 case 2: IEMOP_MNEMONIC(adc_Ev_Iz, "adc Ev,Iz"); break;
3197 case 3: IEMOP_MNEMONIC(sbb_Ev_Iz, "sbb Ev,Iz"); break;
3198 case 4: IEMOP_MNEMONIC(and_Ev_Iz, "and Ev,Iz"); break;
3199 case 5: IEMOP_MNEMONIC(sub_Ev_Iz, "sub Ev,Iz"); break;
3200 case 6: IEMOP_MNEMONIC(xor_Ev_Iz, "xor Ev,Iz"); break;
3201 case 7: IEMOP_MNEMONIC(cmp_Ev_Iz, "cmp Ev,Iz"); break;
3202 }
3203 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3204
3205 switch (pVCpu->iem.s.enmEffOpSize)
3206 {
3207 case IEMMODE_16BIT:
3208 {
3209 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3210 {
3211 /* register target */
3212 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3213 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3214 IEM_MC_BEGIN(3, 0);
3215 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3216 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u16Imm, 1);
3217 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3218
3219 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3220 IEM_MC_REF_EFLAGS(pEFlags);
3221 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3222
3223 IEM_MC_ADVANCE_RIP();
3224 IEM_MC_END();
3225 }
3226 else
3227 {
3228 /* memory target */
3229 uint32_t fAccess;
3230 if (pImpl->pfnLockedU16)
3231 fAccess = IEM_ACCESS_DATA_RW;
3232 else /* CMP, TEST */
3233 fAccess = IEM_ACCESS_DATA_R;
3234 IEM_MC_BEGIN(3, 2);
3235 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3236 IEM_MC_ARG(uint16_t, u16Src, 1);
3237 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3238 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3239
3240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
3241 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
3242 IEM_MC_ASSIGN(u16Src, u16Imm);
3243 if (pImpl->pfnLockedU16)
3244 IEMOP_HLP_DONE_DECODING();
3245 else
3246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3247 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3248 IEM_MC_FETCH_EFLAGS(EFlags);
3249 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3250 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3251 else
3252 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3253
3254 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3255 IEM_MC_COMMIT_EFLAGS(EFlags);
3256 IEM_MC_ADVANCE_RIP();
3257 IEM_MC_END();
3258 }
3259 break;
3260 }
3261
3262 case IEMMODE_32BIT:
3263 {
3264 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3265 {
3266 /* register target */
3267 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3268 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3269 IEM_MC_BEGIN(3, 0);
3270 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3271 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u32Imm, 1);
3272 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3273
3274 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3275 IEM_MC_REF_EFLAGS(pEFlags);
3276 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3277 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3278
3279 IEM_MC_ADVANCE_RIP();
3280 IEM_MC_END();
3281 }
3282 else
3283 {
3284 /* memory target */
3285 uint32_t fAccess;
3286 if (pImpl->pfnLockedU32)
3287 fAccess = IEM_ACCESS_DATA_RW;
3288 else /* CMP, TEST */
3289 fAccess = IEM_ACCESS_DATA_R;
3290 IEM_MC_BEGIN(3, 2);
3291 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3292 IEM_MC_ARG(uint32_t, u32Src, 1);
3293 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3294 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3295
3296 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3297 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
3298 IEM_MC_ASSIGN(u32Src, u32Imm);
3299 if (pImpl->pfnLockedU32)
3300 IEMOP_HLP_DONE_DECODING();
3301 else
3302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3303 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3304 IEM_MC_FETCH_EFLAGS(EFlags);
3305 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3306 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3307 else
3308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3309
3310 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3311 IEM_MC_COMMIT_EFLAGS(EFlags);
3312 IEM_MC_ADVANCE_RIP();
3313 IEM_MC_END();
3314 }
3315 break;
3316 }
3317
3318 case IEMMODE_64BIT:
3319 {
3320 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3321 {
3322 /* register target */
3323 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3324 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3325 IEM_MC_BEGIN(3, 0);
3326 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3327 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u64Imm, 1);
3328 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3329
3330 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3331 IEM_MC_REF_EFLAGS(pEFlags);
3332 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3333
3334 IEM_MC_ADVANCE_RIP();
3335 IEM_MC_END();
3336 }
3337 else
3338 {
3339 /* memory target */
3340 uint32_t fAccess;
3341 if (pImpl->pfnLockedU64)
3342 fAccess = IEM_ACCESS_DATA_RW;
3343 else /* CMP */
3344 fAccess = IEM_ACCESS_DATA_R;
3345 IEM_MC_BEGIN(3, 2);
3346 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3347 IEM_MC_ARG(uint64_t, u64Src, 1);
3348 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3349 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3350
3351 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
3352 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
3353 if (pImpl->pfnLockedU64)
3354 IEMOP_HLP_DONE_DECODING();
3355 else
3356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3357 IEM_MC_ASSIGN(u64Src, u64Imm);
3358 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3359 IEM_MC_FETCH_EFLAGS(EFlags);
3360 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3361 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3362 else
3363 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3364
3365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3366 IEM_MC_COMMIT_EFLAGS(EFlags);
3367 IEM_MC_ADVANCE_RIP();
3368 IEM_MC_END();
3369 }
3370 break;
3371 }
3372 }
3373 return VINF_SUCCESS;
3374}
3375
3376
3377/**
3378 * @opcode 0x82
3379 * @opmnemonic grp1_82
3380 * @opgroup og_groups
3381 */
3382FNIEMOP_DEF(iemOp_Grp1_Eb_Ib_82)
3383{
3384 IEMOP_HLP_NO_64BIT(); /** @todo do we need to decode the whole instruction or is this ok? */
3385 return FNIEMOP_CALL(iemOp_Grp1_Eb_Ib_80);
3386}
3387
3388
3389/**
3390 * @opcode 0x83
3391 */
3392FNIEMOP_DEF(iemOp_Grp1_Ev_Ib)
3393{
3394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3395 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
3396 {
3397 case 0: IEMOP_MNEMONIC(add_Ev_Ib, "add Ev,Ib"); break;
3398 case 1: IEMOP_MNEMONIC(or_Ev_Ib, "or Ev,Ib"); break;
3399 case 2: IEMOP_MNEMONIC(adc_Ev_Ib, "adc Ev,Ib"); break;
3400 case 3: IEMOP_MNEMONIC(sbb_Ev_Ib, "sbb Ev,Ib"); break;
3401 case 4: IEMOP_MNEMONIC(and_Ev_Ib, "and Ev,Ib"); break;
3402 case 5: IEMOP_MNEMONIC(sub_Ev_Ib, "sub Ev,Ib"); break;
3403 case 6: IEMOP_MNEMONIC(xor_Ev_Ib, "xor Ev,Ib"); break;
3404 case 7: IEMOP_MNEMONIC(cmp_Ev_Ib, "cmp Ev,Ib"); break;
3405 }
3406 /* Note! Seems the OR, AND, and XOR instructions are present on CPUs prior
3407 to the 386 even if absent in the intel reference manuals and some
3408 3rd party opcode listings. */
3409 PCIEMOPBINSIZES pImpl = g_apIemImplGrp1[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK];
3410
3411 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3412 {
3413 /*
3414 * Register target
3415 */
3416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3417 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3418 switch (pVCpu->iem.s.enmEffOpSize)
3419 {
3420 case IEMMODE_16BIT:
3421 {
3422 IEM_MC_BEGIN(3, 0);
3423 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3424 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ (int8_t)u8Imm,1);
3425 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3426
3427 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3428 IEM_MC_REF_EFLAGS(pEFlags);
3429 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3430
3431 IEM_MC_ADVANCE_RIP();
3432 IEM_MC_END();
3433 break;
3434 }
3435
3436 case IEMMODE_32BIT:
3437 {
3438 IEM_MC_BEGIN(3, 0);
3439 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3440 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ (int8_t)u8Imm,1);
3441 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3442
3443 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3444 IEM_MC_REF_EFLAGS(pEFlags);
3445 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3446 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
3447
3448 IEM_MC_ADVANCE_RIP();
3449 IEM_MC_END();
3450 break;
3451 }
3452
3453 case IEMMODE_64BIT:
3454 {
3455 IEM_MC_BEGIN(3, 0);
3456 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3457 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ (int8_t)u8Imm,1);
3458 IEM_MC_ARG(uint32_t *, pEFlags, 2);
3459
3460 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3461 IEM_MC_REF_EFLAGS(pEFlags);
3462 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3463
3464 IEM_MC_ADVANCE_RIP();
3465 IEM_MC_END();
3466 break;
3467 }
3468 }
3469 }
3470 else
3471 {
3472 /*
3473 * Memory target.
3474 */
3475 uint32_t fAccess;
3476 if (pImpl->pfnLockedU16)
3477 fAccess = IEM_ACCESS_DATA_RW;
3478 else /* CMP */
3479 fAccess = IEM_ACCESS_DATA_R;
3480
3481 switch (pVCpu->iem.s.enmEffOpSize)
3482 {
3483 case IEMMODE_16BIT:
3484 {
3485 IEM_MC_BEGIN(3, 2);
3486 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
3487 IEM_MC_ARG(uint16_t, u16Src, 1);
3488 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3490
3491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3492 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3493 IEM_MC_ASSIGN(u16Src, (int8_t)u8Imm);
3494 if (pImpl->pfnLockedU16)
3495 IEMOP_HLP_DONE_DECODING();
3496 else
3497 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3498 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3499 IEM_MC_FETCH_EFLAGS(EFlags);
3500 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3501 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
3502 else
3503 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
3504
3505 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
3506 IEM_MC_COMMIT_EFLAGS(EFlags);
3507 IEM_MC_ADVANCE_RIP();
3508 IEM_MC_END();
3509 break;
3510 }
3511
3512 case IEMMODE_32BIT:
3513 {
3514 IEM_MC_BEGIN(3, 2);
3515 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
3516 IEM_MC_ARG(uint32_t, u32Src, 1);
3517 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3519
3520 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3521 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3522 IEM_MC_ASSIGN(u32Src, (int8_t)u8Imm);
3523 if (pImpl->pfnLockedU32)
3524 IEMOP_HLP_DONE_DECODING();
3525 else
3526 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3527 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3528 IEM_MC_FETCH_EFLAGS(EFlags);
3529 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3530 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
3531 else
3532 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
3533
3534 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
3535 IEM_MC_COMMIT_EFLAGS(EFlags);
3536 IEM_MC_ADVANCE_RIP();
3537 IEM_MC_END();
3538 break;
3539 }
3540
3541 case IEMMODE_64BIT:
3542 {
3543 IEM_MC_BEGIN(3, 2);
3544 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
3545 IEM_MC_ARG(uint64_t, u64Src, 1);
3546 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
3547 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3548
3549 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
3550 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
3551 IEM_MC_ASSIGN(u64Src, (int8_t)u8Imm);
3552 if (pImpl->pfnLockedU64)
3553 IEMOP_HLP_DONE_DECODING();
3554 else
3555 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3556 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3557 IEM_MC_FETCH_EFLAGS(EFlags);
3558 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
3559 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
3560 else
3561 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
3562
3563 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
3564 IEM_MC_COMMIT_EFLAGS(EFlags);
3565 IEM_MC_ADVANCE_RIP();
3566 IEM_MC_END();
3567 break;
3568 }
3569 }
3570 }
3571 return VINF_SUCCESS;
3572}
3573
3574
3575/**
3576 * @opcode 0x84
3577 */
3578FNIEMOP_DEF(iemOp_test_Eb_Gb)
3579{
3580 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb");
3581 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3582 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test);
3583}
3584
3585
3586/**
3587 * @opcode 0x85
3588 */
3589FNIEMOP_DEF(iemOp_test_Ev_Gv)
3590{
3591 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv");
3592 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
3593 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test);
3594}
3595
3596
3597/**
3598 * @opcode 0x86
3599 */
3600FNIEMOP_DEF(iemOp_xchg_Eb_Gb)
3601{
3602 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3603 IEMOP_MNEMONIC(xchg_Eb_Gb, "xchg Eb,Gb");
3604
3605 /*
3606 * If rm is denoting a register, no more instruction bytes.
3607 */
3608 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3609 {
3610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3611
3612 IEM_MC_BEGIN(0, 2);
3613 IEM_MC_LOCAL(uint8_t, uTmp1);
3614 IEM_MC_LOCAL(uint8_t, uTmp2);
3615
3616 IEM_MC_FETCH_GREG_U8(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3617 IEM_MC_FETCH_GREG_U8(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3618 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3619 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3620
3621 IEM_MC_ADVANCE_RIP();
3622 IEM_MC_END();
3623 }
3624 else
3625 {
3626 /*
3627 * We're accessing memory.
3628 */
3629/** @todo the register must be committed separately! */
3630 IEM_MC_BEGIN(2, 2);
3631 IEM_MC_ARG(uint8_t *, pu8Mem, 0);
3632 IEM_MC_ARG(uint8_t *, pu8Reg, 1);
3633 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3634
3635 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3636 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3637 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3638 if (!pVCpu->iem.s.fDisregardLock)
3639 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg);
3640 else
3641 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg);
3642 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW);
3643
3644 IEM_MC_ADVANCE_RIP();
3645 IEM_MC_END();
3646 }
3647 return VINF_SUCCESS;
3648}
3649
3650
3651/**
3652 * @opcode 0x87
3653 */
3654FNIEMOP_DEF(iemOp_xchg_Ev_Gv)
3655{
3656 IEMOP_MNEMONIC(xchg_Ev_Gv, "xchg Ev,Gv");
3657 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3658
3659 /*
3660 * If rm is denoting a register, no more instruction bytes.
3661 */
3662 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3663 {
3664 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3665
3666 switch (pVCpu->iem.s.enmEffOpSize)
3667 {
3668 case IEMMODE_16BIT:
3669 IEM_MC_BEGIN(0, 2);
3670 IEM_MC_LOCAL(uint16_t, uTmp1);
3671 IEM_MC_LOCAL(uint16_t, uTmp2);
3672
3673 IEM_MC_FETCH_GREG_U16(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3674 IEM_MC_FETCH_GREG_U16(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3675 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3676 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3677
3678 IEM_MC_ADVANCE_RIP();
3679 IEM_MC_END();
3680 return VINF_SUCCESS;
3681
3682 case IEMMODE_32BIT:
3683 IEM_MC_BEGIN(0, 2);
3684 IEM_MC_LOCAL(uint32_t, uTmp1);
3685 IEM_MC_LOCAL(uint32_t, uTmp2);
3686
3687 IEM_MC_FETCH_GREG_U32(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3688 IEM_MC_FETCH_GREG_U32(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3689 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3690 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3691
3692 IEM_MC_ADVANCE_RIP();
3693 IEM_MC_END();
3694 return VINF_SUCCESS;
3695
3696 case IEMMODE_64BIT:
3697 IEM_MC_BEGIN(0, 2);
3698 IEM_MC_LOCAL(uint64_t, uTmp1);
3699 IEM_MC_LOCAL(uint64_t, uTmp2);
3700
3701 IEM_MC_FETCH_GREG_U64(uTmp1, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3702 IEM_MC_FETCH_GREG_U64(uTmp2, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3703 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uTmp1);
3704 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uTmp2);
3705
3706 IEM_MC_ADVANCE_RIP();
3707 IEM_MC_END();
3708 return VINF_SUCCESS;
3709
3710 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3711 }
3712 }
3713 else
3714 {
3715 /*
3716 * We're accessing memory.
3717 */
3718 switch (pVCpu->iem.s.enmEffOpSize)
3719 {
3720/** @todo the register must be committed separately! */
3721 case IEMMODE_16BIT:
3722 IEM_MC_BEGIN(2, 2);
3723 IEM_MC_ARG(uint16_t *, pu16Mem, 0);
3724 IEM_MC_ARG(uint16_t *, pu16Reg, 1);
3725 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3726
3727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3728 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3729 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3730 if (!pVCpu->iem.s.fDisregardLock)
3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_locked, pu16Mem, pu16Reg);
3732 else
3733 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg);
3734 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW);
3735
3736 IEM_MC_ADVANCE_RIP();
3737 IEM_MC_END();
3738 return VINF_SUCCESS;
3739
3740 case IEMMODE_32BIT:
3741 IEM_MC_BEGIN(2, 2);
3742 IEM_MC_ARG(uint32_t *, pu32Mem, 0);
3743 IEM_MC_ARG(uint32_t *, pu32Reg, 1);
3744 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3745
3746 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3747 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3748 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3749 if (!pVCpu->iem.s.fDisregardLock)
3750 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg);
3751 else
3752 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg);
3753 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW);
3754
3755 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg);
3756 IEM_MC_ADVANCE_RIP();
3757 IEM_MC_END();
3758 return VINF_SUCCESS;
3759
3760 case IEMMODE_64BIT:
3761 IEM_MC_BEGIN(2, 2);
3762 IEM_MC_ARG(uint64_t *, pu64Mem, 0);
3763 IEM_MC_ARG(uint64_t *, pu64Reg, 1);
3764 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3765
3766 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3767 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
3768 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3769 if (!pVCpu->iem.s.fDisregardLock)
3770 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg);
3771 else
3772 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg);
3773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW);
3774
3775 IEM_MC_ADVANCE_RIP();
3776 IEM_MC_END();
3777 return VINF_SUCCESS;
3778
3779 IEM_NOT_REACHED_DEFAULT_CASE_RET();
3780 }
3781 }
3782}
3783
3784
3785/**
3786 * @opcode 0x88
3787 */
3788FNIEMOP_DEF(iemOp_mov_Eb_Gb)
3789{
3790 IEMOP_MNEMONIC(mov_Eb_Gb, "mov Eb,Gb");
3791
3792 uint8_t bRm;
3793 IEM_OPCODE_GET_NEXT_U8(&bRm);
3794
3795 /*
3796 * If rm is denoting a register, no more instruction bytes.
3797 */
3798 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3799 {
3800 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3801 IEM_MC_BEGIN(0, 1);
3802 IEM_MC_LOCAL(uint8_t, u8Value);
3803 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3804 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Value);
3805 IEM_MC_ADVANCE_RIP();
3806 IEM_MC_END();
3807 }
3808 else
3809 {
3810 /*
3811 * We're writing a register to memory.
3812 */
3813 IEM_MC_BEGIN(0, 2);
3814 IEM_MC_LOCAL(uint8_t, u8Value);
3815 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3816 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3818 IEM_MC_FETCH_GREG_U8(u8Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3819 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Value);
3820 IEM_MC_ADVANCE_RIP();
3821 IEM_MC_END();
3822 }
3823 return VINF_SUCCESS;
3824
3825}
3826
3827
3828/**
3829 * @opcode 0x89
3830 */
3831FNIEMOP_DEF(iemOp_mov_Ev_Gv)
3832{
3833 IEMOP_MNEMONIC(mov_Ev_Gv, "mov Ev,Gv");
3834
3835 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3836
3837 /*
3838 * If rm is denoting a register, no more instruction bytes.
3839 */
3840 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3841 {
3842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3843 switch (pVCpu->iem.s.enmEffOpSize)
3844 {
3845 case IEMMODE_16BIT:
3846 IEM_MC_BEGIN(0, 1);
3847 IEM_MC_LOCAL(uint16_t, u16Value);
3848 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3849 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
3850 IEM_MC_ADVANCE_RIP();
3851 IEM_MC_END();
3852 break;
3853
3854 case IEMMODE_32BIT:
3855 IEM_MC_BEGIN(0, 1);
3856 IEM_MC_LOCAL(uint32_t, u32Value);
3857 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3858 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
3859 IEM_MC_ADVANCE_RIP();
3860 IEM_MC_END();
3861 break;
3862
3863 case IEMMODE_64BIT:
3864 IEM_MC_BEGIN(0, 1);
3865 IEM_MC_LOCAL(uint64_t, u64Value);
3866 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3867 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
3868 IEM_MC_ADVANCE_RIP();
3869 IEM_MC_END();
3870 break;
3871 }
3872 }
3873 else
3874 {
3875 /*
3876 * We're writing a register to memory.
3877 */
3878 switch (pVCpu->iem.s.enmEffOpSize)
3879 {
3880 case IEMMODE_16BIT:
3881 IEM_MC_BEGIN(0, 2);
3882 IEM_MC_LOCAL(uint16_t, u16Value);
3883 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3884 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3885 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3886 IEM_MC_FETCH_GREG_U16(u16Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3887 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
3888 IEM_MC_ADVANCE_RIP();
3889 IEM_MC_END();
3890 break;
3891
3892 case IEMMODE_32BIT:
3893 IEM_MC_BEGIN(0, 2);
3894 IEM_MC_LOCAL(uint32_t, u32Value);
3895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3897 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3898 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3899 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value);
3900 IEM_MC_ADVANCE_RIP();
3901 IEM_MC_END();
3902 break;
3903
3904 case IEMMODE_64BIT:
3905 IEM_MC_BEGIN(0, 2);
3906 IEM_MC_LOCAL(uint64_t, u64Value);
3907 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3908 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3910 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
3911 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value);
3912 IEM_MC_ADVANCE_RIP();
3913 IEM_MC_END();
3914 break;
3915 }
3916 }
3917 return VINF_SUCCESS;
3918}
3919
3920
3921/**
3922 * @opcode 0x8a
3923 */
3924FNIEMOP_DEF(iemOp_mov_Gb_Eb)
3925{
3926 IEMOP_MNEMONIC(mov_Gb_Eb, "mov Gb,Eb");
3927
3928 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3929
3930 /*
3931 * If rm is denoting a register, no more instruction bytes.
3932 */
3933 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3934 {
3935 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3936 IEM_MC_BEGIN(0, 1);
3937 IEM_MC_LOCAL(uint8_t, u8Value);
3938 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3939 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3940 IEM_MC_ADVANCE_RIP();
3941 IEM_MC_END();
3942 }
3943 else
3944 {
3945 /*
3946 * We're loading a register from memory.
3947 */
3948 IEM_MC_BEGIN(0, 2);
3949 IEM_MC_LOCAL(uint8_t, u8Value);
3950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
3951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
3952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3953 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
3954 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8Value);
3955 IEM_MC_ADVANCE_RIP();
3956 IEM_MC_END();
3957 }
3958 return VINF_SUCCESS;
3959}
3960
3961
3962/**
3963 * @opcode 0x8b
3964 */
3965FNIEMOP_DEF(iemOp_mov_Gv_Ev)
3966{
3967 IEMOP_MNEMONIC(mov_Gv_Ev, "mov Gv,Ev");
3968
3969 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
3970
3971 /*
3972 * If rm is denoting a register, no more instruction bytes.
3973 */
3974 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
3975 {
3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
3977 switch (pVCpu->iem.s.enmEffOpSize)
3978 {
3979 case IEMMODE_16BIT:
3980 IEM_MC_BEGIN(0, 1);
3981 IEM_MC_LOCAL(uint16_t, u16Value);
3982 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3983 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
3984 IEM_MC_ADVANCE_RIP();
3985 IEM_MC_END();
3986 break;
3987
3988 case IEMMODE_32BIT:
3989 IEM_MC_BEGIN(0, 1);
3990 IEM_MC_LOCAL(uint32_t, u32Value);
3991 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
3992 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
3993 IEM_MC_ADVANCE_RIP();
3994 IEM_MC_END();
3995 break;
3996
3997 case IEMMODE_64BIT:
3998 IEM_MC_BEGIN(0, 1);
3999 IEM_MC_LOCAL(uint64_t, u64Value);
4000 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4001 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4002 IEM_MC_ADVANCE_RIP();
4003 IEM_MC_END();
4004 break;
4005 }
4006 }
4007 else
4008 {
4009 /*
4010 * We're loading a register from memory.
4011 */
4012 switch (pVCpu->iem.s.enmEffOpSize)
4013 {
4014 case IEMMODE_16BIT:
4015 IEM_MC_BEGIN(0, 2);
4016 IEM_MC_LOCAL(uint16_t, u16Value);
4017 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4018 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4019 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4020 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4021 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value);
4022 IEM_MC_ADVANCE_RIP();
4023 IEM_MC_END();
4024 break;
4025
4026 case IEMMODE_32BIT:
4027 IEM_MC_BEGIN(0, 2);
4028 IEM_MC_LOCAL(uint32_t, u32Value);
4029 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4032 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4033 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value);
4034 IEM_MC_ADVANCE_RIP();
4035 IEM_MC_END();
4036 break;
4037
4038 case IEMMODE_64BIT:
4039 IEM_MC_BEGIN(0, 2);
4040 IEM_MC_LOCAL(uint64_t, u64Value);
4041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4043 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4044 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4045 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value);
4046 IEM_MC_ADVANCE_RIP();
4047 IEM_MC_END();
4048 break;
4049 }
4050 }
4051 return VINF_SUCCESS;
4052}
4053
4054
4055/**
4056 * opcode 0x63
4057 * @todo Table fixme
4058 */
4059FNIEMOP_DEF(iemOp_arpl_Ew_Gw_movsx_Gv_Ev)
4060{
4061 if (pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT)
4062 return FNIEMOP_CALL(iemOp_arpl_Ew_Gw);
4063 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT)
4064 return FNIEMOP_CALL(iemOp_mov_Gv_Ev);
4065 return FNIEMOP_CALL(iemOp_movsxd_Gv_Ev);
4066}
4067
4068
4069/**
4070 * @opcode 0x8c
4071 */
4072FNIEMOP_DEF(iemOp_mov_Ev_Sw)
4073{
4074 IEMOP_MNEMONIC(mov_Ev_Sw, "mov Ev,Sw");
4075
4076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4077
4078 /*
4079 * Check that the destination register exists. The REX.R prefix is ignored.
4080 */
4081 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4082 if ( iSegReg > X86_SREG_GS)
4083 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4084
4085 /*
4086 * If rm is denoting a register, no more instruction bytes.
4087 * In that case, the operand size is respected and the upper bits are
4088 * cleared (starting with some pentium).
4089 */
4090 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4091 {
4092 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4093 switch (pVCpu->iem.s.enmEffOpSize)
4094 {
4095 case IEMMODE_16BIT:
4096 IEM_MC_BEGIN(0, 1);
4097 IEM_MC_LOCAL(uint16_t, u16Value);
4098 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4099 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Value);
4100 IEM_MC_ADVANCE_RIP();
4101 IEM_MC_END();
4102 break;
4103
4104 case IEMMODE_32BIT:
4105 IEM_MC_BEGIN(0, 1);
4106 IEM_MC_LOCAL(uint32_t, u32Value);
4107 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iSegReg);
4108 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Value);
4109 IEM_MC_ADVANCE_RIP();
4110 IEM_MC_END();
4111 break;
4112
4113 case IEMMODE_64BIT:
4114 IEM_MC_BEGIN(0, 1);
4115 IEM_MC_LOCAL(uint64_t, u64Value);
4116 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iSegReg);
4117 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Value);
4118 IEM_MC_ADVANCE_RIP();
4119 IEM_MC_END();
4120 break;
4121 }
4122 }
4123 else
4124 {
4125 /*
4126 * We're saving the register to memory. The access is word sized
4127 * regardless of operand size prefixes.
4128 */
4129#if 0 /* not necessary */
4130 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4131#endif
4132 IEM_MC_BEGIN(0, 2);
4133 IEM_MC_LOCAL(uint16_t, u16Value);
4134 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4135 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4136 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4137 IEM_MC_FETCH_SREG_U16(u16Value, iSegReg);
4138 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Value);
4139 IEM_MC_ADVANCE_RIP();
4140 IEM_MC_END();
4141 }
4142 return VINF_SUCCESS;
4143}
4144
4145
4146
4147
4148/**
4149 * @opcode 0x8d
4150 */
4151FNIEMOP_DEF(iemOp_lea_Gv_M)
4152{
4153 IEMOP_MNEMONIC(lea_Gv_M, "lea Gv,M");
4154 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4155 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4156 return IEMOP_RAISE_INVALID_OPCODE(); /* no register form */
4157
4158 switch (pVCpu->iem.s.enmEffOpSize)
4159 {
4160 case IEMMODE_16BIT:
4161 IEM_MC_BEGIN(0, 2);
4162 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4163 IEM_MC_LOCAL(uint16_t, u16Cast);
4164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4165 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4166 IEM_MC_ASSIGN_TO_SMALLER(u16Cast, GCPtrEffSrc);
4167 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Cast);
4168 IEM_MC_ADVANCE_RIP();
4169 IEM_MC_END();
4170 return VINF_SUCCESS;
4171
4172 case IEMMODE_32BIT:
4173 IEM_MC_BEGIN(0, 2);
4174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4175 IEM_MC_LOCAL(uint32_t, u32Cast);
4176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4178 IEM_MC_ASSIGN_TO_SMALLER(u32Cast, GCPtrEffSrc);
4179 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Cast);
4180 IEM_MC_ADVANCE_RIP();
4181 IEM_MC_END();
4182 return VINF_SUCCESS;
4183
4184 case IEMMODE_64BIT:
4185 IEM_MC_BEGIN(0, 1);
4186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
4187 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
4188 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4189 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, GCPtrEffSrc);
4190 IEM_MC_ADVANCE_RIP();
4191 IEM_MC_END();
4192 return VINF_SUCCESS;
4193 }
4194 AssertFailedReturn(VERR_IEM_IPE_7);
4195}
4196
4197
4198/**
4199 * @opcode 0x8e
4200 */
4201FNIEMOP_DEF(iemOp_mov_Sw_Ev)
4202{
4203 IEMOP_MNEMONIC(mov_Sw_Ev, "mov Sw,Ev");
4204
4205 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4206
4207 /*
4208 * The practical operand size is 16-bit.
4209 */
4210#if 0 /* not necessary */
4211 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_16BIT;
4212#endif
4213
4214 /*
4215 * Check that the destination register exists and can be used with this
4216 * instruction. The REX.R prefix is ignored.
4217 */
4218 uint8_t const iSegReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK);
4219 if ( iSegReg == X86_SREG_CS
4220 || iSegReg > X86_SREG_GS)
4221 return IEMOP_RAISE_INVALID_OPCODE(); /** @todo should probably not be raised until we've fetched all the opcode bytes? */
4222
4223 /*
4224 * If rm is denoting a register, no more instruction bytes.
4225 */
4226 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4227 {
4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4229 IEM_MC_BEGIN(2, 0);
4230 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4231 IEM_MC_ARG(uint16_t, u16Value, 1);
4232 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4233 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4234 IEM_MC_END();
4235 }
4236 else
4237 {
4238 /*
4239 * We're loading the register from memory. The access is word sized
4240 * regardless of operand size prefixes.
4241 */
4242 IEM_MC_BEGIN(2, 1);
4243 IEM_MC_ARG_CONST(uint8_t, iSRegArg, iSegReg, 0);
4244 IEM_MC_ARG(uint16_t, u16Value, 1);
4245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
4246 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
4247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4248 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
4249 IEM_MC_CALL_CIMPL_2(iemCImpl_load_SReg, iSRegArg, u16Value);
4250 IEM_MC_END();
4251 }
4252 return VINF_SUCCESS;
4253}
4254
4255
4256/** Opcode 0x8f /0. */
4257FNIEMOP_DEF_1(iemOp_pop_Ev, uint8_t, bRm)
4258{
4259 /* This bugger is rather annoying as it requires rSP to be updated before
4260 doing the effective address calculations. Will eventually require a
4261 split between the R/M+SIB decoding and the effective address
4262 calculation - which is something that is required for any attempt at
4263 reusing this code for a recompiler. It may also be good to have if we
4264 need to delay #UD exception caused by invalid lock prefixes.
4265
4266 For now, we'll do a mostly safe interpreter-only implementation here. */
4267 /** @todo What's the deal with the 'reg' field and pop Ev? Ignorning it for
4268 * now until tests show it's checked.. */
4269 IEMOP_MNEMONIC(pop_Ev, "pop Ev");
4270
4271 /* Register access is relatively easy and can share code. */
4272 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
4273 return FNIEMOP_CALL_1(iemOpCommonPopGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
4274
4275 /*
4276 * Memory target.
4277 *
4278 * Intel says that RSP is incremented before it's used in any effective
4279 * address calcuations. This means some serious extra annoyance here since
4280 * we decode and calculate the effective address in one step and like to
4281 * delay committing registers till everything is done.
4282 *
4283 * So, we'll decode and calculate the effective address twice. This will
4284 * require some recoding if turned into a recompiler.
4285 */
4286 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); /* The common code does this differently. */
4287
4288#ifndef TST_IEM_CHECK_MC
4289 /* Calc effective address with modified ESP. */
4290/** @todo testcase */
4291 RTGCPTR GCPtrEff;
4292 VBOXSTRICTRC rcStrict;
4293 switch (pVCpu->iem.s.enmEffOpSize)
4294 {
4295 case IEMMODE_16BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 2); break;
4296 case IEMMODE_32BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 4); break;
4297 case IEMMODE_64BIT: rcStrict = iemOpHlpCalcRmEffAddrEx(pVCpu, bRm, 0, &GCPtrEff, 8); break;
4298 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4299 }
4300 if (rcStrict != VINF_SUCCESS)
4301 return rcStrict;
4302 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4303
4304 /* Perform the operation - this should be CImpl. */
4305 RTUINT64U TmpRsp;
4306 TmpRsp.u = pVCpu->cpum.GstCtx.rsp;
4307 switch (pVCpu->iem.s.enmEffOpSize)
4308 {
4309 case IEMMODE_16BIT:
4310 {
4311 uint16_t u16Value;
4312 rcStrict = iemMemStackPopU16Ex(pVCpu, &u16Value, &TmpRsp);
4313 if (rcStrict == VINF_SUCCESS)
4314 rcStrict = iemMemStoreDataU16(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u16Value);
4315 break;
4316 }
4317
4318 case IEMMODE_32BIT:
4319 {
4320 uint32_t u32Value;
4321 rcStrict = iemMemStackPopU32Ex(pVCpu, &u32Value, &TmpRsp);
4322 if (rcStrict == VINF_SUCCESS)
4323 rcStrict = iemMemStoreDataU32(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u32Value);
4324 break;
4325 }
4326
4327 case IEMMODE_64BIT:
4328 {
4329 uint64_t u64Value;
4330 rcStrict = iemMemStackPopU64Ex(pVCpu, &u64Value, &TmpRsp);
4331 if (rcStrict == VINF_SUCCESS)
4332 rcStrict = iemMemStoreDataU64(pVCpu, pVCpu->iem.s.iEffSeg, GCPtrEff, u64Value);
4333 break;
4334 }
4335
4336 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4337 }
4338 if (rcStrict == VINF_SUCCESS)
4339 {
4340 pVCpu->cpum.GstCtx.rsp = TmpRsp.u;
4341 iemRegUpdateRipAndClearRF(pVCpu);
4342 }
4343 return rcStrict;
4344
4345#else
4346 return VERR_IEM_IPE_2;
4347#endif
4348}
4349
4350
4351/**
4352 * @opcode 0x8f
4353 */
4354FNIEMOP_DEF(iemOp_Grp1A__xop)
4355{
4356 /*
4357 * AMD has defined /1 thru /7 as XOP prefix. The prefix is similar to the
4358 * three byte VEX prefix, except that the mmmmm field cannot have the values
4359 * 0 thru 7, because it would then be confused with pop Ev (modrm.reg == 0).
4360 */
4361 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
4362 if ((bRm & X86_MODRM_REG_MASK) == (0 << X86_MODRM_REG_SHIFT)) /* /0 */
4363 return FNIEMOP_CALL_1(iemOp_pop_Ev, bRm);
4364
4365 IEMOP_MNEMONIC(xop, "xop");
4366 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXop)
4367 {
4368 /** @todo Test when exctly the XOP conformance checks kick in during
4369 * instruction decoding and fetching (using \#PF). */
4370 uint8_t bXop2; IEM_OPCODE_GET_NEXT_U8(&bXop2);
4371 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
4372 if ( ( pVCpu->iem.s.fPrefixes
4373 & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_LOCK | IEM_OP_PRF_REX))
4374 == 0)
4375 {
4376 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_XOP;
4377 if ((bXop2 & 0x80 /* XOP.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
4378 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
4379 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
4380 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
4381 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
4382 pVCpu->iem.s.uVex3rdReg = (~bXop2 >> 3) & 0xf;
4383 pVCpu->iem.s.uVexLength = (bXop2 >> 2) & 1;
4384 pVCpu->iem.s.idxPrefix = bXop2 & 0x3;
4385
4386 /** @todo XOP: Just use new tables and decoders. */
4387 switch (bRm & 0x1f)
4388 {
4389 case 8: /* xop opcode map 8. */
4390 IEMOP_BITCH_ABOUT_STUB();
4391 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4392
4393 case 9: /* xop opcode map 9. */
4394 IEMOP_BITCH_ABOUT_STUB();
4395 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4396
4397 case 10: /* xop opcode map 10. */
4398 IEMOP_BITCH_ABOUT_STUB();
4399 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
4400
4401 default:
4402 Log(("XOP: Invalid vvvv value: %#x!\n", bRm & 0x1f));
4403 return IEMOP_RAISE_INVALID_OPCODE();
4404 }
4405 }
4406 else
4407 Log(("XOP: Invalid prefix mix!\n"));
4408 }
4409 else
4410 Log(("XOP: XOP support disabled!\n"));
4411 return IEMOP_RAISE_INVALID_OPCODE();
4412}
4413
4414
4415/**
4416 * Common 'xchg reg,rAX' helper.
4417 */
4418FNIEMOP_DEF_1(iemOpCommonXchgGRegRax, uint8_t, iReg)
4419{
4420 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4421
4422 iReg |= pVCpu->iem.s.uRexB;
4423 switch (pVCpu->iem.s.enmEffOpSize)
4424 {
4425 case IEMMODE_16BIT:
4426 IEM_MC_BEGIN(0, 2);
4427 IEM_MC_LOCAL(uint16_t, u16Tmp1);
4428 IEM_MC_LOCAL(uint16_t, u16Tmp2);
4429 IEM_MC_FETCH_GREG_U16(u16Tmp1, iReg);
4430 IEM_MC_FETCH_GREG_U16(u16Tmp2, X86_GREG_xAX);
4431 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp1);
4432 IEM_MC_STORE_GREG_U16(iReg, u16Tmp2);
4433 IEM_MC_ADVANCE_RIP();
4434 IEM_MC_END();
4435 return VINF_SUCCESS;
4436
4437 case IEMMODE_32BIT:
4438 IEM_MC_BEGIN(0, 2);
4439 IEM_MC_LOCAL(uint32_t, u32Tmp1);
4440 IEM_MC_LOCAL(uint32_t, u32Tmp2);
4441 IEM_MC_FETCH_GREG_U32(u32Tmp1, iReg);
4442 IEM_MC_FETCH_GREG_U32(u32Tmp2, X86_GREG_xAX);
4443 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp1);
4444 IEM_MC_STORE_GREG_U32(iReg, u32Tmp2);
4445 IEM_MC_ADVANCE_RIP();
4446 IEM_MC_END();
4447 return VINF_SUCCESS;
4448
4449 case IEMMODE_64BIT:
4450 IEM_MC_BEGIN(0, 2);
4451 IEM_MC_LOCAL(uint64_t, u64Tmp1);
4452 IEM_MC_LOCAL(uint64_t, u64Tmp2);
4453 IEM_MC_FETCH_GREG_U64(u64Tmp1, iReg);
4454 IEM_MC_FETCH_GREG_U64(u64Tmp2, X86_GREG_xAX);
4455 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp1);
4456 IEM_MC_STORE_GREG_U64(iReg, u64Tmp2);
4457 IEM_MC_ADVANCE_RIP();
4458 IEM_MC_END();
4459 return VINF_SUCCESS;
4460
4461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4462 }
4463}
4464
4465
4466/**
4467 * @opcode 0x90
4468 */
4469FNIEMOP_DEF(iemOp_nop)
4470{
4471 /* R8/R8D and RAX/EAX can be exchanged. */
4472 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_B)
4473 {
4474 IEMOP_MNEMONIC(xchg_r8_rAX, "xchg r8,rAX");
4475 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xAX);
4476 }
4477
4478 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)
4479 {
4480 IEMOP_MNEMONIC(pause, "pause");
4481#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
4482 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fVmx)
4483 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmx_pause);
4484#endif
4485#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
4486 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSvm)
4487 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_svm_pause);
4488#endif
4489 }
4490 else
4491 IEMOP_MNEMONIC(nop, "nop");
4492 IEM_MC_BEGIN(0, 0);
4493 IEM_MC_ADVANCE_RIP();
4494 IEM_MC_END();
4495 return VINF_SUCCESS;
4496}
4497
4498
4499/**
4500 * @opcode 0x91
4501 */
4502FNIEMOP_DEF(iemOp_xchg_eCX_eAX)
4503{
4504 IEMOP_MNEMONIC(xchg_rCX_rAX, "xchg rCX,rAX");
4505 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xCX);
4506}
4507
4508
4509/**
4510 * @opcode 0x92
4511 */
4512FNIEMOP_DEF(iemOp_xchg_eDX_eAX)
4513{
4514 IEMOP_MNEMONIC(xchg_rDX_rAX, "xchg rDX,rAX");
4515 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDX);
4516}
4517
4518
4519/**
4520 * @opcode 0x93
4521 */
4522FNIEMOP_DEF(iemOp_xchg_eBX_eAX)
4523{
4524 IEMOP_MNEMONIC(xchg_rBX_rAX, "xchg rBX,rAX");
4525 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBX);
4526}
4527
4528
4529/**
4530 * @opcode 0x94
4531 */
4532FNIEMOP_DEF(iemOp_xchg_eSP_eAX)
4533{
4534 IEMOP_MNEMONIC(xchg_rSX_rAX, "xchg rSX,rAX");
4535 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSP);
4536}
4537
4538
4539/**
4540 * @opcode 0x95
4541 */
4542FNIEMOP_DEF(iemOp_xchg_eBP_eAX)
4543{
4544 IEMOP_MNEMONIC(xchg_rBP_rAX, "xchg rBP,rAX");
4545 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xBP);
4546}
4547
4548
4549/**
4550 * @opcode 0x96
4551 */
4552FNIEMOP_DEF(iemOp_xchg_eSI_eAX)
4553{
4554 IEMOP_MNEMONIC(xchg_rSI_rAX, "xchg rSI,rAX");
4555 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xSI);
4556}
4557
4558
4559/**
4560 * @opcode 0x97
4561 */
4562FNIEMOP_DEF(iemOp_xchg_eDI_eAX)
4563{
4564 IEMOP_MNEMONIC(xchg_rDI_rAX, "xchg rDI,rAX");
4565 return FNIEMOP_CALL_1(iemOpCommonXchgGRegRax, X86_GREG_xDI);
4566}
4567
4568
4569/**
4570 * @opcode 0x98
4571 */
4572FNIEMOP_DEF(iemOp_cbw)
4573{
4574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4575 switch (pVCpu->iem.s.enmEffOpSize)
4576 {
4577 case IEMMODE_16BIT:
4578 IEMOP_MNEMONIC(cbw, "cbw");
4579 IEM_MC_BEGIN(0, 1);
4580 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 7) {
4581 IEM_MC_OR_GREG_U16(X86_GREG_xAX, UINT16_C(0xff00));
4582 } IEM_MC_ELSE() {
4583 IEM_MC_AND_GREG_U16(X86_GREG_xAX, UINT16_C(0x00ff));
4584 } IEM_MC_ENDIF();
4585 IEM_MC_ADVANCE_RIP();
4586 IEM_MC_END();
4587 return VINF_SUCCESS;
4588
4589 case IEMMODE_32BIT:
4590 IEMOP_MNEMONIC(cwde, "cwde");
4591 IEM_MC_BEGIN(0, 1);
4592 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4593 IEM_MC_OR_GREG_U32(X86_GREG_xAX, UINT32_C(0xffff0000));
4594 } IEM_MC_ELSE() {
4595 IEM_MC_AND_GREG_U32(X86_GREG_xAX, UINT32_C(0x0000ffff));
4596 } IEM_MC_ENDIF();
4597 IEM_MC_ADVANCE_RIP();
4598 IEM_MC_END();
4599 return VINF_SUCCESS;
4600
4601 case IEMMODE_64BIT:
4602 IEMOP_MNEMONIC(cdqe, "cdqe");
4603 IEM_MC_BEGIN(0, 1);
4604 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4605 IEM_MC_OR_GREG_U64(X86_GREG_xAX, UINT64_C(0xffffffff00000000));
4606 } IEM_MC_ELSE() {
4607 IEM_MC_AND_GREG_U64(X86_GREG_xAX, UINT64_C(0x00000000ffffffff));
4608 } IEM_MC_ENDIF();
4609 IEM_MC_ADVANCE_RIP();
4610 IEM_MC_END();
4611 return VINF_SUCCESS;
4612
4613 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4614 }
4615}
4616
4617
4618/**
4619 * @opcode 0x99
4620 */
4621FNIEMOP_DEF(iemOp_cwd)
4622{
4623 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4624 switch (pVCpu->iem.s.enmEffOpSize)
4625 {
4626 case IEMMODE_16BIT:
4627 IEMOP_MNEMONIC(cwd, "cwd");
4628 IEM_MC_BEGIN(0, 1);
4629 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 15) {
4630 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, UINT16_C(0xffff));
4631 } IEM_MC_ELSE() {
4632 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xDX, 0);
4633 } IEM_MC_ENDIF();
4634 IEM_MC_ADVANCE_RIP();
4635 IEM_MC_END();
4636 return VINF_SUCCESS;
4637
4638 case IEMMODE_32BIT:
4639 IEMOP_MNEMONIC(cdq, "cdq");
4640 IEM_MC_BEGIN(0, 1);
4641 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 31) {
4642 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, UINT32_C(0xffffffff));
4643 } IEM_MC_ELSE() {
4644 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xDX, 0);
4645 } IEM_MC_ENDIF();
4646 IEM_MC_ADVANCE_RIP();
4647 IEM_MC_END();
4648 return VINF_SUCCESS;
4649
4650 case IEMMODE_64BIT:
4651 IEMOP_MNEMONIC(cqo, "cqo");
4652 IEM_MC_BEGIN(0, 1);
4653 IEM_MC_IF_GREG_BIT_SET(X86_GREG_xAX, 63) {
4654 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, UINT64_C(0xffffffffffffffff));
4655 } IEM_MC_ELSE() {
4656 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xDX, 0);
4657 } IEM_MC_ENDIF();
4658 IEM_MC_ADVANCE_RIP();
4659 IEM_MC_END();
4660 return VINF_SUCCESS;
4661
4662 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4663 }
4664}
4665
4666
4667/**
4668 * @opcode 0x9a
4669 */
4670FNIEMOP_DEF(iemOp_call_Ap)
4671{
4672 IEMOP_MNEMONIC(call_Ap, "call Ap");
4673 IEMOP_HLP_NO_64BIT();
4674
4675 /* Decode the far pointer address and pass it on to the far call C implementation. */
4676 uint32_t offSeg;
4677 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
4678 IEM_OPCODE_GET_NEXT_U32(&offSeg);
4679 else
4680 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
4681 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4683 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_callf, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
4684}
4685
4686
4687/** Opcode 0x9b. (aka fwait) */
4688FNIEMOP_DEF(iemOp_wait)
4689{
4690 IEMOP_MNEMONIC(wait, "wait");
4691 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4692
4693 IEM_MC_BEGIN(0, 0);
4694 IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE();
4695 IEM_MC_MAYBE_RAISE_FPU_XCPT();
4696 IEM_MC_ADVANCE_RIP();
4697 IEM_MC_END();
4698 return VINF_SUCCESS;
4699}
4700
4701
4702/**
4703 * @opcode 0x9c
4704 */
4705FNIEMOP_DEF(iemOp_pushf_Fv)
4706{
4707 IEMOP_MNEMONIC(pushf_Fv, "pushf Fv");
4708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4709 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4710 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_pushf, pVCpu->iem.s.enmEffOpSize);
4711}
4712
4713
4714/**
4715 * @opcode 0x9d
4716 */
4717FNIEMOP_DEF(iemOp_popf_Fv)
4718{
4719 IEMOP_MNEMONIC(popf_Fv, "popf Fv");
4720 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4721 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
4722 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_popf, pVCpu->iem.s.enmEffOpSize);
4723}
4724
4725
4726/**
4727 * @opcode 0x9e
4728 */
4729FNIEMOP_DEF(iemOp_sahf)
4730{
4731 IEMOP_MNEMONIC(sahf, "sahf");
4732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4733 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4734 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4735 return IEMOP_RAISE_INVALID_OPCODE();
4736 IEM_MC_BEGIN(0, 2);
4737 IEM_MC_LOCAL(uint32_t, u32Flags);
4738 IEM_MC_LOCAL(uint32_t, EFlags);
4739 IEM_MC_FETCH_EFLAGS(EFlags);
4740 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Flags, X86_GREG_xSP/*=AH*/);
4741 IEM_MC_AND_LOCAL_U32(u32Flags, X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF);
4742 IEM_MC_AND_LOCAL_U32(EFlags, UINT32_C(0xffffff00));
4743 IEM_MC_OR_LOCAL_U32(u32Flags, X86_EFL_1);
4744 IEM_MC_OR_2LOCS_U32(EFlags, u32Flags);
4745 IEM_MC_COMMIT_EFLAGS(EFlags);
4746 IEM_MC_ADVANCE_RIP();
4747 IEM_MC_END();
4748 return VINF_SUCCESS;
4749}
4750
4751
4752/**
4753 * @opcode 0x9f
4754 */
4755FNIEMOP_DEF(iemOp_lahf)
4756{
4757 IEMOP_MNEMONIC(lahf, "lahf");
4758 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4759 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
4760 && !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fLahfSahf)
4761 return IEMOP_RAISE_INVALID_OPCODE();
4762 IEM_MC_BEGIN(0, 1);
4763 IEM_MC_LOCAL(uint8_t, u8Flags);
4764 IEM_MC_FETCH_EFLAGS_U8(u8Flags);
4765 IEM_MC_STORE_GREG_U8(X86_GREG_xSP/*=AH*/, u8Flags);
4766 IEM_MC_ADVANCE_RIP();
4767 IEM_MC_END();
4768 return VINF_SUCCESS;
4769}
4770
4771
4772/**
4773 * Macro used by iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL and
4774 * iemOp_mov_Ov_rAX to fetch the moffsXX bit of the opcode and fend off lock
4775 * prefixes. Will return on failures.
4776 * @param a_GCPtrMemOff The variable to store the offset in.
4777 */
4778#define IEMOP_FETCH_MOFFS_XX(a_GCPtrMemOff) \
4779 do \
4780 { \
4781 switch (pVCpu->iem.s.enmEffAddrMode) \
4782 { \
4783 case IEMMODE_16BIT: \
4784 IEM_OPCODE_GET_NEXT_U16_ZX_U64(&(a_GCPtrMemOff)); \
4785 break; \
4786 case IEMMODE_32BIT: \
4787 IEM_OPCODE_GET_NEXT_U32_ZX_U64(&(a_GCPtrMemOff)); \
4788 break; \
4789 case IEMMODE_64BIT: \
4790 IEM_OPCODE_GET_NEXT_U64(&(a_GCPtrMemOff)); \
4791 break; \
4792 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \
4793 } \
4794 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \
4795 } while (0)
4796
4797/**
4798 * @opcode 0xa0
4799 */
4800FNIEMOP_DEF(iemOp_mov_AL_Ob)
4801{
4802 /*
4803 * Get the offset and fend off lock prefixes.
4804 */
4805 IEMOP_MNEMONIC(mov_AL_Ob, "mov AL,Ob");
4806 RTGCPTR GCPtrMemOff;
4807 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4808
4809 /*
4810 * Fetch AL.
4811 */
4812 IEM_MC_BEGIN(0,1);
4813 IEM_MC_LOCAL(uint8_t, u8Tmp);
4814 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4815 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
4816 IEM_MC_ADVANCE_RIP();
4817 IEM_MC_END();
4818 return VINF_SUCCESS;
4819}
4820
4821
4822/**
4823 * @opcode 0xa1
4824 */
4825FNIEMOP_DEF(iemOp_mov_rAX_Ov)
4826{
4827 /*
4828 * Get the offset and fend off lock prefixes.
4829 */
4830 IEMOP_MNEMONIC(mov_rAX_Ov, "mov rAX,Ov");
4831 RTGCPTR GCPtrMemOff;
4832 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4833
4834 /*
4835 * Fetch rAX.
4836 */
4837 switch (pVCpu->iem.s.enmEffOpSize)
4838 {
4839 case IEMMODE_16BIT:
4840 IEM_MC_BEGIN(0,1);
4841 IEM_MC_LOCAL(uint16_t, u16Tmp);
4842 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4843 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
4844 IEM_MC_ADVANCE_RIP();
4845 IEM_MC_END();
4846 return VINF_SUCCESS;
4847
4848 case IEMMODE_32BIT:
4849 IEM_MC_BEGIN(0,1);
4850 IEM_MC_LOCAL(uint32_t, u32Tmp);
4851 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4852 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Tmp);
4853 IEM_MC_ADVANCE_RIP();
4854 IEM_MC_END();
4855 return VINF_SUCCESS;
4856
4857 case IEMMODE_64BIT:
4858 IEM_MC_BEGIN(0,1);
4859 IEM_MC_LOCAL(uint64_t, u64Tmp);
4860 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrMemOff);
4861 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Tmp);
4862 IEM_MC_ADVANCE_RIP();
4863 IEM_MC_END();
4864 return VINF_SUCCESS;
4865
4866 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4867 }
4868}
4869
4870
4871/**
4872 * @opcode 0xa2
4873 */
4874FNIEMOP_DEF(iemOp_mov_Ob_AL)
4875{
4876 /*
4877 * Get the offset and fend off lock prefixes.
4878 */
4879 IEMOP_MNEMONIC(mov_Ob_AL, "mov Ob,AL");
4880 RTGCPTR GCPtrMemOff;
4881 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4882
4883 /*
4884 * Store AL.
4885 */
4886 IEM_MC_BEGIN(0,1);
4887 IEM_MC_LOCAL(uint8_t, u8Tmp);
4888 IEM_MC_FETCH_GREG_U8(u8Tmp, X86_GREG_xAX);
4889 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u8Tmp);
4890 IEM_MC_ADVANCE_RIP();
4891 IEM_MC_END();
4892 return VINF_SUCCESS;
4893}
4894
4895
4896/**
4897 * @opcode 0xa3
4898 */
4899FNIEMOP_DEF(iemOp_mov_Ov_rAX)
4900{
4901 /*
4902 * Get the offset and fend off lock prefixes.
4903 */
4904 IEMOP_MNEMONIC(mov_Ov_rAX, "mov Ov,rAX");
4905 RTGCPTR GCPtrMemOff;
4906 IEMOP_FETCH_MOFFS_XX(GCPtrMemOff);
4907
4908 /*
4909 * Store rAX.
4910 */
4911 switch (pVCpu->iem.s.enmEffOpSize)
4912 {
4913 case IEMMODE_16BIT:
4914 IEM_MC_BEGIN(0,1);
4915 IEM_MC_LOCAL(uint16_t, u16Tmp);
4916 IEM_MC_FETCH_GREG_U16(u16Tmp, X86_GREG_xAX);
4917 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u16Tmp);
4918 IEM_MC_ADVANCE_RIP();
4919 IEM_MC_END();
4920 return VINF_SUCCESS;
4921
4922 case IEMMODE_32BIT:
4923 IEM_MC_BEGIN(0,1);
4924 IEM_MC_LOCAL(uint32_t, u32Tmp);
4925 IEM_MC_FETCH_GREG_U32(u32Tmp, X86_GREG_xAX);
4926 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u32Tmp);
4927 IEM_MC_ADVANCE_RIP();
4928 IEM_MC_END();
4929 return VINF_SUCCESS;
4930
4931 case IEMMODE_64BIT:
4932 IEM_MC_BEGIN(0,1);
4933 IEM_MC_LOCAL(uint64_t, u64Tmp);
4934 IEM_MC_FETCH_GREG_U64(u64Tmp, X86_GREG_xAX);
4935 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrMemOff, u64Tmp);
4936 IEM_MC_ADVANCE_RIP();
4937 IEM_MC_END();
4938 return VINF_SUCCESS;
4939
4940 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4941 }
4942}
4943
4944/** Macro used by iemOp_movsb_Xb_Yb and iemOp_movswd_Xv_Yv */
4945#define IEM_MOVS_CASE(ValBits, AddrBits) \
4946 IEM_MC_BEGIN(0, 2); \
4947 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
4948 IEM_MC_LOCAL(RTGCPTR, uAddr); \
4949 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
4950 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
4951 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
4952 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
4953 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
4954 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4955 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4956 } IEM_MC_ELSE() { \
4957 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
4958 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
4959 } IEM_MC_ENDIF(); \
4960 IEM_MC_ADVANCE_RIP(); \
4961 IEM_MC_END();
4962
4963/**
4964 * @opcode 0xa4
4965 */
4966FNIEMOP_DEF(iemOp_movsb_Xb_Yb)
4967{
4968 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
4969
4970 /*
4971 * Use the C implementation if a repeat prefix is encountered.
4972 */
4973 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
4974 {
4975 IEMOP_MNEMONIC(rep_movsb_Xb_Yb, "rep movsb Xb,Yb");
4976 switch (pVCpu->iem.s.enmEffAddrMode)
4977 {
4978 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr16, pVCpu->iem.s.iEffSeg);
4979 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr32, pVCpu->iem.s.iEffSeg);
4980 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op8_addr64, pVCpu->iem.s.iEffSeg);
4981 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4982 }
4983 }
4984 IEMOP_MNEMONIC(movsb_Xb_Yb, "movsb Xb,Yb");
4985
4986 /*
4987 * Sharing case implementation with movs[wdq] below.
4988 */
4989 switch (pVCpu->iem.s.enmEffAddrMode)
4990 {
4991 case IEMMODE_16BIT: IEM_MOVS_CASE(8, 16); break;
4992 case IEMMODE_32BIT: IEM_MOVS_CASE(8, 32); break;
4993 case IEMMODE_64BIT: IEM_MOVS_CASE(8, 64); break;
4994 IEM_NOT_REACHED_DEFAULT_CASE_RET();
4995 }
4996 return VINF_SUCCESS;
4997}
4998
4999
5000/**
5001 * @opcode 0xa5
5002 */
5003FNIEMOP_DEF(iemOp_movswd_Xv_Yv)
5004{
5005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5006
5007 /*
5008 * Use the C implementation if a repeat prefix is encountered.
5009 */
5010 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5011 {
5012 IEMOP_MNEMONIC(rep_movs_Xv_Yv, "rep movs Xv,Yv");
5013 switch (pVCpu->iem.s.enmEffOpSize)
5014 {
5015 case IEMMODE_16BIT:
5016 switch (pVCpu->iem.s.enmEffAddrMode)
5017 {
5018 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr16, pVCpu->iem.s.iEffSeg);
5019 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr32, pVCpu->iem.s.iEffSeg);
5020 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op16_addr64, pVCpu->iem.s.iEffSeg);
5021 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5022 }
5023 break;
5024 case IEMMODE_32BIT:
5025 switch (pVCpu->iem.s.enmEffAddrMode)
5026 {
5027 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr16, pVCpu->iem.s.iEffSeg);
5028 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr32, pVCpu->iem.s.iEffSeg);
5029 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op32_addr64, pVCpu->iem.s.iEffSeg);
5030 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5031 }
5032 case IEMMODE_64BIT:
5033 switch (pVCpu->iem.s.enmEffAddrMode)
5034 {
5035 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6);
5036 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr32, pVCpu->iem.s.iEffSeg);
5037 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_rep_movs_op64_addr64, pVCpu->iem.s.iEffSeg);
5038 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5039 }
5040 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5041 }
5042 }
5043 IEMOP_MNEMONIC(movs_Xv_Yv, "movs Xv,Yv");
5044
5045 /*
5046 * Annoying double switch here.
5047 * Using ugly macro for implementing the cases, sharing it with movsb.
5048 */
5049 switch (pVCpu->iem.s.enmEffOpSize)
5050 {
5051 case IEMMODE_16BIT:
5052 switch (pVCpu->iem.s.enmEffAddrMode)
5053 {
5054 case IEMMODE_16BIT: IEM_MOVS_CASE(16, 16); break;
5055 case IEMMODE_32BIT: IEM_MOVS_CASE(16, 32); break;
5056 case IEMMODE_64BIT: IEM_MOVS_CASE(16, 64); break;
5057 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5058 }
5059 break;
5060
5061 case IEMMODE_32BIT:
5062 switch (pVCpu->iem.s.enmEffAddrMode)
5063 {
5064 case IEMMODE_16BIT: IEM_MOVS_CASE(32, 16); break;
5065 case IEMMODE_32BIT: IEM_MOVS_CASE(32, 32); break;
5066 case IEMMODE_64BIT: IEM_MOVS_CASE(32, 64); break;
5067 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5068 }
5069 break;
5070
5071 case IEMMODE_64BIT:
5072 switch (pVCpu->iem.s.enmEffAddrMode)
5073 {
5074 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5075 case IEMMODE_32BIT: IEM_MOVS_CASE(64, 32); break;
5076 case IEMMODE_64BIT: IEM_MOVS_CASE(64, 64); break;
5077 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5078 }
5079 break;
5080 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5081 }
5082 return VINF_SUCCESS;
5083}
5084
5085#undef IEM_MOVS_CASE
5086
5087/** Macro used by iemOp_cmpsb_Xb_Yb and iemOp_cmpswd_Xv_Yv */
5088#define IEM_CMPS_CASE(ValBits, AddrBits) \
5089 IEM_MC_BEGIN(3, 3); \
5090 IEM_MC_ARG(uint##ValBits##_t *, puValue1, 0); \
5091 IEM_MC_ARG(uint##ValBits##_t, uValue2, 1); \
5092 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5093 IEM_MC_LOCAL(uint##ValBits##_t, uValue1); \
5094 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5095 \
5096 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5097 IEM_MC_FETCH_MEM_U##ValBits(uValue1, pVCpu->iem.s.iEffSeg, uAddr); \
5098 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5099 IEM_MC_FETCH_MEM_U##ValBits(uValue2, X86_SREG_ES, uAddr); \
5100 IEM_MC_REF_LOCAL(puValue1, uValue1); \
5101 IEM_MC_REF_EFLAGS(pEFlags); \
5102 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puValue1, uValue2, pEFlags); \
5103 \
5104 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5105 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5106 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5107 } IEM_MC_ELSE() { \
5108 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5109 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5110 } IEM_MC_ENDIF(); \
5111 IEM_MC_ADVANCE_RIP(); \
5112 IEM_MC_END(); \
5113
5114/**
5115 * @opcode 0xa6
5116 */
5117FNIEMOP_DEF(iemOp_cmpsb_Xb_Yb)
5118{
5119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5120
5121 /*
5122 * Use the C implementation if a repeat prefix is encountered.
5123 */
5124 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5125 {
5126 IEMOP_MNEMONIC(repz_cmps_Xb_Yb, "repz cmps Xb,Yb");
5127 switch (pVCpu->iem.s.enmEffAddrMode)
5128 {
5129 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5130 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5131 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5132 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5133 }
5134 }
5135 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5136 {
5137 IEMOP_MNEMONIC(repnz_cmps_Xb_Yb, "repnz cmps Xb,Yb");
5138 switch (pVCpu->iem.s.enmEffAddrMode)
5139 {
5140 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr16, pVCpu->iem.s.iEffSeg);
5141 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr32, pVCpu->iem.s.iEffSeg);
5142 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op8_addr64, pVCpu->iem.s.iEffSeg);
5143 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5144 }
5145 }
5146 IEMOP_MNEMONIC(cmps_Xb_Yb, "cmps Xb,Yb");
5147
5148 /*
5149 * Sharing case implementation with cmps[wdq] below.
5150 */
5151 switch (pVCpu->iem.s.enmEffAddrMode)
5152 {
5153 case IEMMODE_16BIT: IEM_CMPS_CASE(8, 16); break;
5154 case IEMMODE_32BIT: IEM_CMPS_CASE(8, 32); break;
5155 case IEMMODE_64BIT: IEM_CMPS_CASE(8, 64); break;
5156 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5157 }
5158 return VINF_SUCCESS;
5159
5160}
5161
5162
5163/**
5164 * @opcode 0xa7
5165 */
5166FNIEMOP_DEF(iemOp_cmpswd_Xv_Yv)
5167{
5168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5169
5170 /*
5171 * Use the C implementation if a repeat prefix is encountered.
5172 */
5173 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5174 {
5175 IEMOP_MNEMONIC(repe_cmps_Xv_Yv, "repe cmps Xv,Yv");
5176 switch (pVCpu->iem.s.enmEffOpSize)
5177 {
5178 case IEMMODE_16BIT:
5179 switch (pVCpu->iem.s.enmEffAddrMode)
5180 {
5181 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5182 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5183 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5184 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5185 }
5186 break;
5187 case IEMMODE_32BIT:
5188 switch (pVCpu->iem.s.enmEffAddrMode)
5189 {
5190 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5191 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5192 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5193 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5194 }
5195 case IEMMODE_64BIT:
5196 switch (pVCpu->iem.s.enmEffAddrMode)
5197 {
5198 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_4);
5199 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5200 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repe_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5201 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5202 }
5203 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5204 }
5205 }
5206
5207 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5208 {
5209 IEMOP_MNEMONIC(repne_cmps_Xv_Yv, "repne cmps Xv,Yv");
5210 switch (pVCpu->iem.s.enmEffOpSize)
5211 {
5212 case IEMMODE_16BIT:
5213 switch (pVCpu->iem.s.enmEffAddrMode)
5214 {
5215 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr16, pVCpu->iem.s.iEffSeg);
5216 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr32, pVCpu->iem.s.iEffSeg);
5217 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op16_addr64, pVCpu->iem.s.iEffSeg);
5218 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5219 }
5220 break;
5221 case IEMMODE_32BIT:
5222 switch (pVCpu->iem.s.enmEffAddrMode)
5223 {
5224 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr16, pVCpu->iem.s.iEffSeg);
5225 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr32, pVCpu->iem.s.iEffSeg);
5226 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op32_addr64, pVCpu->iem.s.iEffSeg);
5227 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5228 }
5229 case IEMMODE_64BIT:
5230 switch (pVCpu->iem.s.enmEffAddrMode)
5231 {
5232 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_2);
5233 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr32, pVCpu->iem.s.iEffSeg);
5234 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_repne_cmps_op64_addr64, pVCpu->iem.s.iEffSeg);
5235 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5236 }
5237 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5238 }
5239 }
5240
5241 IEMOP_MNEMONIC(cmps_Xv_Yv, "cmps Xv,Yv");
5242
5243 /*
5244 * Annoying double switch here.
5245 * Using ugly macro for implementing the cases, sharing it with cmpsb.
5246 */
5247 switch (pVCpu->iem.s.enmEffOpSize)
5248 {
5249 case IEMMODE_16BIT:
5250 switch (pVCpu->iem.s.enmEffAddrMode)
5251 {
5252 case IEMMODE_16BIT: IEM_CMPS_CASE(16, 16); break;
5253 case IEMMODE_32BIT: IEM_CMPS_CASE(16, 32); break;
5254 case IEMMODE_64BIT: IEM_CMPS_CASE(16, 64); break;
5255 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5256 }
5257 break;
5258
5259 case IEMMODE_32BIT:
5260 switch (pVCpu->iem.s.enmEffAddrMode)
5261 {
5262 case IEMMODE_16BIT: IEM_CMPS_CASE(32, 16); break;
5263 case IEMMODE_32BIT: IEM_CMPS_CASE(32, 32); break;
5264 case IEMMODE_64BIT: IEM_CMPS_CASE(32, 64); break;
5265 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5266 }
5267 break;
5268
5269 case IEMMODE_64BIT:
5270 switch (pVCpu->iem.s.enmEffAddrMode)
5271 {
5272 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5273 case IEMMODE_32BIT: IEM_CMPS_CASE(64, 32); break;
5274 case IEMMODE_64BIT: IEM_CMPS_CASE(64, 64); break;
5275 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5276 }
5277 break;
5278 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5279 }
5280 return VINF_SUCCESS;
5281
5282}
5283
5284#undef IEM_CMPS_CASE
5285
5286/**
5287 * @opcode 0xa8
5288 */
5289FNIEMOP_DEF(iemOp_test_AL_Ib)
5290{
5291 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib");
5292 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5293 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);
5294}
5295
5296
5297/**
5298 * @opcode 0xa9
5299 */
5300FNIEMOP_DEF(iemOp_test_eAX_Iz)
5301{
5302 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz");
5303 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
5304 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);
5305}
5306
5307
5308/** Macro used by iemOp_stosb_Yb_AL and iemOp_stoswd_Yv_eAX */
5309#define IEM_STOS_CASE(ValBits, AddrBits) \
5310 IEM_MC_BEGIN(0, 2); \
5311 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5312 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5313 IEM_MC_FETCH_GREG_U##ValBits(uValue, X86_GREG_xAX); \
5314 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5315 IEM_MC_STORE_MEM_U##ValBits(X86_SREG_ES, uAddr, uValue); \
5316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5317 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5318 } IEM_MC_ELSE() { \
5319 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5320 } IEM_MC_ENDIF(); \
5321 IEM_MC_ADVANCE_RIP(); \
5322 IEM_MC_END(); \
5323
5324/**
5325 * @opcode 0xaa
5326 */
5327FNIEMOP_DEF(iemOp_stosb_Yb_AL)
5328{
5329 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5330
5331 /*
5332 * Use the C implementation if a repeat prefix is encountered.
5333 */
5334 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5335 {
5336 IEMOP_MNEMONIC(rep_stos_Yb_al, "rep stos Yb,al");
5337 switch (pVCpu->iem.s.enmEffAddrMode)
5338 {
5339 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m16);
5340 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m32);
5341 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_al_m64);
5342 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5343 }
5344 }
5345 IEMOP_MNEMONIC(stos_Yb_al, "stos Yb,al");
5346
5347 /*
5348 * Sharing case implementation with stos[wdq] below.
5349 */
5350 switch (pVCpu->iem.s.enmEffAddrMode)
5351 {
5352 case IEMMODE_16BIT: IEM_STOS_CASE(8, 16); break;
5353 case IEMMODE_32BIT: IEM_STOS_CASE(8, 32); break;
5354 case IEMMODE_64BIT: IEM_STOS_CASE(8, 64); break;
5355 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5356 }
5357 return VINF_SUCCESS;
5358}
5359
5360
5361/**
5362 * @opcode 0xab
5363 */
5364FNIEMOP_DEF(iemOp_stoswd_Yv_eAX)
5365{
5366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5367
5368 /*
5369 * Use the C implementation if a repeat prefix is encountered.
5370 */
5371 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5372 {
5373 IEMOP_MNEMONIC(rep_stos_Yv_rAX, "rep stos Yv,rAX");
5374 switch (pVCpu->iem.s.enmEffOpSize)
5375 {
5376 case IEMMODE_16BIT:
5377 switch (pVCpu->iem.s.enmEffAddrMode)
5378 {
5379 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m16);
5380 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m32);
5381 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_ax_m64);
5382 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5383 }
5384 break;
5385 case IEMMODE_32BIT:
5386 switch (pVCpu->iem.s.enmEffAddrMode)
5387 {
5388 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m16);
5389 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m32);
5390 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_eax_m64);
5391 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5392 }
5393 case IEMMODE_64BIT:
5394 switch (pVCpu->iem.s.enmEffAddrMode)
5395 {
5396 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_9);
5397 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m32);
5398 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stos_rax_m64);
5399 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5400 }
5401 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5402 }
5403 }
5404 IEMOP_MNEMONIC(stos_Yv_rAX, "stos Yv,rAX");
5405
5406 /*
5407 * Annoying double switch here.
5408 * Using ugly macro for implementing the cases, sharing it with stosb.
5409 */
5410 switch (pVCpu->iem.s.enmEffOpSize)
5411 {
5412 case IEMMODE_16BIT:
5413 switch (pVCpu->iem.s.enmEffAddrMode)
5414 {
5415 case IEMMODE_16BIT: IEM_STOS_CASE(16, 16); break;
5416 case IEMMODE_32BIT: IEM_STOS_CASE(16, 32); break;
5417 case IEMMODE_64BIT: IEM_STOS_CASE(16, 64); break;
5418 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5419 }
5420 break;
5421
5422 case IEMMODE_32BIT:
5423 switch (pVCpu->iem.s.enmEffAddrMode)
5424 {
5425 case IEMMODE_16BIT: IEM_STOS_CASE(32, 16); break;
5426 case IEMMODE_32BIT: IEM_STOS_CASE(32, 32); break;
5427 case IEMMODE_64BIT: IEM_STOS_CASE(32, 64); break;
5428 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5429 }
5430 break;
5431
5432 case IEMMODE_64BIT:
5433 switch (pVCpu->iem.s.enmEffAddrMode)
5434 {
5435 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5436 case IEMMODE_32BIT: IEM_STOS_CASE(64, 32); break;
5437 case IEMMODE_64BIT: IEM_STOS_CASE(64, 64); break;
5438 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5439 }
5440 break;
5441 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5442 }
5443 return VINF_SUCCESS;
5444}
5445
5446#undef IEM_STOS_CASE
5447
5448/** Macro used by iemOp_lodsb_AL_Xb and iemOp_lodswd_eAX_Xv */
5449#define IEM_LODS_CASE(ValBits, AddrBits) \
5450 IEM_MC_BEGIN(0, 2); \
5451 IEM_MC_LOCAL(uint##ValBits##_t, uValue); \
5452 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5453 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xSI); \
5454 IEM_MC_FETCH_MEM_U##ValBits(uValue, pVCpu->iem.s.iEffSeg, uAddr); \
5455 IEM_MC_STORE_GREG_U##ValBits(X86_GREG_xAX, uValue); \
5456 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5457 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5458 } IEM_MC_ELSE() { \
5459 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xSI, ValBits / 8); \
5460 } IEM_MC_ENDIF(); \
5461 IEM_MC_ADVANCE_RIP(); \
5462 IEM_MC_END();
5463
5464/**
5465 * @opcode 0xac
5466 */
5467FNIEMOP_DEF(iemOp_lodsb_AL_Xb)
5468{
5469 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5470
5471 /*
5472 * Use the C implementation if a repeat prefix is encountered.
5473 */
5474 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5475 {
5476 IEMOP_MNEMONIC(rep_lodsb_AL_Xb, "rep lodsb AL,Xb");
5477 switch (pVCpu->iem.s.enmEffAddrMode)
5478 {
5479 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m16, pVCpu->iem.s.iEffSeg);
5480 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m32, pVCpu->iem.s.iEffSeg);
5481 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_al_m64, pVCpu->iem.s.iEffSeg);
5482 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5483 }
5484 }
5485 IEMOP_MNEMONIC(lodsb_AL_Xb, "lodsb AL,Xb");
5486
5487 /*
5488 * Sharing case implementation with stos[wdq] below.
5489 */
5490 switch (pVCpu->iem.s.enmEffAddrMode)
5491 {
5492 case IEMMODE_16BIT: IEM_LODS_CASE(8, 16); break;
5493 case IEMMODE_32BIT: IEM_LODS_CASE(8, 32); break;
5494 case IEMMODE_64BIT: IEM_LODS_CASE(8, 64); break;
5495 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5496 }
5497 return VINF_SUCCESS;
5498}
5499
5500
5501/**
5502 * @opcode 0xad
5503 */
5504FNIEMOP_DEF(iemOp_lodswd_eAX_Xv)
5505{
5506 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5507
5508 /*
5509 * Use the C implementation if a repeat prefix is encountered.
5510 */
5511 if (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ))
5512 {
5513 IEMOP_MNEMONIC(rep_lods_rAX_Xv, "rep lods rAX,Xv");
5514 switch (pVCpu->iem.s.enmEffOpSize)
5515 {
5516 case IEMMODE_16BIT:
5517 switch (pVCpu->iem.s.enmEffAddrMode)
5518 {
5519 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m16, pVCpu->iem.s.iEffSeg);
5520 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m32, pVCpu->iem.s.iEffSeg);
5521 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_ax_m64, pVCpu->iem.s.iEffSeg);
5522 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5523 }
5524 break;
5525 case IEMMODE_32BIT:
5526 switch (pVCpu->iem.s.enmEffAddrMode)
5527 {
5528 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m16, pVCpu->iem.s.iEffSeg);
5529 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m32, pVCpu->iem.s.iEffSeg);
5530 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_eax_m64, pVCpu->iem.s.iEffSeg);
5531 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5532 }
5533 case IEMMODE_64BIT:
5534 switch (pVCpu->iem.s.enmEffAddrMode)
5535 {
5536 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_7);
5537 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m32, pVCpu->iem.s.iEffSeg);
5538 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_lods_rax_m64, pVCpu->iem.s.iEffSeg);
5539 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5540 }
5541 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5542 }
5543 }
5544 IEMOP_MNEMONIC(lods_rAX_Xv, "lods rAX,Xv");
5545
5546 /*
5547 * Annoying double switch here.
5548 * Using ugly macro for implementing the cases, sharing it with lodsb.
5549 */
5550 switch (pVCpu->iem.s.enmEffOpSize)
5551 {
5552 case IEMMODE_16BIT:
5553 switch (pVCpu->iem.s.enmEffAddrMode)
5554 {
5555 case IEMMODE_16BIT: IEM_LODS_CASE(16, 16); break;
5556 case IEMMODE_32BIT: IEM_LODS_CASE(16, 32); break;
5557 case IEMMODE_64BIT: IEM_LODS_CASE(16, 64); break;
5558 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5559 }
5560 break;
5561
5562 case IEMMODE_32BIT:
5563 switch (pVCpu->iem.s.enmEffAddrMode)
5564 {
5565 case IEMMODE_16BIT: IEM_LODS_CASE(32, 16); break;
5566 case IEMMODE_32BIT: IEM_LODS_CASE(32, 32); break;
5567 case IEMMODE_64BIT: IEM_LODS_CASE(32, 64); break;
5568 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5569 }
5570 break;
5571
5572 case IEMMODE_64BIT:
5573 switch (pVCpu->iem.s.enmEffAddrMode)
5574 {
5575 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5576 case IEMMODE_32BIT: IEM_LODS_CASE(64, 32); break;
5577 case IEMMODE_64BIT: IEM_LODS_CASE(64, 64); break;
5578 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5579 }
5580 break;
5581 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5582 }
5583 return VINF_SUCCESS;
5584}
5585
5586#undef IEM_LODS_CASE
5587
5588/** Macro used by iemOp_scasb_AL_Xb and iemOp_scaswd_eAX_Xv */
5589#define IEM_SCAS_CASE(ValBits, AddrBits) \
5590 IEM_MC_BEGIN(3, 2); \
5591 IEM_MC_ARG(uint##ValBits##_t *, puRax, 0); \
5592 IEM_MC_ARG(uint##ValBits##_t, uValue, 1); \
5593 IEM_MC_ARG(uint32_t *, pEFlags, 2); \
5594 IEM_MC_LOCAL(RTGCPTR, uAddr); \
5595 \
5596 IEM_MC_FETCH_GREG_U##AddrBits##_ZX_U64(uAddr, X86_GREG_xDI); \
5597 IEM_MC_FETCH_MEM_U##ValBits(uValue, X86_SREG_ES, uAddr); \
5598 IEM_MC_REF_GREG_U##ValBits(puRax, X86_GREG_xAX); \
5599 IEM_MC_REF_EFLAGS(pEFlags); \
5600 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cmp_u##ValBits, puRax, uValue, pEFlags); \
5601 \
5602 IEM_MC_IF_EFL_BIT_SET(X86_EFL_DF) { \
5603 IEM_MC_SUB_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5604 } IEM_MC_ELSE() { \
5605 IEM_MC_ADD_GREG_U##AddrBits(X86_GREG_xDI, ValBits / 8); \
5606 } IEM_MC_ENDIF(); \
5607 IEM_MC_ADVANCE_RIP(); \
5608 IEM_MC_END();
5609
5610/**
5611 * @opcode 0xae
5612 */
5613FNIEMOP_DEF(iemOp_scasb_AL_Xb)
5614{
5615 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5616
5617 /*
5618 * Use the C implementation if a repeat prefix is encountered.
5619 */
5620 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5621 {
5622 IEMOP_MNEMONIC(repe_scasb_AL_Xb, "repe scasb AL,Xb");
5623 switch (pVCpu->iem.s.enmEffAddrMode)
5624 {
5625 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m16);
5626 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m32);
5627 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_al_m64);
5628 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5629 }
5630 }
5631 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5632 {
5633 IEMOP_MNEMONIC(repone_scasb_AL_Xb, "repne scasb AL,Xb");
5634 switch (pVCpu->iem.s.enmEffAddrMode)
5635 {
5636 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m16);
5637 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m32);
5638 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_al_m64);
5639 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5640 }
5641 }
5642 IEMOP_MNEMONIC(scasb_AL_Xb, "scasb AL,Xb");
5643
5644 /*
5645 * Sharing case implementation with stos[wdq] below.
5646 */
5647 switch (pVCpu->iem.s.enmEffAddrMode)
5648 {
5649 case IEMMODE_16BIT: IEM_SCAS_CASE(8, 16); break;
5650 case IEMMODE_32BIT: IEM_SCAS_CASE(8, 32); break;
5651 case IEMMODE_64BIT: IEM_SCAS_CASE(8, 64); break;
5652 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5653 }
5654 return VINF_SUCCESS;
5655}
5656
5657
5658/**
5659 * @opcode 0xaf
5660 */
5661FNIEMOP_DEF(iemOp_scaswd_eAX_Xv)
5662{
5663 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5664
5665 /*
5666 * Use the C implementation if a repeat prefix is encountered.
5667 */
5668 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPZ)
5669 {
5670 IEMOP_MNEMONIC(repe_scas_rAX_Xv, "repe scas rAX,Xv");
5671 switch (pVCpu->iem.s.enmEffOpSize)
5672 {
5673 case IEMMODE_16BIT:
5674 switch (pVCpu->iem.s.enmEffAddrMode)
5675 {
5676 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m16);
5677 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m32);
5678 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_ax_m64);
5679 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5680 }
5681 break;
5682 case IEMMODE_32BIT:
5683 switch (pVCpu->iem.s.enmEffAddrMode)
5684 {
5685 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m16);
5686 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m32);
5687 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_eax_m64);
5688 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5689 }
5690 case IEMMODE_64BIT:
5691 switch (pVCpu->iem.s.enmEffAddrMode)
5692 {
5693 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_6); /** @todo It's this wrong, we can do 16-bit addressing in 64-bit mode, but not 32-bit. right? */
5694 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m32);
5695 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repe_scas_rax_m64);
5696 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5697 }
5698 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5699 }
5700 }
5701 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REPNZ)
5702 {
5703 IEMOP_MNEMONIC(repne_scas_rAX_Xv, "repne scas rAX,Xv");
5704 switch (pVCpu->iem.s.enmEffOpSize)
5705 {
5706 case IEMMODE_16BIT:
5707 switch (pVCpu->iem.s.enmEffAddrMode)
5708 {
5709 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m16);
5710 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m32);
5711 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_ax_m64);
5712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5713 }
5714 break;
5715 case IEMMODE_32BIT:
5716 switch (pVCpu->iem.s.enmEffAddrMode)
5717 {
5718 case IEMMODE_16BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m16);
5719 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m32);
5720 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_eax_m64);
5721 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5722 }
5723 case IEMMODE_64BIT:
5724 switch (pVCpu->iem.s.enmEffAddrMode)
5725 {
5726 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_5);
5727 case IEMMODE_32BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m32);
5728 case IEMMODE_64BIT: return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_repne_scas_rax_m64);
5729 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5730 }
5731 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5732 }
5733 }
5734 IEMOP_MNEMONIC(scas_rAX_Xv, "scas rAX,Xv");
5735
5736 /*
5737 * Annoying double switch here.
5738 * Using ugly macro for implementing the cases, sharing it with scasb.
5739 */
5740 switch (pVCpu->iem.s.enmEffOpSize)
5741 {
5742 case IEMMODE_16BIT:
5743 switch (pVCpu->iem.s.enmEffAddrMode)
5744 {
5745 case IEMMODE_16BIT: IEM_SCAS_CASE(16, 16); break;
5746 case IEMMODE_32BIT: IEM_SCAS_CASE(16, 32); break;
5747 case IEMMODE_64BIT: IEM_SCAS_CASE(16, 64); break;
5748 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5749 }
5750 break;
5751
5752 case IEMMODE_32BIT:
5753 switch (pVCpu->iem.s.enmEffAddrMode)
5754 {
5755 case IEMMODE_16BIT: IEM_SCAS_CASE(32, 16); break;
5756 case IEMMODE_32BIT: IEM_SCAS_CASE(32, 32); break;
5757 case IEMMODE_64BIT: IEM_SCAS_CASE(32, 64); break;
5758 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5759 }
5760 break;
5761
5762 case IEMMODE_64BIT:
5763 switch (pVCpu->iem.s.enmEffAddrMode)
5764 {
5765 case IEMMODE_16BIT: AssertFailedReturn(VERR_IEM_IPE_1); /* cannot be encoded */ break;
5766 case IEMMODE_32BIT: IEM_SCAS_CASE(64, 32); break;
5767 case IEMMODE_64BIT: IEM_SCAS_CASE(64, 64); break;
5768 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5769 }
5770 break;
5771 IEM_NOT_REACHED_DEFAULT_CASE_RET();
5772 }
5773 return VINF_SUCCESS;
5774}
5775
5776#undef IEM_SCAS_CASE
5777
5778/**
5779 * Common 'mov r8, imm8' helper.
5780 */
5781FNIEMOP_DEF_1(iemOpCommonMov_r8_Ib, uint8_t, iReg)
5782{
5783 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
5784 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5785
5786 IEM_MC_BEGIN(0, 1);
5787 IEM_MC_LOCAL_CONST(uint8_t, u8Value,/*=*/ u8Imm);
5788 IEM_MC_STORE_GREG_U8(iReg, u8Value);
5789 IEM_MC_ADVANCE_RIP();
5790 IEM_MC_END();
5791
5792 return VINF_SUCCESS;
5793}
5794
5795
5796/**
5797 * @opcode 0xb0
5798 */
5799FNIEMOP_DEF(iemOp_mov_AL_Ib)
5800{
5801 IEMOP_MNEMONIC(mov_AL_Ib, "mov AL,Ib");
5802 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5803}
5804
5805
5806/**
5807 * @opcode 0xb1
5808 */
5809FNIEMOP_DEF(iemOp_CL_Ib)
5810{
5811 IEMOP_MNEMONIC(mov_CL_Ib, "mov CL,Ib");
5812 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5813}
5814
5815
5816/**
5817 * @opcode 0xb2
5818 */
5819FNIEMOP_DEF(iemOp_DL_Ib)
5820{
5821 IEMOP_MNEMONIC(mov_DL_Ib, "mov DL,Ib");
5822 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5823}
5824
5825
5826/**
5827 * @opcode 0xb3
5828 */
5829FNIEMOP_DEF(iemOp_BL_Ib)
5830{
5831 IEMOP_MNEMONIC(mov_BL_Ib, "mov BL,Ib");
5832 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5833}
5834
5835
5836/**
5837 * @opcode 0xb4
5838 */
5839FNIEMOP_DEF(iemOp_mov_AH_Ib)
5840{
5841 IEMOP_MNEMONIC(mov_AH_Ib, "mov AH,Ib");
5842 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5843}
5844
5845
5846/**
5847 * @opcode 0xb5
5848 */
5849FNIEMOP_DEF(iemOp_CH_Ib)
5850{
5851 IEMOP_MNEMONIC(mov_CH_Ib, "mov CH,Ib");
5852 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5853}
5854
5855
5856/**
5857 * @opcode 0xb6
5858 */
5859FNIEMOP_DEF(iemOp_DH_Ib)
5860{
5861 IEMOP_MNEMONIC(mov_DH_Ib, "mov DH,Ib");
5862 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5863}
5864
5865
5866/**
5867 * @opcode 0xb7
5868 */
5869FNIEMOP_DEF(iemOp_BH_Ib)
5870{
5871 IEMOP_MNEMONIC(mov_BH_Ib, "mov BH,Ib");
5872 return FNIEMOP_CALL_1(iemOpCommonMov_r8_Ib, X86_GREG_xDI | pVCpu->iem.s.uRexB);
5873}
5874
5875
5876/**
5877 * Common 'mov regX,immX' helper.
5878 */
5879FNIEMOP_DEF_1(iemOpCommonMov_Rv_Iv, uint8_t, iReg)
5880{
5881 switch (pVCpu->iem.s.enmEffOpSize)
5882 {
5883 case IEMMODE_16BIT:
5884 {
5885 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
5886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5887
5888 IEM_MC_BEGIN(0, 1);
5889 IEM_MC_LOCAL_CONST(uint16_t, u16Value,/*=*/ u16Imm);
5890 IEM_MC_STORE_GREG_U16(iReg, u16Value);
5891 IEM_MC_ADVANCE_RIP();
5892 IEM_MC_END();
5893 break;
5894 }
5895
5896 case IEMMODE_32BIT:
5897 {
5898 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
5899 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5900
5901 IEM_MC_BEGIN(0, 1);
5902 IEM_MC_LOCAL_CONST(uint32_t, u32Value,/*=*/ u32Imm);
5903 IEM_MC_STORE_GREG_U32(iReg, u32Value);
5904 IEM_MC_ADVANCE_RIP();
5905 IEM_MC_END();
5906 break;
5907 }
5908 case IEMMODE_64BIT:
5909 {
5910 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_U64(&u64Imm); /* 64-bit immediate! */
5911 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
5912
5913 IEM_MC_BEGIN(0, 1);
5914 IEM_MC_LOCAL_CONST(uint64_t, u64Value,/*=*/ u64Imm);
5915 IEM_MC_STORE_GREG_U64(iReg, u64Value);
5916 IEM_MC_ADVANCE_RIP();
5917 IEM_MC_END();
5918 break;
5919 }
5920 }
5921
5922 return VINF_SUCCESS;
5923}
5924
5925
5926/**
5927 * @opcode 0xb8
5928 */
5929FNIEMOP_DEF(iemOp_eAX_Iv)
5930{
5931 IEMOP_MNEMONIC(mov_rAX_IV, "mov rAX,IV");
5932 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xAX | pVCpu->iem.s.uRexB);
5933}
5934
5935
5936/**
5937 * @opcode 0xb9
5938 */
5939FNIEMOP_DEF(iemOp_eCX_Iv)
5940{
5941 IEMOP_MNEMONIC(mov_rCX_IV, "mov rCX,IV");
5942 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xCX | pVCpu->iem.s.uRexB);
5943}
5944
5945
5946/**
5947 * @opcode 0xba
5948 */
5949FNIEMOP_DEF(iemOp_eDX_Iv)
5950{
5951 IEMOP_MNEMONIC(mov_rDX_IV, "mov rDX,IV");
5952 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDX | pVCpu->iem.s.uRexB);
5953}
5954
5955
5956/**
5957 * @opcode 0xbb
5958 */
5959FNIEMOP_DEF(iemOp_eBX_Iv)
5960{
5961 IEMOP_MNEMONIC(mov_rBX_IV, "mov rBX,IV");
5962 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBX | pVCpu->iem.s.uRexB);
5963}
5964
5965
5966/**
5967 * @opcode 0xbc
5968 */
5969FNIEMOP_DEF(iemOp_eSP_Iv)
5970{
5971 IEMOP_MNEMONIC(mov_rSP_IV, "mov rSP,IV");
5972 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSP | pVCpu->iem.s.uRexB);
5973}
5974
5975
5976/**
5977 * @opcode 0xbd
5978 */
5979FNIEMOP_DEF(iemOp_eBP_Iv)
5980{
5981 IEMOP_MNEMONIC(mov_rBP_IV, "mov rBP,IV");
5982 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xBP | pVCpu->iem.s.uRexB);
5983}
5984
5985
5986/**
5987 * @opcode 0xbe
5988 */
5989FNIEMOP_DEF(iemOp_eSI_Iv)
5990{
5991 IEMOP_MNEMONIC(mov_rSI_IV, "mov rSI,IV");
5992 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xSI | pVCpu->iem.s.uRexB);
5993}
5994
5995
5996/**
5997 * @opcode 0xbf
5998 */
5999FNIEMOP_DEF(iemOp_eDI_Iv)
6000{
6001 IEMOP_MNEMONIC(mov_rDI_IV, "mov rDI,IV");
6002 return FNIEMOP_CALL_1(iemOpCommonMov_Rv_Iv, X86_GREG_xDI | pVCpu->iem.s.uRexB);
6003}
6004
6005
6006/**
6007 * @opcode 0xc0
6008 */
6009FNIEMOP_DEF(iemOp_Grp2_Eb_Ib)
6010{
6011 IEMOP_HLP_MIN_186();
6012 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6013 PCIEMOPSHIFTSIZES pImpl;
6014 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6015 {
6016 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_Ib, "rol Eb,Ib"); break;
6017 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_Ib, "ror Eb,Ib"); break;
6018 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_Ib, "rcl Eb,Ib"); break;
6019 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_Ib, "rcr Eb,Ib"); break;
6020 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_Ib, "shl Eb,Ib"); break;
6021 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_Ib, "shr Eb,Ib"); break;
6022 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_Ib, "sar Eb,Ib"); break;
6023 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6024 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6025 }
6026 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6027
6028 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6029 {
6030 /* register */
6031 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6032 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6033 IEM_MC_BEGIN(3, 0);
6034 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6035 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6036 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6037 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6038 IEM_MC_REF_EFLAGS(pEFlags);
6039 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6040 IEM_MC_ADVANCE_RIP();
6041 IEM_MC_END();
6042 }
6043 else
6044 {
6045 /* memory */
6046 IEM_MC_BEGIN(3, 2);
6047 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6048 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6049 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6051
6052 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6053 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6054 IEM_MC_ASSIGN(cShiftArg, cShift);
6055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6056 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6057 IEM_MC_FETCH_EFLAGS(EFlags);
6058 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6059
6060 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6061 IEM_MC_COMMIT_EFLAGS(EFlags);
6062 IEM_MC_ADVANCE_RIP();
6063 IEM_MC_END();
6064 }
6065 return VINF_SUCCESS;
6066}
6067
6068
6069/**
6070 * @opcode 0xc1
6071 */
6072FNIEMOP_DEF(iemOp_Grp2_Ev_Ib)
6073{
6074 IEMOP_HLP_MIN_186();
6075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6076 PCIEMOPSHIFTSIZES pImpl;
6077 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6078 {
6079 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_Ib, "rol Ev,Ib"); break;
6080 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_Ib, "ror Ev,Ib"); break;
6081 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_Ib, "rcl Ev,Ib"); break;
6082 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_Ib, "rcr Ev,Ib"); break;
6083 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_Ib, "shl Ev,Ib"); break;
6084 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_Ib, "shr Ev,Ib"); break;
6085 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_Ib, "sar Ev,Ib"); break;
6086 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6087 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6088 }
6089 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6090
6091 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6092 {
6093 /* register */
6094 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6095 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6096 switch (pVCpu->iem.s.enmEffOpSize)
6097 {
6098 case IEMMODE_16BIT:
6099 IEM_MC_BEGIN(3, 0);
6100 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6101 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6102 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6103 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6104 IEM_MC_REF_EFLAGS(pEFlags);
6105 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6106 IEM_MC_ADVANCE_RIP();
6107 IEM_MC_END();
6108 return VINF_SUCCESS;
6109
6110 case IEMMODE_32BIT:
6111 IEM_MC_BEGIN(3, 0);
6112 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6113 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6114 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6115 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6116 IEM_MC_REF_EFLAGS(pEFlags);
6117 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6118 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6119 IEM_MC_ADVANCE_RIP();
6120 IEM_MC_END();
6121 return VINF_SUCCESS;
6122
6123 case IEMMODE_64BIT:
6124 IEM_MC_BEGIN(3, 0);
6125 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6126 IEM_MC_ARG_CONST(uint8_t, cShiftArg, cShift, 1);
6127 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6128 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6129 IEM_MC_REF_EFLAGS(pEFlags);
6130 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6131 IEM_MC_ADVANCE_RIP();
6132 IEM_MC_END();
6133 return VINF_SUCCESS;
6134
6135 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6136 }
6137 }
6138 else
6139 {
6140 /* memory */
6141 switch (pVCpu->iem.s.enmEffOpSize)
6142 {
6143 case IEMMODE_16BIT:
6144 IEM_MC_BEGIN(3, 2);
6145 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6146 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6147 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6148 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6149
6150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6151 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6152 IEM_MC_ASSIGN(cShiftArg, cShift);
6153 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6154 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6155 IEM_MC_FETCH_EFLAGS(EFlags);
6156 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6157
6158 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6159 IEM_MC_COMMIT_EFLAGS(EFlags);
6160 IEM_MC_ADVANCE_RIP();
6161 IEM_MC_END();
6162 return VINF_SUCCESS;
6163
6164 case IEMMODE_32BIT:
6165 IEM_MC_BEGIN(3, 2);
6166 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6167 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6168 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6169 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6170
6171 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6172 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6173 IEM_MC_ASSIGN(cShiftArg, cShift);
6174 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6175 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6176 IEM_MC_FETCH_EFLAGS(EFlags);
6177 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6178
6179 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6180 IEM_MC_COMMIT_EFLAGS(EFlags);
6181 IEM_MC_ADVANCE_RIP();
6182 IEM_MC_END();
6183 return VINF_SUCCESS;
6184
6185 case IEMMODE_64BIT:
6186 IEM_MC_BEGIN(3, 2);
6187 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6188 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6189 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6190 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6191
6192 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6193 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift);
6194 IEM_MC_ASSIGN(cShiftArg, cShift);
6195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6196 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6197 IEM_MC_FETCH_EFLAGS(EFlags);
6198 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6199
6200 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6201 IEM_MC_COMMIT_EFLAGS(EFlags);
6202 IEM_MC_ADVANCE_RIP();
6203 IEM_MC_END();
6204 return VINF_SUCCESS;
6205
6206 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6207 }
6208 }
6209}
6210
6211
6212/**
6213 * @opcode 0xc2
6214 */
6215FNIEMOP_DEF(iemOp_retn_Iw)
6216{
6217 IEMOP_MNEMONIC(retn_Iw, "retn Iw");
6218 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6220 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6221 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, u16Imm);
6222}
6223
6224
6225/**
6226 * @opcode 0xc3
6227 */
6228FNIEMOP_DEF(iemOp_retn)
6229{
6230 IEMOP_MNEMONIC(retn, "retn");
6231 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6233 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retn, pVCpu->iem.s.enmEffOpSize, 0);
6234}
6235
6236
6237/**
6238 * @opcode 0xc4
6239 */
6240FNIEMOP_DEF(iemOp_les_Gv_Mp__vex3)
6241{
6242 /* The LDS instruction is invalid 64-bit mode. In legacy and
6243 compatability mode it is invalid with MOD=3.
6244 The use as a VEX prefix is made possible by assigning the inverted
6245 REX.R and REX.X to the two MOD bits, since the REX bits are ignored
6246 outside of 64-bit mode. VEX is not available in real or v86 mode. */
6247 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6248 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6249 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT) )
6250 {
6251 IEMOP_MNEMONIC(vex3_prefix, "vex3");
6252 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6253 {
6254 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6255 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6256 uint8_t bVex2; IEM_OPCODE_GET_NEXT_U8(&bVex2);
6257 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6258 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6259 if ((bVex2 & 0x80 /* VEX.W */) && pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT)
6260 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_SIZE_REX_W;
6261 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6262 pVCpu->iem.s.uRexIndex = (~bRm >> (6 - 3)) & 0x8;
6263 pVCpu->iem.s.uRexB = (~bRm >> (5 - 3)) & 0x8;
6264 pVCpu->iem.s.uVex3rdReg = (~bVex2 >> 3) & 0xf;
6265 pVCpu->iem.s.uVexLength = (bVex2 >> 2) & 1;
6266 pVCpu->iem.s.idxPrefix = bVex2 & 0x3;
6267
6268 switch (bRm & 0x1f)
6269 {
6270 case 1: /* 0x0f lead opcode byte. */
6271#ifdef IEM_WITH_VEX
6272 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6273#else
6274 IEMOP_BITCH_ABOUT_STUB();
6275 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6276#endif
6277
6278 case 2: /* 0x0f 0x38 lead opcode bytes. */
6279#ifdef IEM_WITH_VEX
6280 return FNIEMOP_CALL(g_apfnVexMap2[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6281#else
6282 IEMOP_BITCH_ABOUT_STUB();
6283 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6284#endif
6285
6286 case 3: /* 0x0f 0x3a lead opcode bytes. */
6287#ifdef IEM_WITH_VEX
6288 return FNIEMOP_CALL(g_apfnVexMap3[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6289#else
6290 IEMOP_BITCH_ABOUT_STUB();
6291 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6292#endif
6293
6294 default:
6295 Log(("VEX3: Invalid vvvv value: %#x!\n", bRm & 0x1f));
6296 return IEMOP_RAISE_INVALID_OPCODE();
6297 }
6298 }
6299 Log(("VEX3: AVX support disabled!\n"));
6300 return IEMOP_RAISE_INVALID_OPCODE();
6301 }
6302
6303 IEMOP_MNEMONIC(les_Gv_Mp, "les Gv,Mp");
6304 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_ES, bRm);
6305}
6306
6307
6308/**
6309 * @opcode 0xc5
6310 */
6311FNIEMOP_DEF(iemOp_lds_Gv_Mp__vex2)
6312{
6313 /* The LES instruction is invalid 64-bit mode. In legacy and
6314 compatability mode it is invalid with MOD=3.
6315 The use as a VEX prefix is made possible by assigning the inverted
6316 REX.R to the top MOD bit, and the top bit in the inverted register
6317 specifier to the bottom MOD bit, thereby effectively limiting 32-bit
6318 to accessing registers 0..7 in this VEX form. */
6319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6320 if ( pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT
6321 || (bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6322 {
6323 IEMOP_MNEMONIC(vex2_prefix, "vex2");
6324 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx)
6325 {
6326 /* Note! The real mode, v8086 mode and invalid prefix checks are done once
6327 the instruction is fully decoded. Even when XCR0=3 and CR4.OSXSAVE=0. */
6328 uint8_t bOpcode; IEM_OPCODE_GET_NEXT_U8(&bOpcode);
6329 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_VEX;
6330 pVCpu->iem.s.uRexReg = (~bRm >> (7 - 3)) & 0x8;
6331 pVCpu->iem.s.uVex3rdReg = (~bRm >> 3) & 0xf;
6332 pVCpu->iem.s.uVexLength = (bRm >> 2) & 1;
6333 pVCpu->iem.s.idxPrefix = bRm & 0x3;
6334
6335#ifdef IEM_WITH_VEX
6336 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]);
6337#else
6338 IEMOP_BITCH_ABOUT_STUB();
6339 return VERR_IEM_INSTR_NOT_IMPLEMENTED;
6340#endif
6341 }
6342
6343 /** @todo does intel completely decode the sequence with SIB/disp before \#UD? */
6344 Log(("VEX2: AVX support disabled!\n"));
6345 return IEMOP_RAISE_INVALID_OPCODE();
6346 }
6347
6348 IEMOP_MNEMONIC(lds_Gv_Mp, "lds Gv,Mp");
6349 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_DS, bRm);
6350}
6351
6352
6353/**
6354 * @opcode 0xc6
6355 */
6356FNIEMOP_DEF(iemOp_Grp11_Eb_Ib)
6357{
6358 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6359 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6360 return IEMOP_RAISE_INVALID_OPCODE();
6361 IEMOP_MNEMONIC(mov_Eb_Ib, "mov Eb,Ib");
6362
6363 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6364 {
6365 /* register access */
6366 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6367 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6368 IEM_MC_BEGIN(0, 0);
6369 IEM_MC_STORE_GREG_U8((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u8Imm);
6370 IEM_MC_ADVANCE_RIP();
6371 IEM_MC_END();
6372 }
6373 else
6374 {
6375 /* memory access. */
6376 IEM_MC_BEGIN(0, 1);
6377 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
6379 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
6380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6381 IEM_MC_STORE_MEM_U8(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u8Imm);
6382 IEM_MC_ADVANCE_RIP();
6383 IEM_MC_END();
6384 }
6385 return VINF_SUCCESS;
6386}
6387
6388
6389/**
6390 * @opcode 0xc7
6391 */
6392FNIEMOP_DEF(iemOp_Grp11_Ev_Iz)
6393{
6394 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6395 if ((bRm & X86_MODRM_REG_MASK) != (0 << X86_MODRM_REG_SHIFT)) /* only mov Eb,Ib in this group. */
6396 return IEMOP_RAISE_INVALID_OPCODE();
6397 IEMOP_MNEMONIC(mov_Ev_Iz, "mov Ev,Iz");
6398
6399 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6400 {
6401 /* register access */
6402 switch (pVCpu->iem.s.enmEffOpSize)
6403 {
6404 case IEMMODE_16BIT:
6405 IEM_MC_BEGIN(0, 0);
6406 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6407 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6408 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Imm);
6409 IEM_MC_ADVANCE_RIP();
6410 IEM_MC_END();
6411 return VINF_SUCCESS;
6412
6413 case IEMMODE_32BIT:
6414 IEM_MC_BEGIN(0, 0);
6415 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6416 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6417 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Imm);
6418 IEM_MC_ADVANCE_RIP();
6419 IEM_MC_END();
6420 return VINF_SUCCESS;
6421
6422 case IEMMODE_64BIT:
6423 IEM_MC_BEGIN(0, 0);
6424 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6425 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6426 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Imm);
6427 IEM_MC_ADVANCE_RIP();
6428 IEM_MC_END();
6429 return VINF_SUCCESS;
6430
6431 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6432 }
6433 }
6434 else
6435 {
6436 /* memory access. */
6437 switch (pVCpu->iem.s.enmEffOpSize)
6438 {
6439 case IEMMODE_16BIT:
6440 IEM_MC_BEGIN(0, 1);
6441 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
6443 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6445 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Imm);
6446 IEM_MC_ADVANCE_RIP();
6447 IEM_MC_END();
6448 return VINF_SUCCESS;
6449
6450 case IEMMODE_32BIT:
6451 IEM_MC_BEGIN(0, 1);
6452 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6453 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6454 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
6455 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6456 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Imm);
6457 IEM_MC_ADVANCE_RIP();
6458 IEM_MC_END();
6459 return VINF_SUCCESS;
6460
6461 case IEMMODE_64BIT:
6462 IEM_MC_BEGIN(0, 1);
6463 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6464 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
6465 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
6466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6467 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Imm);
6468 IEM_MC_ADVANCE_RIP();
6469 IEM_MC_END();
6470 return VINF_SUCCESS;
6471
6472 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6473 }
6474 }
6475}
6476
6477
6478
6479
6480/**
6481 * @opcode 0xc8
6482 */
6483FNIEMOP_DEF(iemOp_enter_Iw_Ib)
6484{
6485 IEMOP_MNEMONIC(enter_Iw_Ib, "enter Iw,Ib");
6486 IEMOP_HLP_MIN_186();
6487 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6488 uint16_t cbFrame; IEM_OPCODE_GET_NEXT_U16(&cbFrame);
6489 uint8_t u8NestingLevel; IEM_OPCODE_GET_NEXT_U8(&u8NestingLevel);
6490 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6491 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_enter, pVCpu->iem.s.enmEffOpSize, cbFrame, u8NestingLevel);
6492}
6493
6494
6495/**
6496 * @opcode 0xc9
6497 */
6498FNIEMOP_DEF(iemOp_leave)
6499{
6500 IEMOP_MNEMONIC(leave, "leave");
6501 IEMOP_HLP_MIN_186();
6502 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6504 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_leave, pVCpu->iem.s.enmEffOpSize);
6505}
6506
6507
6508/**
6509 * @opcode 0xca
6510 */
6511FNIEMOP_DEF(iemOp_retf_Iw)
6512{
6513 IEMOP_MNEMONIC(retf_Iw, "retf Iw");
6514 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
6515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6516 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6517 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);
6518}
6519
6520
6521/**
6522 * @opcode 0xcb
6523 */
6524FNIEMOP_DEF(iemOp_retf)
6525{
6526 IEMOP_MNEMONIC(retf, "retf");
6527 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6528 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
6529 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);
6530}
6531
6532
6533/**
6534 * @opcode 0xcc
6535 */
6536FNIEMOP_DEF(iemOp_int3)
6537{
6538 IEMOP_MNEMONIC(int3, "int3");
6539 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6540 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_BP, IEMINT_INT3);
6541}
6542
6543
6544/**
6545 * @opcode 0xcd
6546 */
6547FNIEMOP_DEF(iemOp_int_Ib)
6548{
6549 IEMOP_MNEMONIC(int_Ib, "int Ib");
6550 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int);
6551 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6552 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, u8Int, IEMINT_INTN);
6553}
6554
6555
6556/**
6557 * @opcode 0xce
6558 */
6559FNIEMOP_DEF(iemOp_into)
6560{
6561 IEMOP_MNEMONIC(into, "into");
6562 IEMOP_HLP_NO_64BIT();
6563
6564 IEM_MC_BEGIN(2, 0);
6565 IEM_MC_ARG_CONST(uint8_t, u8Int, /*=*/ X86_XCPT_OF, 0);
6566 IEM_MC_ARG_CONST(IEMINT, enmInt, /*=*/ IEMINT_INTO, 1);
6567 IEM_MC_CALL_CIMPL_2(iemCImpl_int, u8Int, enmInt);
6568 IEM_MC_END();
6569 return VINF_SUCCESS;
6570}
6571
6572
6573/**
6574 * @opcode 0xcf
6575 */
6576FNIEMOP_DEF(iemOp_iret)
6577{
6578 IEMOP_MNEMONIC(iret, "iret");
6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6580 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_iret, pVCpu->iem.s.enmEffOpSize);
6581}
6582
6583
6584/**
6585 * @opcode 0xd0
6586 */
6587FNIEMOP_DEF(iemOp_Grp2_Eb_1)
6588{
6589 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6590 PCIEMOPSHIFTSIZES pImpl;
6591 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6592 {
6593 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_1, "rol Eb,1"); break;
6594 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_1, "ror Eb,1"); break;
6595 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_1, "rcl Eb,1"); break;
6596 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_1, "rcr Eb,1"); break;
6597 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_1, "shl Eb,1"); break;
6598 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_1, "shr Eb,1"); break;
6599 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_1, "sar Eb,1"); break;
6600 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6601 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6602 }
6603 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6604
6605 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6606 {
6607 /* register */
6608 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6609 IEM_MC_BEGIN(3, 0);
6610 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6611 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6612 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6613 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6614 IEM_MC_REF_EFLAGS(pEFlags);
6615 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6616 IEM_MC_ADVANCE_RIP();
6617 IEM_MC_END();
6618 }
6619 else
6620 {
6621 /* memory */
6622 IEM_MC_BEGIN(3, 2);
6623 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6624 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=*/1, 1);
6625 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6626 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6627
6628 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6629 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6630 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6631 IEM_MC_FETCH_EFLAGS(EFlags);
6632 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6633
6634 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6635 IEM_MC_COMMIT_EFLAGS(EFlags);
6636 IEM_MC_ADVANCE_RIP();
6637 IEM_MC_END();
6638 }
6639 return VINF_SUCCESS;
6640}
6641
6642
6643
6644/**
6645 * @opcode 0xd1
6646 */
6647FNIEMOP_DEF(iemOp_Grp2_Ev_1)
6648{
6649 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6650 PCIEMOPSHIFTSIZES pImpl;
6651 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6652 {
6653 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_1, "rol Ev,1"); break;
6654 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_1, "ror Ev,1"); break;
6655 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_1, "rcl Ev,1"); break;
6656 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_1, "rcr Ev,1"); break;
6657 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_1, "shl Ev,1"); break;
6658 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_1, "shr Ev,1"); break;
6659 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_1, "sar Ev,1"); break;
6660 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6661 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe, well... */
6662 }
6663 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6664
6665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6666 {
6667 /* register */
6668 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6669 switch (pVCpu->iem.s.enmEffOpSize)
6670 {
6671 case IEMMODE_16BIT:
6672 IEM_MC_BEGIN(3, 0);
6673 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6674 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6675 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6676 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6677 IEM_MC_REF_EFLAGS(pEFlags);
6678 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6679 IEM_MC_ADVANCE_RIP();
6680 IEM_MC_END();
6681 return VINF_SUCCESS;
6682
6683 case IEMMODE_32BIT:
6684 IEM_MC_BEGIN(3, 0);
6685 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6686 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6687 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6688 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6689 IEM_MC_REF_EFLAGS(pEFlags);
6690 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6691 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6692 IEM_MC_ADVANCE_RIP();
6693 IEM_MC_END();
6694 return VINF_SUCCESS;
6695
6696 case IEMMODE_64BIT:
6697 IEM_MC_BEGIN(3, 0);
6698 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6699 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6700 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6701 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6702 IEM_MC_REF_EFLAGS(pEFlags);
6703 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6704 IEM_MC_ADVANCE_RIP();
6705 IEM_MC_END();
6706 return VINF_SUCCESS;
6707
6708 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6709 }
6710 }
6711 else
6712 {
6713 /* memory */
6714 switch (pVCpu->iem.s.enmEffOpSize)
6715 {
6716 case IEMMODE_16BIT:
6717 IEM_MC_BEGIN(3, 2);
6718 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6719 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6720 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6721 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6722
6723 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6725 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6726 IEM_MC_FETCH_EFLAGS(EFlags);
6727 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6728
6729 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6730 IEM_MC_COMMIT_EFLAGS(EFlags);
6731 IEM_MC_ADVANCE_RIP();
6732 IEM_MC_END();
6733 return VINF_SUCCESS;
6734
6735 case IEMMODE_32BIT:
6736 IEM_MC_BEGIN(3, 2);
6737 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6738 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6739 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6740 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6741
6742 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6743 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6744 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6745 IEM_MC_FETCH_EFLAGS(EFlags);
6746 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6747
6748 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6749 IEM_MC_COMMIT_EFLAGS(EFlags);
6750 IEM_MC_ADVANCE_RIP();
6751 IEM_MC_END();
6752 return VINF_SUCCESS;
6753
6754 case IEMMODE_64BIT:
6755 IEM_MC_BEGIN(3, 2);
6756 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6757 IEM_MC_ARG_CONST(uint8_t, cShiftArg,/*=1*/1, 1);
6758 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6759 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6760
6761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6763 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6764 IEM_MC_FETCH_EFLAGS(EFlags);
6765 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6766
6767 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6768 IEM_MC_COMMIT_EFLAGS(EFlags);
6769 IEM_MC_ADVANCE_RIP();
6770 IEM_MC_END();
6771 return VINF_SUCCESS;
6772
6773 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6774 }
6775 }
6776}
6777
6778
6779/**
6780 * @opcode 0xd2
6781 */
6782FNIEMOP_DEF(iemOp_Grp2_Eb_CL)
6783{
6784 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6785 PCIEMOPSHIFTSIZES pImpl;
6786 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6787 {
6788 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Eb_CL, "rol Eb,CL"); break;
6789 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Eb_CL, "ror Eb,CL"); break;
6790 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Eb_CL, "rcl Eb,CL"); break;
6791 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Eb_CL, "rcr Eb,CL"); break;
6792 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Eb_CL, "shl Eb,CL"); break;
6793 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Eb_CL, "shr Eb,CL"); break;
6794 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Eb_CL, "sar Eb,CL"); break;
6795 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6796 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc, grr. */
6797 }
6798 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6799
6800 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6801 {
6802 /* register */
6803 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6804 IEM_MC_BEGIN(3, 0);
6805 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6806 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6807 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6808 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6809 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6810 IEM_MC_REF_EFLAGS(pEFlags);
6811 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6812 IEM_MC_ADVANCE_RIP();
6813 IEM_MC_END();
6814 }
6815 else
6816 {
6817 /* memory */
6818 IEM_MC_BEGIN(3, 2);
6819 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
6820 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6821 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6823
6824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6826 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6827 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6828 IEM_MC_FETCH_EFLAGS(EFlags);
6829 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, cShiftArg, pEFlags);
6830
6831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
6832 IEM_MC_COMMIT_EFLAGS(EFlags);
6833 IEM_MC_ADVANCE_RIP();
6834 IEM_MC_END();
6835 }
6836 return VINF_SUCCESS;
6837}
6838
6839
6840/**
6841 * @opcode 0xd3
6842 */
6843FNIEMOP_DEF(iemOp_Grp2_Ev_CL)
6844{
6845 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
6846 PCIEMOPSHIFTSIZES pImpl;
6847 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
6848 {
6849 case 0: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rol_eflags); IEMOP_MNEMONIC(rol_Ev_CL, "rol Ev,CL"); break;
6850 case 1: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_ror_eflags); IEMOP_MNEMONIC(ror_Ev_CL, "ror Ev,CL"); break;
6851 case 2: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcl_eflags); IEMOP_MNEMONIC(rcl_Ev_CL, "rcl Ev,CL"); break;
6852 case 3: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_rcr_eflags); IEMOP_MNEMONIC(rcr_Ev_CL, "rcr Ev,CL"); break;
6853 case 4: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shl_eflags); IEMOP_MNEMONIC(shl_Ev_CL, "shl Ev,CL"); break;
6854 case 5: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_shr_eflags); IEMOP_MNEMONIC(shr_Ev_CL, "shr Ev,CL"); break;
6855 case 7: pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_sar_eflags); IEMOP_MNEMONIC(sar_Ev_CL, "sar Ev,CL"); break;
6856 case 6: return IEMOP_RAISE_INVALID_OPCODE();
6857 IEM_NOT_REACHED_DEFAULT_CASE_RET(); /* gcc maybe stupid */
6858 }
6859 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_AF);
6860
6861 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
6862 {
6863 /* register */
6864 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6865 switch (pVCpu->iem.s.enmEffOpSize)
6866 {
6867 case IEMMODE_16BIT:
6868 IEM_MC_BEGIN(3, 0);
6869 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6870 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6871 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6872 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6873 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6874 IEM_MC_REF_EFLAGS(pEFlags);
6875 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6876 IEM_MC_ADVANCE_RIP();
6877 IEM_MC_END();
6878 return VINF_SUCCESS;
6879
6880 case IEMMODE_32BIT:
6881 IEM_MC_BEGIN(3, 0);
6882 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6883 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6884 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6885 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6886 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6887 IEM_MC_REF_EFLAGS(pEFlags);
6888 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6889 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
6890 IEM_MC_ADVANCE_RIP();
6891 IEM_MC_END();
6892 return VINF_SUCCESS;
6893
6894 case IEMMODE_64BIT:
6895 IEM_MC_BEGIN(3, 0);
6896 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6897 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6898 IEM_MC_ARG(uint32_t *, pEFlags, 2);
6899 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
6900 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6901 IEM_MC_REF_EFLAGS(pEFlags);
6902 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6903 IEM_MC_ADVANCE_RIP();
6904 IEM_MC_END();
6905 return VINF_SUCCESS;
6906
6907 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6908 }
6909 }
6910 else
6911 {
6912 /* memory */
6913 switch (pVCpu->iem.s.enmEffOpSize)
6914 {
6915 case IEMMODE_16BIT:
6916 IEM_MC_BEGIN(3, 2);
6917 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
6918 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6919 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6920 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6921
6922 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6923 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6924 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6925 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6926 IEM_MC_FETCH_EFLAGS(EFlags);
6927 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, cShiftArg, pEFlags);
6928
6929 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
6930 IEM_MC_COMMIT_EFLAGS(EFlags);
6931 IEM_MC_ADVANCE_RIP();
6932 IEM_MC_END();
6933 return VINF_SUCCESS;
6934
6935 case IEMMODE_32BIT:
6936 IEM_MC_BEGIN(3, 2);
6937 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
6938 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6939 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6940 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6941
6942 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6944 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6945 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6946 IEM_MC_FETCH_EFLAGS(EFlags);
6947 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, cShiftArg, pEFlags);
6948
6949 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
6950 IEM_MC_COMMIT_EFLAGS(EFlags);
6951 IEM_MC_ADVANCE_RIP();
6952 IEM_MC_END();
6953 return VINF_SUCCESS;
6954
6955 case IEMMODE_64BIT:
6956 IEM_MC_BEGIN(3, 2);
6957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
6958 IEM_MC_ARG(uint8_t, cShiftArg, 1);
6959 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
6960 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
6961
6962 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
6963 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6964 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX);
6965 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
6966 IEM_MC_FETCH_EFLAGS(EFlags);
6967 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, cShiftArg, pEFlags);
6968
6969 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
6970 IEM_MC_COMMIT_EFLAGS(EFlags);
6971 IEM_MC_ADVANCE_RIP();
6972 IEM_MC_END();
6973 return VINF_SUCCESS;
6974
6975 IEM_NOT_REACHED_DEFAULT_CASE_RET();
6976 }
6977 }
6978}
6979
6980/**
6981 * @opcode 0xd4
6982 */
6983FNIEMOP_DEF(iemOp_aam_Ib)
6984{
6985 IEMOP_MNEMONIC(aam_Ib, "aam Ib");
6986 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
6987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
6988 IEMOP_HLP_NO_64BIT();
6989 if (!bImm)
6990 return IEMOP_RAISE_DIVIDE_ERROR();
6991 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aam, bImm);
6992}
6993
6994
6995/**
6996 * @opcode 0xd5
6997 */
6998FNIEMOP_DEF(iemOp_aad_Ib)
6999{
7000 IEMOP_MNEMONIC(aad_Ib, "aad Ib");
7001 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm);
7002 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7003 IEMOP_HLP_NO_64BIT();
7004 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_aad, bImm);
7005}
7006
7007
7008/**
7009 * @opcode 0xd6
7010 */
7011FNIEMOP_DEF(iemOp_salc)
7012{
7013 IEMOP_MNEMONIC(salc, "salc");
7014 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7015 IEMOP_HLP_NO_64BIT();
7016
7017 IEM_MC_BEGIN(0, 0);
7018 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) {
7019 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0xff);
7020 } IEM_MC_ELSE() {
7021 IEM_MC_STORE_GREG_U8_CONST(X86_GREG_xAX, 0x00);
7022 } IEM_MC_ENDIF();
7023 IEM_MC_ADVANCE_RIP();
7024 IEM_MC_END();
7025 return VINF_SUCCESS;
7026}
7027
7028
7029/**
7030 * @opcode 0xd7
7031 */
7032FNIEMOP_DEF(iemOp_xlat)
7033{
7034 IEMOP_MNEMONIC(xlat, "xlat");
7035 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7036 switch (pVCpu->iem.s.enmEffAddrMode)
7037 {
7038 case IEMMODE_16BIT:
7039 IEM_MC_BEGIN(2, 0);
7040 IEM_MC_LOCAL(uint8_t, u8Tmp);
7041 IEM_MC_LOCAL(uint16_t, u16Addr);
7042 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Addr, X86_GREG_xAX);
7043 IEM_MC_ADD_GREG_U16_TO_LOCAL(u16Addr, X86_GREG_xBX);
7044 IEM_MC_FETCH_MEM16_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u16Addr);
7045 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7046 IEM_MC_ADVANCE_RIP();
7047 IEM_MC_END();
7048 return VINF_SUCCESS;
7049
7050 case IEMMODE_32BIT:
7051 IEM_MC_BEGIN(2, 0);
7052 IEM_MC_LOCAL(uint8_t, u8Tmp);
7053 IEM_MC_LOCAL(uint32_t, u32Addr);
7054 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Addr, X86_GREG_xAX);
7055 IEM_MC_ADD_GREG_U32_TO_LOCAL(u32Addr, X86_GREG_xBX);
7056 IEM_MC_FETCH_MEM32_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u32Addr);
7057 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7058 IEM_MC_ADVANCE_RIP();
7059 IEM_MC_END();
7060 return VINF_SUCCESS;
7061
7062 case IEMMODE_64BIT:
7063 IEM_MC_BEGIN(2, 0);
7064 IEM_MC_LOCAL(uint8_t, u8Tmp);
7065 IEM_MC_LOCAL(uint64_t, u64Addr);
7066 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Addr, X86_GREG_xAX);
7067 IEM_MC_ADD_GREG_U64_TO_LOCAL(u64Addr, X86_GREG_xBX);
7068 IEM_MC_FETCH_MEM_U8(u8Tmp, pVCpu->iem.s.iEffSeg, u64Addr);
7069 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Tmp);
7070 IEM_MC_ADVANCE_RIP();
7071 IEM_MC_END();
7072 return VINF_SUCCESS;
7073
7074 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7075 }
7076}
7077
7078
7079/**
7080 * Common worker for FPU instructions working on ST0 and STn, and storing the
7081 * result in ST0.
7082 *
7083 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7084 */
7085FNIEMOP_DEF_2(iemOpHlpFpu_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7086{
7087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7088
7089 IEM_MC_BEGIN(3, 1);
7090 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7091 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7092 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7093 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7094
7095 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7096 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7097 IEM_MC_PREPARE_FPU_USAGE();
7098 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7099 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7100 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7101 IEM_MC_ELSE()
7102 IEM_MC_FPU_STACK_UNDERFLOW(0);
7103 IEM_MC_ENDIF();
7104 IEM_MC_ADVANCE_RIP();
7105
7106 IEM_MC_END();
7107 return VINF_SUCCESS;
7108}
7109
7110
7111/**
7112 * Common worker for FPU instructions working on ST0 and STn, and only affecting
7113 * flags.
7114 *
7115 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7116 */
7117FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7118{
7119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7120
7121 IEM_MC_BEGIN(3, 1);
7122 IEM_MC_LOCAL(uint16_t, u16Fsw);
7123 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7124 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7125 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7126
7127 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7128 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7129 IEM_MC_PREPARE_FPU_USAGE();
7130 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7131 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7132 IEM_MC_UPDATE_FSW(u16Fsw);
7133 IEM_MC_ELSE()
7134 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7135 IEM_MC_ENDIF();
7136 IEM_MC_ADVANCE_RIP();
7137
7138 IEM_MC_END();
7139 return VINF_SUCCESS;
7140}
7141
7142
7143/**
7144 * Common worker for FPU instructions working on ST0 and STn, only affecting
7145 * flags, and popping when done.
7146 *
7147 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7148 */
7149FNIEMOP_DEF_2(iemOpHlpFpuNoStore_st0_stN_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
7150{
7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7152
7153 IEM_MC_BEGIN(3, 1);
7154 IEM_MC_LOCAL(uint16_t, u16Fsw);
7155 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7156 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7157 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7158
7159 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7160 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7161 IEM_MC_PREPARE_FPU_USAGE();
7162 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7163 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
7164 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7165 IEM_MC_ELSE()
7166 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(UINT8_MAX);
7167 IEM_MC_ENDIF();
7168 IEM_MC_ADVANCE_RIP();
7169
7170 IEM_MC_END();
7171 return VINF_SUCCESS;
7172}
7173
7174
7175/** Opcode 0xd8 11/0. */
7176FNIEMOP_DEF_1(iemOp_fadd_stN, uint8_t, bRm)
7177{
7178 IEMOP_MNEMONIC(fadd_st0_stN, "fadd st0,stN");
7179 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fadd_r80_by_r80);
7180}
7181
7182
7183/** Opcode 0xd8 11/1. */
7184FNIEMOP_DEF_1(iemOp_fmul_stN, uint8_t, bRm)
7185{
7186 IEMOP_MNEMONIC(fmul_st0_stN, "fmul st0,stN");
7187 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fmul_r80_by_r80);
7188}
7189
7190
7191/** Opcode 0xd8 11/2. */
7192FNIEMOP_DEF_1(iemOp_fcom_stN, uint8_t, bRm)
7193{
7194 IEMOP_MNEMONIC(fcom_st0_stN, "fcom st0,stN");
7195 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fcom_r80_by_r80);
7196}
7197
7198
7199/** Opcode 0xd8 11/3. */
7200FNIEMOP_DEF_1(iemOp_fcomp_stN, uint8_t, bRm)
7201{
7202 IEMOP_MNEMONIC(fcomp_st0_stN, "fcomp st0,stN");
7203 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fcom_r80_by_r80);
7204}
7205
7206
7207/** Opcode 0xd8 11/4. */
7208FNIEMOP_DEF_1(iemOp_fsub_stN, uint8_t, bRm)
7209{
7210 IEMOP_MNEMONIC(fsub_st0_stN, "fsub st0,stN");
7211 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsub_r80_by_r80);
7212}
7213
7214
7215/** Opcode 0xd8 11/5. */
7216FNIEMOP_DEF_1(iemOp_fsubr_stN, uint8_t, bRm)
7217{
7218 IEMOP_MNEMONIC(fsubr_st0_stN, "fsubr st0,stN");
7219 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fsubr_r80_by_r80);
7220}
7221
7222
7223/** Opcode 0xd8 11/6. */
7224FNIEMOP_DEF_1(iemOp_fdiv_stN, uint8_t, bRm)
7225{
7226 IEMOP_MNEMONIC(fdiv_st0_stN, "fdiv st0,stN");
7227 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdiv_r80_by_r80);
7228}
7229
7230
7231/** Opcode 0xd8 11/7. */
7232FNIEMOP_DEF_1(iemOp_fdivr_stN, uint8_t, bRm)
7233{
7234 IEMOP_MNEMONIC(fdivr_st0_stN, "fdivr st0,stN");
7235 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, bRm, iemAImpl_fdivr_r80_by_r80);
7236}
7237
7238
7239/**
7240 * Common worker for FPU instructions working on ST0 and an m32r, and storing
7241 * the result in ST0.
7242 *
7243 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7244 */
7245FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32r, uint8_t, bRm, PFNIEMAIMPLFPUR32, pfnAImpl)
7246{
7247 IEM_MC_BEGIN(3, 3);
7248 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7249 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7250 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7251 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7252 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7253 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7254
7255 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7257
7258 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7259 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7260 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7261
7262 IEM_MC_PREPARE_FPU_USAGE();
7263 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7264 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr32Val2);
7265 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7266 IEM_MC_ELSE()
7267 IEM_MC_FPU_STACK_UNDERFLOW(0);
7268 IEM_MC_ENDIF();
7269 IEM_MC_ADVANCE_RIP();
7270
7271 IEM_MC_END();
7272 return VINF_SUCCESS;
7273}
7274
7275
7276/** Opcode 0xd8 !11/0. */
7277FNIEMOP_DEF_1(iemOp_fadd_m32r, uint8_t, bRm)
7278{
7279 IEMOP_MNEMONIC(fadd_st0_m32r, "fadd st0,m32r");
7280 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fadd_r80_by_r32);
7281}
7282
7283
7284/** Opcode 0xd8 !11/1. */
7285FNIEMOP_DEF_1(iemOp_fmul_m32r, uint8_t, bRm)
7286{
7287 IEMOP_MNEMONIC(fmul_st0_m32r, "fmul st0,m32r");
7288 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fmul_r80_by_r32);
7289}
7290
7291
7292/** Opcode 0xd8 !11/2. */
7293FNIEMOP_DEF_1(iemOp_fcom_m32r, uint8_t, bRm)
7294{
7295 IEMOP_MNEMONIC(fcom_st0_m32r, "fcom st0,m32r");
7296
7297 IEM_MC_BEGIN(3, 3);
7298 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7299 IEM_MC_LOCAL(uint16_t, u16Fsw);
7300 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7301 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7302 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7303 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7304
7305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7307
7308 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7309 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7310 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7311
7312 IEM_MC_PREPARE_FPU_USAGE();
7313 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7314 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7315 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7316 IEM_MC_ELSE()
7317 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7318 IEM_MC_ENDIF();
7319 IEM_MC_ADVANCE_RIP();
7320
7321 IEM_MC_END();
7322 return VINF_SUCCESS;
7323}
7324
7325
7326/** Opcode 0xd8 !11/3. */
7327FNIEMOP_DEF_1(iemOp_fcomp_m32r, uint8_t, bRm)
7328{
7329 IEMOP_MNEMONIC(fcomp_st0_m32r, "fcomp st0,m32r");
7330
7331 IEM_MC_BEGIN(3, 3);
7332 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7333 IEM_MC_LOCAL(uint16_t, u16Fsw);
7334 IEM_MC_LOCAL(RTFLOAT32U, r32Val2);
7335 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7336 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7337 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val2, r32Val2, 2);
7338
7339 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7340 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7341
7342 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7343 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7344 IEM_MC_FETCH_MEM_R32(r32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7345
7346 IEM_MC_PREPARE_FPU_USAGE();
7347 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
7348 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r32, pu16Fsw, pr80Value1, pr32Val2);
7349 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7350 IEM_MC_ELSE()
7351 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7352 IEM_MC_ENDIF();
7353 IEM_MC_ADVANCE_RIP();
7354
7355 IEM_MC_END();
7356 return VINF_SUCCESS;
7357}
7358
7359
7360/** Opcode 0xd8 !11/4. */
7361FNIEMOP_DEF_1(iemOp_fsub_m32r, uint8_t, bRm)
7362{
7363 IEMOP_MNEMONIC(fsub_st0_m32r, "fsub st0,m32r");
7364 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsub_r80_by_r32);
7365}
7366
7367
7368/** Opcode 0xd8 !11/5. */
7369FNIEMOP_DEF_1(iemOp_fsubr_m32r, uint8_t, bRm)
7370{
7371 IEMOP_MNEMONIC(fsubr_st0_m32r, "fsubr st0,m32r");
7372 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fsubr_r80_by_r32);
7373}
7374
7375
7376/** Opcode 0xd8 !11/6. */
7377FNIEMOP_DEF_1(iemOp_fdiv_m32r, uint8_t, bRm)
7378{
7379 IEMOP_MNEMONIC(fdiv_st0_m32r, "fdiv st0,m32r");
7380 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdiv_r80_by_r32);
7381}
7382
7383
7384/** Opcode 0xd8 !11/7. */
7385FNIEMOP_DEF_1(iemOp_fdivr_m32r, uint8_t, bRm)
7386{
7387 IEMOP_MNEMONIC(fdivr_st0_m32r, "fdivr st0,m32r");
7388 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32r, bRm, iemAImpl_fdivr_r80_by_r32);
7389}
7390
7391
7392/**
7393 * @opcode 0xd8
7394 */
7395FNIEMOP_DEF(iemOp_EscF0)
7396{
7397 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
7398 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd8 & 0x7);
7399
7400 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
7401 {
7402 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7403 {
7404 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN, bRm);
7405 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN, bRm);
7406 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm);
7407 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
7408 case 4: return FNIEMOP_CALL_1(iemOp_fsub_stN, bRm);
7409 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_stN, bRm);
7410 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_stN, bRm);
7411 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_stN, bRm);
7412 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7413 }
7414 }
7415 else
7416 {
7417 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
7418 {
7419 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m32r, bRm);
7420 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m32r, bRm);
7421 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m32r, bRm);
7422 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m32r, bRm);
7423 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m32r, bRm);
7424 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m32r, bRm);
7425 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m32r, bRm);
7426 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m32r, bRm);
7427 IEM_NOT_REACHED_DEFAULT_CASE_RET();
7428 }
7429 }
7430}
7431
7432
7433/** Opcode 0xd9 /0 mem32real
7434 * @sa iemOp_fld_m64r */
7435FNIEMOP_DEF_1(iemOp_fld_m32r, uint8_t, bRm)
7436{
7437 IEMOP_MNEMONIC(fld_m32r, "fld m32r");
7438
7439 IEM_MC_BEGIN(2, 3);
7440 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7441 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7442 IEM_MC_LOCAL(RTFLOAT32U, r32Val);
7443 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7444 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT32U, pr32Val, r32Val, 1);
7445
7446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7448
7449 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7450 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7451 IEM_MC_FETCH_MEM_R32(r32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7452
7453 IEM_MC_PREPARE_FPU_USAGE();
7454 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7455 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r32_to_r80, pFpuRes, pr32Val);
7456 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7457 IEM_MC_ELSE()
7458 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7459 IEM_MC_ENDIF();
7460 IEM_MC_ADVANCE_RIP();
7461
7462 IEM_MC_END();
7463 return VINF_SUCCESS;
7464}
7465
7466
7467/** Opcode 0xd9 !11/2 mem32real */
7468FNIEMOP_DEF_1(iemOp_fst_m32r, uint8_t, bRm)
7469{
7470 IEMOP_MNEMONIC(fst_m32r, "fst m32r");
7471 IEM_MC_BEGIN(3, 2);
7472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7473 IEM_MC_LOCAL(uint16_t, u16Fsw);
7474 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7475 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7476 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7477
7478 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7479 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7480 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7481 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7482
7483 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7484 IEM_MC_PREPARE_FPU_USAGE();
7485 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7486 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7487 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7488 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7489 IEM_MC_ELSE()
7490 IEM_MC_IF_FCW_IM()
7491 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7492 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7493 IEM_MC_ENDIF();
7494 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7495 IEM_MC_ENDIF();
7496 IEM_MC_ADVANCE_RIP();
7497
7498 IEM_MC_END();
7499 return VINF_SUCCESS;
7500}
7501
7502
7503/** Opcode 0xd9 !11/3 */
7504FNIEMOP_DEF_1(iemOp_fstp_m32r, uint8_t, bRm)
7505{
7506 IEMOP_MNEMONIC(fstp_m32r, "fstp m32r");
7507 IEM_MC_BEGIN(3, 2);
7508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7509 IEM_MC_LOCAL(uint16_t, u16Fsw);
7510 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7511 IEM_MC_ARG(PRTFLOAT32U, pr32Dst, 1);
7512 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
7513
7514 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7515 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7516 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7517 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7518
7519 IEM_MC_MEM_MAP(pr32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
7520 IEM_MC_PREPARE_FPU_USAGE();
7521 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7522 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r32, pu16Fsw, pr32Dst, pr80Value);
7523 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr32Dst, IEM_ACCESS_DATA_W, u16Fsw);
7524 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7525 IEM_MC_ELSE()
7526 IEM_MC_IF_FCW_IM()
7527 IEM_MC_STORE_MEM_NEG_QNAN_R32_BY_REF(pr32Dst);
7528 IEM_MC_MEM_COMMIT_AND_UNMAP(pr32Dst, IEM_ACCESS_DATA_W);
7529 IEM_MC_ENDIF();
7530 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
7531 IEM_MC_ENDIF();
7532 IEM_MC_ADVANCE_RIP();
7533
7534 IEM_MC_END();
7535 return VINF_SUCCESS;
7536}
7537
7538
7539/** Opcode 0xd9 !11/4 */
7540FNIEMOP_DEF_1(iemOp_fldenv, uint8_t, bRm)
7541{
7542 IEMOP_MNEMONIC(fldenv, "fldenv m14/28byte");
7543 IEM_MC_BEGIN(3, 0);
7544 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7545 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7546 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
7547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7549 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7550 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7551 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7552 IEM_MC_CALL_CIMPL_3(iemCImpl_fldenv, enmEffOpSize, iEffSeg, GCPtrEffSrc);
7553 IEM_MC_END();
7554 return VINF_SUCCESS;
7555}
7556
7557
7558/** Opcode 0xd9 !11/5 */
7559FNIEMOP_DEF_1(iemOp_fldcw, uint8_t, bRm)
7560{
7561 IEMOP_MNEMONIC(fldcw_m2byte, "fldcw m2byte");
7562 IEM_MC_BEGIN(1, 1);
7563 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
7564 IEM_MC_ARG(uint16_t, u16Fsw, 0);
7565 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
7566 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7567 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7568 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7569 IEM_MC_FETCH_MEM_U16(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
7570 IEM_MC_CALL_CIMPL_1(iemCImpl_fldcw, u16Fsw);
7571 IEM_MC_END();
7572 return VINF_SUCCESS;
7573}
7574
7575
7576/** Opcode 0xd9 !11/6 */
7577FNIEMOP_DEF_1(iemOp_fnstenv, uint8_t, bRm)
7578{
7579 IEMOP_MNEMONIC(fstenv, "fstenv m14/m28byte");
7580 IEM_MC_BEGIN(3, 0);
7581 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
7582 IEM_MC_ARG(uint8_t, iEffSeg, 1);
7583 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
7584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7585 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7586 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7587 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7588 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
7589 IEM_MC_CALL_CIMPL_3(iemCImpl_fnstenv, enmEffOpSize, iEffSeg, GCPtrEffDst);
7590 IEM_MC_END();
7591 return VINF_SUCCESS;
7592}
7593
7594
7595/** Opcode 0xd9 !11/7 */
7596FNIEMOP_DEF_1(iemOp_fnstcw, uint8_t, bRm)
7597{
7598 IEMOP_MNEMONIC(fnstcw_m2byte, "fnstcw m2byte");
7599 IEM_MC_BEGIN(2, 0);
7600 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
7601 IEM_MC_LOCAL(uint16_t, u16Fcw);
7602 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
7603 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7604 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7605 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
7606 IEM_MC_FETCH_FCW(u16Fcw);
7607 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Fcw);
7608 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7609 IEM_MC_END();
7610 return VINF_SUCCESS;
7611}
7612
7613
7614/** Opcode 0xd9 0xd0, 0xd9 0xd8-0xdf, ++?. */
7615FNIEMOP_DEF(iemOp_fnop)
7616{
7617 IEMOP_MNEMONIC(fnop, "fnop");
7618 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7619
7620 IEM_MC_BEGIN(0, 0);
7621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7623 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
7624 /** @todo Testcase: looks like FNOP leaves FOP alone but updates FPUIP. Could be
7625 * intel optimizations. Investigate. */
7626 IEM_MC_UPDATE_FPU_OPCODE_IP();
7627 IEM_MC_ADVANCE_RIP(); /* C0-C3 are documented as undefined, we leave them unmodified. */
7628 IEM_MC_END();
7629 return VINF_SUCCESS;
7630}
7631
7632
7633/** Opcode 0xd9 11/0 stN */
7634FNIEMOP_DEF_1(iemOp_fld_stN, uint8_t, bRm)
7635{
7636 IEMOP_MNEMONIC(fld_stN, "fld stN");
7637 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7638
7639 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7640 * indicates that it does. */
7641 IEM_MC_BEGIN(0, 2);
7642 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7643 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7644 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7645 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7646
7647 IEM_MC_PREPARE_FPU_USAGE();
7648 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, bRm & X86_MODRM_RM_MASK)
7649 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7650 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7651 IEM_MC_ELSE()
7652 IEM_MC_FPU_STACK_PUSH_UNDERFLOW();
7653 IEM_MC_ENDIF();
7654
7655 IEM_MC_ADVANCE_RIP();
7656 IEM_MC_END();
7657
7658 return VINF_SUCCESS;
7659}
7660
7661
7662/** Opcode 0xd9 11/3 stN */
7663FNIEMOP_DEF_1(iemOp_fxch_stN, uint8_t, bRm)
7664{
7665 IEMOP_MNEMONIC(fxch_stN, "fxch stN");
7666 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7667
7668 /** @todo Testcase: Check if this raises \#MF? Intel mentioned it not. AMD
7669 * indicates that it does. */
7670 IEM_MC_BEGIN(1, 3);
7671 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value1);
7672 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value2);
7673 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7674 IEM_MC_ARG_CONST(uint8_t, iStReg, /*=*/ bRm & X86_MODRM_RM_MASK, 0);
7675 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7676 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7677
7678 IEM_MC_PREPARE_FPU_USAGE();
7679 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, bRm & X86_MODRM_RM_MASK)
7680 IEM_MC_SET_FPU_RESULT(FpuRes, X86_FSW_C1, pr80Value2);
7681 IEM_MC_STORE_FPUREG_R80_SRC_REF(bRm & X86_MODRM_RM_MASK, pr80Value1);
7682 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7683 IEM_MC_ELSE()
7684 IEM_MC_CALL_CIMPL_1(iemCImpl_fxch_underflow, iStReg);
7685 IEM_MC_ENDIF();
7686
7687 IEM_MC_ADVANCE_RIP();
7688 IEM_MC_END();
7689
7690 return VINF_SUCCESS;
7691}
7692
7693
7694/** Opcode 0xd9 11/4, 0xdd 11/2. */
7695FNIEMOP_DEF_1(iemOp_fstp_stN, uint8_t, bRm)
7696{
7697 IEMOP_MNEMONIC(fstp_st0_stN, "fstp st0,stN");
7698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7699
7700 /* fstp st0, st0 is frequently used as an official 'ffreep st0' sequence. */
7701 uint8_t const iDstReg = bRm & X86_MODRM_RM_MASK;
7702 if (!iDstReg)
7703 {
7704 IEM_MC_BEGIN(0, 1);
7705 IEM_MC_LOCAL_CONST(uint16_t, u16Fsw, /*=*/ 0);
7706 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7707 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7708
7709 IEM_MC_PREPARE_FPU_USAGE();
7710 IEM_MC_IF_FPUREG_NOT_EMPTY(0)
7711 IEM_MC_UPDATE_FSW_THEN_POP(u16Fsw);
7712 IEM_MC_ELSE()
7713 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(0);
7714 IEM_MC_ENDIF();
7715
7716 IEM_MC_ADVANCE_RIP();
7717 IEM_MC_END();
7718 }
7719 else
7720 {
7721 IEM_MC_BEGIN(0, 2);
7722 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
7723 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7724 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7725 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7726
7727 IEM_MC_PREPARE_FPU_USAGE();
7728 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7729 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
7730 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, iDstReg);
7731 IEM_MC_ELSE()
7732 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(iDstReg);
7733 IEM_MC_ENDIF();
7734
7735 IEM_MC_ADVANCE_RIP();
7736 IEM_MC_END();
7737 }
7738 return VINF_SUCCESS;
7739}
7740
7741
7742/**
7743 * Common worker for FPU instructions working on ST0 and replaces it with the
7744 * result, i.e. unary operators.
7745 *
7746 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7747 */
7748FNIEMOP_DEF_1(iemOpHlpFpu_st0, PFNIEMAIMPLFPUR80UNARY, pfnAImpl)
7749{
7750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7751
7752 IEM_MC_BEGIN(2, 1);
7753 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7754 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7755 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7756
7757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7759 IEM_MC_PREPARE_FPU_USAGE();
7760 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7761 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuRes, pr80Value);
7762 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
7763 IEM_MC_ELSE()
7764 IEM_MC_FPU_STACK_UNDERFLOW(0);
7765 IEM_MC_ENDIF();
7766 IEM_MC_ADVANCE_RIP();
7767
7768 IEM_MC_END();
7769 return VINF_SUCCESS;
7770}
7771
7772
7773/** Opcode 0xd9 0xe0. */
7774FNIEMOP_DEF(iemOp_fchs)
7775{
7776 IEMOP_MNEMONIC(fchs_st0, "fchs st0");
7777 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fchs_r80);
7778}
7779
7780
7781/** Opcode 0xd9 0xe1. */
7782FNIEMOP_DEF(iemOp_fabs)
7783{
7784 IEMOP_MNEMONIC(fabs_st0, "fabs st0");
7785 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fabs_r80);
7786}
7787
7788
7789/**
7790 * Common worker for FPU instructions working on ST0 and only returns FSW.
7791 *
7792 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7793 */
7794FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0, PFNIEMAIMPLFPUR80UNARYFSW, pfnAImpl)
7795{
7796 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7797
7798 IEM_MC_BEGIN(2, 1);
7799 IEM_MC_LOCAL(uint16_t, u16Fsw);
7800 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
7801 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7802
7803 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7804 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7805 IEM_MC_PREPARE_FPU_USAGE();
7806 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7807 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pu16Fsw, pr80Value);
7808 IEM_MC_UPDATE_FSW(u16Fsw);
7809 IEM_MC_ELSE()
7810 IEM_MC_FPU_STACK_UNDERFLOW(UINT8_MAX);
7811 IEM_MC_ENDIF();
7812 IEM_MC_ADVANCE_RIP();
7813
7814 IEM_MC_END();
7815 return VINF_SUCCESS;
7816}
7817
7818
7819/** Opcode 0xd9 0xe4. */
7820FNIEMOP_DEF(iemOp_ftst)
7821{
7822 IEMOP_MNEMONIC(ftst_st0, "ftst st0");
7823 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_ftst_r80);
7824}
7825
7826
7827/** Opcode 0xd9 0xe5. */
7828FNIEMOP_DEF(iemOp_fxam)
7829{
7830 IEMOP_MNEMONIC(fxam_st0, "fxam st0");
7831 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0, iemAImpl_fxam_r80);
7832}
7833
7834
7835/**
7836 * Common worker for FPU instructions pushing a constant onto the FPU stack.
7837 *
7838 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7839 */
7840FNIEMOP_DEF_1(iemOpHlpFpuPushConstant, PFNIEMAIMPLFPUR80LDCONST, pfnAImpl)
7841{
7842 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7843
7844 IEM_MC_BEGIN(1, 1);
7845 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7846 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7847
7848 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7849 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7850 IEM_MC_PREPARE_FPU_USAGE();
7851 IEM_MC_IF_FPUREG_IS_EMPTY(7)
7852 IEM_MC_CALL_FPU_AIMPL_1(pfnAImpl, pFpuRes);
7853 IEM_MC_PUSH_FPU_RESULT(FpuRes);
7854 IEM_MC_ELSE()
7855 IEM_MC_FPU_STACK_PUSH_OVERFLOW();
7856 IEM_MC_ENDIF();
7857 IEM_MC_ADVANCE_RIP();
7858
7859 IEM_MC_END();
7860 return VINF_SUCCESS;
7861}
7862
7863
7864/** Opcode 0xd9 0xe8. */
7865FNIEMOP_DEF(iemOp_fld1)
7866{
7867 IEMOP_MNEMONIC(fld1, "fld1");
7868 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fld1);
7869}
7870
7871
7872/** Opcode 0xd9 0xe9. */
7873FNIEMOP_DEF(iemOp_fldl2t)
7874{
7875 IEMOP_MNEMONIC(fldl2t, "fldl2t");
7876 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2t);
7877}
7878
7879
7880/** Opcode 0xd9 0xea. */
7881FNIEMOP_DEF(iemOp_fldl2e)
7882{
7883 IEMOP_MNEMONIC(fldl2e, "fldl2e");
7884 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldl2e);
7885}
7886
7887/** Opcode 0xd9 0xeb. */
7888FNIEMOP_DEF(iemOp_fldpi)
7889{
7890 IEMOP_MNEMONIC(fldpi, "fldpi");
7891 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldpi);
7892}
7893
7894
7895/** Opcode 0xd9 0xec. */
7896FNIEMOP_DEF(iemOp_fldlg2)
7897{
7898 IEMOP_MNEMONIC(fldlg2, "fldlg2");
7899 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldlg2);
7900}
7901
7902/** Opcode 0xd9 0xed. */
7903FNIEMOP_DEF(iemOp_fldln2)
7904{
7905 IEMOP_MNEMONIC(fldln2, "fldln2");
7906 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldln2);
7907}
7908
7909
7910/** Opcode 0xd9 0xee. */
7911FNIEMOP_DEF(iemOp_fldz)
7912{
7913 IEMOP_MNEMONIC(fldz, "fldz");
7914 return FNIEMOP_CALL_1(iemOpHlpFpuPushConstant, iemAImpl_fldz);
7915}
7916
7917
7918/** Opcode 0xd9 0xf0. */
7919FNIEMOP_DEF(iemOp_f2xm1)
7920{
7921 IEMOP_MNEMONIC(f2xm1_st0, "f2xm1 st0");
7922 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_f2xm1_r80);
7923}
7924
7925
7926/**
7927 * Common worker for FPU instructions working on STn and ST0, storing the result
7928 * in STn, and popping the stack unless IE, DE or ZE was raised.
7929 *
7930 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7931 */
7932FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0_pop, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
7933{
7934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7935
7936 IEM_MC_BEGIN(3, 1);
7937 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
7938 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
7939 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
7940 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
7941
7942 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7943 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7944
7945 IEM_MC_PREPARE_FPU_USAGE();
7946 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
7947 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
7948 IEM_MC_STORE_FPU_RESULT_THEN_POP(FpuRes, bRm & X86_MODRM_RM_MASK);
7949 IEM_MC_ELSE()
7950 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP(bRm & X86_MODRM_RM_MASK);
7951 IEM_MC_ENDIF();
7952 IEM_MC_ADVANCE_RIP();
7953
7954 IEM_MC_END();
7955 return VINF_SUCCESS;
7956}
7957
7958
7959/** Opcode 0xd9 0xf1. */
7960FNIEMOP_DEF(iemOp_fyl2x)
7961{
7962 IEMOP_MNEMONIC(fyl2x_st0, "fyl2x st1,st0");
7963 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2x_r80_by_r80);
7964}
7965
7966
7967/**
7968 * Common worker for FPU instructions working on ST0 and having two outputs, one
7969 * replacing ST0 and one pushed onto the stack.
7970 *
7971 * @param pfnAImpl Pointer to the instruction implementation (assembly).
7972 */
7973FNIEMOP_DEF_1(iemOpHlpFpuReplace_st0_push, PFNIEMAIMPLFPUR80UNARYTWO, pfnAImpl)
7974{
7975 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
7976
7977 IEM_MC_BEGIN(2, 1);
7978 IEM_MC_LOCAL(IEMFPURESULTTWO, FpuResTwo);
7979 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULTTWO, pFpuResTwo, FpuResTwo, 0);
7980 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 1);
7981
7982 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
7983 IEM_MC_MAYBE_RAISE_FPU_XCPT();
7984 IEM_MC_PREPARE_FPU_USAGE();
7985 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
7986 IEM_MC_CALL_FPU_AIMPL_2(pfnAImpl, pFpuResTwo, pr80Value);
7987 IEM_MC_PUSH_FPU_RESULT_TWO(FpuResTwo);
7988 IEM_MC_ELSE()
7989 IEM_MC_FPU_STACK_PUSH_UNDERFLOW_TWO();
7990 IEM_MC_ENDIF();
7991 IEM_MC_ADVANCE_RIP();
7992
7993 IEM_MC_END();
7994 return VINF_SUCCESS;
7995}
7996
7997
7998/** Opcode 0xd9 0xf2. */
7999FNIEMOP_DEF(iemOp_fptan)
8000{
8001 IEMOP_MNEMONIC(fptan_st0, "fptan st0");
8002 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fptan_r80_r80);
8003}
8004
8005
8006/** Opcode 0xd9 0xf3. */
8007FNIEMOP_DEF(iemOp_fpatan)
8008{
8009 IEMOP_MNEMONIC(fpatan_st1_st0, "fpatan st1,st0");
8010 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fpatan_r80_by_r80);
8011}
8012
8013
8014/** Opcode 0xd9 0xf4. */
8015FNIEMOP_DEF(iemOp_fxtract)
8016{
8017 IEMOP_MNEMONIC(fxtract_st0, "fxtract st0");
8018 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fxtract_r80_r80);
8019}
8020
8021
8022/** Opcode 0xd9 0xf5. */
8023FNIEMOP_DEF(iemOp_fprem1)
8024{
8025 IEMOP_MNEMONIC(fprem1_st0_st1, "fprem1 st0,st1");
8026 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem1_r80_by_r80);
8027}
8028
8029
8030/** Opcode 0xd9 0xf6. */
8031FNIEMOP_DEF(iemOp_fdecstp)
8032{
8033 IEMOP_MNEMONIC(fdecstp, "fdecstp");
8034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8035 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8036 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8037 * FINCSTP and FDECSTP. */
8038
8039 IEM_MC_BEGIN(0,0);
8040
8041 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8042 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8043
8044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8045 IEM_MC_FPU_STACK_DEC_TOP();
8046 IEM_MC_UPDATE_FSW_CONST(0);
8047
8048 IEM_MC_ADVANCE_RIP();
8049 IEM_MC_END();
8050 return VINF_SUCCESS;
8051}
8052
8053
8054/** Opcode 0xd9 0xf7. */
8055FNIEMOP_DEF(iemOp_fincstp)
8056{
8057 IEMOP_MNEMONIC(fincstp, "fincstp");
8058 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8059 /* Note! C0, C2 and C3 are documented as undefined, we clear them. */
8060 /** @todo Testcase: Check whether FOP, FPUIP and FPUCS are affected by
8061 * FINCSTP and FDECSTP. */
8062
8063 IEM_MC_BEGIN(0,0);
8064
8065 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8066 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8067
8068 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8069 IEM_MC_FPU_STACK_INC_TOP();
8070 IEM_MC_UPDATE_FSW_CONST(0);
8071
8072 IEM_MC_ADVANCE_RIP();
8073 IEM_MC_END();
8074 return VINF_SUCCESS;
8075}
8076
8077
8078/** Opcode 0xd9 0xf8. */
8079FNIEMOP_DEF(iemOp_fprem)
8080{
8081 IEMOP_MNEMONIC(fprem_st0_st1, "fprem st0,st1");
8082 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fprem_r80_by_r80);
8083}
8084
8085
8086/** Opcode 0xd9 0xf9. */
8087FNIEMOP_DEF(iemOp_fyl2xp1)
8088{
8089 IEMOP_MNEMONIC(fyl2xp1_st1_st0, "fyl2xp1 st1,st0");
8090 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, 1, iemAImpl_fyl2xp1_r80_by_r80);
8091}
8092
8093
8094/** Opcode 0xd9 0xfa. */
8095FNIEMOP_DEF(iemOp_fsqrt)
8096{
8097 IEMOP_MNEMONIC(fsqrt_st0, "fsqrt st0");
8098 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsqrt_r80);
8099}
8100
8101
8102/** Opcode 0xd9 0xfb. */
8103FNIEMOP_DEF(iemOp_fsincos)
8104{
8105 IEMOP_MNEMONIC(fsincos_st0, "fsincos st0");
8106 return FNIEMOP_CALL_1(iemOpHlpFpuReplace_st0_push, iemAImpl_fsincos_r80_r80);
8107}
8108
8109
8110/** Opcode 0xd9 0xfc. */
8111FNIEMOP_DEF(iemOp_frndint)
8112{
8113 IEMOP_MNEMONIC(frndint_st0, "frndint st0");
8114 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_frndint_r80);
8115}
8116
8117
8118/** Opcode 0xd9 0xfd. */
8119FNIEMOP_DEF(iemOp_fscale)
8120{
8121 IEMOP_MNEMONIC(fscale_st0_st1, "fscale st0,st1");
8122 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_stN, 1, iemAImpl_fscale_r80_by_r80);
8123}
8124
8125
8126/** Opcode 0xd9 0xfe. */
8127FNIEMOP_DEF(iemOp_fsin)
8128{
8129 IEMOP_MNEMONIC(fsin_st0, "fsin st0");
8130 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fsin_r80);
8131}
8132
8133
8134/** Opcode 0xd9 0xff. */
8135FNIEMOP_DEF(iemOp_fcos)
8136{
8137 IEMOP_MNEMONIC(fcos_st0, "fcos st0");
8138 return FNIEMOP_CALL_1(iemOpHlpFpu_st0, iemAImpl_fcos_r80);
8139}
8140
8141
8142/** Used by iemOp_EscF1. */
8143IEM_STATIC const PFNIEMOP g_apfnEscF1_E0toFF[32] =
8144{
8145 /* 0xe0 */ iemOp_fchs,
8146 /* 0xe1 */ iemOp_fabs,
8147 /* 0xe2 */ iemOp_Invalid,
8148 /* 0xe3 */ iemOp_Invalid,
8149 /* 0xe4 */ iemOp_ftst,
8150 /* 0xe5 */ iemOp_fxam,
8151 /* 0xe6 */ iemOp_Invalid,
8152 /* 0xe7 */ iemOp_Invalid,
8153 /* 0xe8 */ iemOp_fld1,
8154 /* 0xe9 */ iemOp_fldl2t,
8155 /* 0xea */ iemOp_fldl2e,
8156 /* 0xeb */ iemOp_fldpi,
8157 /* 0xec */ iemOp_fldlg2,
8158 /* 0xed */ iemOp_fldln2,
8159 /* 0xee */ iemOp_fldz,
8160 /* 0xef */ iemOp_Invalid,
8161 /* 0xf0 */ iemOp_f2xm1,
8162 /* 0xf1 */ iemOp_fyl2x,
8163 /* 0xf2 */ iemOp_fptan,
8164 /* 0xf3 */ iemOp_fpatan,
8165 /* 0xf4 */ iemOp_fxtract,
8166 /* 0xf5 */ iemOp_fprem1,
8167 /* 0xf6 */ iemOp_fdecstp,
8168 /* 0xf7 */ iemOp_fincstp,
8169 /* 0xf8 */ iemOp_fprem,
8170 /* 0xf9 */ iemOp_fyl2xp1,
8171 /* 0xfa */ iemOp_fsqrt,
8172 /* 0xfb */ iemOp_fsincos,
8173 /* 0xfc */ iemOp_frndint,
8174 /* 0xfd */ iemOp_fscale,
8175 /* 0xfe */ iemOp_fsin,
8176 /* 0xff */ iemOp_fcos
8177};
8178
8179
8180/**
8181 * @opcode 0xd9
8182 */
8183FNIEMOP_DEF(iemOp_EscF1)
8184{
8185 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8186 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xd9 & 0x7);
8187
8188 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8189 {
8190 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8191 {
8192 case 0: return FNIEMOP_CALL_1(iemOp_fld_stN, bRm);
8193 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm);
8194 case 2:
8195 if (bRm == 0xd0)
8196 return FNIEMOP_CALL(iemOp_fnop);
8197 return IEMOP_RAISE_INVALID_OPCODE();
8198 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved. Intel behavior seems to be FSTP ST(i) though. */
8199 case 4:
8200 case 5:
8201 case 6:
8202 case 7:
8203 Assert((unsigned)bRm - 0xe0U < RT_ELEMENTS(g_apfnEscF1_E0toFF));
8204 return FNIEMOP_CALL(g_apfnEscF1_E0toFF[bRm - 0xe0]);
8205 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8206 }
8207 }
8208 else
8209 {
8210 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8211 {
8212 case 0: return FNIEMOP_CALL_1(iemOp_fld_m32r, bRm);
8213 case 1: return IEMOP_RAISE_INVALID_OPCODE();
8214 case 2: return FNIEMOP_CALL_1(iemOp_fst_m32r, bRm);
8215 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m32r, bRm);
8216 case 4: return FNIEMOP_CALL_1(iemOp_fldenv, bRm);
8217 case 5: return FNIEMOP_CALL_1(iemOp_fldcw, bRm);
8218 case 6: return FNIEMOP_CALL_1(iemOp_fnstenv, bRm);
8219 case 7: return FNIEMOP_CALL_1(iemOp_fnstcw, bRm);
8220 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8221 }
8222 }
8223}
8224
8225
8226/** Opcode 0xda 11/0. */
8227FNIEMOP_DEF_1(iemOp_fcmovb_stN, uint8_t, bRm)
8228{
8229 IEMOP_MNEMONIC(fcmovb_st0_stN, "fcmovb st0,stN");
8230 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8231
8232 IEM_MC_BEGIN(0, 1);
8233 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8234
8235 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8236 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8237
8238 IEM_MC_PREPARE_FPU_USAGE();
8239 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8240 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)
8241 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8242 IEM_MC_ENDIF();
8243 IEM_MC_UPDATE_FPU_OPCODE_IP();
8244 IEM_MC_ELSE()
8245 IEM_MC_FPU_STACK_UNDERFLOW(0);
8246 IEM_MC_ENDIF();
8247 IEM_MC_ADVANCE_RIP();
8248
8249 IEM_MC_END();
8250 return VINF_SUCCESS;
8251}
8252
8253
8254/** Opcode 0xda 11/1. */
8255FNIEMOP_DEF_1(iemOp_fcmove_stN, uint8_t, bRm)
8256{
8257 IEMOP_MNEMONIC(fcmove_st0_stN, "fcmove st0,stN");
8258 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8259
8260 IEM_MC_BEGIN(0, 1);
8261 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8262
8263 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8264 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8265
8266 IEM_MC_PREPARE_FPU_USAGE();
8267 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8268 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)
8269 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8270 IEM_MC_ENDIF();
8271 IEM_MC_UPDATE_FPU_OPCODE_IP();
8272 IEM_MC_ELSE()
8273 IEM_MC_FPU_STACK_UNDERFLOW(0);
8274 IEM_MC_ENDIF();
8275 IEM_MC_ADVANCE_RIP();
8276
8277 IEM_MC_END();
8278 return VINF_SUCCESS;
8279}
8280
8281
8282/** Opcode 0xda 11/2. */
8283FNIEMOP_DEF_1(iemOp_fcmovbe_stN, uint8_t, bRm)
8284{
8285 IEMOP_MNEMONIC(fcmovbe_st0_stN, "fcmovbe st0,stN");
8286 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8287
8288 IEM_MC_BEGIN(0, 1);
8289 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8290
8291 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8292 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8293
8294 IEM_MC_PREPARE_FPU_USAGE();
8295 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8296 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8297 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8298 IEM_MC_ENDIF();
8299 IEM_MC_UPDATE_FPU_OPCODE_IP();
8300 IEM_MC_ELSE()
8301 IEM_MC_FPU_STACK_UNDERFLOW(0);
8302 IEM_MC_ENDIF();
8303 IEM_MC_ADVANCE_RIP();
8304
8305 IEM_MC_END();
8306 return VINF_SUCCESS;
8307}
8308
8309
8310/** Opcode 0xda 11/3. */
8311FNIEMOP_DEF_1(iemOp_fcmovu_stN, uint8_t, bRm)
8312{
8313 IEMOP_MNEMONIC(fcmovu_st0_stN, "fcmovu st0,stN");
8314 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8315
8316 IEM_MC_BEGIN(0, 1);
8317 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8318
8319 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8320 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8321
8322 IEM_MC_PREPARE_FPU_USAGE();
8323 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8324 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)
8325 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8326 IEM_MC_ENDIF();
8327 IEM_MC_UPDATE_FPU_OPCODE_IP();
8328 IEM_MC_ELSE()
8329 IEM_MC_FPU_STACK_UNDERFLOW(0);
8330 IEM_MC_ENDIF();
8331 IEM_MC_ADVANCE_RIP();
8332
8333 IEM_MC_END();
8334 return VINF_SUCCESS;
8335}
8336
8337
8338/**
8339 * Common worker for FPU instructions working on ST0 and STn, only affecting
8340 * flags, and popping twice when done.
8341 *
8342 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8343 */
8344FNIEMOP_DEF_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, PFNIEMAIMPLFPUR80FSW, pfnAImpl)
8345{
8346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8347
8348 IEM_MC_BEGIN(3, 1);
8349 IEM_MC_LOCAL(uint16_t, u16Fsw);
8350 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8351 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8352 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
8353
8354 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8355 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8356
8357 IEM_MC_PREPARE_FPU_USAGE();
8358 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, 0, pr80Value2, 1)
8359 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pu16Fsw, pr80Value1, pr80Value2);
8360 IEM_MC_UPDATE_FSW_THEN_POP_POP(u16Fsw);
8361 IEM_MC_ELSE()
8362 IEM_MC_FPU_STACK_UNDERFLOW_THEN_POP_POP();
8363 IEM_MC_ENDIF();
8364 IEM_MC_ADVANCE_RIP();
8365
8366 IEM_MC_END();
8367 return VINF_SUCCESS;
8368}
8369
8370
8371/** Opcode 0xda 0xe9. */
8372FNIEMOP_DEF(iemOp_fucompp)
8373{
8374 IEMOP_MNEMONIC(fucompp_st0_stN, "fucompp st0,stN");
8375 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fucom_r80_by_r80);
8376}
8377
8378
8379/**
8380 * Common worker for FPU instructions working on ST0 and an m32i, and storing
8381 * the result in ST0.
8382 *
8383 * @param pfnAImpl Pointer to the instruction implementation (assembly).
8384 */
8385FNIEMOP_DEF_2(iemOpHlpFpu_st0_m32i, uint8_t, bRm, PFNIEMAIMPLFPUI32, pfnAImpl)
8386{
8387 IEM_MC_BEGIN(3, 3);
8388 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8389 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8390 IEM_MC_LOCAL(int32_t, i32Val2);
8391 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8392 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8393 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8394
8395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8397
8398 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8399 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8400 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8401
8402 IEM_MC_PREPARE_FPU_USAGE();
8403 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8404 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi32Val2);
8405 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
8406 IEM_MC_ELSE()
8407 IEM_MC_FPU_STACK_UNDERFLOW(0);
8408 IEM_MC_ENDIF();
8409 IEM_MC_ADVANCE_RIP();
8410
8411 IEM_MC_END();
8412 return VINF_SUCCESS;
8413}
8414
8415
8416/** Opcode 0xda !11/0. */
8417FNIEMOP_DEF_1(iemOp_fiadd_m32i, uint8_t, bRm)
8418{
8419 IEMOP_MNEMONIC(fiadd_m32i, "fiadd m32i");
8420 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fiadd_r80_by_i32);
8421}
8422
8423
8424/** Opcode 0xda !11/1. */
8425FNIEMOP_DEF_1(iemOp_fimul_m32i, uint8_t, bRm)
8426{
8427 IEMOP_MNEMONIC(fimul_m32i, "fimul m32i");
8428 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fimul_r80_by_i32);
8429}
8430
8431
8432/** Opcode 0xda !11/2. */
8433FNIEMOP_DEF_1(iemOp_ficom_m32i, uint8_t, bRm)
8434{
8435 IEMOP_MNEMONIC(ficom_st0_m32i, "ficom st0,m32i");
8436
8437 IEM_MC_BEGIN(3, 3);
8438 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8439 IEM_MC_LOCAL(uint16_t, u16Fsw);
8440 IEM_MC_LOCAL(int32_t, i32Val2);
8441 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8442 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8443 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8444
8445 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8447
8448 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8449 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8450 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8451
8452 IEM_MC_PREPARE_FPU_USAGE();
8453 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8454 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8455 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8456 IEM_MC_ELSE()
8457 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8458 IEM_MC_ENDIF();
8459 IEM_MC_ADVANCE_RIP();
8460
8461 IEM_MC_END();
8462 return VINF_SUCCESS;
8463}
8464
8465
8466/** Opcode 0xda !11/3. */
8467FNIEMOP_DEF_1(iemOp_ficomp_m32i, uint8_t, bRm)
8468{
8469 IEMOP_MNEMONIC(ficomp_st0_m32i, "ficomp st0,m32i");
8470
8471 IEM_MC_BEGIN(3, 3);
8472 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8473 IEM_MC_LOCAL(uint16_t, u16Fsw);
8474 IEM_MC_LOCAL(int32_t, i32Val2);
8475 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8476 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
8477 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val2, i32Val2, 2);
8478
8479 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8481
8482 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8483 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8484 IEM_MC_FETCH_MEM_I32(i32Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8485
8486 IEM_MC_PREPARE_FPU_USAGE();
8487 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
8488 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i32, pu16Fsw, pr80Value1, pi32Val2);
8489 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8490 IEM_MC_ELSE()
8491 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8492 IEM_MC_ENDIF();
8493 IEM_MC_ADVANCE_RIP();
8494
8495 IEM_MC_END();
8496 return VINF_SUCCESS;
8497}
8498
8499
8500/** Opcode 0xda !11/4. */
8501FNIEMOP_DEF_1(iemOp_fisub_m32i, uint8_t, bRm)
8502{
8503 IEMOP_MNEMONIC(fisub_m32i, "fisub m32i");
8504 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisub_r80_by_i32);
8505}
8506
8507
8508/** Opcode 0xda !11/5. */
8509FNIEMOP_DEF_1(iemOp_fisubr_m32i, uint8_t, bRm)
8510{
8511 IEMOP_MNEMONIC(fisubr_m32i, "fisubr m32i");
8512 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fisubr_r80_by_i32);
8513}
8514
8515
8516/** Opcode 0xda !11/6. */
8517FNIEMOP_DEF_1(iemOp_fidiv_m32i, uint8_t, bRm)
8518{
8519 IEMOP_MNEMONIC(fidiv_m32i, "fidiv m32i");
8520 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidiv_r80_by_i32);
8521}
8522
8523
8524/** Opcode 0xda !11/7. */
8525FNIEMOP_DEF_1(iemOp_fidivr_m32i, uint8_t, bRm)
8526{
8527 IEMOP_MNEMONIC(fidivr_m32i, "fidivr m32i");
8528 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m32i, bRm, iemAImpl_fidivr_r80_by_i32);
8529}
8530
8531
8532/**
8533 * @opcode 0xda
8534 */
8535FNIEMOP_DEF(iemOp_EscF2)
8536{
8537 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
8538 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xda & 0x7);
8539 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
8540 {
8541 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8542 {
8543 case 0: return FNIEMOP_CALL_1(iemOp_fcmovb_stN, bRm);
8544 case 1: return FNIEMOP_CALL_1(iemOp_fcmove_stN, bRm);
8545 case 2: return FNIEMOP_CALL_1(iemOp_fcmovbe_stN, bRm);
8546 case 3: return FNIEMOP_CALL_1(iemOp_fcmovu_stN, bRm);
8547 case 4: return IEMOP_RAISE_INVALID_OPCODE();
8548 case 5:
8549 if (bRm == 0xe9)
8550 return FNIEMOP_CALL(iemOp_fucompp);
8551 return IEMOP_RAISE_INVALID_OPCODE();
8552 case 6: return IEMOP_RAISE_INVALID_OPCODE();
8553 case 7: return IEMOP_RAISE_INVALID_OPCODE();
8554 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8555 }
8556 }
8557 else
8558 {
8559 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
8560 {
8561 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m32i, bRm);
8562 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m32i, bRm);
8563 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m32i, bRm);
8564 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m32i, bRm);
8565 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m32i, bRm);
8566 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m32i, bRm);
8567 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m32i, bRm);
8568 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m32i, bRm);
8569 IEM_NOT_REACHED_DEFAULT_CASE_RET();
8570 }
8571 }
8572}
8573
8574
8575/** Opcode 0xdb !11/0. */
8576FNIEMOP_DEF_1(iemOp_fild_m32i, uint8_t, bRm)
8577{
8578 IEMOP_MNEMONIC(fild_m32i, "fild m32i");
8579
8580 IEM_MC_BEGIN(2, 3);
8581 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8582 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8583 IEM_MC_LOCAL(int32_t, i32Val);
8584 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8585 IEM_MC_ARG_LOCAL_REF(int32_t const *, pi32Val, i32Val, 1);
8586
8587 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8588 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8589
8590 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8591 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8592 IEM_MC_FETCH_MEM_I32(i32Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8593
8594 IEM_MC_PREPARE_FPU_USAGE();
8595 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8596 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i32_to_r80, pFpuRes, pi32Val);
8597 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8598 IEM_MC_ELSE()
8599 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8600 IEM_MC_ENDIF();
8601 IEM_MC_ADVANCE_RIP();
8602
8603 IEM_MC_END();
8604 return VINF_SUCCESS;
8605}
8606
8607
8608/** Opcode 0xdb !11/1. */
8609FNIEMOP_DEF_1(iemOp_fisttp_m32i, uint8_t, bRm)
8610{
8611 IEMOP_MNEMONIC(fisttp_m32i, "fisttp m32i");
8612 IEM_MC_BEGIN(3, 2);
8613 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8614 IEM_MC_LOCAL(uint16_t, u16Fsw);
8615 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8616 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8617 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8618
8619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8620 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8621 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8622 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8623
8624 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8625 IEM_MC_PREPARE_FPU_USAGE();
8626 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8627 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8628 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8629 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8630 IEM_MC_ELSE()
8631 IEM_MC_IF_FCW_IM()
8632 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8633 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8634 IEM_MC_ENDIF();
8635 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8636 IEM_MC_ENDIF();
8637 IEM_MC_ADVANCE_RIP();
8638
8639 IEM_MC_END();
8640 return VINF_SUCCESS;
8641}
8642
8643
8644/** Opcode 0xdb !11/2. */
8645FNIEMOP_DEF_1(iemOp_fist_m32i, uint8_t, bRm)
8646{
8647 IEMOP_MNEMONIC(fist_m32i, "fist m32i");
8648 IEM_MC_BEGIN(3, 2);
8649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8650 IEM_MC_LOCAL(uint16_t, u16Fsw);
8651 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8652 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8653 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8654
8655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8657 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8658 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8659
8660 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8661 IEM_MC_PREPARE_FPU_USAGE();
8662 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8663 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8664 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8665 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8666 IEM_MC_ELSE()
8667 IEM_MC_IF_FCW_IM()
8668 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8669 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8670 IEM_MC_ENDIF();
8671 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8672 IEM_MC_ENDIF();
8673 IEM_MC_ADVANCE_RIP();
8674
8675 IEM_MC_END();
8676 return VINF_SUCCESS;
8677}
8678
8679
8680/** Opcode 0xdb !11/3. */
8681FNIEMOP_DEF_1(iemOp_fistp_m32i, uint8_t, bRm)
8682{
8683 IEMOP_MNEMONIC(fistp_m32i, "fistp m32i");
8684 IEM_MC_BEGIN(3, 2);
8685 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8686 IEM_MC_LOCAL(uint16_t, u16Fsw);
8687 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8688 IEM_MC_ARG(int32_t *, pi32Dst, 1);
8689 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8690
8691 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8692 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8693 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8694 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8695
8696 IEM_MC_MEM_MAP(pi32Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8697 IEM_MC_PREPARE_FPU_USAGE();
8698 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8699 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i32, pu16Fsw, pi32Dst, pr80Value);
8700 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi32Dst, IEM_ACCESS_DATA_W, u16Fsw);
8701 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8702 IEM_MC_ELSE()
8703 IEM_MC_IF_FCW_IM()
8704 IEM_MC_STORE_MEM_I32_CONST_BY_REF(pi32Dst, INT32_MIN /* (integer indefinite) */);
8705 IEM_MC_MEM_COMMIT_AND_UNMAP(pi32Dst, IEM_ACCESS_DATA_W);
8706 IEM_MC_ENDIF();
8707 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8708 IEM_MC_ENDIF();
8709 IEM_MC_ADVANCE_RIP();
8710
8711 IEM_MC_END();
8712 return VINF_SUCCESS;
8713}
8714
8715
8716/** Opcode 0xdb !11/5. */
8717FNIEMOP_DEF_1(iemOp_fld_m80r, uint8_t, bRm)
8718{
8719 IEMOP_MNEMONIC(fld_m80r, "fld m80r");
8720
8721 IEM_MC_BEGIN(2, 3);
8722 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
8723 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
8724 IEM_MC_LOCAL(RTFLOAT80U, r80Val);
8725 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
8726 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT80U, pr80Val, r80Val, 1);
8727
8728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
8729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8730
8731 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8732 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8733 IEM_MC_FETCH_MEM_R80(r80Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8734
8735 IEM_MC_PREPARE_FPU_USAGE();
8736 IEM_MC_IF_FPUREG_IS_EMPTY(7)
8737 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r80_from_r80, pFpuRes, pr80Val);
8738 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8739 IEM_MC_ELSE()
8740 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
8741 IEM_MC_ENDIF();
8742 IEM_MC_ADVANCE_RIP();
8743
8744 IEM_MC_END();
8745 return VINF_SUCCESS;
8746}
8747
8748
8749/** Opcode 0xdb !11/7. */
8750FNIEMOP_DEF_1(iemOp_fstp_m80r, uint8_t, bRm)
8751{
8752 IEMOP_MNEMONIC(fstp_m80r, "fstp m80r");
8753 IEM_MC_BEGIN(3, 2);
8754 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
8755 IEM_MC_LOCAL(uint16_t, u16Fsw);
8756 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
8757 IEM_MC_ARG(PRTFLOAT80U, pr80Dst, 1);
8758 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
8759
8760 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
8761 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8762 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8763 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8764
8765 IEM_MC_MEM_MAP(pr80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
8766 IEM_MC_PREPARE_FPU_USAGE();
8767 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
8768 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r80, pu16Fsw, pr80Dst, pr80Value);
8769 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr80Dst, IEM_ACCESS_DATA_W, u16Fsw);
8770 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8771 IEM_MC_ELSE()
8772 IEM_MC_IF_FCW_IM()
8773 IEM_MC_STORE_MEM_NEG_QNAN_R80_BY_REF(pr80Dst);
8774 IEM_MC_MEM_COMMIT_AND_UNMAP(pr80Dst, IEM_ACCESS_DATA_W);
8775 IEM_MC_ENDIF();
8776 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
8777 IEM_MC_ENDIF();
8778 IEM_MC_ADVANCE_RIP();
8779
8780 IEM_MC_END();
8781 return VINF_SUCCESS;
8782}
8783
8784
8785/** Opcode 0xdb 11/0. */
8786FNIEMOP_DEF_1(iemOp_fcmovnb_stN, uint8_t, bRm)
8787{
8788 IEMOP_MNEMONIC(fcmovnb_st0_stN, "fcmovnb st0,stN");
8789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8790
8791 IEM_MC_BEGIN(0, 1);
8792 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8793
8794 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8795 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8796
8797 IEM_MC_PREPARE_FPU_USAGE();
8798 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8799 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)
8800 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8801 IEM_MC_ENDIF();
8802 IEM_MC_UPDATE_FPU_OPCODE_IP();
8803 IEM_MC_ELSE()
8804 IEM_MC_FPU_STACK_UNDERFLOW(0);
8805 IEM_MC_ENDIF();
8806 IEM_MC_ADVANCE_RIP();
8807
8808 IEM_MC_END();
8809 return VINF_SUCCESS;
8810}
8811
8812
8813/** Opcode 0xdb 11/1. */
8814FNIEMOP_DEF_1(iemOp_fcmovne_stN, uint8_t, bRm)
8815{
8816 IEMOP_MNEMONIC(fcmovne_st0_stN, "fcmovne st0,stN");
8817 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8818
8819 IEM_MC_BEGIN(0, 1);
8820 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8821
8822 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8823 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8824
8825 IEM_MC_PREPARE_FPU_USAGE();
8826 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8827 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)
8828 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8829 IEM_MC_ENDIF();
8830 IEM_MC_UPDATE_FPU_OPCODE_IP();
8831 IEM_MC_ELSE()
8832 IEM_MC_FPU_STACK_UNDERFLOW(0);
8833 IEM_MC_ENDIF();
8834 IEM_MC_ADVANCE_RIP();
8835
8836 IEM_MC_END();
8837 return VINF_SUCCESS;
8838}
8839
8840
8841/** Opcode 0xdb 11/2. */
8842FNIEMOP_DEF_1(iemOp_fcmovnbe_stN, uint8_t, bRm)
8843{
8844 IEMOP_MNEMONIC(fcmovnbe_st0_stN, "fcmovnbe st0,stN");
8845 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8846
8847 IEM_MC_BEGIN(0, 1);
8848 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8849
8850 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8851 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8852
8853 IEM_MC_PREPARE_FPU_USAGE();
8854 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8855 IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)
8856 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8857 IEM_MC_ENDIF();
8858 IEM_MC_UPDATE_FPU_OPCODE_IP();
8859 IEM_MC_ELSE()
8860 IEM_MC_FPU_STACK_UNDERFLOW(0);
8861 IEM_MC_ENDIF();
8862 IEM_MC_ADVANCE_RIP();
8863
8864 IEM_MC_END();
8865 return VINF_SUCCESS;
8866}
8867
8868
8869/** Opcode 0xdb 11/3. */
8870FNIEMOP_DEF_1(iemOp_fcmovnnu_stN, uint8_t, bRm)
8871{
8872 IEMOP_MNEMONIC(fcmovnnu_st0_stN, "fcmovnnu st0,stN");
8873 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8874
8875 IEM_MC_BEGIN(0, 1);
8876 IEM_MC_LOCAL(PCRTFLOAT80U, pr80ValueN);
8877
8878 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8879 IEM_MC_MAYBE_RAISE_FPU_XCPT();
8880
8881 IEM_MC_PREPARE_FPU_USAGE();
8882 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80_FIRST(pr80ValueN, bRm & X86_MODRM_RM_MASK, 0)
8883 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)
8884 IEM_MC_STORE_FPUREG_R80_SRC_REF(0, pr80ValueN);
8885 IEM_MC_ENDIF();
8886 IEM_MC_UPDATE_FPU_OPCODE_IP();
8887 IEM_MC_ELSE()
8888 IEM_MC_FPU_STACK_UNDERFLOW(0);
8889 IEM_MC_ENDIF();
8890 IEM_MC_ADVANCE_RIP();
8891
8892 IEM_MC_END();
8893 return VINF_SUCCESS;
8894}
8895
8896
8897/** Opcode 0xdb 0xe0. */
8898FNIEMOP_DEF(iemOp_fneni)
8899{
8900 IEMOP_MNEMONIC(fneni, "fneni (8087/ign)");
8901 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8902 IEM_MC_BEGIN(0,0);
8903 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8904 IEM_MC_ADVANCE_RIP();
8905 IEM_MC_END();
8906 return VINF_SUCCESS;
8907}
8908
8909
8910/** Opcode 0xdb 0xe1. */
8911FNIEMOP_DEF(iemOp_fndisi)
8912{
8913 IEMOP_MNEMONIC(fndisi, "fndisi (8087/ign)");
8914 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8915 IEM_MC_BEGIN(0,0);
8916 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8917 IEM_MC_ADVANCE_RIP();
8918 IEM_MC_END();
8919 return VINF_SUCCESS;
8920}
8921
8922
8923/** Opcode 0xdb 0xe2. */
8924FNIEMOP_DEF(iemOp_fnclex)
8925{
8926 IEMOP_MNEMONIC(fnclex, "fnclex");
8927 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8928
8929 IEM_MC_BEGIN(0,0);
8930 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8931 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
8932 IEM_MC_CLEAR_FSW_EX();
8933 IEM_MC_ADVANCE_RIP();
8934 IEM_MC_END();
8935 return VINF_SUCCESS;
8936}
8937
8938
8939/** Opcode 0xdb 0xe3. */
8940FNIEMOP_DEF(iemOp_fninit)
8941{
8942 IEMOP_MNEMONIC(fninit, "fninit");
8943 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8944 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_finit, false /*fCheckXcpts*/);
8945}
8946
8947
8948/** Opcode 0xdb 0xe4. */
8949FNIEMOP_DEF(iemOp_fnsetpm)
8950{
8951 IEMOP_MNEMONIC(fnsetpm, "fnsetpm (80287/ign)"); /* set protected mode on fpu. */
8952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8953 IEM_MC_BEGIN(0,0);
8954 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8955 IEM_MC_ADVANCE_RIP();
8956 IEM_MC_END();
8957 return VINF_SUCCESS;
8958}
8959
8960
8961/** Opcode 0xdb 0xe5. */
8962FNIEMOP_DEF(iemOp_frstpm)
8963{
8964 IEMOP_MNEMONIC(frstpm, "frstpm (80287XL/ign)"); /* reset pm, back to real mode. */
8965#if 0 /* #UDs on newer CPUs */
8966 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
8967 IEM_MC_BEGIN(0,0);
8968 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
8969 IEM_MC_ADVANCE_RIP();
8970 IEM_MC_END();
8971 return VINF_SUCCESS;
8972#else
8973 return IEMOP_RAISE_INVALID_OPCODE();
8974#endif
8975}
8976
8977
8978/** Opcode 0xdb 11/5. */
8979FNIEMOP_DEF_1(iemOp_fucomi_stN, uint8_t, bRm)
8980{
8981 IEMOP_MNEMONIC(fucomi_st0_stN, "fucomi st0,stN");
8982 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fucomi_r80_by_r80, false /*fPop*/);
8983}
8984
8985
8986/** Opcode 0xdb 11/6. */
8987FNIEMOP_DEF_1(iemOp_fcomi_stN, uint8_t, bRm)
8988{
8989 IEMOP_MNEMONIC(fcomi_st0_stN, "fcomi st0,stN");
8990 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, false /*fPop*/);
8991}
8992
8993
8994/**
8995 * @opcode 0xdb
8996 */
8997FNIEMOP_DEF(iemOp_EscF3)
8998{
8999 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9000 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdb & 0x7);
9001 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9002 {
9003 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9004 {
9005 case 0: return FNIEMOP_CALL_1(iemOp_fcmovnb_stN, bRm);
9006 case 1: return FNIEMOP_CALL_1(iemOp_fcmovne_stN, bRm);
9007 case 2: return FNIEMOP_CALL_1(iemOp_fcmovnbe_stN, bRm);
9008 case 3: return FNIEMOP_CALL_1(iemOp_fcmovnnu_stN, bRm);
9009 case 4:
9010 switch (bRm)
9011 {
9012 case 0xe0: return FNIEMOP_CALL(iemOp_fneni);
9013 case 0xe1: return FNIEMOP_CALL(iemOp_fndisi);
9014 case 0xe2: return FNIEMOP_CALL(iemOp_fnclex);
9015 case 0xe3: return FNIEMOP_CALL(iemOp_fninit);
9016 case 0xe4: return FNIEMOP_CALL(iemOp_fnsetpm);
9017 case 0xe5: return FNIEMOP_CALL(iemOp_frstpm);
9018 case 0xe6: return IEMOP_RAISE_INVALID_OPCODE();
9019 case 0xe7: return IEMOP_RAISE_INVALID_OPCODE();
9020 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9021 }
9022 break;
9023 case 5: return FNIEMOP_CALL_1(iemOp_fucomi_stN, bRm);
9024 case 6: return FNIEMOP_CALL_1(iemOp_fcomi_stN, bRm);
9025 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9026 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9027 }
9028 }
9029 else
9030 {
9031 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9032 {
9033 case 0: return FNIEMOP_CALL_1(iemOp_fild_m32i, bRm);
9034 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m32i,bRm);
9035 case 2: return FNIEMOP_CALL_1(iemOp_fist_m32i, bRm);
9036 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m32i, bRm);
9037 case 4: return IEMOP_RAISE_INVALID_OPCODE();
9038 case 5: return FNIEMOP_CALL_1(iemOp_fld_m80r, bRm);
9039 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9040 case 7: return FNIEMOP_CALL_1(iemOp_fstp_m80r, bRm);
9041 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9042 }
9043 }
9044}
9045
9046
9047/**
9048 * Common worker for FPU instructions working on STn and ST0, and storing the
9049 * result in STn unless IE, DE or ZE was raised.
9050 *
9051 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9052 */
9053FNIEMOP_DEF_2(iemOpHlpFpu_stN_st0, uint8_t, bRm, PFNIEMAIMPLFPUR80, pfnAImpl)
9054{
9055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9056
9057 IEM_MC_BEGIN(3, 1);
9058 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9059 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9060 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9061 IEM_MC_ARG(PCRTFLOAT80U, pr80Value2, 2);
9062
9063 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9064 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9065
9066 IEM_MC_PREPARE_FPU_USAGE();
9067 IEM_MC_IF_TWO_FPUREGS_NOT_EMPTY_REF_R80(pr80Value1, bRm & X86_MODRM_RM_MASK, pr80Value2, 0)
9068 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pr80Value2);
9069 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9070 IEM_MC_ELSE()
9071 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9072 IEM_MC_ENDIF();
9073 IEM_MC_ADVANCE_RIP();
9074
9075 IEM_MC_END();
9076 return VINF_SUCCESS;
9077}
9078
9079
9080/** Opcode 0xdc 11/0. */
9081FNIEMOP_DEF_1(iemOp_fadd_stN_st0, uint8_t, bRm)
9082{
9083 IEMOP_MNEMONIC(fadd_stN_st0, "fadd stN,st0");
9084 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fadd_r80_by_r80);
9085}
9086
9087
9088/** Opcode 0xdc 11/1. */
9089FNIEMOP_DEF_1(iemOp_fmul_stN_st0, uint8_t, bRm)
9090{
9091 IEMOP_MNEMONIC(fmul_stN_st0, "fmul stN,st0");
9092 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fmul_r80_by_r80);
9093}
9094
9095
9096/** Opcode 0xdc 11/4. */
9097FNIEMOP_DEF_1(iemOp_fsubr_stN_st0, uint8_t, bRm)
9098{
9099 IEMOP_MNEMONIC(fsubr_stN_st0, "fsubr stN,st0");
9100 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsubr_r80_by_r80);
9101}
9102
9103
9104/** Opcode 0xdc 11/5. */
9105FNIEMOP_DEF_1(iemOp_fsub_stN_st0, uint8_t, bRm)
9106{
9107 IEMOP_MNEMONIC(fsub_stN_st0, "fsub stN,st0");
9108 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fsub_r80_by_r80);
9109}
9110
9111
9112/** Opcode 0xdc 11/6. */
9113FNIEMOP_DEF_1(iemOp_fdivr_stN_st0, uint8_t, bRm)
9114{
9115 IEMOP_MNEMONIC(fdivr_stN_st0, "fdivr stN,st0");
9116 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdivr_r80_by_r80);
9117}
9118
9119
9120/** Opcode 0xdc 11/7. */
9121FNIEMOP_DEF_1(iemOp_fdiv_stN_st0, uint8_t, bRm)
9122{
9123 IEMOP_MNEMONIC(fdiv_stN_st0, "fdiv stN,st0");
9124 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0, bRm, iemAImpl_fdiv_r80_by_r80);
9125}
9126
9127
9128/**
9129 * Common worker for FPU instructions working on ST0 and a 64-bit floating point
9130 * memory operand, and storing the result in ST0.
9131 *
9132 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9133 */
9134FNIEMOP_DEF_2(iemOpHlpFpu_ST0_m64r, uint8_t, bRm, PFNIEMAIMPLFPUR64, pfnImpl)
9135{
9136 IEM_MC_BEGIN(3, 3);
9137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9138 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9139 IEM_MC_LOCAL(RTFLOAT64U, r64Factor2);
9140 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9141 IEM_MC_ARG(PCRTFLOAT80U, pr80Factor1, 1);
9142 IEM_MC_ARG_LOCAL_REF(PRTFLOAT64U, pr64Factor2, r64Factor2, 2);
9143
9144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9145 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9146 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9147 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9148
9149 IEM_MC_FETCH_MEM_R64(r64Factor2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9150 IEM_MC_PREPARE_FPU_USAGE();
9151 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Factor1, 0)
9152 IEM_MC_CALL_FPU_AIMPL_3(pfnImpl, pFpuRes, pr80Factor1, pr64Factor2);
9153 IEM_MC_STORE_FPU_RESULT_MEM_OP(FpuRes, 0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9154 IEM_MC_ELSE()
9155 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(0, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9156 IEM_MC_ENDIF();
9157 IEM_MC_ADVANCE_RIP();
9158
9159 IEM_MC_END();
9160 return VINF_SUCCESS;
9161}
9162
9163
9164/** Opcode 0xdc !11/0. */
9165FNIEMOP_DEF_1(iemOp_fadd_m64r, uint8_t, bRm)
9166{
9167 IEMOP_MNEMONIC(fadd_m64r, "fadd m64r");
9168 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fadd_r80_by_r64);
9169}
9170
9171
9172/** Opcode 0xdc !11/1. */
9173FNIEMOP_DEF_1(iemOp_fmul_m64r, uint8_t, bRm)
9174{
9175 IEMOP_MNEMONIC(fmul_m64r, "fmul m64r");
9176 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fmul_r80_by_r64);
9177}
9178
9179
9180/** Opcode 0xdc !11/2. */
9181FNIEMOP_DEF_1(iemOp_fcom_m64r, uint8_t, bRm)
9182{
9183 IEMOP_MNEMONIC(fcom_st0_m64r, "fcom st0,m64r");
9184
9185 IEM_MC_BEGIN(3, 3);
9186 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9187 IEM_MC_LOCAL(uint16_t, u16Fsw);
9188 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9189 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9190 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9191 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9192
9193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9195
9196 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9197 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9198 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9199
9200 IEM_MC_PREPARE_FPU_USAGE();
9201 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9202 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9203 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9204 IEM_MC_ELSE()
9205 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9206 IEM_MC_ENDIF();
9207 IEM_MC_ADVANCE_RIP();
9208
9209 IEM_MC_END();
9210 return VINF_SUCCESS;
9211}
9212
9213
9214/** Opcode 0xdc !11/3. */
9215FNIEMOP_DEF_1(iemOp_fcomp_m64r, uint8_t, bRm)
9216{
9217 IEMOP_MNEMONIC(fcomp_st0_m64r, "fcomp st0,m64r");
9218
9219 IEM_MC_BEGIN(3, 3);
9220 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9221 IEM_MC_LOCAL(uint16_t, u16Fsw);
9222 IEM_MC_LOCAL(RTFLOAT64U, r64Val2);
9223 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9224 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9225 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val2, r64Val2, 2);
9226
9227 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9229
9230 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9231 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9232 IEM_MC_FETCH_MEM_R64(r64Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9233
9234 IEM_MC_PREPARE_FPU_USAGE();
9235 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9236 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fcom_r80_by_r64, pu16Fsw, pr80Value1, pr64Val2);
9237 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9238 IEM_MC_ELSE()
9239 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9240 IEM_MC_ENDIF();
9241 IEM_MC_ADVANCE_RIP();
9242
9243 IEM_MC_END();
9244 return VINF_SUCCESS;
9245}
9246
9247
9248/** Opcode 0xdc !11/4. */
9249FNIEMOP_DEF_1(iemOp_fsub_m64r, uint8_t, bRm)
9250{
9251 IEMOP_MNEMONIC(fsub_m64r, "fsub m64r");
9252 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsub_r80_by_r64);
9253}
9254
9255
9256/** Opcode 0xdc !11/5. */
9257FNIEMOP_DEF_1(iemOp_fsubr_m64r, uint8_t, bRm)
9258{
9259 IEMOP_MNEMONIC(fsubr_m64r, "fsubr m64r");
9260 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fsubr_r80_by_r64);
9261}
9262
9263
9264/** Opcode 0xdc !11/6. */
9265FNIEMOP_DEF_1(iemOp_fdiv_m64r, uint8_t, bRm)
9266{
9267 IEMOP_MNEMONIC(fdiv_m64r, "fdiv m64r");
9268 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdiv_r80_by_r64);
9269}
9270
9271
9272/** Opcode 0xdc !11/7. */
9273FNIEMOP_DEF_1(iemOp_fdivr_m64r, uint8_t, bRm)
9274{
9275 IEMOP_MNEMONIC(fdivr_m64r, "fdivr m64r");
9276 return FNIEMOP_CALL_2(iemOpHlpFpu_ST0_m64r, bRm, iemAImpl_fdivr_r80_by_r64);
9277}
9278
9279
9280/**
9281 * @opcode 0xdc
9282 */
9283FNIEMOP_DEF(iemOp_EscF4)
9284{
9285 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9286 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdc & 0x7);
9287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9288 {
9289 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9290 {
9291 case 0: return FNIEMOP_CALL_1(iemOp_fadd_stN_st0, bRm);
9292 case 1: return FNIEMOP_CALL_1(iemOp_fmul_stN_st0, bRm);
9293 case 2: return FNIEMOP_CALL_1(iemOp_fcom_stN, bRm); /* Marked reserved, intel behavior is that of FCOM ST(i). */
9294 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm); /* Marked reserved, intel behavior is that of FCOMP ST(i). */
9295 case 4: return FNIEMOP_CALL_1(iemOp_fsubr_stN_st0, bRm);
9296 case 5: return FNIEMOP_CALL_1(iemOp_fsub_stN_st0, bRm);
9297 case 6: return FNIEMOP_CALL_1(iemOp_fdivr_stN_st0, bRm);
9298 case 7: return FNIEMOP_CALL_1(iemOp_fdiv_stN_st0, bRm);
9299 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9300 }
9301 }
9302 else
9303 {
9304 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9305 {
9306 case 0: return FNIEMOP_CALL_1(iemOp_fadd_m64r, bRm);
9307 case 1: return FNIEMOP_CALL_1(iemOp_fmul_m64r, bRm);
9308 case 2: return FNIEMOP_CALL_1(iemOp_fcom_m64r, bRm);
9309 case 3: return FNIEMOP_CALL_1(iemOp_fcomp_m64r, bRm);
9310 case 4: return FNIEMOP_CALL_1(iemOp_fsub_m64r, bRm);
9311 case 5: return FNIEMOP_CALL_1(iemOp_fsubr_m64r, bRm);
9312 case 6: return FNIEMOP_CALL_1(iemOp_fdiv_m64r, bRm);
9313 case 7: return FNIEMOP_CALL_1(iemOp_fdivr_m64r, bRm);
9314 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9315 }
9316 }
9317}
9318
9319
9320/** Opcode 0xdd !11/0.
9321 * @sa iemOp_fld_m32r */
9322FNIEMOP_DEF_1(iemOp_fld_m64r, uint8_t, bRm)
9323{
9324 IEMOP_MNEMONIC(fld_m64r, "fld m64r");
9325
9326 IEM_MC_BEGIN(2, 3);
9327 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9328 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9329 IEM_MC_LOCAL(RTFLOAT64U, r64Val);
9330 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9331 IEM_MC_ARG_LOCAL_REF(PCRTFLOAT64U, pr64Val, r64Val, 1);
9332
9333 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9334 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9335 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9336 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9337
9338 IEM_MC_FETCH_MEM_R64(r64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9339 IEM_MC_PREPARE_FPU_USAGE();
9340 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9341 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fld_r64_to_r80, pFpuRes, pr64Val);
9342 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9343 IEM_MC_ELSE()
9344 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9345 IEM_MC_ENDIF();
9346 IEM_MC_ADVANCE_RIP();
9347
9348 IEM_MC_END();
9349 return VINF_SUCCESS;
9350}
9351
9352
9353/** Opcode 0xdd !11/0. */
9354FNIEMOP_DEF_1(iemOp_fisttp_m64i, uint8_t, bRm)
9355{
9356 IEMOP_MNEMONIC(fisttp_m64i, "fisttp m64i");
9357 IEM_MC_BEGIN(3, 2);
9358 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9359 IEM_MC_LOCAL(uint16_t, u16Fsw);
9360 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9361 IEM_MC_ARG(int64_t *, pi64Dst, 1);
9362 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9363
9364 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9365 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9366 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9367 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9368
9369 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9370 IEM_MC_PREPARE_FPU_USAGE();
9371 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9372 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
9373 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9374 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9375 IEM_MC_ELSE()
9376 IEM_MC_IF_FCW_IM()
9377 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
9378 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
9379 IEM_MC_ENDIF();
9380 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9381 IEM_MC_ENDIF();
9382 IEM_MC_ADVANCE_RIP();
9383
9384 IEM_MC_END();
9385 return VINF_SUCCESS;
9386}
9387
9388
9389/** Opcode 0xdd !11/0. */
9390FNIEMOP_DEF_1(iemOp_fst_m64r, uint8_t, bRm)
9391{
9392 IEMOP_MNEMONIC(fst_m64r, "fst m64r");
9393 IEM_MC_BEGIN(3, 2);
9394 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9395 IEM_MC_LOCAL(uint16_t, u16Fsw);
9396 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9397 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9398 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9399
9400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9401 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9402 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9403 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9404
9405 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9406 IEM_MC_PREPARE_FPU_USAGE();
9407 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9408 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9409 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9410 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9411 IEM_MC_ELSE()
9412 IEM_MC_IF_FCW_IM()
9413 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9414 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9415 IEM_MC_ENDIF();
9416 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9417 IEM_MC_ENDIF();
9418 IEM_MC_ADVANCE_RIP();
9419
9420 IEM_MC_END();
9421 return VINF_SUCCESS;
9422}
9423
9424
9425
9426
9427/** Opcode 0xdd !11/0. */
9428FNIEMOP_DEF_1(iemOp_fstp_m64r, uint8_t, bRm)
9429{
9430 IEMOP_MNEMONIC(fstp_m64r, "fstp m64r");
9431 IEM_MC_BEGIN(3, 2);
9432 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9433 IEM_MC_LOCAL(uint16_t, u16Fsw);
9434 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9435 IEM_MC_ARG(PRTFLOAT64U, pr64Dst, 1);
9436 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9437
9438 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9439 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9440 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9441 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9442
9443 IEM_MC_MEM_MAP(pr64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9444 IEM_MC_PREPARE_FPU_USAGE();
9445 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9446 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_r64, pu16Fsw, pr64Dst, pr80Value);
9447 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pr64Dst, IEM_ACCESS_DATA_W, u16Fsw);
9448 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9449 IEM_MC_ELSE()
9450 IEM_MC_IF_FCW_IM()
9451 IEM_MC_STORE_MEM_NEG_QNAN_R64_BY_REF(pr64Dst);
9452 IEM_MC_MEM_COMMIT_AND_UNMAP(pr64Dst, IEM_ACCESS_DATA_W);
9453 IEM_MC_ENDIF();
9454 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9455 IEM_MC_ENDIF();
9456 IEM_MC_ADVANCE_RIP();
9457
9458 IEM_MC_END();
9459 return VINF_SUCCESS;
9460}
9461
9462
9463/** Opcode 0xdd !11/0. */
9464FNIEMOP_DEF_1(iemOp_frstor, uint8_t, bRm)
9465{
9466 IEMOP_MNEMONIC(frstor, "frstor m94/108byte");
9467 IEM_MC_BEGIN(3, 0);
9468 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9469 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9470 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 2);
9471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9472 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9473 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9474 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9475 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9476 IEM_MC_CALL_CIMPL_3(iemCImpl_frstor, enmEffOpSize, iEffSeg, GCPtrEffSrc);
9477 IEM_MC_END();
9478 return VINF_SUCCESS;
9479}
9480
9481
9482/** Opcode 0xdd !11/0. */
9483FNIEMOP_DEF_1(iemOp_fnsave, uint8_t, bRm)
9484{
9485 IEMOP_MNEMONIC(fnsave, "fnsave m94/108byte");
9486 IEM_MC_BEGIN(3, 0);
9487 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, /*=*/ pVCpu->iem.s.enmEffOpSize, 0);
9488 IEM_MC_ARG(uint8_t, iEffSeg, 1);
9489 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 2);
9490 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9492 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9493 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9494 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg);
9495 IEM_MC_CALL_CIMPL_3(iemCImpl_fnsave, enmEffOpSize, iEffSeg, GCPtrEffDst);
9496 IEM_MC_END();
9497 return VINF_SUCCESS;
9498
9499}
9500
9501/** Opcode 0xdd !11/0. */
9502FNIEMOP_DEF_1(iemOp_fnstsw, uint8_t, bRm)
9503{
9504 IEMOP_MNEMONIC(fnstsw_m16, "fnstsw m16");
9505
9506 IEM_MC_BEGIN(0, 2);
9507 IEM_MC_LOCAL(uint16_t, u16Tmp);
9508 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9509
9510 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9511 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9512 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9513
9514 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9515 IEM_MC_FETCH_FSW(u16Tmp);
9516 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp);
9517 IEM_MC_ADVANCE_RIP();
9518
9519/** @todo Debug / drop a hint to the verifier that things may differ
9520 * from REM. Seen 0x4020 (iem) vs 0x4000 (rem) at 0008:801c6b88 booting
9521 * NT4SP1. (X86_FSW_PE) */
9522 IEM_MC_END();
9523 return VINF_SUCCESS;
9524}
9525
9526
9527/** Opcode 0xdd 11/0. */
9528FNIEMOP_DEF_1(iemOp_ffree_stN, uint8_t, bRm)
9529{
9530 IEMOP_MNEMONIC(ffree_stN, "ffree stN");
9531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9532 /* Note! C0, C1, C2 and C3 are documented as undefined, we leave the
9533 unmodified. */
9534
9535 IEM_MC_BEGIN(0, 0);
9536
9537 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9538 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9539
9540 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9541 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9542 IEM_MC_UPDATE_FPU_OPCODE_IP();
9543
9544 IEM_MC_ADVANCE_RIP();
9545 IEM_MC_END();
9546 return VINF_SUCCESS;
9547}
9548
9549
9550/** Opcode 0xdd 11/1. */
9551FNIEMOP_DEF_1(iemOp_fst_stN, uint8_t, bRm)
9552{
9553 IEMOP_MNEMONIC(fst_st0_stN, "fst st0,stN");
9554 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9555
9556 IEM_MC_BEGIN(0, 2);
9557 IEM_MC_LOCAL(PCRTFLOAT80U, pr80Value);
9558 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9559 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9560 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9561
9562 IEM_MC_PREPARE_FPU_USAGE();
9563 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9564 IEM_MC_SET_FPU_RESULT(FpuRes, 0 /*FSW*/, pr80Value);
9565 IEM_MC_STORE_FPU_RESULT(FpuRes, bRm & X86_MODRM_RM_MASK);
9566 IEM_MC_ELSE()
9567 IEM_MC_FPU_STACK_UNDERFLOW(bRm & X86_MODRM_RM_MASK);
9568 IEM_MC_ENDIF();
9569
9570 IEM_MC_ADVANCE_RIP();
9571 IEM_MC_END();
9572 return VINF_SUCCESS;
9573}
9574
9575
9576/** Opcode 0xdd 11/3. */
9577FNIEMOP_DEF_1(iemOp_fucom_stN_st0, uint8_t, bRm)
9578{
9579 IEMOP_MNEMONIC(fucom_st0_stN, "fucom st0,stN");
9580 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN, bRm, iemAImpl_fucom_r80_by_r80);
9581}
9582
9583
9584/** Opcode 0xdd 11/4. */
9585FNIEMOP_DEF_1(iemOp_fucomp_stN, uint8_t, bRm)
9586{
9587 IEMOP_MNEMONIC(fucomp_st0_stN, "fucomp st0,stN");
9588 return FNIEMOP_CALL_2(iemOpHlpFpuNoStore_st0_stN_pop, bRm, iemAImpl_fucom_r80_by_r80);
9589}
9590
9591
9592/**
9593 * @opcode 0xdd
9594 */
9595FNIEMOP_DEF(iemOp_EscF5)
9596{
9597 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9598 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xdd & 0x7);
9599 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9600 {
9601 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9602 {
9603 case 0: return FNIEMOP_CALL_1(iemOp_ffree_stN, bRm);
9604 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, intel behavior is that of XCHG ST(i). */
9605 case 2: return FNIEMOP_CALL_1(iemOp_fst_stN, bRm);
9606 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm);
9607 case 4: return FNIEMOP_CALL_1(iemOp_fucom_stN_st0,bRm);
9608 case 5: return FNIEMOP_CALL_1(iemOp_fucomp_stN, bRm);
9609 case 6: return IEMOP_RAISE_INVALID_OPCODE();
9610 case 7: return IEMOP_RAISE_INVALID_OPCODE();
9611 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9612 }
9613 }
9614 else
9615 {
9616 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9617 {
9618 case 0: return FNIEMOP_CALL_1(iemOp_fld_m64r, bRm);
9619 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m64i, bRm);
9620 case 2: return FNIEMOP_CALL_1(iemOp_fst_m64r, bRm);
9621 case 3: return FNIEMOP_CALL_1(iemOp_fstp_m64r, bRm);
9622 case 4: return FNIEMOP_CALL_1(iemOp_frstor, bRm);
9623 case 5: return IEMOP_RAISE_INVALID_OPCODE();
9624 case 6: return FNIEMOP_CALL_1(iemOp_fnsave, bRm);
9625 case 7: return FNIEMOP_CALL_1(iemOp_fnstsw, bRm);
9626 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9627 }
9628 }
9629}
9630
9631
9632/** Opcode 0xde 11/0. */
9633FNIEMOP_DEF_1(iemOp_faddp_stN_st0, uint8_t, bRm)
9634{
9635 IEMOP_MNEMONIC(faddp_stN_st0, "faddp stN,st0");
9636 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fadd_r80_by_r80);
9637}
9638
9639
9640/** Opcode 0xde 11/0. */
9641FNIEMOP_DEF_1(iemOp_fmulp_stN_st0, uint8_t, bRm)
9642{
9643 IEMOP_MNEMONIC(fmulp_stN_st0, "fmulp stN,st0");
9644 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fmul_r80_by_r80);
9645}
9646
9647
9648/** Opcode 0xde 0xd9. */
9649FNIEMOP_DEF(iemOp_fcompp)
9650{
9651 IEMOP_MNEMONIC(fcompp_st0_stN, "fcompp st0,stN");
9652 return FNIEMOP_CALL_1(iemOpHlpFpuNoStore_st0_stN_pop_pop, iemAImpl_fcom_r80_by_r80);
9653}
9654
9655
9656/** Opcode 0xde 11/4. */
9657FNIEMOP_DEF_1(iemOp_fsubrp_stN_st0, uint8_t, bRm)
9658{
9659 IEMOP_MNEMONIC(fsubrp_stN_st0, "fsubrp stN,st0");
9660 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsubr_r80_by_r80);
9661}
9662
9663
9664/** Opcode 0xde 11/5. */
9665FNIEMOP_DEF_1(iemOp_fsubp_stN_st0, uint8_t, bRm)
9666{
9667 IEMOP_MNEMONIC(fsubp_stN_st0, "fsubp stN,st0");
9668 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fsub_r80_by_r80);
9669}
9670
9671
9672/** Opcode 0xde 11/6. */
9673FNIEMOP_DEF_1(iemOp_fdivrp_stN_st0, uint8_t, bRm)
9674{
9675 IEMOP_MNEMONIC(fdivrp_stN_st0, "fdivrp stN,st0");
9676 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdivr_r80_by_r80);
9677}
9678
9679
9680/** Opcode 0xde 11/7. */
9681FNIEMOP_DEF_1(iemOp_fdivp_stN_st0, uint8_t, bRm)
9682{
9683 IEMOP_MNEMONIC(fdivp_stN_st0, "fdivp stN,st0");
9684 return FNIEMOP_CALL_2(iemOpHlpFpu_stN_st0_pop, bRm, iemAImpl_fdiv_r80_by_r80);
9685}
9686
9687
9688/**
9689 * Common worker for FPU instructions working on ST0 and an m16i, and storing
9690 * the result in ST0.
9691 *
9692 * @param pfnAImpl Pointer to the instruction implementation (assembly).
9693 */
9694FNIEMOP_DEF_2(iemOpHlpFpu_st0_m16i, uint8_t, bRm, PFNIEMAIMPLFPUI16, pfnAImpl)
9695{
9696 IEM_MC_BEGIN(3, 3);
9697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9698 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9699 IEM_MC_LOCAL(int16_t, i16Val2);
9700 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9701 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9702 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9703
9704 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9705 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9706
9707 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9708 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9709 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9710
9711 IEM_MC_PREPARE_FPU_USAGE();
9712 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9713 IEM_MC_CALL_FPU_AIMPL_3(pfnAImpl, pFpuRes, pr80Value1, pi16Val2);
9714 IEM_MC_STORE_FPU_RESULT(FpuRes, 0);
9715 IEM_MC_ELSE()
9716 IEM_MC_FPU_STACK_UNDERFLOW(0);
9717 IEM_MC_ENDIF();
9718 IEM_MC_ADVANCE_RIP();
9719
9720 IEM_MC_END();
9721 return VINF_SUCCESS;
9722}
9723
9724
9725/** Opcode 0xde !11/0. */
9726FNIEMOP_DEF_1(iemOp_fiadd_m16i, uint8_t, bRm)
9727{
9728 IEMOP_MNEMONIC(fiadd_m16i, "fiadd m16i");
9729 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fiadd_r80_by_i16);
9730}
9731
9732
9733/** Opcode 0xde !11/1. */
9734FNIEMOP_DEF_1(iemOp_fimul_m16i, uint8_t, bRm)
9735{
9736 IEMOP_MNEMONIC(fimul_m16i, "fimul m16i");
9737 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fimul_r80_by_i16);
9738}
9739
9740
9741/** Opcode 0xde !11/2. */
9742FNIEMOP_DEF_1(iemOp_ficom_m16i, uint8_t, bRm)
9743{
9744 IEMOP_MNEMONIC(ficom_st0_m16i, "ficom st0,m16i");
9745
9746 IEM_MC_BEGIN(3, 3);
9747 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9748 IEM_MC_LOCAL(uint16_t, u16Fsw);
9749 IEM_MC_LOCAL(int16_t, i16Val2);
9750 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9751 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9752 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9753
9754 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9755 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9756
9757 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9758 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9759 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9760
9761 IEM_MC_PREPARE_FPU_USAGE();
9762 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9763 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9764 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9765 IEM_MC_ELSE()
9766 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9767 IEM_MC_ENDIF();
9768 IEM_MC_ADVANCE_RIP();
9769
9770 IEM_MC_END();
9771 return VINF_SUCCESS;
9772}
9773
9774
9775/** Opcode 0xde !11/3. */
9776FNIEMOP_DEF_1(iemOp_ficomp_m16i, uint8_t, bRm)
9777{
9778 IEMOP_MNEMONIC(ficomp_st0_m16i, "ficomp st0,m16i");
9779
9780 IEM_MC_BEGIN(3, 3);
9781 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9782 IEM_MC_LOCAL(uint16_t, u16Fsw);
9783 IEM_MC_LOCAL(int16_t, i16Val2);
9784 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9785 IEM_MC_ARG(PCRTFLOAT80U, pr80Value1, 1);
9786 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val2, i16Val2, 2);
9787
9788 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9789 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9790
9791 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9792 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9793 IEM_MC_FETCH_MEM_I16(i16Val2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9794
9795 IEM_MC_PREPARE_FPU_USAGE();
9796 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value1, 0)
9797 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_ficom_r80_by_i16, pu16Fsw, pr80Value1, pi16Val2);
9798 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9799 IEM_MC_ELSE()
9800 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9801 IEM_MC_ENDIF();
9802 IEM_MC_ADVANCE_RIP();
9803
9804 IEM_MC_END();
9805 return VINF_SUCCESS;
9806}
9807
9808
9809/** Opcode 0xde !11/4. */
9810FNIEMOP_DEF_1(iemOp_fisub_m16i, uint8_t, bRm)
9811{
9812 IEMOP_MNEMONIC(fisub_m16i, "fisub m16i");
9813 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisub_r80_by_i16);
9814}
9815
9816
9817/** Opcode 0xde !11/5. */
9818FNIEMOP_DEF_1(iemOp_fisubr_m16i, uint8_t, bRm)
9819{
9820 IEMOP_MNEMONIC(fisubr_m16i, "fisubr m16i");
9821 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fisubr_r80_by_i16);
9822}
9823
9824
9825/** Opcode 0xde !11/6. */
9826FNIEMOP_DEF_1(iemOp_fidiv_m16i, uint8_t, bRm)
9827{
9828 IEMOP_MNEMONIC(fidiv_m16i, "fidiv m16i");
9829 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidiv_r80_by_i16);
9830}
9831
9832
9833/** Opcode 0xde !11/7. */
9834FNIEMOP_DEF_1(iemOp_fidivr_m16i, uint8_t, bRm)
9835{
9836 IEMOP_MNEMONIC(fidivr_m16i, "fidivr m16i");
9837 return FNIEMOP_CALL_2(iemOpHlpFpu_st0_m16i, bRm, iemAImpl_fidivr_r80_by_i16);
9838}
9839
9840
9841/**
9842 * @opcode 0xde
9843 */
9844FNIEMOP_DEF(iemOp_EscF6)
9845{
9846 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
9847 pVCpu->iem.s.uFpuOpcode = RT_MAKE_U16(bRm, 0xde & 0x7);
9848 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
9849 {
9850 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9851 {
9852 case 0: return FNIEMOP_CALL_1(iemOp_faddp_stN_st0, bRm);
9853 case 1: return FNIEMOP_CALL_1(iemOp_fmulp_stN_st0, bRm);
9854 case 2: return FNIEMOP_CALL_1(iemOp_fcomp_stN, bRm);
9855 case 3: if (bRm == 0xd9)
9856 return FNIEMOP_CALL(iemOp_fcompp);
9857 return IEMOP_RAISE_INVALID_OPCODE();
9858 case 4: return FNIEMOP_CALL_1(iemOp_fsubrp_stN_st0, bRm);
9859 case 5: return FNIEMOP_CALL_1(iemOp_fsubp_stN_st0, bRm);
9860 case 6: return FNIEMOP_CALL_1(iemOp_fdivrp_stN_st0, bRm);
9861 case 7: return FNIEMOP_CALL_1(iemOp_fdivp_stN_st0, bRm);
9862 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9863 }
9864 }
9865 else
9866 {
9867 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
9868 {
9869 case 0: return FNIEMOP_CALL_1(iemOp_fiadd_m16i, bRm);
9870 case 1: return FNIEMOP_CALL_1(iemOp_fimul_m16i, bRm);
9871 case 2: return FNIEMOP_CALL_1(iemOp_ficom_m16i, bRm);
9872 case 3: return FNIEMOP_CALL_1(iemOp_ficomp_m16i, bRm);
9873 case 4: return FNIEMOP_CALL_1(iemOp_fisub_m16i, bRm);
9874 case 5: return FNIEMOP_CALL_1(iemOp_fisubr_m16i, bRm);
9875 case 6: return FNIEMOP_CALL_1(iemOp_fidiv_m16i, bRm);
9876 case 7: return FNIEMOP_CALL_1(iemOp_fidivr_m16i, bRm);
9877 IEM_NOT_REACHED_DEFAULT_CASE_RET();
9878 }
9879 }
9880}
9881
9882
9883/** Opcode 0xdf 11/0.
9884 * Undocument instruction, assumed to work like ffree + fincstp. */
9885FNIEMOP_DEF_1(iemOp_ffreep_stN, uint8_t, bRm)
9886{
9887 IEMOP_MNEMONIC(ffreep_stN, "ffreep stN");
9888 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9889
9890 IEM_MC_BEGIN(0, 0);
9891
9892 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9893 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9894
9895 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE();
9896 IEM_MC_FPU_STACK_FREE(bRm & X86_MODRM_RM_MASK);
9897 IEM_MC_FPU_STACK_INC_TOP();
9898 IEM_MC_UPDATE_FPU_OPCODE_IP();
9899
9900 IEM_MC_ADVANCE_RIP();
9901 IEM_MC_END();
9902 return VINF_SUCCESS;
9903}
9904
9905
9906/** Opcode 0xdf 0xe0. */
9907FNIEMOP_DEF(iemOp_fnstsw_ax)
9908{
9909 IEMOP_MNEMONIC(fnstsw_ax, "fnstsw ax");
9910 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9911
9912 IEM_MC_BEGIN(0, 1);
9913 IEM_MC_LOCAL(uint16_t, u16Tmp);
9914 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9915 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ();
9916 IEM_MC_FETCH_FSW(u16Tmp);
9917 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Tmp);
9918 IEM_MC_ADVANCE_RIP();
9919 IEM_MC_END();
9920 return VINF_SUCCESS;
9921}
9922
9923
9924/** Opcode 0xdf 11/5. */
9925FNIEMOP_DEF_1(iemOp_fucomip_st0_stN, uint8_t, bRm)
9926{
9927 IEMOP_MNEMONIC(fucomip_st0_stN, "fucomip st0,stN");
9928 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9929}
9930
9931
9932/** Opcode 0xdf 11/6. */
9933FNIEMOP_DEF_1(iemOp_fcomip_st0_stN, uint8_t, bRm)
9934{
9935 IEMOP_MNEMONIC(fcomip_st0_stN, "fcomip st0,stN");
9936 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_fcomi_fucomi, bRm & X86_MODRM_RM_MASK, iemAImpl_fcomi_r80_by_r80, true /*fPop*/);
9937}
9938
9939
9940/** Opcode 0xdf !11/0. */
9941FNIEMOP_DEF_1(iemOp_fild_m16i, uint8_t, bRm)
9942{
9943 IEMOP_MNEMONIC(fild_m16i, "fild m16i");
9944
9945 IEM_MC_BEGIN(2, 3);
9946 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
9947 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
9948 IEM_MC_LOCAL(int16_t, i16Val);
9949 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
9950 IEM_MC_ARG_LOCAL_REF(int16_t const *, pi16Val, i16Val, 1);
9951
9952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
9953 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9954
9955 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9956 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9957 IEM_MC_FETCH_MEM_I16(i16Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9958
9959 IEM_MC_PREPARE_FPU_USAGE();
9960 IEM_MC_IF_FPUREG_IS_EMPTY(7)
9961 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i16_to_r80, pFpuRes, pi16Val);
9962 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9963 IEM_MC_ELSE()
9964 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
9965 IEM_MC_ENDIF();
9966 IEM_MC_ADVANCE_RIP();
9967
9968 IEM_MC_END();
9969 return VINF_SUCCESS;
9970}
9971
9972
9973/** Opcode 0xdf !11/1. */
9974FNIEMOP_DEF_1(iemOp_fisttp_m16i, uint8_t, bRm)
9975{
9976 IEMOP_MNEMONIC(fisttp_m16i, "fisttp m16i");
9977 IEM_MC_BEGIN(3, 2);
9978 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
9979 IEM_MC_LOCAL(uint16_t, u16Fsw);
9980 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
9981 IEM_MC_ARG(int16_t *, pi16Dst, 1);
9982 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
9983
9984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
9985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
9986 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
9987 IEM_MC_MAYBE_RAISE_FPU_XCPT();
9988
9989 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
9990 IEM_MC_PREPARE_FPU_USAGE();
9991 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
9992 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fistt_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
9993 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
9994 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
9995 IEM_MC_ELSE()
9996 IEM_MC_IF_FCW_IM()
9997 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
9998 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
9999 IEM_MC_ENDIF();
10000 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10001 IEM_MC_ENDIF();
10002 IEM_MC_ADVANCE_RIP();
10003
10004 IEM_MC_END();
10005 return VINF_SUCCESS;
10006}
10007
10008
10009/** Opcode 0xdf !11/2. */
10010FNIEMOP_DEF_1(iemOp_fist_m16i, uint8_t, bRm)
10011{
10012 IEMOP_MNEMONIC(fist_m16i, "fist m16i");
10013 IEM_MC_BEGIN(3, 2);
10014 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10015 IEM_MC_LOCAL(uint16_t, u16Fsw);
10016 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10017 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10018 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10019
10020 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10021 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10022 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10023 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10024
10025 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10026 IEM_MC_PREPARE_FPU_USAGE();
10027 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10028 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10029 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10030 IEM_MC_UPDATE_FSW_WITH_MEM_OP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10031 IEM_MC_ELSE()
10032 IEM_MC_IF_FCW_IM()
10033 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10034 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10035 IEM_MC_ENDIF();
10036 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10037 IEM_MC_ENDIF();
10038 IEM_MC_ADVANCE_RIP();
10039
10040 IEM_MC_END();
10041 return VINF_SUCCESS;
10042}
10043
10044
10045/** Opcode 0xdf !11/3. */
10046FNIEMOP_DEF_1(iemOp_fistp_m16i, uint8_t, bRm)
10047{
10048 IEMOP_MNEMONIC(fistp_m16i, "fistp m16i");
10049 IEM_MC_BEGIN(3, 2);
10050 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10051 IEM_MC_LOCAL(uint16_t, u16Fsw);
10052 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10053 IEM_MC_ARG(int16_t *, pi16Dst, 1);
10054 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10055
10056 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10058 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10059 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10060
10061 IEM_MC_MEM_MAP(pi16Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10062 IEM_MC_PREPARE_FPU_USAGE();
10063 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10064 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i16, pu16Fsw, pi16Dst, pr80Value);
10065 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi16Dst, IEM_ACCESS_DATA_W, u16Fsw);
10066 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10067 IEM_MC_ELSE()
10068 IEM_MC_IF_FCW_IM()
10069 IEM_MC_STORE_MEM_I16_CONST_BY_REF(pi16Dst, INT16_MIN /* (integer indefinite) */);
10070 IEM_MC_MEM_COMMIT_AND_UNMAP(pi16Dst, IEM_ACCESS_DATA_W);
10071 IEM_MC_ENDIF();
10072 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10073 IEM_MC_ENDIF();
10074 IEM_MC_ADVANCE_RIP();
10075
10076 IEM_MC_END();
10077 return VINF_SUCCESS;
10078}
10079
10080
10081/** Opcode 0xdf !11/4. */
10082FNIEMOP_STUB_1(iemOp_fbld_m80d, uint8_t, bRm);
10083
10084
10085/** Opcode 0xdf !11/5. */
10086FNIEMOP_DEF_1(iemOp_fild_m64i, uint8_t, bRm)
10087{
10088 IEMOP_MNEMONIC(fild_m64i, "fild m64i");
10089
10090 IEM_MC_BEGIN(2, 3);
10091 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
10092 IEM_MC_LOCAL(IEMFPURESULT, FpuRes);
10093 IEM_MC_LOCAL(int64_t, i64Val);
10094 IEM_MC_ARG_LOCAL_REF(PIEMFPURESULT, pFpuRes, FpuRes, 0);
10095 IEM_MC_ARG_LOCAL_REF(int64_t const *, pi64Val, i64Val, 1);
10096
10097 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
10098 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10099
10100 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10101 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10102 IEM_MC_FETCH_MEM_I64(i64Val, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10103
10104 IEM_MC_PREPARE_FPU_USAGE();
10105 IEM_MC_IF_FPUREG_IS_EMPTY(7)
10106 IEM_MC_CALL_FPU_AIMPL_2(iemAImpl_fild_i64_to_r80, pFpuRes, pi64Val);
10107 IEM_MC_PUSH_FPU_RESULT_MEM_OP(FpuRes, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10108 IEM_MC_ELSE()
10109 IEM_MC_FPU_STACK_PUSH_OVERFLOW_MEM_OP(pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
10110 IEM_MC_ENDIF();
10111 IEM_MC_ADVANCE_RIP();
10112
10113 IEM_MC_END();
10114 return VINF_SUCCESS;
10115}
10116
10117
10118/** Opcode 0xdf !11/6. */
10119FNIEMOP_DEF_1(iemOp_fbstp_m80d, uint8_t, bRm)
10120{
10121 IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
10122 IEM_MC_BEGIN(3, 2);
10123 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10124 IEM_MC_LOCAL(uint16_t, u16Fsw);
10125 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10126 IEM_MC_ARG(PRTPBCD80U, pd80Dst, 1);
10127 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10128
10129 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10130 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10131 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10132 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10133
10134 IEM_MC_MEM_MAP(pd80Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10135 IEM_MC_PREPARE_FPU_USAGE();
10136 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10137 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
10138 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
10139 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10140 IEM_MC_ELSE()
10141 IEM_MC_IF_FCW_IM()
10142 IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
10143 IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
10144 IEM_MC_ENDIF();
10145 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10146 IEM_MC_ENDIF();
10147 IEM_MC_ADVANCE_RIP();
10148
10149 IEM_MC_END();
10150 return VINF_SUCCESS;
10151}
10152
10153
10154/** Opcode 0xdf !11/7. */
10155FNIEMOP_DEF_1(iemOp_fistp_m64i, uint8_t, bRm)
10156{
10157 IEMOP_MNEMONIC(fistp_m64i, "fistp m64i");
10158 IEM_MC_BEGIN(3, 2);
10159 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10160 IEM_MC_LOCAL(uint16_t, u16Fsw);
10161 IEM_MC_ARG_LOCAL_REF(uint16_t *, pu16Fsw, u16Fsw, 0);
10162 IEM_MC_ARG(int64_t *, pi64Dst, 1);
10163 IEM_MC_ARG(PCRTFLOAT80U, pr80Value, 2);
10164
10165 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10167 IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
10168 IEM_MC_MAYBE_RAISE_FPU_XCPT();
10169
10170 IEM_MC_MEM_MAP(pi64Dst, IEM_ACCESS_DATA_W, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1 /*arg*/);
10171 IEM_MC_PREPARE_FPU_USAGE();
10172 IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0)
10173 IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fist_r80_to_i64, pu16Fsw, pi64Dst, pr80Value);
10174 IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pi64Dst, IEM_ACCESS_DATA_W, u16Fsw);
10175 IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10176 IEM_MC_ELSE()
10177 IEM_MC_IF_FCW_IM()
10178 IEM_MC_STORE_MEM_I64_CONST_BY_REF(pi64Dst, INT64_MIN /* (integer indefinite) */);
10179 IEM_MC_MEM_COMMIT_AND_UNMAP(pi64Dst, IEM_ACCESS_DATA_W);
10180 IEM_MC_ENDIF();
10181 IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
10182 IEM_MC_ENDIF();
10183 IEM_MC_ADVANCE_RIP();
10184
10185 IEM_MC_END();
10186 return VINF_SUCCESS;
10187}
10188
10189
10190/**
10191 * @opcode 0xdf
10192 */
10193FNIEMOP_DEF(iemOp_EscF7)
10194{
10195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
10196 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10197 {
10198 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10199 {
10200 case 0: return FNIEMOP_CALL_1(iemOp_ffreep_stN, bRm); /* ffree + pop afterwards, since forever according to AMD. */
10201 case 1: return FNIEMOP_CALL_1(iemOp_fxch_stN, bRm); /* Reserved, behaves like FXCH ST(i) on intel. */
10202 case 2: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10203 case 3: return FNIEMOP_CALL_1(iemOp_fstp_stN, bRm); /* Reserved, behaves like FSTP ST(i) on intel. */
10204 case 4: if (bRm == 0xe0)
10205 return FNIEMOP_CALL(iemOp_fnstsw_ax);
10206 return IEMOP_RAISE_INVALID_OPCODE();
10207 case 5: return FNIEMOP_CALL_1(iemOp_fucomip_st0_stN, bRm);
10208 case 6: return FNIEMOP_CALL_1(iemOp_fcomip_st0_stN, bRm);
10209 case 7: return IEMOP_RAISE_INVALID_OPCODE();
10210 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10211 }
10212 }
10213 else
10214 {
10215 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
10216 {
10217 case 0: return FNIEMOP_CALL_1(iemOp_fild_m16i, bRm);
10218 case 1: return FNIEMOP_CALL_1(iemOp_fisttp_m16i, bRm);
10219 case 2: return FNIEMOP_CALL_1(iemOp_fist_m16i, bRm);
10220 case 3: return FNIEMOP_CALL_1(iemOp_fistp_m16i, bRm);
10221 case 4: return FNIEMOP_CALL_1(iemOp_fbld_m80d, bRm);
10222 case 5: return FNIEMOP_CALL_1(iemOp_fild_m64i, bRm);
10223 case 6: return FNIEMOP_CALL_1(iemOp_fbstp_m80d, bRm);
10224 case 7: return FNIEMOP_CALL_1(iemOp_fistp_m64i, bRm);
10225 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10226 }
10227 }
10228}
10229
10230
10231/**
10232 * @opcode 0xe0
10233 */
10234FNIEMOP_DEF(iemOp_loopne_Jb)
10235{
10236 IEMOP_MNEMONIC(loopne_Jb, "loopne Jb");
10237 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10239 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10240
10241 switch (pVCpu->iem.s.enmEffAddrMode)
10242 {
10243 case IEMMODE_16BIT:
10244 IEM_MC_BEGIN(0,0);
10245 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10246 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10247 IEM_MC_REL_JMP_S8(i8Imm);
10248 } IEM_MC_ELSE() {
10249 IEM_MC_ADVANCE_RIP();
10250 } IEM_MC_ENDIF();
10251 IEM_MC_END();
10252 return VINF_SUCCESS;
10253
10254 case IEMMODE_32BIT:
10255 IEM_MC_BEGIN(0,0);
10256 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10257 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10258 IEM_MC_REL_JMP_S8(i8Imm);
10259 } IEM_MC_ELSE() {
10260 IEM_MC_ADVANCE_RIP();
10261 } IEM_MC_ENDIF();
10262 IEM_MC_END();
10263 return VINF_SUCCESS;
10264
10265 case IEMMODE_64BIT:
10266 IEM_MC_BEGIN(0,0);
10267 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10268 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_NOT_SET(X86_EFL_ZF) {
10269 IEM_MC_REL_JMP_S8(i8Imm);
10270 } IEM_MC_ELSE() {
10271 IEM_MC_ADVANCE_RIP();
10272 } IEM_MC_ENDIF();
10273 IEM_MC_END();
10274 return VINF_SUCCESS;
10275
10276 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10277 }
10278}
10279
10280
10281/**
10282 * @opcode 0xe1
10283 */
10284FNIEMOP_DEF(iemOp_loope_Jb)
10285{
10286 IEMOP_MNEMONIC(loope_Jb, "loope Jb");
10287 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10288 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10289 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10290
10291 switch (pVCpu->iem.s.enmEffAddrMode)
10292 {
10293 case IEMMODE_16BIT:
10294 IEM_MC_BEGIN(0,0);
10295 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10296 IEM_MC_IF_CX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10297 IEM_MC_REL_JMP_S8(i8Imm);
10298 } IEM_MC_ELSE() {
10299 IEM_MC_ADVANCE_RIP();
10300 } IEM_MC_ENDIF();
10301 IEM_MC_END();
10302 return VINF_SUCCESS;
10303
10304 case IEMMODE_32BIT:
10305 IEM_MC_BEGIN(0,0);
10306 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10307 IEM_MC_IF_ECX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10308 IEM_MC_REL_JMP_S8(i8Imm);
10309 } IEM_MC_ELSE() {
10310 IEM_MC_ADVANCE_RIP();
10311 } IEM_MC_ENDIF();
10312 IEM_MC_END();
10313 return VINF_SUCCESS;
10314
10315 case IEMMODE_64BIT:
10316 IEM_MC_BEGIN(0,0);
10317 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10318 IEM_MC_IF_RCX_IS_NZ_AND_EFL_BIT_SET(X86_EFL_ZF) {
10319 IEM_MC_REL_JMP_S8(i8Imm);
10320 } IEM_MC_ELSE() {
10321 IEM_MC_ADVANCE_RIP();
10322 } IEM_MC_ENDIF();
10323 IEM_MC_END();
10324 return VINF_SUCCESS;
10325
10326 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10327 }
10328}
10329
10330
10331/**
10332 * @opcode 0xe2
10333 */
10334FNIEMOP_DEF(iemOp_loop_Jb)
10335{
10336 IEMOP_MNEMONIC(loop_Jb, "loop Jb");
10337 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10338 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10339 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10340
10341 /** @todo Check out the #GP case if EIP < CS.Base or EIP > CS.Limit when
10342 * using the 32-bit operand size override. How can that be restarted? See
10343 * weird pseudo code in intel manual. */
10344
10345 /** NB: At least Windows for Workgroups 3.11 (NDIS.386) and Windows 95 (NDIS.VXD, IOS)
10346 * use LOOP $-2 to implement NdisStallExecution and other CPU stall APIs. Shortcutting
10347 * the loop causes guest crashes, but when logging it's nice to skip a few million
10348 * lines of useless output. */
10349#if defined(LOG_ENABLED)
10350 if ((LogIs3Enabled() || LogIs4Enabled()) && (-(int8_t)IEM_GET_INSTR_LEN(pVCpu) == i8Imm))
10351 switch (pVCpu->iem.s.enmEffAddrMode)
10352 {
10353 case IEMMODE_16BIT:
10354 IEM_MC_BEGIN(0,0);
10355 IEM_MC_STORE_GREG_U16_CONST(X86_GREG_xCX, 0);
10356 IEM_MC_ADVANCE_RIP();
10357 IEM_MC_END();
10358 return VINF_SUCCESS;
10359
10360 case IEMMODE_32BIT:
10361 IEM_MC_BEGIN(0,0);
10362 IEM_MC_STORE_GREG_U32_CONST(X86_GREG_xCX, 0);
10363 IEM_MC_ADVANCE_RIP();
10364 IEM_MC_END();
10365 return VINF_SUCCESS;
10366
10367 case IEMMODE_64BIT:
10368 IEM_MC_BEGIN(0,0);
10369 IEM_MC_STORE_GREG_U64_CONST(X86_GREG_xCX, 0);
10370 IEM_MC_ADVANCE_RIP();
10371 IEM_MC_END();
10372 return VINF_SUCCESS;
10373
10374 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10375 }
10376#endif
10377
10378 switch (pVCpu->iem.s.enmEffAddrMode)
10379 {
10380 case IEMMODE_16BIT:
10381 IEM_MC_BEGIN(0,0);
10382
10383 IEM_MC_SUB_GREG_U16(X86_GREG_xCX, 1);
10384 IEM_MC_IF_CX_IS_NZ() {
10385 IEM_MC_REL_JMP_S8(i8Imm);
10386 } IEM_MC_ELSE() {
10387 IEM_MC_ADVANCE_RIP();
10388 } IEM_MC_ENDIF();
10389 IEM_MC_END();
10390 return VINF_SUCCESS;
10391
10392 case IEMMODE_32BIT:
10393 IEM_MC_BEGIN(0,0);
10394 IEM_MC_SUB_GREG_U32(X86_GREG_xCX, 1);
10395 IEM_MC_IF_ECX_IS_NZ() {
10396 IEM_MC_REL_JMP_S8(i8Imm);
10397 } IEM_MC_ELSE() {
10398 IEM_MC_ADVANCE_RIP();
10399 } IEM_MC_ENDIF();
10400 IEM_MC_END();
10401 return VINF_SUCCESS;
10402
10403 case IEMMODE_64BIT:
10404 IEM_MC_BEGIN(0,0);
10405 IEM_MC_SUB_GREG_U64(X86_GREG_xCX, 1);
10406 IEM_MC_IF_RCX_IS_NZ() {
10407 IEM_MC_REL_JMP_S8(i8Imm);
10408 } IEM_MC_ELSE() {
10409 IEM_MC_ADVANCE_RIP();
10410 } IEM_MC_ENDIF();
10411 IEM_MC_END();
10412 return VINF_SUCCESS;
10413
10414 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10415 }
10416}
10417
10418
10419/**
10420 * @opcode 0xe3
10421 */
10422FNIEMOP_DEF(iemOp_jecxz_Jb)
10423{
10424 IEMOP_MNEMONIC(jecxz_Jb, "jecxz Jb");
10425 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10427 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10428
10429 switch (pVCpu->iem.s.enmEffAddrMode)
10430 {
10431 case IEMMODE_16BIT:
10432 IEM_MC_BEGIN(0,0);
10433 IEM_MC_IF_CX_IS_NZ() {
10434 IEM_MC_ADVANCE_RIP();
10435 } IEM_MC_ELSE() {
10436 IEM_MC_REL_JMP_S8(i8Imm);
10437 } IEM_MC_ENDIF();
10438 IEM_MC_END();
10439 return VINF_SUCCESS;
10440
10441 case IEMMODE_32BIT:
10442 IEM_MC_BEGIN(0,0);
10443 IEM_MC_IF_ECX_IS_NZ() {
10444 IEM_MC_ADVANCE_RIP();
10445 } IEM_MC_ELSE() {
10446 IEM_MC_REL_JMP_S8(i8Imm);
10447 } IEM_MC_ENDIF();
10448 IEM_MC_END();
10449 return VINF_SUCCESS;
10450
10451 case IEMMODE_64BIT:
10452 IEM_MC_BEGIN(0,0);
10453 IEM_MC_IF_RCX_IS_NZ() {
10454 IEM_MC_ADVANCE_RIP();
10455 } IEM_MC_ELSE() {
10456 IEM_MC_REL_JMP_S8(i8Imm);
10457 } IEM_MC_ENDIF();
10458 IEM_MC_END();
10459 return VINF_SUCCESS;
10460
10461 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10462 }
10463}
10464
10465
10466/** Opcode 0xe4 */
10467FNIEMOP_DEF(iemOp_in_AL_Ib)
10468{
10469 IEMOP_MNEMONIC(in_AL_Ib, "in AL,Ib");
10470 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10472 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, 1);
10473}
10474
10475
10476/** Opcode 0xe5 */
10477FNIEMOP_DEF(iemOp_in_eAX_Ib)
10478{
10479 IEMOP_MNEMONIC(in_eAX_Ib, "in eAX,Ib");
10480 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10481 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10482 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_in, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10483}
10484
10485
10486/** Opcode 0xe6 */
10487FNIEMOP_DEF(iemOp_out_Ib_AL)
10488{
10489 IEMOP_MNEMONIC(out_Ib_AL, "out Ib,AL");
10490 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10491 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10492 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, 1);
10493}
10494
10495
10496/** Opcode 0xe7 */
10497FNIEMOP_DEF(iemOp_out_Ib_eAX)
10498{
10499 IEMOP_MNEMONIC(out_Ib_eAX, "out Ib,eAX");
10500 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10502 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_out, u8Imm, true /* fImm */, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10503}
10504
10505
10506/**
10507 * @opcode 0xe8
10508 */
10509FNIEMOP_DEF(iemOp_call_Jv)
10510{
10511 IEMOP_MNEMONIC(call_Jv, "call Jv");
10512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10513 switch (pVCpu->iem.s.enmEffOpSize)
10514 {
10515 case IEMMODE_16BIT:
10516 {
10517 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10518 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_16, (int16_t)u16Imm);
10519 }
10520
10521 case IEMMODE_32BIT:
10522 {
10523 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10524 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_32, (int32_t)u32Imm);
10525 }
10526
10527 case IEMMODE_64BIT:
10528 {
10529 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10530 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_call_rel_64, u64Imm);
10531 }
10532
10533 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10534 }
10535}
10536
10537
10538/**
10539 * @opcode 0xe9
10540 */
10541FNIEMOP_DEF(iemOp_jmp_Jv)
10542{
10543 IEMOP_MNEMONIC(jmp_Jv, "jmp Jv");
10544 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10545 switch (pVCpu->iem.s.enmEffOpSize)
10546 {
10547 case IEMMODE_16BIT:
10548 {
10549 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm);
10550 IEM_MC_BEGIN(0, 0);
10551 IEM_MC_REL_JMP_S16(i16Imm);
10552 IEM_MC_END();
10553 return VINF_SUCCESS;
10554 }
10555
10556 case IEMMODE_64BIT:
10557 case IEMMODE_32BIT:
10558 {
10559 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm);
10560 IEM_MC_BEGIN(0, 0);
10561 IEM_MC_REL_JMP_S32(i32Imm);
10562 IEM_MC_END();
10563 return VINF_SUCCESS;
10564 }
10565
10566 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10567 }
10568}
10569
10570
10571/**
10572 * @opcode 0xea
10573 */
10574FNIEMOP_DEF(iemOp_jmp_Ap)
10575{
10576 IEMOP_MNEMONIC(jmp_Ap, "jmp Ap");
10577 IEMOP_HLP_NO_64BIT();
10578
10579 /* Decode the far pointer address and pass it on to the far call C implementation. */
10580 uint32_t offSeg;
10581 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_16BIT)
10582 IEM_OPCODE_GET_NEXT_U32(&offSeg);
10583 else
10584 IEM_OPCODE_GET_NEXT_U16_ZX_U32(&offSeg);
10585 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(&uSel);
10586 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10587 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pVCpu->iem.s.enmEffOpSize);
10588}
10589
10590
10591/**
10592 * @opcode 0xeb
10593 */
10594FNIEMOP_DEF(iemOp_jmp_Jb)
10595{
10596 IEMOP_MNEMONIC(jmp_Jb, "jmp Jb");
10597 int8_t i8Imm; IEM_OPCODE_GET_NEXT_S8(&i8Imm);
10598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10599 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
10600
10601 IEM_MC_BEGIN(0, 0);
10602 IEM_MC_REL_JMP_S8(i8Imm);
10603 IEM_MC_END();
10604 return VINF_SUCCESS;
10605}
10606
10607
10608/** Opcode 0xec */
10609FNIEMOP_DEF(iemOp_in_AL_DX)
10610{
10611 IEMOP_MNEMONIC(in_AL_DX, "in AL,DX");
10612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10613 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, 1);
10614}
10615
10616
10617/** Opcode 0xed */
10618FNIEMOP_DEF(iemOp_in_eAX_DX)
10619{
10620 IEMOP_MNEMONIC(in_eAX_DX, "in eAX,DX");
10621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10622 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_in_eAX_DX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10623}
10624
10625
10626/** Opcode 0xee */
10627FNIEMOP_DEF(iemOp_out_DX_AL)
10628{
10629 IEMOP_MNEMONIC(out_DX_AL, "out DX,AL");
10630 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10631 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, 1);
10632}
10633
10634
10635/** Opcode 0xef */
10636FNIEMOP_DEF(iemOp_out_DX_eAX)
10637{
10638 IEMOP_MNEMONIC(out_DX_eAX, "out DX,eAX");
10639 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10640 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_out_DX_eAX, pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT ? 2 : 4);
10641}
10642
10643
10644/**
10645 * @opcode 0xf0
10646 */
10647FNIEMOP_DEF(iemOp_lock)
10648{
10649 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock");
10650 if (!pVCpu->iem.s.fDisregardLock)
10651 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK;
10652
10653 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10654 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10655}
10656
10657
10658/**
10659 * @opcode 0xf1
10660 */
10661FNIEMOP_DEF(iemOp_int1)
10662{
10663 IEMOP_MNEMONIC(int1, "int1"); /* icebp */
10664 /** @todo Does not generate #UD on 286, or so they say... Was allegedly a
10665 * prefix byte on 8086 and/or/maybe 80286 without meaning according to the 286
10666 * LOADALL memo. Needs some testing. */
10667 IEMOP_HLP_MIN_386();
10668 /** @todo testcase! */
10669 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_int, X86_XCPT_DB, IEMINT_INT1);
10670}
10671
10672
10673/**
10674 * @opcode 0xf2
10675 */
10676FNIEMOP_DEF(iemOp_repne)
10677{
10678 /* This overrides any previous REPE prefix. */
10679 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPZ;
10680 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repne");
10681 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPNZ;
10682
10683 /* For the 4 entry opcode tables, REPNZ overrides any previous
10684 REPZ and operand size prefixes. */
10685 pVCpu->iem.s.idxPrefix = 3;
10686
10687 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10688 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10689}
10690
10691
10692/**
10693 * @opcode 0xf3
10694 */
10695FNIEMOP_DEF(iemOp_repe)
10696{
10697 /* This overrides any previous REPNE prefix. */
10698 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REPNZ;
10699 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("repe");
10700 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_REPZ;
10701
10702 /* For the 4 entry opcode tables, REPNZ overrides any previous
10703 REPNZ and operand size prefixes. */
10704 pVCpu->iem.s.idxPrefix = 2;
10705
10706 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
10707 return FNIEMOP_CALL(g_apfnOneByteMap[b]);
10708}
10709
10710
10711/**
10712 * @opcode 0xf4
10713 */
10714FNIEMOP_DEF(iemOp_hlt)
10715{
10716 IEMOP_MNEMONIC(hlt, "hlt");
10717 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10718 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_hlt);
10719}
10720
10721
10722/**
10723 * @opcode 0xf5
10724 */
10725FNIEMOP_DEF(iemOp_cmc)
10726{
10727 IEMOP_MNEMONIC(cmc, "cmc");
10728 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10729 IEM_MC_BEGIN(0, 0);
10730 IEM_MC_FLIP_EFL_BIT(X86_EFL_CF);
10731 IEM_MC_ADVANCE_RIP();
10732 IEM_MC_END();
10733 return VINF_SUCCESS;
10734}
10735
10736
10737/**
10738 * Common implementation of 'inc/dec/not/neg Eb'.
10739 *
10740 * @param bRm The RM byte.
10741 * @param pImpl The instruction implementation.
10742 */
10743FNIEMOP_DEF_2(iemOpCommonUnaryEb, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10744{
10745 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10746 {
10747 /* register access */
10748 IEM_MC_BEGIN(2, 0);
10749 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10750 IEM_MC_ARG(uint32_t *, pEFlags, 1);
10751 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10752 IEM_MC_REF_EFLAGS(pEFlags);
10753 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10754 IEM_MC_ADVANCE_RIP();
10755 IEM_MC_END();
10756 }
10757 else
10758 {
10759 /* memory access. */
10760 IEM_MC_BEGIN(2, 2);
10761 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10762 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10763 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10764
10765 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10766 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10767 IEM_MC_FETCH_EFLAGS(EFlags);
10768 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10769 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU8, pu8Dst, pEFlags);
10770 else
10771 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU8, pu8Dst, pEFlags);
10772
10773 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW);
10774 IEM_MC_COMMIT_EFLAGS(EFlags);
10775 IEM_MC_ADVANCE_RIP();
10776 IEM_MC_END();
10777 }
10778 return VINF_SUCCESS;
10779}
10780
10781
10782/**
10783 * Common implementation of 'inc/dec/not/neg Ev'.
10784 *
10785 * @param bRm The RM byte.
10786 * @param pImpl The instruction implementation.
10787 */
10788FNIEMOP_DEF_2(iemOpCommonUnaryEv, uint8_t, bRm, PCIEMOPUNARYSIZES, pImpl)
10789{
10790 /* Registers are handled by a common worker. */
10791 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10792 return FNIEMOP_CALL_2(iemOpCommonUnaryGReg, pImpl, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10793
10794 /* Memory we do here. */
10795 switch (pVCpu->iem.s.enmEffOpSize)
10796 {
10797 case IEMMODE_16BIT:
10798 IEM_MC_BEGIN(2, 2);
10799 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10800 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10801 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10802
10803 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10804 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10805 IEM_MC_FETCH_EFLAGS(EFlags);
10806 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10807 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU16, pu16Dst, pEFlags);
10808 else
10809 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU16, pu16Dst, pEFlags);
10810
10811 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW);
10812 IEM_MC_COMMIT_EFLAGS(EFlags);
10813 IEM_MC_ADVANCE_RIP();
10814 IEM_MC_END();
10815 return VINF_SUCCESS;
10816
10817 case IEMMODE_32BIT:
10818 IEM_MC_BEGIN(2, 2);
10819 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10820 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10821 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10822
10823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10824 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10825 IEM_MC_FETCH_EFLAGS(EFlags);
10826 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10827 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU32, pu32Dst, pEFlags);
10828 else
10829 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU32, pu32Dst, pEFlags);
10830
10831 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW);
10832 IEM_MC_COMMIT_EFLAGS(EFlags);
10833 IEM_MC_ADVANCE_RIP();
10834 IEM_MC_END();
10835 return VINF_SUCCESS;
10836
10837 case IEMMODE_64BIT:
10838 IEM_MC_BEGIN(2, 2);
10839 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10840 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 1);
10841 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10842
10843 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
10844 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10845 IEM_MC_FETCH_EFLAGS(EFlags);
10846 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
10847 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnNormalU64, pu64Dst, pEFlags);
10848 else
10849 IEM_MC_CALL_VOID_AIMPL_2(pImpl->pfnLockedU64, pu64Dst, pEFlags);
10850
10851 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW);
10852 IEM_MC_COMMIT_EFLAGS(EFlags);
10853 IEM_MC_ADVANCE_RIP();
10854 IEM_MC_END();
10855 return VINF_SUCCESS;
10856
10857 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10858 }
10859}
10860
10861
10862/** Opcode 0xf6 /0. */
10863FNIEMOP_DEF_1(iemOp_grp3_test_Eb, uint8_t, bRm)
10864{
10865 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib");
10866 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10867
10868 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10869 {
10870 /* register access */
10871 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10873
10874 IEM_MC_BEGIN(3, 0);
10875 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10876 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1);
10877 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10878 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10879 IEM_MC_REF_EFLAGS(pEFlags);
10880 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10881 IEM_MC_ADVANCE_RIP();
10882 IEM_MC_END();
10883 }
10884 else
10885 {
10886 /* memory access. */
10887 IEM_MC_BEGIN(3, 2);
10888 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
10889 IEM_MC_ARG(uint8_t, u8Src, 1);
10890 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10892
10893 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1);
10894 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
10895 IEM_MC_ASSIGN(u8Src, u8Imm);
10896 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10897 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10898 IEM_MC_FETCH_EFLAGS(EFlags);
10899 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags);
10900
10901 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_R);
10902 IEM_MC_COMMIT_EFLAGS(EFlags);
10903 IEM_MC_ADVANCE_RIP();
10904 IEM_MC_END();
10905 }
10906 return VINF_SUCCESS;
10907}
10908
10909
10910/** Opcode 0xf7 /0. */
10911FNIEMOP_DEF_1(iemOp_grp3_test_Ev, uint8_t, bRm)
10912{
10913 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv");
10914 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF);
10915
10916 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
10917 {
10918 /* register access */
10919 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10920 switch (pVCpu->iem.s.enmEffOpSize)
10921 {
10922 case IEMMODE_16BIT:
10923 {
10924 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10925 IEM_MC_BEGIN(3, 0);
10926 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10927 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1);
10928 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10929 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10930 IEM_MC_REF_EFLAGS(pEFlags);
10931 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10932 IEM_MC_ADVANCE_RIP();
10933 IEM_MC_END();
10934 return VINF_SUCCESS;
10935 }
10936
10937 case IEMMODE_32BIT:
10938 {
10939 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
10940 IEM_MC_BEGIN(3, 0);
10941 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
10942 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1);
10943 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10944 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10945 IEM_MC_REF_EFLAGS(pEFlags);
10946 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
10947 /* No clearing the high dword here - test doesn't write back the result. */
10948 IEM_MC_ADVANCE_RIP();
10949 IEM_MC_END();
10950 return VINF_SUCCESS;
10951 }
10952
10953 case IEMMODE_64BIT:
10954 {
10955 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
10956 IEM_MC_BEGIN(3, 0);
10957 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
10958 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1);
10959 IEM_MC_ARG(uint32_t *, pEFlags, 2);
10960 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
10961 IEM_MC_REF_EFLAGS(pEFlags);
10962 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
10963 IEM_MC_ADVANCE_RIP();
10964 IEM_MC_END();
10965 return VINF_SUCCESS;
10966 }
10967
10968 IEM_NOT_REACHED_DEFAULT_CASE_RET();
10969 }
10970 }
10971 else
10972 {
10973 /* memory access. */
10974 switch (pVCpu->iem.s.enmEffOpSize)
10975 {
10976 case IEMMODE_16BIT:
10977 {
10978 IEM_MC_BEGIN(3, 2);
10979 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
10980 IEM_MC_ARG(uint16_t, u16Src, 1);
10981 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
10982 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
10983
10984 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2);
10985 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
10986 IEM_MC_ASSIGN(u16Src, u16Imm);
10987 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
10988 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
10989 IEM_MC_FETCH_EFLAGS(EFlags);
10990 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags);
10991
10992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_R);
10993 IEM_MC_COMMIT_EFLAGS(EFlags);
10994 IEM_MC_ADVANCE_RIP();
10995 IEM_MC_END();
10996 return VINF_SUCCESS;
10997 }
10998
10999 case IEMMODE_32BIT:
11000 {
11001 IEM_MC_BEGIN(3, 2);
11002 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
11003 IEM_MC_ARG(uint32_t, u32Src, 1);
11004 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11005 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11006
11007 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11008 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
11009 IEM_MC_ASSIGN(u32Src, u32Imm);
11010 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11011 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11012 IEM_MC_FETCH_EFLAGS(EFlags);
11013 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags);
11014
11015 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_R);
11016 IEM_MC_COMMIT_EFLAGS(EFlags);
11017 IEM_MC_ADVANCE_RIP();
11018 IEM_MC_END();
11019 return VINF_SUCCESS;
11020 }
11021
11022 case IEMMODE_64BIT:
11023 {
11024 IEM_MC_BEGIN(3, 2);
11025 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
11026 IEM_MC_ARG(uint64_t, u64Src, 1);
11027 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2);
11028 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11029
11030 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4);
11031 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
11032 IEM_MC_ASSIGN(u64Src, u64Imm);
11033 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11034 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_R, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
11035 IEM_MC_FETCH_EFLAGS(EFlags);
11036 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags);
11037
11038 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_R);
11039 IEM_MC_COMMIT_EFLAGS(EFlags);
11040 IEM_MC_ADVANCE_RIP();
11041 IEM_MC_END();
11042 return VINF_SUCCESS;
11043 }
11044
11045 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11046 }
11047 }
11048}
11049
11050
11051/** Opcode 0xf6 /4, /5, /6 and /7. */
11052FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEb, uint8_t, bRm, PFNIEMAIMPLMULDIVU8, pfnU8)
11053{
11054 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11055 {
11056 /* register access */
11057 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11058 IEM_MC_BEGIN(3, 1);
11059 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11060 IEM_MC_ARG(uint8_t, u8Value, 1);
11061 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11062 IEM_MC_LOCAL(int32_t, rc);
11063
11064 IEM_MC_FETCH_GREG_U8(u8Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11065 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11066 IEM_MC_REF_EFLAGS(pEFlags);
11067 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11068 IEM_MC_IF_LOCAL_IS_Z(rc) {
11069 IEM_MC_ADVANCE_RIP();
11070 } IEM_MC_ELSE() {
11071 IEM_MC_RAISE_DIVIDE_ERROR();
11072 } IEM_MC_ENDIF();
11073
11074 IEM_MC_END();
11075 }
11076 else
11077 {
11078 /* memory access. */
11079 IEM_MC_BEGIN(3, 2);
11080 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11081 IEM_MC_ARG(uint8_t, u8Value, 1);
11082 IEM_MC_ARG(uint32_t *, pEFlags, 2);
11083 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11084 IEM_MC_LOCAL(int32_t, rc);
11085
11086 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11087 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11088 IEM_MC_FETCH_MEM_U8(u8Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11089 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11090 IEM_MC_REF_EFLAGS(pEFlags);
11091 IEM_MC_CALL_AIMPL_3(rc, pfnU8, pu16AX, u8Value, pEFlags);
11092 IEM_MC_IF_LOCAL_IS_Z(rc) {
11093 IEM_MC_ADVANCE_RIP();
11094 } IEM_MC_ELSE() {
11095 IEM_MC_RAISE_DIVIDE_ERROR();
11096 } IEM_MC_ENDIF();
11097
11098 IEM_MC_END();
11099 }
11100 return VINF_SUCCESS;
11101}
11102
11103
11104/** Opcode 0xf7 /4, /5, /6 and /7. */
11105FNIEMOP_DEF_2(iemOpCommonGrp3MulDivEv, uint8_t, bRm, PCIEMOPMULDIVSIZES, pImpl)
11106{
11107 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11108
11109 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11110 {
11111 /* register access */
11112 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11113 switch (pVCpu->iem.s.enmEffOpSize)
11114 {
11115 case IEMMODE_16BIT:
11116 {
11117 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11118 IEM_MC_BEGIN(4, 1);
11119 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11120 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11121 IEM_MC_ARG(uint16_t, u16Value, 2);
11122 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11123 IEM_MC_LOCAL(int32_t, rc);
11124
11125 IEM_MC_FETCH_GREG_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11126 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11127 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11128 IEM_MC_REF_EFLAGS(pEFlags);
11129 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11130 IEM_MC_IF_LOCAL_IS_Z(rc) {
11131 IEM_MC_ADVANCE_RIP();
11132 } IEM_MC_ELSE() {
11133 IEM_MC_RAISE_DIVIDE_ERROR();
11134 } IEM_MC_ENDIF();
11135
11136 IEM_MC_END();
11137 return VINF_SUCCESS;
11138 }
11139
11140 case IEMMODE_32BIT:
11141 {
11142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11143 IEM_MC_BEGIN(4, 1);
11144 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11145 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11146 IEM_MC_ARG(uint32_t, u32Value, 2);
11147 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11148 IEM_MC_LOCAL(int32_t, rc);
11149
11150 IEM_MC_FETCH_GREG_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11151 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11152 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11153 IEM_MC_REF_EFLAGS(pEFlags);
11154 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11155 IEM_MC_IF_LOCAL_IS_Z(rc) {
11156 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11157 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11158 IEM_MC_ADVANCE_RIP();
11159 } IEM_MC_ELSE() {
11160 IEM_MC_RAISE_DIVIDE_ERROR();
11161 } IEM_MC_ENDIF();
11162
11163 IEM_MC_END();
11164 return VINF_SUCCESS;
11165 }
11166
11167 case IEMMODE_64BIT:
11168 {
11169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11170 IEM_MC_BEGIN(4, 1);
11171 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11172 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11173 IEM_MC_ARG(uint64_t, u64Value, 2);
11174 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11175 IEM_MC_LOCAL(int32_t, rc);
11176
11177 IEM_MC_FETCH_GREG_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11178 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11179 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11180 IEM_MC_REF_EFLAGS(pEFlags);
11181 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11182 IEM_MC_IF_LOCAL_IS_Z(rc) {
11183 IEM_MC_ADVANCE_RIP();
11184 } IEM_MC_ELSE() {
11185 IEM_MC_RAISE_DIVIDE_ERROR();
11186 } IEM_MC_ENDIF();
11187
11188 IEM_MC_END();
11189 return VINF_SUCCESS;
11190 }
11191
11192 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11193 }
11194 }
11195 else
11196 {
11197 /* memory access. */
11198 switch (pVCpu->iem.s.enmEffOpSize)
11199 {
11200 case IEMMODE_16BIT:
11201 {
11202 IEM_MC_BEGIN(4, 2);
11203 IEM_MC_ARG(uint16_t *, pu16AX, 0);
11204 IEM_MC_ARG(uint16_t *, pu16DX, 1);
11205 IEM_MC_ARG(uint16_t, u16Value, 2);
11206 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11207 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11208 IEM_MC_LOCAL(int32_t, rc);
11209
11210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11212 IEM_MC_FETCH_MEM_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11213 IEM_MC_REF_GREG_U16(pu16AX, X86_GREG_xAX);
11214 IEM_MC_REF_GREG_U16(pu16DX, X86_GREG_xDX);
11215 IEM_MC_REF_EFLAGS(pEFlags);
11216 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU16, pu16AX, pu16DX, u16Value, pEFlags);
11217 IEM_MC_IF_LOCAL_IS_Z(rc) {
11218 IEM_MC_ADVANCE_RIP();
11219 } IEM_MC_ELSE() {
11220 IEM_MC_RAISE_DIVIDE_ERROR();
11221 } IEM_MC_ENDIF();
11222
11223 IEM_MC_END();
11224 return VINF_SUCCESS;
11225 }
11226
11227 case IEMMODE_32BIT:
11228 {
11229 IEM_MC_BEGIN(4, 2);
11230 IEM_MC_ARG(uint32_t *, pu32AX, 0);
11231 IEM_MC_ARG(uint32_t *, pu32DX, 1);
11232 IEM_MC_ARG(uint32_t, u32Value, 2);
11233 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11234 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11235 IEM_MC_LOCAL(int32_t, rc);
11236
11237 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11238 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11239 IEM_MC_FETCH_MEM_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11240 IEM_MC_REF_GREG_U32(pu32AX, X86_GREG_xAX);
11241 IEM_MC_REF_GREG_U32(pu32DX, X86_GREG_xDX);
11242 IEM_MC_REF_EFLAGS(pEFlags);
11243 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU32, pu32AX, pu32DX, u32Value, pEFlags);
11244 IEM_MC_IF_LOCAL_IS_Z(rc) {
11245 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32AX);
11246 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32DX);
11247 IEM_MC_ADVANCE_RIP();
11248 } IEM_MC_ELSE() {
11249 IEM_MC_RAISE_DIVIDE_ERROR();
11250 } IEM_MC_ENDIF();
11251
11252 IEM_MC_END();
11253 return VINF_SUCCESS;
11254 }
11255
11256 case IEMMODE_64BIT:
11257 {
11258 IEM_MC_BEGIN(4, 2);
11259 IEM_MC_ARG(uint64_t *, pu64AX, 0);
11260 IEM_MC_ARG(uint64_t *, pu64DX, 1);
11261 IEM_MC_ARG(uint64_t, u64Value, 2);
11262 IEM_MC_ARG(uint32_t *, pEFlags, 3);
11263 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
11264 IEM_MC_LOCAL(int32_t, rc);
11265
11266 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
11267 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11268 IEM_MC_FETCH_MEM_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
11269 IEM_MC_REF_GREG_U64(pu64AX, X86_GREG_xAX);
11270 IEM_MC_REF_GREG_U64(pu64DX, X86_GREG_xDX);
11271 IEM_MC_REF_EFLAGS(pEFlags);
11272 IEM_MC_CALL_AIMPL_4(rc, pImpl->pfnU64, pu64AX, pu64DX, u64Value, pEFlags);
11273 IEM_MC_IF_LOCAL_IS_Z(rc) {
11274 IEM_MC_ADVANCE_RIP();
11275 } IEM_MC_ELSE() {
11276 IEM_MC_RAISE_DIVIDE_ERROR();
11277 } IEM_MC_ENDIF();
11278
11279 IEM_MC_END();
11280 return VINF_SUCCESS;
11281 }
11282
11283 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11284 }
11285 }
11286}
11287
11288/**
11289 * @opcode 0xf6
11290 */
11291FNIEMOP_DEF(iemOp_Grp3_Eb)
11292{
11293 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11294 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11295 {
11296 case 0:
11297 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb, bRm);
11298 case 1:
11299/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11300 return IEMOP_RAISE_INVALID_OPCODE();
11301 case 2:
11302 IEMOP_MNEMONIC(not_Eb, "not Eb");
11303 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_not);
11304 case 3:
11305 IEMOP_MNEMONIC(neg_Eb, "neg Eb");
11306 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_neg);
11307 case 4:
11308 IEMOP_MNEMONIC(mul_Eb, "mul Eb");
11309 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11310 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_u8_eflags));
11311 case 5:
11312 IEMOP_MNEMONIC(imul_Eb, "imul Eb");
11313 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11314 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_u8_eflags));
11315 case 6:
11316 IEMOP_MNEMONIC(div_Eb, "div Eb");
11317 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11318 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_u8_eflags));
11319 case 7:
11320 IEMOP_MNEMONIC(idiv_Eb, "idiv Eb");
11321 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11322 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEb, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_u8_eflags));
11323 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11324 }
11325}
11326
11327
11328/**
11329 * @opcode 0xf7
11330 */
11331FNIEMOP_DEF(iemOp_Grp3_Ev)
11332{
11333 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11334 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11335 {
11336 case 0:
11337 return FNIEMOP_CALL_1(iemOp_grp3_test_Ev, bRm);
11338 case 1:
11339/** @todo testcase: Present on <=386, most 486 (not early), Pentiums, and current CPUs too. CPUUNDOC.EXE */
11340 return IEMOP_RAISE_INVALID_OPCODE();
11341 case 2:
11342 IEMOP_MNEMONIC(not_Ev, "not Ev");
11343 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_not);
11344 case 3:
11345 IEMOP_MNEMONIC(neg_Ev, "neg Ev");
11346 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_neg);
11347 case 4:
11348 IEMOP_MNEMONIC(mul_Ev, "mul Ev");
11349 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11350 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_mul_eflags));
11351 case 5:
11352 IEMOP_MNEMONIC(imul_Ev, "imul Ev");
11353 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF);
11354 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_eflags));
11355 case 6:
11356 IEMOP_MNEMONIC(div_Ev, "div Ev");
11357 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11358 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_div_eflags));
11359 case 7:
11360 IEMOP_MNEMONIC(idiv_Ev, "idiv Ev");
11361 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF | X86_EFL_OF | X86_EFL_CF);
11362 return FNIEMOP_CALL_2(iemOpCommonGrp3MulDivEv, bRm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_idiv_eflags));
11363 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11364 }
11365}
11366
11367
11368/**
11369 * @opcode 0xf8
11370 */
11371FNIEMOP_DEF(iemOp_clc)
11372{
11373 IEMOP_MNEMONIC(clc, "clc");
11374 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11375 IEM_MC_BEGIN(0, 0);
11376 IEM_MC_CLEAR_EFL_BIT(X86_EFL_CF);
11377 IEM_MC_ADVANCE_RIP();
11378 IEM_MC_END();
11379 return VINF_SUCCESS;
11380}
11381
11382
11383/**
11384 * @opcode 0xf9
11385 */
11386FNIEMOP_DEF(iemOp_stc)
11387{
11388 IEMOP_MNEMONIC(stc, "stc");
11389 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11390 IEM_MC_BEGIN(0, 0);
11391 IEM_MC_SET_EFL_BIT(X86_EFL_CF);
11392 IEM_MC_ADVANCE_RIP();
11393 IEM_MC_END();
11394 return VINF_SUCCESS;
11395}
11396
11397
11398/**
11399 * @opcode 0xfa
11400 */
11401FNIEMOP_DEF(iemOp_cli)
11402{
11403 IEMOP_MNEMONIC(cli, "cli");
11404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11405 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cli);
11406}
11407
11408
11409FNIEMOP_DEF(iemOp_sti)
11410{
11411 IEMOP_MNEMONIC(sti, "sti");
11412 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11413 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sti);
11414}
11415
11416
11417/**
11418 * @opcode 0xfc
11419 */
11420FNIEMOP_DEF(iemOp_cld)
11421{
11422 IEMOP_MNEMONIC(cld, "cld");
11423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11424 IEM_MC_BEGIN(0, 0);
11425 IEM_MC_CLEAR_EFL_BIT(X86_EFL_DF);
11426 IEM_MC_ADVANCE_RIP();
11427 IEM_MC_END();
11428 return VINF_SUCCESS;
11429}
11430
11431
11432/**
11433 * @opcode 0xfd
11434 */
11435FNIEMOP_DEF(iemOp_std)
11436{
11437 IEMOP_MNEMONIC(std, "std");
11438 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11439 IEM_MC_BEGIN(0, 0);
11440 IEM_MC_SET_EFL_BIT(X86_EFL_DF);
11441 IEM_MC_ADVANCE_RIP();
11442 IEM_MC_END();
11443 return VINF_SUCCESS;
11444}
11445
11446
11447/**
11448 * @opcode 0xfe
11449 */
11450FNIEMOP_DEF(iemOp_Grp4)
11451{
11452 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11453 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11454 {
11455 case 0:
11456 IEMOP_MNEMONIC(inc_Eb, "inc Eb");
11457 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_inc);
11458 case 1:
11459 IEMOP_MNEMONIC(dec_Eb, "dec Eb");
11460 return FNIEMOP_CALL_2(iemOpCommonUnaryEb, bRm, &g_iemAImpl_dec);
11461 default:
11462 IEMOP_MNEMONIC(grp4_ud, "grp4-ud");
11463 return IEMOP_RAISE_INVALID_OPCODE();
11464 }
11465}
11466
11467
11468/**
11469 * Opcode 0xff /2.
11470 * @param bRm The RM byte.
11471 */
11472FNIEMOP_DEF_1(iemOp_Grp5_calln_Ev, uint8_t, bRm)
11473{
11474 IEMOP_MNEMONIC(calln_Ev, "calln Ev");
11475 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11476
11477 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11478 {
11479 /* The new RIP is taken from a register. */
11480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11481 switch (pVCpu->iem.s.enmEffOpSize)
11482 {
11483 case IEMMODE_16BIT:
11484 IEM_MC_BEGIN(1, 0);
11485 IEM_MC_ARG(uint16_t, u16Target, 0);
11486 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11487 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11488 IEM_MC_END()
11489 return VINF_SUCCESS;
11490
11491 case IEMMODE_32BIT:
11492 IEM_MC_BEGIN(1, 0);
11493 IEM_MC_ARG(uint32_t, u32Target, 0);
11494 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11495 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11496 IEM_MC_END()
11497 return VINF_SUCCESS;
11498
11499 case IEMMODE_64BIT:
11500 IEM_MC_BEGIN(1, 0);
11501 IEM_MC_ARG(uint64_t, u64Target, 0);
11502 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11503 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11504 IEM_MC_END()
11505 return VINF_SUCCESS;
11506
11507 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11508 }
11509 }
11510 else
11511 {
11512 /* The new RIP is taken from a register. */
11513 switch (pVCpu->iem.s.enmEffOpSize)
11514 {
11515 case IEMMODE_16BIT:
11516 IEM_MC_BEGIN(1, 1);
11517 IEM_MC_ARG(uint16_t, u16Target, 0);
11518 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11519 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11521 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11522 IEM_MC_CALL_CIMPL_1(iemCImpl_call_16, u16Target);
11523 IEM_MC_END()
11524 return VINF_SUCCESS;
11525
11526 case IEMMODE_32BIT:
11527 IEM_MC_BEGIN(1, 1);
11528 IEM_MC_ARG(uint32_t, u32Target, 0);
11529 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11531 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11532 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11533 IEM_MC_CALL_CIMPL_1(iemCImpl_call_32, u32Target);
11534 IEM_MC_END()
11535 return VINF_SUCCESS;
11536
11537 case IEMMODE_64BIT:
11538 IEM_MC_BEGIN(1, 1);
11539 IEM_MC_ARG(uint64_t, u64Target, 0);
11540 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11541 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11542 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11543 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11544 IEM_MC_CALL_CIMPL_1(iemCImpl_call_64, u64Target);
11545 IEM_MC_END()
11546 return VINF_SUCCESS;
11547
11548 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11549 }
11550 }
11551}
11552
11553typedef IEM_CIMPL_DECL_TYPE_3(FNIEMCIMPLFARBRANCH, uint16_t, uSel, uint64_t, offSeg, IEMMODE, enmOpSize);
11554
11555FNIEMOP_DEF_2(iemOpHlp_Grp5_far_Ep, uint8_t, bRm, FNIEMCIMPLFARBRANCH *, pfnCImpl)
11556{
11557 /* Registers? How?? */
11558 if (RT_LIKELY((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)))
11559 { /* likely */ }
11560 else
11561 return IEMOP_RAISE_INVALID_OPCODE(); /* callf eax is not legal */
11562
11563 /* Far pointer loaded from memory. */
11564 switch (pVCpu->iem.s.enmEffOpSize)
11565 {
11566 case IEMMODE_16BIT:
11567 IEM_MC_BEGIN(3, 1);
11568 IEM_MC_ARG(uint16_t, u16Sel, 0);
11569 IEM_MC_ARG(uint16_t, offSeg, 1);
11570 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11571 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11572 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11574 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11575 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2);
11576 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11577 IEM_MC_END();
11578 return VINF_SUCCESS;
11579
11580 case IEMMODE_64BIT:
11581 /** @todo testcase: AMD does not seem to believe in the case (see bs-cpu-xcpt-1)
11582 * and will apparently ignore REX.W, at least for the jmp far qword [rsp]
11583 * and call far qword [rsp] encodings. */
11584 if (!IEM_IS_GUEST_CPU_AMD(pVCpu))
11585 {
11586 IEM_MC_BEGIN(3, 1);
11587 IEM_MC_ARG(uint16_t, u16Sel, 0);
11588 IEM_MC_ARG(uint64_t, offSeg, 1);
11589 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2);
11590 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11591 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11592 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11593 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11594 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8);
11595 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11596 IEM_MC_END();
11597 return VINF_SUCCESS;
11598 }
11599 /* AMD falls thru. */
11600 RT_FALL_THRU();
11601
11602 case IEMMODE_32BIT:
11603 IEM_MC_BEGIN(3, 1);
11604 IEM_MC_ARG(uint16_t, u16Sel, 0);
11605 IEM_MC_ARG(uint32_t, offSeg, 1);
11606 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2);
11607 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11608 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11609 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11610 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11611 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4);
11612 IEM_MC_CALL_CIMPL_3(pfnCImpl, u16Sel, offSeg, enmEffOpSize);
11613 IEM_MC_END();
11614 return VINF_SUCCESS;
11615
11616 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11617 }
11618}
11619
11620
11621/**
11622 * Opcode 0xff /3.
11623 * @param bRm The RM byte.
11624 */
11625FNIEMOP_DEF_1(iemOp_Grp5_callf_Ep, uint8_t, bRm)
11626{
11627 IEMOP_MNEMONIC(callf_Ep, "callf Ep");
11628 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_callf);
11629}
11630
11631
11632/**
11633 * Opcode 0xff /4.
11634 * @param bRm The RM byte.
11635 */
11636FNIEMOP_DEF_1(iemOp_Grp5_jmpn_Ev, uint8_t, bRm)
11637{
11638 IEMOP_MNEMONIC(jmpn_Ev, "jmpn Ev");
11639 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11640
11641 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11642 {
11643 /* The new RIP is taken from a register. */
11644 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11645 switch (pVCpu->iem.s.enmEffOpSize)
11646 {
11647 case IEMMODE_16BIT:
11648 IEM_MC_BEGIN(0, 1);
11649 IEM_MC_LOCAL(uint16_t, u16Target);
11650 IEM_MC_FETCH_GREG_U16(u16Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11651 IEM_MC_SET_RIP_U16(u16Target);
11652 IEM_MC_END()
11653 return VINF_SUCCESS;
11654
11655 case IEMMODE_32BIT:
11656 IEM_MC_BEGIN(0, 1);
11657 IEM_MC_LOCAL(uint32_t, u32Target);
11658 IEM_MC_FETCH_GREG_U32(u32Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11659 IEM_MC_SET_RIP_U32(u32Target);
11660 IEM_MC_END()
11661 return VINF_SUCCESS;
11662
11663 case IEMMODE_64BIT:
11664 IEM_MC_BEGIN(0, 1);
11665 IEM_MC_LOCAL(uint64_t, u64Target);
11666 IEM_MC_FETCH_GREG_U64(u64Target, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11667 IEM_MC_SET_RIP_U64(u64Target);
11668 IEM_MC_END()
11669 return VINF_SUCCESS;
11670
11671 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11672 }
11673 }
11674 else
11675 {
11676 /* The new RIP is taken from a memory location. */
11677 switch (pVCpu->iem.s.enmEffOpSize)
11678 {
11679 case IEMMODE_16BIT:
11680 IEM_MC_BEGIN(0, 2);
11681 IEM_MC_LOCAL(uint16_t, u16Target);
11682 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11683 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11685 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11686 IEM_MC_SET_RIP_U16(u16Target);
11687 IEM_MC_END()
11688 return VINF_SUCCESS;
11689
11690 case IEMMODE_32BIT:
11691 IEM_MC_BEGIN(0, 2);
11692 IEM_MC_LOCAL(uint32_t, u32Target);
11693 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11694 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11695 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11696 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11697 IEM_MC_SET_RIP_U32(u32Target);
11698 IEM_MC_END()
11699 return VINF_SUCCESS;
11700
11701 case IEMMODE_64BIT:
11702 IEM_MC_BEGIN(0, 2);
11703 IEM_MC_LOCAL(uint64_t, u64Target);
11704 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11705 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11706 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11707 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11708 IEM_MC_SET_RIP_U64(u64Target);
11709 IEM_MC_END()
11710 return VINF_SUCCESS;
11711
11712 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11713 }
11714 }
11715}
11716
11717
11718/**
11719 * Opcode 0xff /5.
11720 * @param bRm The RM byte.
11721 */
11722FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm)
11723{
11724 IEMOP_MNEMONIC(jmpf_Ep, "jmpf Ep");
11725 return FNIEMOP_CALL_2(iemOpHlp_Grp5_far_Ep, bRm, iemCImpl_FarJmp);
11726}
11727
11728
11729/**
11730 * Opcode 0xff /6.
11731 * @param bRm The RM byte.
11732 */
11733FNIEMOP_DEF_1(iemOp_Grp5_push_Ev, uint8_t, bRm)
11734{
11735 IEMOP_MNEMONIC(push_Ev, "push Ev");
11736
11737 /* Registers are handled by a common worker. */
11738 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
11739 return FNIEMOP_CALL_1(iemOpCommonPushGReg, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
11740
11741 /* Memory we do here. */
11742 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE();
11743 switch (pVCpu->iem.s.enmEffOpSize)
11744 {
11745 case IEMMODE_16BIT:
11746 IEM_MC_BEGIN(0, 2);
11747 IEM_MC_LOCAL(uint16_t, u16Src);
11748 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11749 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11750 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11751 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11752 IEM_MC_PUSH_U16(u16Src);
11753 IEM_MC_ADVANCE_RIP();
11754 IEM_MC_END();
11755 return VINF_SUCCESS;
11756
11757 case IEMMODE_32BIT:
11758 IEM_MC_BEGIN(0, 2);
11759 IEM_MC_LOCAL(uint32_t, u32Src);
11760 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11761 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11762 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11763 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11764 IEM_MC_PUSH_U32(u32Src);
11765 IEM_MC_ADVANCE_RIP();
11766 IEM_MC_END();
11767 return VINF_SUCCESS;
11768
11769 case IEMMODE_64BIT:
11770 IEM_MC_BEGIN(0, 2);
11771 IEM_MC_LOCAL(uint64_t, u64Src);
11772 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc);
11773 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0);
11774 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
11775 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);
11776 IEM_MC_PUSH_U64(u64Src);
11777 IEM_MC_ADVANCE_RIP();
11778 IEM_MC_END();
11779 return VINF_SUCCESS;
11780
11781 IEM_NOT_REACHED_DEFAULT_CASE_RET();
11782 }
11783}
11784
11785
11786/**
11787 * @opcode 0xff
11788 */
11789FNIEMOP_DEF(iemOp_Grp5)
11790{
11791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
11792 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)
11793 {
11794 case 0:
11795 IEMOP_MNEMONIC(inc_Ev, "inc Ev");
11796 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_inc);
11797 case 1:
11798 IEMOP_MNEMONIC(dec_Ev, "dec Ev");
11799 return FNIEMOP_CALL_2(iemOpCommonUnaryEv, bRm, &g_iemAImpl_dec);
11800 case 2:
11801 return FNIEMOP_CALL_1(iemOp_Grp5_calln_Ev, bRm);
11802 case 3:
11803 return FNIEMOP_CALL_1(iemOp_Grp5_callf_Ep, bRm);
11804 case 4:
11805 return FNIEMOP_CALL_1(iemOp_Grp5_jmpn_Ev, bRm);
11806 case 5:
11807 return FNIEMOP_CALL_1(iemOp_Grp5_jmpf_Ep, bRm);
11808 case 6:
11809 return FNIEMOP_CALL_1(iemOp_Grp5_push_Ev, bRm);
11810 case 7:
11811 IEMOP_MNEMONIC(grp5_ud, "grp5-ud");
11812 return IEMOP_RAISE_INVALID_OPCODE();
11813 }
11814 AssertFailedReturn(VERR_IEM_IPE_3);
11815}
11816
11817
11818
11819const PFNIEMOP g_apfnOneByteMap[256] =
11820{
11821 /* 0x00 */ iemOp_add_Eb_Gb, iemOp_add_Ev_Gv, iemOp_add_Gb_Eb, iemOp_add_Gv_Ev,
11822 /* 0x04 */ iemOp_add_Al_Ib, iemOp_add_eAX_Iz, iemOp_push_ES, iemOp_pop_ES,
11823 /* 0x08 */ iemOp_or_Eb_Gb, iemOp_or_Ev_Gv, iemOp_or_Gb_Eb, iemOp_or_Gv_Ev,
11824 /* 0x0c */ iemOp_or_Al_Ib, iemOp_or_eAX_Iz, iemOp_push_CS, iemOp_2byteEscape,
11825 /* 0x10 */ iemOp_adc_Eb_Gb, iemOp_adc_Ev_Gv, iemOp_adc_Gb_Eb, iemOp_adc_Gv_Ev,
11826 /* 0x14 */ iemOp_adc_Al_Ib, iemOp_adc_eAX_Iz, iemOp_push_SS, iemOp_pop_SS,
11827 /* 0x18 */ iemOp_sbb_Eb_Gb, iemOp_sbb_Ev_Gv, iemOp_sbb_Gb_Eb, iemOp_sbb_Gv_Ev,
11828 /* 0x1c */ iemOp_sbb_Al_Ib, iemOp_sbb_eAX_Iz, iemOp_push_DS, iemOp_pop_DS,
11829 /* 0x20 */ iemOp_and_Eb_Gb, iemOp_and_Ev_Gv, iemOp_and_Gb_Eb, iemOp_and_Gv_Ev,
11830 /* 0x24 */ iemOp_and_Al_Ib, iemOp_and_eAX_Iz, iemOp_seg_ES, iemOp_daa,
11831 /* 0x28 */ iemOp_sub_Eb_Gb, iemOp_sub_Ev_Gv, iemOp_sub_Gb_Eb, iemOp_sub_Gv_Ev,
11832 /* 0x2c */ iemOp_sub_Al_Ib, iemOp_sub_eAX_Iz, iemOp_seg_CS, iemOp_das,
11833 /* 0x30 */ iemOp_xor_Eb_Gb, iemOp_xor_Ev_Gv, iemOp_xor_Gb_Eb, iemOp_xor_Gv_Ev,
11834 /* 0x34 */ iemOp_xor_Al_Ib, iemOp_xor_eAX_Iz, iemOp_seg_SS, iemOp_aaa,
11835 /* 0x38 */ iemOp_cmp_Eb_Gb, iemOp_cmp_Ev_Gv, iemOp_cmp_Gb_Eb, iemOp_cmp_Gv_Ev,
11836 /* 0x3c */ iemOp_cmp_Al_Ib, iemOp_cmp_eAX_Iz, iemOp_seg_DS, iemOp_aas,
11837 /* 0x40 */ iemOp_inc_eAX, iemOp_inc_eCX, iemOp_inc_eDX, iemOp_inc_eBX,
11838 /* 0x44 */ iemOp_inc_eSP, iemOp_inc_eBP, iemOp_inc_eSI, iemOp_inc_eDI,
11839 /* 0x48 */ iemOp_dec_eAX, iemOp_dec_eCX, iemOp_dec_eDX, iemOp_dec_eBX,
11840 /* 0x4c */ iemOp_dec_eSP, iemOp_dec_eBP, iemOp_dec_eSI, iemOp_dec_eDI,
11841 /* 0x50 */ iemOp_push_eAX, iemOp_push_eCX, iemOp_push_eDX, iemOp_push_eBX,
11842 /* 0x54 */ iemOp_push_eSP, iemOp_push_eBP, iemOp_push_eSI, iemOp_push_eDI,
11843 /* 0x58 */ iemOp_pop_eAX, iemOp_pop_eCX, iemOp_pop_eDX, iemOp_pop_eBX,
11844 /* 0x5c */ iemOp_pop_eSP, iemOp_pop_eBP, iemOp_pop_eSI, iemOp_pop_eDI,
11845 /* 0x60 */ iemOp_pusha, iemOp_popa__mvex, iemOp_bound_Gv_Ma__evex, iemOp_arpl_Ew_Gw_movsx_Gv_Ev,
11846 /* 0x64 */ iemOp_seg_FS, iemOp_seg_GS, iemOp_op_size, iemOp_addr_size,
11847 /* 0x68 */ iemOp_push_Iz, iemOp_imul_Gv_Ev_Iz, iemOp_push_Ib, iemOp_imul_Gv_Ev_Ib,
11848 /* 0x6c */ iemOp_insb_Yb_DX, iemOp_inswd_Yv_DX, iemOp_outsb_Yb_DX, iemOp_outswd_Yv_DX,
11849 /* 0x70 */ iemOp_jo_Jb, iemOp_jno_Jb, iemOp_jc_Jb, iemOp_jnc_Jb,
11850 /* 0x74 */ iemOp_je_Jb, iemOp_jne_Jb, iemOp_jbe_Jb, iemOp_jnbe_Jb,
11851 /* 0x78 */ iemOp_js_Jb, iemOp_jns_Jb, iemOp_jp_Jb, iemOp_jnp_Jb,
11852 /* 0x7c */ iemOp_jl_Jb, iemOp_jnl_Jb, iemOp_jle_Jb, iemOp_jnle_Jb,
11853 /* 0x80 */ iemOp_Grp1_Eb_Ib_80, iemOp_Grp1_Ev_Iz, iemOp_Grp1_Eb_Ib_82, iemOp_Grp1_Ev_Ib,
11854 /* 0x84 */ iemOp_test_Eb_Gb, iemOp_test_Ev_Gv, iemOp_xchg_Eb_Gb, iemOp_xchg_Ev_Gv,
11855 /* 0x88 */ iemOp_mov_Eb_Gb, iemOp_mov_Ev_Gv, iemOp_mov_Gb_Eb, iemOp_mov_Gv_Ev,
11856 /* 0x8c */ iemOp_mov_Ev_Sw, iemOp_lea_Gv_M, iemOp_mov_Sw_Ev, iemOp_Grp1A__xop,
11857 /* 0x90 */ iemOp_nop, iemOp_xchg_eCX_eAX, iemOp_xchg_eDX_eAX, iemOp_xchg_eBX_eAX,
11858 /* 0x94 */ iemOp_xchg_eSP_eAX, iemOp_xchg_eBP_eAX, iemOp_xchg_eSI_eAX, iemOp_xchg_eDI_eAX,
11859 /* 0x98 */ iemOp_cbw, iemOp_cwd, iemOp_call_Ap, iemOp_wait,
11860 /* 0x9c */ iemOp_pushf_Fv, iemOp_popf_Fv, iemOp_sahf, iemOp_lahf,
11861 /* 0xa0 */ iemOp_mov_AL_Ob, iemOp_mov_rAX_Ov, iemOp_mov_Ob_AL, iemOp_mov_Ov_rAX,
11862 /* 0xa4 */ iemOp_movsb_Xb_Yb, iemOp_movswd_Xv_Yv, iemOp_cmpsb_Xb_Yb, iemOp_cmpswd_Xv_Yv,
11863 /* 0xa8 */ iemOp_test_AL_Ib, iemOp_test_eAX_Iz, iemOp_stosb_Yb_AL, iemOp_stoswd_Yv_eAX,
11864 /* 0xac */ iemOp_lodsb_AL_Xb, iemOp_lodswd_eAX_Xv, iemOp_scasb_AL_Xb, iemOp_scaswd_eAX_Xv,
11865 /* 0xb0 */ iemOp_mov_AL_Ib, iemOp_CL_Ib, iemOp_DL_Ib, iemOp_BL_Ib,
11866 /* 0xb4 */ iemOp_mov_AH_Ib, iemOp_CH_Ib, iemOp_DH_Ib, iemOp_BH_Ib,
11867 /* 0xb8 */ iemOp_eAX_Iv, iemOp_eCX_Iv, iemOp_eDX_Iv, iemOp_eBX_Iv,
11868 /* 0xbc */ iemOp_eSP_Iv, iemOp_eBP_Iv, iemOp_eSI_Iv, iemOp_eDI_Iv,
11869 /* 0xc0 */ iemOp_Grp2_Eb_Ib, iemOp_Grp2_Ev_Ib, iemOp_retn_Iw, iemOp_retn,
11870 /* 0xc4 */ iemOp_les_Gv_Mp__vex3, iemOp_lds_Gv_Mp__vex2, iemOp_Grp11_Eb_Ib, iemOp_Grp11_Ev_Iz,
11871 /* 0xc8 */ iemOp_enter_Iw_Ib, iemOp_leave, iemOp_retf_Iw, iemOp_retf,
11872 /* 0xcc */ iemOp_int3, iemOp_int_Ib, iemOp_into, iemOp_iret,
11873 /* 0xd0 */ iemOp_Grp2_Eb_1, iemOp_Grp2_Ev_1, iemOp_Grp2_Eb_CL, iemOp_Grp2_Ev_CL,
11874 /* 0xd4 */ iemOp_aam_Ib, iemOp_aad_Ib, iemOp_salc, iemOp_xlat,
11875 /* 0xd8 */ iemOp_EscF0, iemOp_EscF1, iemOp_EscF2, iemOp_EscF3,
11876 /* 0xdc */ iemOp_EscF4, iemOp_EscF5, iemOp_EscF6, iemOp_EscF7,
11877 /* 0xe0 */ iemOp_loopne_Jb, iemOp_loope_Jb, iemOp_loop_Jb, iemOp_jecxz_Jb,
11878 /* 0xe4 */ iemOp_in_AL_Ib, iemOp_in_eAX_Ib, iemOp_out_Ib_AL, iemOp_out_Ib_eAX,
11879 /* 0xe8 */ iemOp_call_Jv, iemOp_jmp_Jv, iemOp_jmp_Ap, iemOp_jmp_Jb,
11880 /* 0xec */ iemOp_in_AL_DX, iemOp_in_eAX_DX, iemOp_out_DX_AL, iemOp_out_DX_eAX,
11881 /* 0xf0 */ iemOp_lock, iemOp_int1, iemOp_repne, iemOp_repe,
11882 /* 0xf4 */ iemOp_hlt, iemOp_cmc, iemOp_Grp3_Eb, iemOp_Grp3_Ev,
11883 /* 0xf8 */ iemOp_clc, iemOp_stc, iemOp_cli, iemOp_sti,
11884 /* 0xfc */ iemOp_cld, iemOp_std, iemOp_Grp4, iemOp_Grp5,
11885};
11886
11887
11888/** @} */
11889
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette