VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsInterpretOnly.cpp@ 95499

Last change on this file since 95499 was 95499, checked in by vboxsync, 2 years ago

VMM/IEM: [v]pshufhb. bugref:9898

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 62.1 KB
Line 
1/* $Id: IEMAllInstructionsInterpretOnly.cpp 95499 2022-07-04 12:52:29Z vboxsync $ */
2/** @file
3 * IEM - Instruction Decoding and Emulation.
4 */
5
6/*
7 * Copyright (C) 2011-2022 Oracle Corporation
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 */
17
18
19/*********************************************************************************************************************************
20* Header Files *
21*********************************************************************************************************************************/
22#ifndef LOG_GROUP /* defined when included by tstIEMCheckMc.cpp */
23# define LOG_GROUP LOG_GROUP_IEM
24#endif
25#define VMCPU_INCL_CPUM_GST_CTX
26#include <VBox/vmm/iem.h>
27#include <VBox/vmm/cpum.h>
28#include <VBox/vmm/apic.h>
29#include <VBox/vmm/pdm.h>
30#include <VBox/vmm/pgm.h>
31#include <VBox/vmm/iom.h>
32#include <VBox/vmm/em.h>
33#include <VBox/vmm/hm.h>
34#include <VBox/vmm/nem.h>
35#include <VBox/vmm/gim.h>
36#ifdef VBOX_WITH_NESTED_HWVIRT_SVM
37# include <VBox/vmm/em.h>
38# include <VBox/vmm/hm_svm.h>
39#endif
40#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
41# include <VBox/vmm/hmvmxinline.h>
42#endif
43#include <VBox/vmm/tm.h>
44#include <VBox/vmm/dbgf.h>
45#include <VBox/vmm/dbgftrace.h>
46#ifndef TST_IEM_CHECK_MC
47# include "IEMInternal.h"
48#endif
49#include <VBox/vmm/vmcc.h>
50#include <VBox/log.h>
51#include <VBox/err.h>
52#include <VBox/param.h>
53#include <VBox/dis.h>
54#include <VBox/disopcode.h>
55#include <iprt/asm-math.h>
56#include <iprt/assert.h>
57#include <iprt/string.h>
58#include <iprt/x86.h>
59
60#ifndef TST_IEM_CHECK_MC
61# include "IEMInline.h"
62# include "IEMOpHlp.h"
63# include "IEMMc.h"
64#endif
65
66
67#ifdef _MSC_VER
68# pragma warning(push)
69# pragma warning(disable: 4702) /* Unreachable code like return in iemOp_Grp6_lldt. */
70#endif
71
72
73/*********************************************************************************************************************************
74* Global Variables *
75*********************************************************************************************************************************/
76#ifndef TST_IEM_CHECK_MC
77/** Function table for the ADD instruction. */
78IEM_STATIC const IEMOPBINSIZES g_iemAImpl_add =
79{
80 iemAImpl_add_u8, iemAImpl_add_u8_locked,
81 iemAImpl_add_u16, iemAImpl_add_u16_locked,
82 iemAImpl_add_u32, iemAImpl_add_u32_locked,
83 iemAImpl_add_u64, iemAImpl_add_u64_locked
84};
85
86/** Function table for the ADC instruction. */
87IEM_STATIC const IEMOPBINSIZES g_iemAImpl_adc =
88{
89 iemAImpl_adc_u8, iemAImpl_adc_u8_locked,
90 iemAImpl_adc_u16, iemAImpl_adc_u16_locked,
91 iemAImpl_adc_u32, iemAImpl_adc_u32_locked,
92 iemAImpl_adc_u64, iemAImpl_adc_u64_locked
93};
94
95/** Function table for the SUB instruction. */
96IEM_STATIC const IEMOPBINSIZES g_iemAImpl_sub =
97{
98 iemAImpl_sub_u8, iemAImpl_sub_u8_locked,
99 iemAImpl_sub_u16, iemAImpl_sub_u16_locked,
100 iemAImpl_sub_u32, iemAImpl_sub_u32_locked,
101 iemAImpl_sub_u64, iemAImpl_sub_u64_locked
102};
103
104/** Function table for the SBB instruction. */
105IEM_STATIC const IEMOPBINSIZES g_iemAImpl_sbb =
106{
107 iemAImpl_sbb_u8, iemAImpl_sbb_u8_locked,
108 iemAImpl_sbb_u16, iemAImpl_sbb_u16_locked,
109 iemAImpl_sbb_u32, iemAImpl_sbb_u32_locked,
110 iemAImpl_sbb_u64, iemAImpl_sbb_u64_locked
111};
112
113/** Function table for the OR instruction. */
114IEM_STATIC const IEMOPBINSIZES g_iemAImpl_or =
115{
116 iemAImpl_or_u8, iemAImpl_or_u8_locked,
117 iemAImpl_or_u16, iemAImpl_or_u16_locked,
118 iemAImpl_or_u32, iemAImpl_or_u32_locked,
119 iemAImpl_or_u64, iemAImpl_or_u64_locked
120};
121
122/** Function table for the XOR instruction. */
123IEM_STATIC const IEMOPBINSIZES g_iemAImpl_xor =
124{
125 iemAImpl_xor_u8, iemAImpl_xor_u8_locked,
126 iemAImpl_xor_u16, iemAImpl_xor_u16_locked,
127 iemAImpl_xor_u32, iemAImpl_xor_u32_locked,
128 iemAImpl_xor_u64, iemAImpl_xor_u64_locked
129};
130
131/** Function table for the AND instruction. */
132IEM_STATIC const IEMOPBINSIZES g_iemAImpl_and =
133{
134 iemAImpl_and_u8, iemAImpl_and_u8_locked,
135 iemAImpl_and_u16, iemAImpl_and_u16_locked,
136 iemAImpl_and_u32, iemAImpl_and_u32_locked,
137 iemAImpl_and_u64, iemAImpl_and_u64_locked
138};
139
140/** Function table for the CMP instruction.
141 * @remarks Making operand order ASSUMPTIONS.
142 */
143IEM_STATIC const IEMOPBINSIZES g_iemAImpl_cmp =
144{
145 iemAImpl_cmp_u8, NULL,
146 iemAImpl_cmp_u16, NULL,
147 iemAImpl_cmp_u32, NULL,
148 iemAImpl_cmp_u64, NULL
149};
150
151/** Function table for the TEST instruction.
152 * @remarks Making operand order ASSUMPTIONS.
153 */
154IEM_STATIC const IEMOPBINSIZES g_iemAImpl_test =
155{
156 iemAImpl_test_u8, NULL,
157 iemAImpl_test_u16, NULL,
158 iemAImpl_test_u32, NULL,
159 iemAImpl_test_u64, NULL
160};
161
162
163/** Function table for the BT instruction. */
164IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bt =
165{
166 NULL, NULL,
167 iemAImpl_bt_u16, NULL,
168 iemAImpl_bt_u32, NULL,
169 iemAImpl_bt_u64, NULL
170};
171
172/** Function table for the BTC instruction. */
173IEM_STATIC const IEMOPBINSIZES g_iemAImpl_btc =
174{
175 NULL, NULL,
176 iemAImpl_btc_u16, iemAImpl_btc_u16_locked,
177 iemAImpl_btc_u32, iemAImpl_btc_u32_locked,
178 iemAImpl_btc_u64, iemAImpl_btc_u64_locked
179};
180
181/** Function table for the BTR instruction. */
182IEM_STATIC const IEMOPBINSIZES g_iemAImpl_btr =
183{
184 NULL, NULL,
185 iemAImpl_btr_u16, iemAImpl_btr_u16_locked,
186 iemAImpl_btr_u32, iemAImpl_btr_u32_locked,
187 iemAImpl_btr_u64, iemAImpl_btr_u64_locked
188};
189
190/** Function table for the BTS instruction. */
191IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bts =
192{
193 NULL, NULL,
194 iemAImpl_bts_u16, iemAImpl_bts_u16_locked,
195 iemAImpl_bts_u32, iemAImpl_bts_u32_locked,
196 iemAImpl_bts_u64, iemAImpl_bts_u64_locked
197};
198
199/** Function table for the BSF instruction. */
200IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsf =
201{
202 NULL, NULL,
203 iemAImpl_bsf_u16, NULL,
204 iemAImpl_bsf_u32, NULL,
205 iemAImpl_bsf_u64, NULL
206};
207
208/** Function table for the BSF instruction, AMD EFLAGS variant. */
209IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsf_amd =
210{
211 NULL, NULL,
212 iemAImpl_bsf_u16_amd, NULL,
213 iemAImpl_bsf_u32_amd, NULL,
214 iemAImpl_bsf_u64_amd, NULL
215};
216
217/** Function table for the BSF instruction, Intel EFLAGS variant. */
218IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsf_intel =
219{
220 NULL, NULL,
221 iemAImpl_bsf_u16_intel, NULL,
222 iemAImpl_bsf_u32_intel, NULL,
223 iemAImpl_bsf_u64_intel, NULL
224};
225
226/** EFLAGS variation selection table for the BSF instruction. */
227IEM_STATIC const IEMOPBINSIZES * const g_iemAImpl_bsf_eflags[] =
228{
229 &g_iemAImpl_bsf,
230 &g_iemAImpl_bsf_intel,
231 &g_iemAImpl_bsf_amd,
232 &g_iemAImpl_bsf,
233};
234
235/** Function table for the BSR instruction. */
236IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsr =
237{
238 NULL, NULL,
239 iemAImpl_bsr_u16, NULL,
240 iemAImpl_bsr_u32, NULL,
241 iemAImpl_bsr_u64, NULL
242};
243
244/** Function table for the BSR instruction, AMD EFLAGS variant. */
245IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsr_amd =
246{
247 NULL, NULL,
248 iemAImpl_bsr_u16_amd, NULL,
249 iemAImpl_bsr_u32_amd, NULL,
250 iemAImpl_bsr_u64_amd, NULL
251};
252
253/** Function table for the BSR instruction, Intel EFLAGS variant. */
254IEM_STATIC const IEMOPBINSIZES g_iemAImpl_bsr_intel =
255{
256 NULL, NULL,
257 iemAImpl_bsr_u16_intel, NULL,
258 iemAImpl_bsr_u32_intel, NULL,
259 iemAImpl_bsr_u64_intel, NULL
260};
261
262/** EFLAGS variation selection table for the BSR instruction. */
263IEM_STATIC const IEMOPBINSIZES * const g_iemAImpl_bsr_eflags[] =
264{
265 &g_iemAImpl_bsr,
266 &g_iemAImpl_bsr_intel,
267 &g_iemAImpl_bsr_amd,
268 &g_iemAImpl_bsr,
269};
270
271/** Function table for the IMUL instruction. */
272IEM_STATIC const IEMOPBINSIZES g_iemAImpl_imul_two =
273{
274 NULL, NULL,
275 iemAImpl_imul_two_u16, NULL,
276 iemAImpl_imul_two_u32, NULL,
277 iemAImpl_imul_two_u64, NULL
278};
279
280/** Function table for the IMUL instruction, AMD EFLAGS variant. */
281IEM_STATIC const IEMOPBINSIZES g_iemAImpl_imul_two_amd =
282{
283 NULL, NULL,
284 iemAImpl_imul_two_u16_amd, NULL,
285 iemAImpl_imul_two_u32_amd, NULL,
286 iemAImpl_imul_two_u64_amd, NULL
287};
288
289/** Function table for the IMUL instruction, Intel EFLAGS variant. */
290IEM_STATIC const IEMOPBINSIZES g_iemAImpl_imul_two_intel =
291{
292 NULL, NULL,
293 iemAImpl_imul_two_u16_intel, NULL,
294 iemAImpl_imul_two_u32_intel, NULL,
295 iemAImpl_imul_two_u64_intel, NULL
296};
297
298/** EFLAGS variation selection table for the IMUL instruction. */
299IEM_STATIC const IEMOPBINSIZES * const g_iemAImpl_imul_two_eflags[] =
300{
301 &g_iemAImpl_imul_two,
302 &g_iemAImpl_imul_two_intel,
303 &g_iemAImpl_imul_two_amd,
304 &g_iemAImpl_imul_two,
305};
306
307/** EFLAGS variation selection table for the 16-bit IMUL instruction. */
308IEM_STATIC PFNIEMAIMPLBINU16 const g_iemAImpl_imul_two_u16_eflags[] =
309{
310 iemAImpl_imul_two_u16,
311 iemAImpl_imul_two_u16_intel,
312 iemAImpl_imul_two_u16_amd,
313 iemAImpl_imul_two_u16,
314};
315
316/** EFLAGS variation selection table for the 32-bit IMUL instruction. */
317IEM_STATIC PFNIEMAIMPLBINU32 const g_iemAImpl_imul_two_u32_eflags[] =
318{
319 iemAImpl_imul_two_u32,
320 iemAImpl_imul_two_u32_intel,
321 iemAImpl_imul_two_u32_amd,
322 iemAImpl_imul_two_u32,
323};
324
325/** EFLAGS variation selection table for the 64-bit IMUL instruction. */
326IEM_STATIC PFNIEMAIMPLBINU64 const g_iemAImpl_imul_two_u64_eflags[] =
327{
328 iemAImpl_imul_two_u64,
329 iemAImpl_imul_two_u64_intel,
330 iemAImpl_imul_two_u64_amd,
331 iemAImpl_imul_two_u64,
332};
333
334/** Group 1 /r lookup table. */
335IEM_STATIC const PCIEMOPBINSIZES g_apIemImplGrp1[8] =
336{
337 &g_iemAImpl_add,
338 &g_iemAImpl_or,
339 &g_iemAImpl_adc,
340 &g_iemAImpl_sbb,
341 &g_iemAImpl_and,
342 &g_iemAImpl_sub,
343 &g_iemAImpl_xor,
344 &g_iemAImpl_cmp
345};
346
347/** Function table for the INC instruction. */
348IEM_STATIC const IEMOPUNARYSIZES g_iemAImpl_inc =
349{
350 iemAImpl_inc_u8, iemAImpl_inc_u8_locked,
351 iemAImpl_inc_u16, iemAImpl_inc_u16_locked,
352 iemAImpl_inc_u32, iemAImpl_inc_u32_locked,
353 iemAImpl_inc_u64, iemAImpl_inc_u64_locked
354};
355
356/** Function table for the DEC instruction. */
357IEM_STATIC const IEMOPUNARYSIZES g_iemAImpl_dec =
358{
359 iemAImpl_dec_u8, iemAImpl_dec_u8_locked,
360 iemAImpl_dec_u16, iemAImpl_dec_u16_locked,
361 iemAImpl_dec_u32, iemAImpl_dec_u32_locked,
362 iemAImpl_dec_u64, iemAImpl_dec_u64_locked
363};
364
365/** Function table for the NEG instruction. */
366IEM_STATIC const IEMOPUNARYSIZES g_iemAImpl_neg =
367{
368 iemAImpl_neg_u8, iemAImpl_neg_u8_locked,
369 iemAImpl_neg_u16, iemAImpl_neg_u16_locked,
370 iemAImpl_neg_u32, iemAImpl_neg_u32_locked,
371 iemAImpl_neg_u64, iemAImpl_neg_u64_locked
372};
373
374/** Function table for the NOT instruction. */
375IEM_STATIC const IEMOPUNARYSIZES g_iemAImpl_not =
376{
377 iemAImpl_not_u8, iemAImpl_not_u8_locked,
378 iemAImpl_not_u16, iemAImpl_not_u16_locked,
379 iemAImpl_not_u32, iemAImpl_not_u32_locked,
380 iemAImpl_not_u64, iemAImpl_not_u64_locked
381};
382
383
384/** Function table for the ROL instruction. */
385IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rol =
386{
387 iemAImpl_rol_u8,
388 iemAImpl_rol_u16,
389 iemAImpl_rol_u32,
390 iemAImpl_rol_u64
391};
392
393/** Function table for the ROL instruction, AMD EFLAGS variant. */
394IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rol_amd =
395{
396 iemAImpl_rol_u8_amd,
397 iemAImpl_rol_u16_amd,
398 iemAImpl_rol_u32_amd,
399 iemAImpl_rol_u64_amd
400};
401
402/** Function table for the ROL instruction, Intel EFLAGS variant. */
403IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rol_intel =
404{
405 iemAImpl_rol_u8_intel,
406 iemAImpl_rol_u16_intel,
407 iemAImpl_rol_u32_intel,
408 iemAImpl_rol_u64_intel
409};
410
411/** EFLAGS variation selection table for the ROL instruction. */
412IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_rol_eflags[] =
413{
414 &g_iemAImpl_rol,
415 &g_iemAImpl_rol_intel,
416 &g_iemAImpl_rol_amd,
417 &g_iemAImpl_rol,
418};
419
420
421/** Function table for the ROR instruction. */
422IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_ror =
423{
424 iemAImpl_ror_u8,
425 iemAImpl_ror_u16,
426 iemAImpl_ror_u32,
427 iemAImpl_ror_u64
428};
429
430/** Function table for the ROR instruction, AMD EFLAGS variant. */
431IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_ror_amd =
432{
433 iemAImpl_ror_u8_amd,
434 iemAImpl_ror_u16_amd,
435 iemAImpl_ror_u32_amd,
436 iemAImpl_ror_u64_amd
437};
438
439/** Function table for the ROR instruction, Intel EFLAGS variant. */
440IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_ror_intel =
441{
442 iemAImpl_ror_u8_intel,
443 iemAImpl_ror_u16_intel,
444 iemAImpl_ror_u32_intel,
445 iemAImpl_ror_u64_intel
446};
447
448/** EFLAGS variation selection table for the ROR instruction. */
449IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_ror_eflags[] =
450{
451 &g_iemAImpl_ror,
452 &g_iemAImpl_ror_intel,
453 &g_iemAImpl_ror_amd,
454 &g_iemAImpl_ror,
455};
456
457
458/** Function table for the RCL instruction. */
459IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcl =
460{
461 iemAImpl_rcl_u8,
462 iemAImpl_rcl_u16,
463 iemAImpl_rcl_u32,
464 iemAImpl_rcl_u64
465};
466
467/** Function table for the RCL instruction, AMD EFLAGS variant. */
468IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcl_amd =
469{
470 iemAImpl_rcl_u8_amd,
471 iemAImpl_rcl_u16_amd,
472 iemAImpl_rcl_u32_amd,
473 iemAImpl_rcl_u64_amd
474};
475
476/** Function table for the RCL instruction, Intel EFLAGS variant. */
477IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcl_intel =
478{
479 iemAImpl_rcl_u8_intel,
480 iemAImpl_rcl_u16_intel,
481 iemAImpl_rcl_u32_intel,
482 iemAImpl_rcl_u64_intel
483};
484
485/** EFLAGS variation selection table for the RCL instruction. */
486IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_rcl_eflags[] =
487{
488 &g_iemAImpl_rcl,
489 &g_iemAImpl_rcl_intel,
490 &g_iemAImpl_rcl_amd,
491 &g_iemAImpl_rcl,
492};
493
494
495/** Function table for the RCR instruction. */
496IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcr =
497{
498 iemAImpl_rcr_u8,
499 iemAImpl_rcr_u16,
500 iemAImpl_rcr_u32,
501 iemAImpl_rcr_u64
502};
503
504/** Function table for the RCR instruction, AMD EFLAGS variant. */
505IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcr_amd =
506{
507 iemAImpl_rcr_u8_amd,
508 iemAImpl_rcr_u16_amd,
509 iemAImpl_rcr_u32_amd,
510 iemAImpl_rcr_u64_amd
511};
512
513/** Function table for the RCR instruction, Intel EFLAGS variant. */
514IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_rcr_intel =
515{
516 iemAImpl_rcr_u8_intel,
517 iemAImpl_rcr_u16_intel,
518 iemAImpl_rcr_u32_intel,
519 iemAImpl_rcr_u64_intel
520};
521
522/** EFLAGS variation selection table for the RCR instruction. */
523IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_rcr_eflags[] =
524{
525 &g_iemAImpl_rcr,
526 &g_iemAImpl_rcr_intel,
527 &g_iemAImpl_rcr_amd,
528 &g_iemAImpl_rcr,
529};
530
531
532/** Function table for the SHL instruction. */
533IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shl =
534{
535 iemAImpl_shl_u8,
536 iemAImpl_shl_u16,
537 iemAImpl_shl_u32,
538 iemAImpl_shl_u64
539};
540
541/** Function table for the SHL instruction, AMD EFLAGS variant. */
542IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shl_amd =
543{
544 iemAImpl_shl_u8_amd,
545 iemAImpl_shl_u16_amd,
546 iemAImpl_shl_u32_amd,
547 iemAImpl_shl_u64_amd
548};
549
550/** Function table for the SHL instruction, Intel EFLAGS variant. */
551IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shl_intel =
552{
553 iemAImpl_shl_u8_intel,
554 iemAImpl_shl_u16_intel,
555 iemAImpl_shl_u32_intel,
556 iemAImpl_shl_u64_intel
557};
558
559/** EFLAGS variation selection table for the SHL instruction. */
560IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_shl_eflags[] =
561{
562 &g_iemAImpl_shl,
563 &g_iemAImpl_shl_intel,
564 &g_iemAImpl_shl_amd,
565 &g_iemAImpl_shl,
566};
567
568
569/** Function table for the SHR instruction. */
570IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shr =
571{
572 iemAImpl_shr_u8,
573 iemAImpl_shr_u16,
574 iemAImpl_shr_u32,
575 iemAImpl_shr_u64
576};
577
578/** Function table for the SHR instruction, AMD EFLAGS variant. */
579IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shr_amd =
580{
581 iemAImpl_shr_u8_amd,
582 iemAImpl_shr_u16_amd,
583 iemAImpl_shr_u32_amd,
584 iemAImpl_shr_u64_amd
585};
586
587/** Function table for the SHR instruction, Intel EFLAGS variant. */
588IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_shr_intel =
589{
590 iemAImpl_shr_u8_intel,
591 iemAImpl_shr_u16_intel,
592 iemAImpl_shr_u32_intel,
593 iemAImpl_shr_u64_intel
594};
595
596/** EFLAGS variation selection table for the SHR instruction. */
597IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_shr_eflags[] =
598{
599 &g_iemAImpl_shr,
600 &g_iemAImpl_shr_intel,
601 &g_iemAImpl_shr_amd,
602 &g_iemAImpl_shr,
603};
604
605
606/** Function table for the SAR instruction. */
607IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_sar =
608{
609 iemAImpl_sar_u8,
610 iemAImpl_sar_u16,
611 iemAImpl_sar_u32,
612 iemAImpl_sar_u64
613};
614
615/** Function table for the SAR instruction, AMD EFLAGS variant. */
616IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_sar_amd =
617{
618 iemAImpl_sar_u8_amd,
619 iemAImpl_sar_u16_amd,
620 iemAImpl_sar_u32_amd,
621 iemAImpl_sar_u64_amd
622};
623
624/** Function table for the SAR instruction, Intel EFLAGS variant. */
625IEM_STATIC const IEMOPSHIFTSIZES g_iemAImpl_sar_intel =
626{
627 iemAImpl_sar_u8_intel,
628 iemAImpl_sar_u16_intel,
629 iemAImpl_sar_u32_intel,
630 iemAImpl_sar_u64_intel
631};
632
633/** EFLAGS variation selection table for the SAR instruction. */
634IEM_STATIC const IEMOPSHIFTSIZES * const g_iemAImpl_sar_eflags[] =
635{
636 &g_iemAImpl_sar,
637 &g_iemAImpl_sar_intel,
638 &g_iemAImpl_sar_amd,
639 &g_iemAImpl_sar,
640};
641
642
643/** Function table for the MUL instruction. */
644IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_mul =
645{
646 iemAImpl_mul_u8,
647 iemAImpl_mul_u16,
648 iemAImpl_mul_u32,
649 iemAImpl_mul_u64
650};
651
652/** Function table for the MUL instruction, AMD EFLAGS variation. */
653IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_mul_amd =
654{
655 iemAImpl_mul_u8_amd,
656 iemAImpl_mul_u16_amd,
657 iemAImpl_mul_u32_amd,
658 iemAImpl_mul_u64_amd
659};
660
661/** Function table for the MUL instruction, Intel EFLAGS variation. */
662IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_mul_intel =
663{
664 iemAImpl_mul_u8_intel,
665 iemAImpl_mul_u16_intel,
666 iemAImpl_mul_u32_intel,
667 iemAImpl_mul_u64_intel
668};
669
670/** EFLAGS variation selection table for the MUL instruction. */
671IEM_STATIC const IEMOPMULDIVSIZES * const g_iemAImpl_mul_eflags[] =
672{
673 &g_iemAImpl_mul,
674 &g_iemAImpl_mul_intel,
675 &g_iemAImpl_mul_amd,
676 &g_iemAImpl_mul,
677};
678
679/** EFLAGS variation selection table for the 8-bit MUL instruction. */
680IEM_STATIC PFNIEMAIMPLMULDIVU8 const g_iemAImpl_mul_u8_eflags[] =
681{
682 iemAImpl_mul_u8,
683 iemAImpl_mul_u8_intel,
684 iemAImpl_mul_u8_amd,
685 iemAImpl_mul_u8
686};
687
688
689/** Function table for the IMUL instruction working implicitly on rAX. */
690IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_imul =
691{
692 iemAImpl_imul_u8,
693 iemAImpl_imul_u16,
694 iemAImpl_imul_u32,
695 iemAImpl_imul_u64
696};
697
698/** Function table for the IMUL instruction working implicitly on rAX, AMD EFLAGS variation. */
699IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_imul_amd =
700{
701 iemAImpl_imul_u8_amd,
702 iemAImpl_imul_u16_amd,
703 iemAImpl_imul_u32_amd,
704 iemAImpl_imul_u64_amd
705};
706
707/** Function table for the IMUL instruction working implicitly on rAX, Intel EFLAGS variation. */
708IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_imul_intel =
709{
710 iemAImpl_imul_u8_intel,
711 iemAImpl_imul_u16_intel,
712 iemAImpl_imul_u32_intel,
713 iemAImpl_imul_u64_intel
714};
715
716/** EFLAGS variation selection table for the IMUL instruction. */
717IEM_STATIC const IEMOPMULDIVSIZES * const g_iemAImpl_imul_eflags[] =
718{
719 &g_iemAImpl_imul,
720 &g_iemAImpl_imul_intel,
721 &g_iemAImpl_imul_amd,
722 &g_iemAImpl_imul,
723};
724
725/** EFLAGS variation selection table for the 8-bit IMUL instruction. */
726IEM_STATIC PFNIEMAIMPLMULDIVU8 const g_iemAImpl_imul_u8_eflags[] =
727{
728 iemAImpl_imul_u8,
729 iemAImpl_imul_u8_intel,
730 iemAImpl_imul_u8_amd,
731 iemAImpl_imul_u8
732};
733
734
735/** Function table for the DIV instruction. */
736IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_div =
737{
738 iemAImpl_div_u8,
739 iemAImpl_div_u16,
740 iemAImpl_div_u32,
741 iemAImpl_div_u64
742};
743
744/** Function table for the DIV instruction, AMD EFLAGS variation. */
745IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_div_amd =
746{
747 iemAImpl_div_u8_amd,
748 iemAImpl_div_u16_amd,
749 iemAImpl_div_u32_amd,
750 iemAImpl_div_u64_amd
751};
752
753/** Function table for the DIV instruction, Intel EFLAGS variation. */
754IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_div_intel =
755{
756 iemAImpl_div_u8_intel,
757 iemAImpl_div_u16_intel,
758 iemAImpl_div_u32_intel,
759 iemAImpl_div_u64_intel
760};
761
762/** EFLAGS variation selection table for the DIV instruction. */
763IEM_STATIC const IEMOPMULDIVSIZES * const g_iemAImpl_div_eflags[] =
764{
765 &g_iemAImpl_div,
766 &g_iemAImpl_div_intel,
767 &g_iemAImpl_div_amd,
768 &g_iemAImpl_div,
769};
770
771/** EFLAGS variation selection table for the 8-bit DIV instruction. */
772IEM_STATIC PFNIEMAIMPLMULDIVU8 const g_iemAImpl_div_u8_eflags[] =
773{
774 iemAImpl_div_u8,
775 iemAImpl_div_u8_intel,
776 iemAImpl_div_u8_amd,
777 iemAImpl_div_u8
778};
779
780
781/** Function table for the IDIV instruction. */
782IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_idiv =
783{
784 iemAImpl_idiv_u8,
785 iemAImpl_idiv_u16,
786 iemAImpl_idiv_u32,
787 iemAImpl_idiv_u64
788};
789
790/** Function table for the IDIV instruction, AMD EFLAGS variation. */
791IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_idiv_amd =
792{
793 iemAImpl_idiv_u8_amd,
794 iemAImpl_idiv_u16_amd,
795 iemAImpl_idiv_u32_amd,
796 iemAImpl_idiv_u64_amd
797};
798
799/** Function table for the IDIV instruction, Intel EFLAGS variation. */
800IEM_STATIC const IEMOPMULDIVSIZES g_iemAImpl_idiv_intel =
801{
802 iemAImpl_idiv_u8_intel,
803 iemAImpl_idiv_u16_intel,
804 iemAImpl_idiv_u32_intel,
805 iemAImpl_idiv_u64_intel
806};
807
808/** EFLAGS variation selection table for the IDIV instruction. */
809IEM_STATIC const IEMOPMULDIVSIZES * const g_iemAImpl_idiv_eflags[] =
810{
811 &g_iemAImpl_idiv,
812 &g_iemAImpl_idiv_intel,
813 &g_iemAImpl_idiv_amd,
814 &g_iemAImpl_idiv,
815};
816
817/** EFLAGS variation selection table for the 8-bit IDIV instruction. */
818IEM_STATIC PFNIEMAIMPLMULDIVU8 const g_iemAImpl_idiv_u8_eflags[] =
819{
820 iemAImpl_idiv_u8,
821 iemAImpl_idiv_u8_intel,
822 iemAImpl_idiv_u8_amd,
823 iemAImpl_idiv_u8
824};
825
826
827/** Function table for the SHLD instruction. */
828IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shld =
829{
830 iemAImpl_shld_u16,
831 iemAImpl_shld_u32,
832 iemAImpl_shld_u64,
833};
834
835/** Function table for the SHLD instruction, AMD EFLAGS variation. */
836IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shld_amd =
837{
838 iemAImpl_shld_u16_amd,
839 iemAImpl_shld_u32_amd,
840 iemAImpl_shld_u64_amd
841};
842
843/** Function table for the SHLD instruction, Intel EFLAGS variation. */
844IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shld_intel =
845{
846 iemAImpl_shld_u16_intel,
847 iemAImpl_shld_u32_intel,
848 iemAImpl_shld_u64_intel
849};
850
851/** EFLAGS variation selection table for the SHLD instruction. */
852IEM_STATIC const IEMOPSHIFTDBLSIZES * const g_iemAImpl_shld_eflags[] =
853{
854 &g_iemAImpl_shld,
855 &g_iemAImpl_shld_intel,
856 &g_iemAImpl_shld_amd,
857 &g_iemAImpl_shld
858};
859
860/** Function table for the SHRD instruction. */
861IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shrd =
862{
863 iemAImpl_shrd_u16,
864 iemAImpl_shrd_u32,
865 iemAImpl_shrd_u64
866};
867
868/** Function table for the SHRD instruction, AMD EFLAGS variation. */
869IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shrd_amd =
870{
871 iemAImpl_shrd_u16_amd,
872 iemAImpl_shrd_u32_amd,
873 iemAImpl_shrd_u64_amd
874};
875
876/** Function table for the SHRD instruction, Intel EFLAGS variation. */
877IEM_STATIC const IEMOPSHIFTDBLSIZES g_iemAImpl_shrd_intel =
878{
879 iemAImpl_shrd_u16_intel,
880 iemAImpl_shrd_u32_intel,
881 iemAImpl_shrd_u64_intel
882};
883
884/** EFLAGS variation selection table for the SHRD instruction. */
885IEM_STATIC const IEMOPSHIFTDBLSIZES * const g_iemAImpl_shrd_eflags[] =
886{
887 &g_iemAImpl_shrd,
888 &g_iemAImpl_shrd_intel,
889 &g_iemAImpl_shrd_amd,
890 &g_iemAImpl_shrd
891};
892
893
894/** Function table for the PUNPCKLBW instruction */
895IEM_STATIC const IEMOPMEDIAF1L1 g_iemAImpl_punpcklbw = { iemAImpl_punpcklbw_u64, iemAImpl_punpcklbw_u128 };
896/** Function table for the PUNPCKLBD instruction */
897IEM_STATIC const IEMOPMEDIAF1L1 g_iemAImpl_punpcklwd = { iemAImpl_punpcklwd_u64, iemAImpl_punpcklwd_u128 };
898/** Function table for the PUNPCKLDQ instruction */
899IEM_STATIC const IEMOPMEDIAF1L1 g_iemAImpl_punpckldq = { iemAImpl_punpckldq_u64, iemAImpl_punpckldq_u128 };
900/** Function table for the PUNPCKLQDQ instruction */
901IEM_STATIC const IEMOPMEDIAF1L1 g_iemAImpl_punpcklqdq = { NULL, iemAImpl_punpcklqdq_u128 };
902
903/** Function table for the PUNPCKHBW instruction */
904IEM_STATIC const IEMOPMEDIAF1H1 g_iemAImpl_punpckhbw = { iemAImpl_punpckhbw_u64, iemAImpl_punpckhbw_u128 };
905/** Function table for the PUNPCKHBD instruction */
906IEM_STATIC const IEMOPMEDIAF1H1 g_iemAImpl_punpckhwd = { iemAImpl_punpckhwd_u64, iemAImpl_punpckhwd_u128 };
907/** Function table for the PUNPCKHDQ instruction */
908IEM_STATIC const IEMOPMEDIAF1H1 g_iemAImpl_punpckhdq = { iemAImpl_punpckhdq_u64, iemAImpl_punpckhdq_u128 };
909/** Function table for the PUNPCKHQDQ instruction */
910IEM_STATIC const IEMOPMEDIAF1H1 g_iemAImpl_punpckhqdq = { NULL, iemAImpl_punpckhqdq_u128 };
911
912# ifndef IEM_WITHOUT_ASSEMBLY
913/** Function table for the PSHUFB instruction */
914IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pshufb = { iemAImpl_pshufb_u64, iemAImpl_pshufb_u128 };
915# endif
916/** Function table for the PSHUFB instruction */
917IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pshufb_fallback = { iemAImpl_pshufb_u64_fallback, iemAImpl_pshufb_u128_fallback };
918/** Function table for the PAND instruction */
919IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pand = { iemAImpl_pand_u64, iemAImpl_pand_u128 };
920/** Function table for the PANDN instruction */
921IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pandn = { iemAImpl_pandn_u64, iemAImpl_pandn_u128 };
922/** Function table for the POR instruction */
923IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_por = { iemAImpl_por_u64, iemAImpl_por_u128 };
924/** Function table for the PXOR instruction */
925IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pxor = { iemAImpl_pxor_u64, iemAImpl_pxor_u128 };
926/** Function table for the PCMPEQB instruction */
927IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpeqb = { iemAImpl_pcmpeqb_u64, iemAImpl_pcmpeqb_u128 };
928/** Function table for the PCMPEQW instruction */
929IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpeqw = { iemAImpl_pcmpeqw_u64, iemAImpl_pcmpeqw_u128 };
930/** Function table for the PCMPEQD instruction */
931IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpeqd = { iemAImpl_pcmpeqd_u64, iemAImpl_pcmpeqd_u128 };
932# ifndef IEM_WITHOUT_ASSEMBLY
933/** Function table for the PCMPEQQ instruction */
934IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpeqq = { NULL, iemAImpl_pcmpeqq_u128 };
935# endif
936/** Function table for the PCMPEQQ instruction, software fallback. */
937IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpeqq_fallback = { NULL, iemAImpl_pcmpeqq_u128_fallback };
938/** Function table for the PCMPGTB instruction */
939IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpgtb = { iemAImpl_pcmpgtb_u64, iemAImpl_pcmpgtb_u128 };
940/** Function table for the PCMPGTW instruction */
941IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpgtw = { iemAImpl_pcmpgtw_u64, iemAImpl_pcmpgtw_u128 };
942/** Function table for the PCMPGTD instruction */
943IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpgtd = { iemAImpl_pcmpgtd_u64, iemAImpl_pcmpgtd_u128 };
944# ifndef IEM_WITHOUT_ASSEMBLY
945/** Function table for the PCMPGTQ instruction */
946IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpgtq = { NULL, iemAImpl_pcmpgtq_u128 };
947# endif
948/** Function table for the PCMPGTQ instruction, software fallback. */
949IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_pcmpgtq_fallback = { NULL, iemAImpl_pcmpgtq_u128_fallback };
950/** Function table for the PADDB instruction. */
951IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_paddb = { iemAImpl_paddb_u64, iemAImpl_paddb_u128 };
952/** Function table for the PADDW instruction. */
953IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_paddw = { iemAImpl_paddw_u64, iemAImpl_paddw_u128 };
954/** Function table for the PADDD instruction. */
955IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_paddd = { iemAImpl_paddd_u64, iemAImpl_paddd_u128 };
956/** Function table for the PADDQ instruction. */
957IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_paddq = { iemAImpl_paddq_u64, iemAImpl_paddq_u128 };
958/** Function table for the PSUBB instruction. */
959IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_psubb = { iemAImpl_psubb_u64, iemAImpl_psubb_u128 };
960/** Function table for the PSUBW instruction. */
961IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_psubw = { iemAImpl_psubw_u64, iemAImpl_psubw_u128 };
962/** Function table for the PSUBD instruction. */
963IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_psubd = { iemAImpl_psubd_u64, iemAImpl_psubd_u128 };
964/** Function table for the PSUBQ instruction. */
965IEM_STATIC const IEMOPMEDIAF2 g_iemAImpl_psubq = { iemAImpl_psubq_u64, iemAImpl_psubq_u128 };
966
967# ifndef IEM_WITHOUT_ASSEMBLY
968/** Function table for the VPSHUFB instruction. */
969IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpshufb = { iemAImpl_vpshufb_u128, iemAImpl_vpshufb_u256 };
970/** Function table for the VPXOR instruction */
971IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpand = { iemAImpl_vpand_u128, iemAImpl_vpand_u256 };
972/** Function table for the VPXORN instruction */
973IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpandn = { iemAImpl_vpandn_u128, iemAImpl_vpandn_u256 };
974/** Function table for the VPOR instruction */
975IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpor = { iemAImpl_vpor_u128, iemAImpl_vpor_u256 };
976/** Function table for the VPXOR instruction */
977IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpxor = { iemAImpl_vpxor_u128, iemAImpl_vpxor_u256 };
978/** Function table for the VPCMPEQB instruction */
979IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqb = { iemAImpl_vpcmpeqb_u128, iemAImpl_vpcmpeqb_u256 };
980/** Function table for the VPCMPEQW instruction */
981IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqw = { iemAImpl_vpcmpeqw_u128, iemAImpl_vpcmpeqw_u256 };
982/** Function table for the VPCMPEQD instruction */
983IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqd = { iemAImpl_vpcmpeqd_u128, iemAImpl_vpcmpeqd_u256 };
984/** Function table for the VPCMPEQQ instruction */
985IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqq = { iemAImpl_vpcmpeqq_u128, iemAImpl_vpcmpeqq_u256 };
986/** Function table for the VPCMPGTB instruction */
987IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtb = { iemAImpl_vpcmpgtb_u128, iemAImpl_vpcmpgtb_u256 };
988/** Function table for the VPCMPGTW instruction */
989IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtw = { iemAImpl_vpcmpgtw_u128, iemAImpl_vpcmpgtw_u256 };
990/** Function table for the VPCMPGTD instruction */
991IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtd = { iemAImpl_vpcmpgtd_u128, iemAImpl_vpcmpgtd_u256 };
992/** Function table for the VPCMPGTQ instruction */
993IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtq = { iemAImpl_vpcmpgtq_u128, iemAImpl_vpcmpgtq_u256 };
994/** Function table for the VPADDB instruction */
995IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddb = { iemAImpl_vpaddb_u128, iemAImpl_vpaddb_u256 };
996/** Function table for the VPADDW instruction */
997IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddw = { iemAImpl_vpaddw_u128, iemAImpl_vpaddw_u256 };
998/** Function table for the VPADDD instruction */
999IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddd = { iemAImpl_vpaddd_u128, iemAImpl_vpaddd_u256 };
1000/** Function table for the VPADDQ instruction */
1001IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddq = { iemAImpl_vpaddq_u128, iemAImpl_vpaddq_u256 };
1002/** Function table for the VPSUBB instruction */
1003IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubb = { iemAImpl_vpsubb_u128, iemAImpl_vpsubb_u256 };
1004/** Function table for the VPSUBW instruction */
1005IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubw = { iemAImpl_vpsubw_u128, iemAImpl_vpsubw_u256 };
1006/** Function table for the VPSUBD instruction */
1007IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubd = { iemAImpl_vpsubd_u128, iemAImpl_vpsubd_u256 };
1008/** Function table for the VPSUBQ instruction */
1009IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubq = { iemAImpl_vpsubq_u128, iemAImpl_vpsubq_u256 };
1010# endif
1011
1012/** Function table for the VPSHUFB instruction, software fallback. */
1013IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpshufb_fallback = { iemAImpl_vpshufb_u128_fallback, iemAImpl_vpshufb_u256_fallback };
1014/** Function table for the VPAND instruction, software fallback. */
1015IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpand_fallback = { iemAImpl_vpand_u128_fallback, iemAImpl_vpand_u256_fallback };
1016/** Function table for the VPANDN instruction, software fallback. */
1017IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpandn_fallback= { iemAImpl_vpandn_u128_fallback, iemAImpl_vpandn_u256_fallback };
1018/** Function table for the VPOR instruction, software fallback. */
1019IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpor_fallback = { iemAImpl_vpor_u128_fallback, iemAImpl_vpor_u256_fallback };
1020/** Function table for the VPXOR instruction, software fallback. */
1021IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpxor_fallback = { iemAImpl_vpxor_u128_fallback, iemAImpl_vpxor_u256_fallback };
1022/** Function table for the VPCMPEQB instruction, software fallback. */
1023IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqb_fallback = { iemAImpl_vpcmpeqb_u128_fallback, iemAImpl_vpcmpeqb_u256_fallback };
1024/** Function table for the VPCMPEQW instruction, software fallback. */
1025IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqw_fallback = { iemAImpl_vpcmpeqw_u128_fallback, iemAImpl_vpcmpeqw_u256_fallback };
1026/** Function table for the VPCMPEQD instruction, software fallback. */
1027IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqd_fallback = { iemAImpl_vpcmpeqd_u128_fallback, iemAImpl_vpcmpeqd_u256_fallback };
1028/** Function table for the VPCMPEQQ instruction, software fallback. */
1029IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpeqq_fallback = { iemAImpl_vpcmpeqq_u128_fallback, iemAImpl_vpcmpeqq_u256_fallback };
1030/** Function table for the VPCMPGTB instruction, software fallback. */
1031IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtb_fallback = { iemAImpl_vpcmpgtb_u128_fallback, iemAImpl_vpcmpgtb_u256_fallback };
1032/** Function table for the VPCMPGTW instruction, software fallback. */
1033IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtw_fallback = { iemAImpl_vpcmpgtw_u128_fallback, iemAImpl_vpcmpgtw_u256_fallback };
1034/** Function table for the VPCMPGTD instruction, software fallback. */
1035IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtd_fallback = { iemAImpl_vpcmpgtd_u128_fallback, iemAImpl_vpcmpgtd_u256_fallback };
1036/** Function table for the VPCMPGTQ instruction, software fallback. */
1037IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpcmpgtq_fallback = { iemAImpl_vpcmpgtq_u128_fallback, iemAImpl_vpcmpgtq_u256_fallback };
1038/** Function table for the VPADDB instruction, software fallback. */
1039IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddb_fallback = { iemAImpl_vpaddb_u128_fallback, iemAImpl_vpaddb_u256_fallback };
1040/** Function table for the VPADDW instruction, software fallback. */
1041IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddw_fallback = { iemAImpl_vpaddw_u128_fallback, iemAImpl_vpaddw_u256_fallback };
1042/** Function table for the VPADDD instruction, software fallback. */
1043IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddd_fallback = { iemAImpl_vpaddd_u128_fallback, iemAImpl_vpaddd_u256_fallback };
1044/** Function table for the VPADDQ instruction, software fallback. */
1045IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpaddq_fallback = { iemAImpl_vpaddq_u128_fallback, iemAImpl_vpaddq_u256_fallback };
1046/** Function table for the VPSUBB instruction, software fallback. */
1047IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubb_fallback = { iemAImpl_vpsubb_u128_fallback, iemAImpl_vpsubb_u256_fallback };
1048/** Function table for the VPSUBW instruction, software fallback. */
1049IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubw_fallback = { iemAImpl_vpsubw_u128_fallback, iemAImpl_vpsubw_u256_fallback };
1050/** Function table for the VPSUBD instruction, software fallback. */
1051IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubd_fallback = { iemAImpl_vpsubd_u128_fallback, iemAImpl_vpsubd_u256_fallback };
1052/** Function table for the VPSUBQ instruction, software fallback. */
1053IEM_STATIC const IEMOPMEDIAF3 g_iemAImpl_vpsubq_fallback = { iemAImpl_vpsubq_u128_fallback, iemAImpl_vpsubq_u256_fallback };
1054
1055#endif /* !TST_IEM_CHECK_MC */
1056
1057
1058/**
1059 * Common worker for instructions like ADD, AND, OR, ++ with a byte
1060 * memory/register as the destination.
1061 *
1062 * @param pImpl Pointer to the instruction implementation (assembly).
1063 */
1064FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)
1065{
1066 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1067
1068 /*
1069 * If rm is denoting a register, no more instruction bytes.
1070 */
1071 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1072 {
1073 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1074
1075 IEM_MC_BEGIN(3, 0);
1076 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
1077 IEM_MC_ARG(uint8_t, u8Src, 1);
1078 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1079
1080 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1081 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1082 IEM_MC_REF_EFLAGS(pEFlags);
1083 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
1084
1085 IEM_MC_ADVANCE_RIP();
1086 IEM_MC_END();
1087 }
1088 else
1089 {
1090 /*
1091 * We're accessing memory.
1092 * Note! We're putting the eflags on the stack here so we can commit them
1093 * after the memory.
1094 */
1095 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */
1096 IEM_MC_BEGIN(3, 2);
1097 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
1098 IEM_MC_ARG(uint8_t, u8Src, 1);
1099 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1100 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1101
1102 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1103 if (!pImpl->pfnLockedU8)
1104 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1105 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1106 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1107 IEM_MC_FETCH_EFLAGS(EFlags);
1108 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
1109 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
1110 else
1111 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);
1112
1113 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);
1114 IEM_MC_COMMIT_EFLAGS(EFlags);
1115 IEM_MC_ADVANCE_RIP();
1116 IEM_MC_END();
1117 }
1118 return VINF_SUCCESS;
1119}
1120
1121
1122/**
1123 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with
1124 * memory/register as the destination.
1125 *
1126 * @param pImpl Pointer to the instruction implementation (assembly).
1127 */
1128FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)
1129{
1130 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1131
1132 /*
1133 * If rm is denoting a register, no more instruction bytes.
1134 */
1135 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1136 {
1137 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1138
1139 switch (pVCpu->iem.s.enmEffOpSize)
1140 {
1141 case IEMMODE_16BIT:
1142 IEM_MC_BEGIN(3, 0);
1143 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1144 IEM_MC_ARG(uint16_t, u16Src, 1);
1145 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1146
1147 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1148 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1149 IEM_MC_REF_EFLAGS(pEFlags);
1150 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
1151
1152 IEM_MC_ADVANCE_RIP();
1153 IEM_MC_END();
1154 break;
1155
1156 case IEMMODE_32BIT:
1157 IEM_MC_BEGIN(3, 0);
1158 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1159 IEM_MC_ARG(uint32_t, u32Src, 1);
1160 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1161
1162 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1163 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1164 IEM_MC_REF_EFLAGS(pEFlags);
1165 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
1166
1167 if (pImpl != &g_iemAImpl_test)
1168 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1169 IEM_MC_ADVANCE_RIP();
1170 IEM_MC_END();
1171 break;
1172
1173 case IEMMODE_64BIT:
1174 IEM_MC_BEGIN(3, 0);
1175 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1176 IEM_MC_ARG(uint64_t, u64Src, 1);
1177 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1178
1179 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1180 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1181 IEM_MC_REF_EFLAGS(pEFlags);
1182 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
1183
1184 IEM_MC_ADVANCE_RIP();
1185 IEM_MC_END();
1186 break;
1187 }
1188 }
1189 else
1190 {
1191 /*
1192 * We're accessing memory.
1193 * Note! We're putting the eflags on the stack here so we can commit them
1194 * after the memory.
1195 */
1196 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;
1197 switch (pVCpu->iem.s.enmEffOpSize)
1198 {
1199 case IEMMODE_16BIT:
1200 IEM_MC_BEGIN(3, 2);
1201 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1202 IEM_MC_ARG(uint16_t, u16Src, 1);
1203 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1204 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1205
1206 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1207 if (!pImpl->pfnLockedU16)
1208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1209 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1210 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1211 IEM_MC_FETCH_EFLAGS(EFlags);
1212 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
1213 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
1214 else
1215 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);
1216
1217 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);
1218 IEM_MC_COMMIT_EFLAGS(EFlags);
1219 IEM_MC_ADVANCE_RIP();
1220 IEM_MC_END();
1221 break;
1222
1223 case IEMMODE_32BIT:
1224 IEM_MC_BEGIN(3, 2);
1225 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1226 IEM_MC_ARG(uint32_t, u32Src, 1);
1227 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1229
1230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1231 if (!pImpl->pfnLockedU32)
1232 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1233 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1234 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1235 IEM_MC_FETCH_EFLAGS(EFlags);
1236 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
1237 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
1238 else
1239 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);
1240
1241 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);
1242 IEM_MC_COMMIT_EFLAGS(EFlags);
1243 IEM_MC_ADVANCE_RIP();
1244 IEM_MC_END();
1245 break;
1246
1247 case IEMMODE_64BIT:
1248 IEM_MC_BEGIN(3, 2);
1249 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1250 IEM_MC_ARG(uint64_t, u64Src, 1);
1251 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);
1252 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1253
1254 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1255 if (!pImpl->pfnLockedU64)
1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1257 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);
1258 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1259 IEM_MC_FETCH_EFLAGS(EFlags);
1260 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))
1261 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
1262 else
1263 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);
1264
1265 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);
1266 IEM_MC_COMMIT_EFLAGS(EFlags);
1267 IEM_MC_ADVANCE_RIP();
1268 IEM_MC_END();
1269 break;
1270 }
1271 }
1272 return VINF_SUCCESS;
1273}
1274
1275
1276/**
1277 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as
1278 * the destination.
1279 *
1280 * @param pImpl Pointer to the instruction implementation (assembly).
1281 */
1282FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)
1283{
1284 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1285
1286 /*
1287 * If rm is denoting a register, no more instruction bytes.
1288 */
1289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1290 {
1291 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1292 IEM_MC_BEGIN(3, 0);
1293 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
1294 IEM_MC_ARG(uint8_t, u8Src, 1);
1295 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1296
1297 IEM_MC_FETCH_GREG_U8(u8Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1298 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1299 IEM_MC_REF_EFLAGS(pEFlags);
1300 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
1301
1302 IEM_MC_ADVANCE_RIP();
1303 IEM_MC_END();
1304 }
1305 else
1306 {
1307 /*
1308 * We're accessing memory.
1309 */
1310 IEM_MC_BEGIN(3, 1);
1311 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
1312 IEM_MC_ARG(uint8_t, u8Src, 1);
1313 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1314 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1315
1316 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1317 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1318 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1319 IEM_MC_REF_GREG_U8(pu8Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1320 IEM_MC_REF_EFLAGS(pEFlags);
1321 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
1322
1323 IEM_MC_ADVANCE_RIP();
1324 IEM_MC_END();
1325 }
1326 return VINF_SUCCESS;
1327}
1328
1329
1330/**
1331 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a
1332 * register as the destination.
1333 *
1334 * @param pImpl Pointer to the instruction implementation (assembly).
1335 */
1336FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)
1337{
1338 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);
1339
1340 /*
1341 * If rm is denoting a register, no more instruction bytes.
1342 */
1343 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))
1344 {
1345 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1346 switch (pVCpu->iem.s.enmEffOpSize)
1347 {
1348 case IEMMODE_16BIT:
1349 IEM_MC_BEGIN(3, 0);
1350 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1351 IEM_MC_ARG(uint16_t, u16Src, 1);
1352 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1353
1354 IEM_MC_FETCH_GREG_U16(u16Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1355 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1356 IEM_MC_REF_EFLAGS(pEFlags);
1357 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
1358
1359 IEM_MC_ADVANCE_RIP();
1360 IEM_MC_END();
1361 break;
1362
1363 case IEMMODE_32BIT:
1364 IEM_MC_BEGIN(3, 0);
1365 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1366 IEM_MC_ARG(uint32_t, u32Src, 1);
1367 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1368
1369 IEM_MC_FETCH_GREG_U32(u32Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1370 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1371 IEM_MC_REF_EFLAGS(pEFlags);
1372 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
1373
1374 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1375 IEM_MC_ADVANCE_RIP();
1376 IEM_MC_END();
1377 break;
1378
1379 case IEMMODE_64BIT:
1380 IEM_MC_BEGIN(3, 0);
1381 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1382 IEM_MC_ARG(uint64_t, u64Src, 1);
1383 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1384
1385 IEM_MC_FETCH_GREG_U64(u64Src, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB);
1386 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1387 IEM_MC_REF_EFLAGS(pEFlags);
1388 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
1389
1390 IEM_MC_ADVANCE_RIP();
1391 IEM_MC_END();
1392 break;
1393 }
1394 }
1395 else
1396 {
1397 /*
1398 * We're accessing memory.
1399 */
1400 switch (pVCpu->iem.s.enmEffOpSize)
1401 {
1402 case IEMMODE_16BIT:
1403 IEM_MC_BEGIN(3, 1);
1404 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1405 IEM_MC_ARG(uint16_t, u16Src, 1);
1406 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1407 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1408
1409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1410 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1411 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1412 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1413 IEM_MC_REF_EFLAGS(pEFlags);
1414 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
1415
1416 IEM_MC_ADVANCE_RIP();
1417 IEM_MC_END();
1418 break;
1419
1420 case IEMMODE_32BIT:
1421 IEM_MC_BEGIN(3, 1);
1422 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1423 IEM_MC_ARG(uint32_t, u32Src, 1);
1424 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1425 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1426
1427 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1428 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1429 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1430 IEM_MC_REF_GREG_U32(pu32Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1431 IEM_MC_REF_EFLAGS(pEFlags);
1432 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
1433
1434 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1435 IEM_MC_ADVANCE_RIP();
1436 IEM_MC_END();
1437 break;
1438
1439 case IEMMODE_64BIT:
1440 IEM_MC_BEGIN(3, 1);
1441 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1442 IEM_MC_ARG(uint64_t, u64Src, 1);
1443 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1444 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);
1445
1446 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
1447 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1448 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
1449 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg);
1450 IEM_MC_REF_EFLAGS(pEFlags);
1451 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
1452
1453 IEM_MC_ADVANCE_RIP();
1454 IEM_MC_END();
1455 break;
1456 }
1457 }
1458 return VINF_SUCCESS;
1459}
1460
1461
1462/**
1463 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with
1464 * a byte immediate.
1465 *
1466 * @param pImpl Pointer to the instruction implementation (assembly).
1467 */
1468FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)
1469{
1470 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);
1471 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1472
1473 IEM_MC_BEGIN(3, 0);
1474 IEM_MC_ARG(uint8_t *, pu8Dst, 0);
1475 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);
1476 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1477
1478 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);
1479 IEM_MC_REF_EFLAGS(pEFlags);
1480 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);
1481
1482 IEM_MC_ADVANCE_RIP();
1483 IEM_MC_END();
1484 return VINF_SUCCESS;
1485}
1486
1487
1488/**
1489 * Common worker for instructions like ADD, AND, OR, ++ with working on
1490 * AX/EAX/RAX with a word/dword immediate.
1491 *
1492 * @param pImpl Pointer to the instruction implementation (assembly).
1493 */
1494FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)
1495{
1496 switch (pVCpu->iem.s.enmEffOpSize)
1497 {
1498 case IEMMODE_16BIT:
1499 {
1500 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);
1501 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1502
1503 IEM_MC_BEGIN(3, 0);
1504 IEM_MC_ARG(uint16_t *, pu16Dst, 0);
1505 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);
1506 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1507
1508 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);
1509 IEM_MC_REF_EFLAGS(pEFlags);
1510 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);
1511
1512 IEM_MC_ADVANCE_RIP();
1513 IEM_MC_END();
1514 return VINF_SUCCESS;
1515 }
1516
1517 case IEMMODE_32BIT:
1518 {
1519 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);
1520 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1521
1522 IEM_MC_BEGIN(3, 0);
1523 IEM_MC_ARG(uint32_t *, pu32Dst, 0);
1524 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);
1525 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1526
1527 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);
1528 IEM_MC_REF_EFLAGS(pEFlags);
1529 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);
1530
1531 if (pImpl != &g_iemAImpl_test)
1532 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);
1533 IEM_MC_ADVANCE_RIP();
1534 IEM_MC_END();
1535 return VINF_SUCCESS;
1536 }
1537
1538 case IEMMODE_64BIT:
1539 {
1540 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);
1541 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
1542
1543 IEM_MC_BEGIN(3, 0);
1544 IEM_MC_ARG(uint64_t *, pu64Dst, 0);
1545 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);
1546 IEM_MC_ARG(uint32_t *, pEFlags, 2);
1547
1548 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);
1549 IEM_MC_REF_EFLAGS(pEFlags);
1550 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);
1551
1552 IEM_MC_ADVANCE_RIP();
1553 IEM_MC_END();
1554 return VINF_SUCCESS;
1555 }
1556
1557 IEM_NOT_REACHED_DEFAULT_CASE_RET();
1558 }
1559}
1560
1561
1562/** Opcodes 0xf1, 0xd6. */
1563FNIEMOP_DEF(iemOp_Invalid)
1564{
1565 IEMOP_MNEMONIC(Invalid, "Invalid");
1566 return IEMOP_RAISE_INVALID_OPCODE();
1567}
1568
1569
1570/** Invalid with RM byte . */
1571FNIEMOPRM_DEF(iemOp_InvalidWithRM)
1572{
1573 RT_NOREF_PV(bRm);
1574 IEMOP_MNEMONIC(InvalidWithRm, "InvalidWithRM");
1575 return IEMOP_RAISE_INVALID_OPCODE();
1576}
1577
1578
1579/** Invalid with RM byte where intel decodes any additional address encoding
1580 * bytes. */
1581FNIEMOPRM_DEF(iemOp_InvalidWithRMNeedDecode)
1582{
1583 IEMOP_MNEMONIC(InvalidWithRMNeedDecode, "InvalidWithRMNeedDecode");
1584 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1585 {
1586#ifndef TST_IEM_CHECK_MC
1587 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1588 {
1589 RTGCPTR GCPtrEff;
1590 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1591 if (rcStrict != VINF_SUCCESS)
1592 return rcStrict;
1593 }
1594#endif
1595 }
1596 IEMOP_HLP_DONE_DECODING();
1597 return IEMOP_RAISE_INVALID_OPCODE();
1598}
1599
1600
1601/** Invalid with RM byte where both AMD and Intel decodes any additional
1602 * address encoding bytes. */
1603FNIEMOPRM_DEF(iemOp_InvalidWithRMAllNeeded)
1604{
1605 IEMOP_MNEMONIC(InvalidWithRMAllNeeded, "InvalidWithRMAllNeeded");
1606#ifndef TST_IEM_CHECK_MC
1607 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1608 {
1609 RTGCPTR GCPtrEff;
1610 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1611 if (rcStrict != VINF_SUCCESS)
1612 return rcStrict;
1613 }
1614#endif
1615 IEMOP_HLP_DONE_DECODING();
1616 return IEMOP_RAISE_INVALID_OPCODE();
1617}
1618
1619
1620/** Invalid with RM byte where intel requires 8-byte immediate.
1621 * Intel will also need SIB and displacement if bRm indicates memory. */
1622FNIEMOPRM_DEF(iemOp_InvalidWithRMNeedImm8)
1623{
1624 IEMOP_MNEMONIC(InvalidWithRMNeedImm8, "InvalidWithRMNeedImm8");
1625 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1626 {
1627#ifndef TST_IEM_CHECK_MC
1628 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1629 {
1630 RTGCPTR GCPtrEff;
1631 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1632 if (rcStrict != VINF_SUCCESS)
1633 return rcStrict;
1634 }
1635#endif
1636 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8); RT_NOREF(bRm);
1637 }
1638 IEMOP_HLP_DONE_DECODING();
1639 return IEMOP_RAISE_INVALID_OPCODE();
1640}
1641
1642
1643/** Invalid with RM byte where intel requires 8-byte immediate.
1644 * Both AMD and Intel also needs SIB and displacement according to bRm. */
1645FNIEMOPRM_DEF(iemOp_InvalidWithRMAllNeedImm8)
1646{
1647 IEMOP_MNEMONIC(InvalidWithRMAllNeedImm8, "InvalidWithRMAllNeedImm8");
1648#ifndef TST_IEM_CHECK_MC
1649 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1650 {
1651 RTGCPTR GCPtrEff;
1652 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1653 if (rcStrict != VINF_SUCCESS)
1654 return rcStrict;
1655 }
1656#endif
1657 uint8_t bImm8; IEM_OPCODE_GET_NEXT_U8(&bImm8); RT_NOREF(bRm);
1658 IEMOP_HLP_DONE_DECODING();
1659 return IEMOP_RAISE_INVALID_OPCODE();
1660}
1661
1662
1663/** Invalid opcode where intel requires Mod R/M sequence. */
1664FNIEMOP_DEF(iemOp_InvalidNeedRM)
1665{
1666 IEMOP_MNEMONIC(InvalidNeedRM, "InvalidNeedRM");
1667 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1668 {
1669 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
1670#ifndef TST_IEM_CHECK_MC
1671 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1672 {
1673 RTGCPTR GCPtrEff;
1674 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1675 if (rcStrict != VINF_SUCCESS)
1676 return rcStrict;
1677 }
1678#endif
1679 }
1680 IEMOP_HLP_DONE_DECODING();
1681 return IEMOP_RAISE_INVALID_OPCODE();
1682}
1683
1684
1685/** Invalid opcode where both AMD and Intel requires Mod R/M sequence. */
1686FNIEMOP_DEF(iemOp_InvalidAllNeedRM)
1687{
1688 IEMOP_MNEMONIC(InvalidAllNeedRM, "InvalidAllNeedRM");
1689 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
1690#ifndef TST_IEM_CHECK_MC
1691 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1692 {
1693 RTGCPTR GCPtrEff;
1694 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1695 if (rcStrict != VINF_SUCCESS)
1696 return rcStrict;
1697 }
1698#endif
1699 IEMOP_HLP_DONE_DECODING();
1700 return IEMOP_RAISE_INVALID_OPCODE();
1701}
1702
1703
1704/** Invalid opcode where intel requires Mod R/M sequence and 8-byte
1705 * immediate. */
1706FNIEMOP_DEF(iemOp_InvalidNeedRMImm8)
1707{
1708 IEMOP_MNEMONIC(InvalidNeedRMImm8, "InvalidNeedRMImm8");
1709 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1710 {
1711 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
1712#ifndef TST_IEM_CHECK_MC
1713 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1714 {
1715 RTGCPTR GCPtrEff;
1716 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1717 if (rcStrict != VINF_SUCCESS)
1718 return rcStrict;
1719 }
1720#endif
1721 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
1722 }
1723 IEMOP_HLP_DONE_DECODING();
1724 return IEMOP_RAISE_INVALID_OPCODE();
1725}
1726
1727
1728/** Invalid opcode where intel requires a 3rd escape byte and a Mod R/M
1729 * sequence. */
1730FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRM)
1731{
1732 IEMOP_MNEMONIC(InvalidNeed3ByteEscRM, "InvalidNeed3ByteEscRM");
1733 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1734 {
1735 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
1736 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
1737#ifndef TST_IEM_CHECK_MC
1738 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1739 {
1740 RTGCPTR GCPtrEff;
1741 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 0, &GCPtrEff);
1742 if (rcStrict != VINF_SUCCESS)
1743 return rcStrict;
1744 }
1745#endif
1746 }
1747 IEMOP_HLP_DONE_DECODING();
1748 return IEMOP_RAISE_INVALID_OPCODE();
1749}
1750
1751
1752/** Invalid opcode where intel requires a 3rd escape byte, Mod R/M sequence, and
1753 * a 8-byte immediate. */
1754FNIEMOP_DEF(iemOp_InvalidNeed3ByteEscRMImm8)
1755{
1756 IEMOP_MNEMONIC(InvalidNeed3ByteEscRMImm8, "InvalidNeed3ByteEscRMImm8");
1757 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL)
1758 {
1759 uint8_t b3rd; IEM_OPCODE_GET_NEXT_U8(&b3rd); RT_NOREF(b3rd);
1760 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); RT_NOREF(bRm);
1761#ifndef TST_IEM_CHECK_MC
1762 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT))
1763 {
1764 RTGCPTR GCPtrEff;
1765 VBOXSTRICTRC rcStrict = iemOpHlpCalcRmEffAddr(pVCpu, bRm, 1, &GCPtrEff);
1766 if (rcStrict != VINF_SUCCESS)
1767 return rcStrict;
1768 }
1769#endif
1770 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); RT_NOREF(bImm);
1771 IEMOP_HLP_DONE_DECODING();
1772 }
1773 return IEMOP_RAISE_INVALID_OPCODE();
1774}
1775
1776
1777/** Repeats a_fn four times. For decoding tables. */
1778#define IEMOP_X4(a_fn) a_fn, a_fn, a_fn, a_fn
1779
1780/*
1781 * Include the tables.
1782 */
1783#ifdef IEM_WITH_3DNOW
1784# include "IEMAllInstructions3DNow.cpp.h"
1785#endif
1786#ifdef IEM_WITH_THREE_0F_38
1787# include "IEMAllInstructionsThree0f38.cpp.h"
1788#endif
1789#ifdef IEM_WITH_THREE_0F_3A
1790# include "IEMAllInstructionsThree0f3a.cpp.h"
1791#endif
1792#include "IEMAllInstructionsTwoByte0f.cpp.h"
1793#ifdef IEM_WITH_VEX
1794# include "IEMAllInstructionsVexMap1.cpp.h"
1795# include "IEMAllInstructionsVexMap2.cpp.h"
1796# include "IEMAllInstructionsVexMap3.cpp.h"
1797#endif
1798#include "IEMAllInstructionsOneByte.cpp.h"
1799
1800
1801#ifdef _MSC_VER
1802# pragma warning(pop)
1803#endif
1804
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette