VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/target-x86/IEMOpHlp-x86.h

Last change on this file was 108278, checked in by vboxsync, 7 weeks ago

VMM/IEM: Removed the #ifndef IEM_WITH_SETJMP code. We've had IEM_WITH_SETJMP defined unconditionally since 7.0 and the code probably doesn't even compile w/o it, so best remove the unused code. jiraref:VBP-1531

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 25.2 KB
Line 
1/* $Id: IEMOpHlp-x86.h 108278 2025-02-18 15:46:53Z vboxsync $ */
2/** @file
3 * IEM - Interpreted Execution Manager - Opcode Helpers.
4 */
5
6/*
7 * Copyright (C) 2011-2024 Oracle and/or its affiliates.
8 *
9 * This file is part of VirtualBox base platform packages, as
10 * available from https://www.virtualbox.org.
11 *
12 * This program is free software; you can redistribute it and/or
13 * modify it under the terms of the GNU General Public License
14 * as published by the Free Software Foundation, in version 3 of the
15 * License.
16 *
17 * This program is distributed in the hope that it will be useful, but
18 * WITHOUT ANY WARRANTY; without even the implied warranty of
19 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20 * General Public License for more details.
21 *
22 * You should have received a copy of the GNU General Public License
23 * along with this program; if not, see <https://www.gnu.org/licenses>.
24 *
25 * SPDX-License-Identifier: GPL-3.0-only
26 */
27
28#ifndef VMM_INCLUDED_SRC_VMMAll_target_x86_IEMOpHlp_x86_h
29#define VMM_INCLUDED_SRC_VMMAll_target_x86_IEMOpHlp_x86_h
30#ifndef RT_WITHOUT_PRAGMA_ONCE
31# pragma once
32#endif
33
34
35/** @name Opcode Debug Helpers.
36 * @{
37 */
38#ifdef DEBUG
39# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) \
40 do { \
41 IEMOP_INC_STATS(a_Stats); \
42 Log4(("decode - %04x:%RGv %s%s [#%u]\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, \
43 pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, pVCpu->iem.s.cInstructions)); \
44 } while (0)
45
46# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
47 do { \
48 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
49 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
50 (void)RT_CONCAT(OP_,a_Upper); \
51 (void)(a_fDisHints); \
52 (void)(a_fIemHints); \
53 } while (0)
54
55# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
56 do { \
57 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
58 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
59 (void)RT_CONCAT(OP_,a_Upper); \
60 (void)RT_CONCAT(OP_PARM_,a_Op1); \
61 (void)(a_fDisHints); \
62 (void)(a_fIemHints); \
63 } while (0)
64
65# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
66 do { \
67 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
68 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
69 (void)RT_CONCAT(OP_,a_Upper); \
70 (void)RT_CONCAT(OP_PARM_,a_Op1); \
71 (void)RT_CONCAT(OP_PARM_,a_Op2); \
72 (void)(a_fDisHints); \
73 (void)(a_fIemHints); \
74 } while (0)
75
76# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
77 do { \
78 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
79 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
80 (void)RT_CONCAT(OP_,a_Upper); \
81 (void)RT_CONCAT(OP_PARM_,a_Op1); \
82 (void)RT_CONCAT(OP_PARM_,a_Op2); \
83 (void)RT_CONCAT(OP_PARM_,a_Op3); \
84 (void)(a_fDisHints); \
85 (void)(a_fIemHints); \
86 } while (0)
87
88# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
89 do { \
90 IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
91 (void)RT_CONCAT(IEMOPFORM_, a_Form); \
92 (void)RT_CONCAT(OP_,a_Upper); \
93 (void)RT_CONCAT(OP_PARM_,a_Op1); \
94 (void)RT_CONCAT(OP_PARM_,a_Op2); \
95 (void)RT_CONCAT(OP_PARM_,a_Op3); \
96 (void)RT_CONCAT(OP_PARM_,a_Op4); \
97 (void)(a_fDisHints); \
98 (void)(a_fIemHints); \
99 } while (0)
100
101#else
102# define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) IEMOP_INC_STATS(a_Stats)
103
104# define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
105 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
106# define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
107 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
108# define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
109 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
110# define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
111 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
112# define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
113 IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
114
115#endif
116
117#define IEMOP_MNEMONIC0(a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
118 IEMOP_MNEMONIC0EX(a_Lower, \
119 #a_Lower, \
120 a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints)
121#define IEMOP_MNEMONIC1(a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
122 IEMOP_MNEMONIC1EX(RT_CONCAT3(a_Lower,_,a_Op1), \
123 #a_Lower " " #a_Op1, \
124 a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints)
125#define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
126 IEMOP_MNEMONIC2EX(RT_CONCAT5(a_Lower,_,a_Op1,_,a_Op2), \
127 #a_Lower " " #a_Op1 "," #a_Op2, \
128 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints)
129#define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
130 IEMOP_MNEMONIC3EX(RT_CONCAT7(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3), \
131 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3, \
132 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints)
133#define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
134 IEMOP_MNEMONIC4EX(RT_CONCAT9(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3,_,a_Op4), \
135 #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3 "," #a_Op4, \
136 a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints)
137
138/** @} */
139
140
141/** @name Opcode Helpers.
142 * @{
143 */
144
145/** The instruction requires a 186 or later. */
146#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_186
147# define IEMOP_HLP_MIN_186() do { } while (0)
148#else
149# define IEMOP_HLP_MIN_186() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_186, true)
150#endif
151
152/** The instruction requires a 286 or later. */
153#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_286
154# define IEMOP_HLP_MIN_286() do { } while (0)
155#else
156# define IEMOP_HLP_MIN_286() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_286, true)
157#endif
158
159/** The instruction requires a 386 or later. */
160#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
161# define IEMOP_HLP_MIN_386() do { } while (0)
162#else
163# define IEMOP_HLP_MIN_386() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, true)
164#endif
165
166/** The instruction requires a 386 or later if the given expression is true. */
167#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
168# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) do { } while (0)
169#else
170# define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, a_fOnlyIf)
171#endif
172
173/** The instruction requires a 486 or later. */
174#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_486
175# define IEMOP_HLP_MIN_486() do { } while (0)
176#else
177# define IEMOP_HLP_MIN_486() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_486, true)
178#endif
179
180/** The instruction requires a Pentium (586) or later. */
181#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PENTIUM
182# define IEMOP_HLP_MIN_586() do { } while (0)
183#else
184# define IEMOP_HLP_MIN_586() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PENTIUM, true)
185#endif
186
187/** The instruction requires a PentiumPro (686) or later. */
188#if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PPRO
189# define IEMOP_HLP_MIN_686() do { } while (0)
190#else
191# define IEMOP_HLP_MIN_686() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PPRO, true)
192#endif
193
194
195/** The instruction raises an \#UD in real and V8086 mode. */
196#define IEMOP_HLP_NO_REAL_OR_V86_MODE() \
197 do \
198 { \
199 if (!IEM_IS_REAL_OR_V86_MODE(pVCpu)) { /* likely */ } \
200 else IEMOP_RAISE_INVALID_OPCODE_RET(); \
201 } while (0)
202
203#ifdef VBOX_WITH_NESTED_HWVIRT_VMX
204/** This instruction raises an \#UD in real and V8086 mode or when not using a
205 * 64-bit code segment when in long mode (applicable to all VMX instructions
206 * except VMCALL).
207 *
208 * @todo r=bird: This is not recompiler friendly. The scenario with
209 * 16-bit/32-bit code running in long mode doesn't fit at all.
210 */
211# define IEMOP_HLP_VMX_INSTR(a_szInstr, a_InsDiagPrefix) \
212 do \
213 { \
214 if ( !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
215 && ( !IEM_IS_LONG_MODE(pVCpu) \
216 || IEM_IS_64BIT_CODE(pVCpu))) \
217 { /* likely */ } \
218 else \
219 { \
220 if (IEM_IS_REAL_OR_V86_MODE(pVCpu)) \
221 { \
222 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_RealOrV86Mode; \
223 Log5((a_szInstr ": Real or v8086 mode -> #UD\n")); \
224 IEMOP_RAISE_INVALID_OPCODE_RET(); \
225 } \
226 if (IEM_IS_LONG_MODE(pVCpu) && !IEM_IS_64BIT_CODE(pVCpu)) \
227 { \
228 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_LongModeCS; \
229 Log5((a_szInstr ": Long mode without 64-bit code segment -> #UD\n")); \
230 IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET(); /** @todo This doesn't work. */ \
231 } \
232 } \
233 } while (0)
234
235/** The instruction can only be executed in VMX operation (VMX root mode and
236 * non-root mode).
237 *
238 * @note Update IEM_VMX_IN_VMX_OPERATION if changes are made here.
239 *
240 * @todo r=bird: This is absolutely *INCORRECT* since IEM_VMX_IS_ROOT_MODE
241 * is a complicated runtime state (calls CPUMIsGuestInVmxRootMode), and
242 * not something we can decide while decoding. Convert to an IEM_MC!
243 */
244# define IEMOP_HLP_IN_VMX_OPERATION(a_szInstr, a_InsDiagPrefix) \
245 do \
246 { \
247 if (IEM_VMX_IS_ROOT_MODE(pVCpu)) { /* likely */ } \
248 else \
249 { \
250 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_VmxRoot; \
251 Log5((a_szInstr ": Not in VMX operation (root mode) -> #UD\n")); \
252 IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET(); /** @todo This doesn't work. */ \
253 } \
254 } while (0)
255#endif /* VBOX_WITH_NESTED_HWVIRT_VMX */
256
257/** The instruction is not available in 64-bit mode, throw \#UD if we're in
258 * 64-bit mode. */
259#define IEMOP_HLP_NO_64BIT() \
260 do \
261 { \
262 if (!IEM_IS_64BIT_CODE(pVCpu)) \
263 { /* likely */ } \
264 else \
265 IEMOP_RAISE_INVALID_OPCODE_RET(); \
266 } while (0)
267
268/** The instruction is only available in 64-bit mode, throw \#UD if we're not in
269 * 64-bit mode. */
270#define IEMOP_HLP_ONLY_64BIT() \
271 do \
272 { \
273 if (IEM_IS_64BIT_CODE(pVCpu)) \
274 { /* likely */ } \
275 else \
276 IEMOP_RAISE_INVALID_OPCODE_RET(); \
277 } while (0)
278
279/** The instruction defaults to 64-bit operand size if 64-bit mode. */
280#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE() \
281 do \
282 { \
283 if (IEM_IS_64BIT_CODE(pVCpu)) \
284 iemRecalEffOpSize64Default(pVCpu); \
285 } while (0)
286
287/** The instruction defaults to 64-bit operand size if 64-bit mode and intel
288 * CPUs ignore the operand size prefix complete (e.g. relative jumps). */
289#define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX() \
290 do \
291 { \
292 if (IEM_IS_64BIT_CODE(pVCpu)) \
293 iemRecalEffOpSize64DefaultAndIntelIgnoresOpSizePrefix(pVCpu); \
294 } while (0)
295
296/** The instruction has 64-bit operand size if 64-bit mode. */
297#define IEMOP_HLP_64BIT_OP_SIZE() \
298 do \
299 { \
300 if (IEM_IS_64BIT_CODE(pVCpu)) \
301 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; \
302 } while (0)
303
304/** Only a REX prefix immediately preceeding the first opcode byte takes
305 * effect. This macro helps ensuring this as well as logging bad guest code. */
306#define IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE(a_szPrf) \
307 do \
308 { \
309 if (RT_UNLIKELY(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX)) \
310 { \
311 Log5((a_szPrf ": Overriding REX prefix at %RX16! fPrefixes=%#x\n", pVCpu->cpum.GstCtx.rip, pVCpu->iem.s.fPrefixes)); \
312 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REX_MASK; \
313 pVCpu->iem.s.uRexB = 0; \
314 pVCpu->iem.s.uRexIndex = 0; \
315 pVCpu->iem.s.uRexReg = 0; \
316 iemRecalEffOpSize(pVCpu); \
317 } \
318 } while (0)
319
320/** The instruction ignores any REX.W/VEX.W prefix if not in 64-bit mode. */
321#define IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT() \
322 do \
323 { \
324 if (!IEM_IS_64BIT_CODE(pVCpu)) \
325 pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_SIZE_REX_W; \
326 } while (0)
327
328/**
329 * Done decoding.
330 */
331#define IEMOP_HLP_DONE_DECODING() \
332 do \
333 { \
334 /*nothing for now, maybe later... */ \
335 } while (0)
336
337#define IEMOP_HLP_DONE_DECODING_EX(a_fFeature) \
338 do \
339 { \
340 if (RT_LIKELY(IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
341 { /* likely */ } \
342 else \
343 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
344 } while (0)
345
346/**
347 * Done decoding, raise \#UD exception if lock prefix present.
348 */
349#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() \
350 do \
351 { \
352 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
353 { /* likely */ } \
354 else \
355 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
356 } while (0)
357
358/**
359 * Done decoding, raise \#UD exception if lock prefix present, or if the
360 * a_fFeature is present in the guest CPU.
361 */
362#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(a_fFeature) \
363 do \
364 { \
365 if (RT_LIKELY( !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
366 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
367 { /* likely */ } \
368 else \
369 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
370 } while (0)
371
372/**
373 * Done decoding, raise \#UD exception if lock prefix present, or if the
374 * a_fFeature is present in the guest CPU.
375 */
376#define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(a_fFeature1, a_fFeature2) \
377 do \
378 { \
379 if (RT_LIKELY( !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
380 && ( IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature1 \
381 || IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2) )) \
382 { /* likely */ } \
383 else \
384 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
385 } while (0)
386
387
388/**
389 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
390 * repnz or size prefixes are present, if in real or v8086 mode, or if the
391 * a_fFeature is not present in the guest CPU.
392 */
393#define IEMOP_HLP_DONE_VEX_DECODING_EX(a_fFeature) \
394 do \
395 { \
396 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
397 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
398 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
399 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
400 { /* likely */ } \
401 else \
402 IEMOP_RAISE_INVALID_OPCODE_RET(); \
403 } while (0)
404
405/**
406 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
407 * repnz or size prefixes are present, or if in real or v8086 mode, or if the
408 * a_fFeature is not present in the guest CPU.
409 */
410#define IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeature) \
411 do \
412 { \
413 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
414 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
415 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
416 && pVCpu->iem.s.uVexLength == 0 \
417 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
418 { /* likely */ } \
419 else \
420 IEMOP_RAISE_INVALID_OPCODE_RET(); \
421 } while (0)
422
423/**
424 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
425 * repnz or size prefixes are present, or if in real or v8086 mode, or if the
426 * a_fFeature is not present in the guest CPU.
427 */
428#define IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(a_fFeature, a_fFeature2) \
429 do \
430 { \
431 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
432 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
433 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
434 && pVCpu->iem.s.uVexLength == 0 \
435 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \
436 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \
437 { /* likely */ } \
438 else \
439 IEMOP_RAISE_INVALID_OPCODE_RET(); \
440 } while (0)
441
442/**
443 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
444 * repnz or size prefixes are present, or if in real or v8086 mode, or if the
445 * a_fFeature is not present in the guest CPU.
446 */
447#define IEMOP_HLP_DONE_VEX_DECODING_L1_EX(a_fFeature) \
448 do \
449 { \
450 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
451 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
452 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
453 && pVCpu->iem.s.uVexLength == 1 \
454 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
455 { /* likely */ } \
456 else \
457 IEMOP_RAISE_INVALID_OPCODE_RET(); \
458 } while (0)
459
460/**
461 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
462 * repnz or size prefixes are present, or if VEX.W is one, or if in real or
463 * v8086 mode, or if the a_fFeature is not present in the guest CPU.
464 */
465#define IEMOP_HLP_DONE_VEX_DECODING_W0_EX(a_fFeature) \
466 do \
467 { \
468 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
469 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX \
470 | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
471 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
472 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
473 { /* likely */ } \
474 else \
475 IEMOP_RAISE_INVALID_OPCODE_RET(); \
476 } while (0)
477
478/**
479 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
480 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
481 * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
482 * the guest CPU.
483 */
484#define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \
485 do \
486 { \
487 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
488 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
489 && !pVCpu->iem.s.uVex3rdReg \
490 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
491 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
492 { /* likely */ } \
493 else \
494 IEMOP_RAISE_INVALID_OPCODE_RET(); \
495 } while (0)
496
497/**
498 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
499 * repnz or size prefixes are present, or if VEX.W is one, or if the VEX.VVVV field doesn't indicate
500 * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
501 * the guest CPU.
502 */
503#define IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(a_fFeature) \
504 do \
505 { \
506 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
507 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX \
508 | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
509 && !pVCpu->iem.s.uVex3rdReg \
510 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
511 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
512 { /* likely */ } \
513 else \
514 IEMOP_RAISE_INVALID_OPCODE_RET(); \
515 } while (0)
516
517/**
518 * Done decoding VEX, no V, L=0.
519 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
520 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature
521 * is not present in the guest CPU.
522 */
523#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) \
524 do \
525 { \
526 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
527 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
528 && pVCpu->iem.s.uVexLength == 0 \
529 && pVCpu->iem.s.uVex3rdReg == 0 \
530 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
531 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
532 { /* likely */ } \
533 else \
534 IEMOP_RAISE_INVALID_OPCODE_RET(); \
535 } while (0)
536
537/**
538 * Done decoding VEX, no V, L=0.
539 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
540 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature or a_fFeature2
541 * is not present in the guest CPU.
542 */
543#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(a_fFeature, a_fFeature2) \
544 do \
545 { \
546 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
547 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
548 && pVCpu->iem.s.uVexLength == 0 \
549 && pVCpu->iem.s.uVex3rdReg == 0 \
550 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
551 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \
552 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \
553 { /* likely */ } \
554 else \
555 IEMOP_RAISE_INVALID_OPCODE_RET(); \
556 } while (0)
557
558/**
559 * Done decoding VEX, no V, L=1.
560 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
561 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=1, or if the a_fFeature
562 * is not present in the guest CPU.
563 */
564#define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) \
565 do \
566 { \
567 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
568 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
569 && pVCpu->iem.s.uVexLength == 1 \
570 && pVCpu->iem.s.uVex3rdReg == 0 \
571 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
572 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
573 { /* likely */ } \
574 else \
575 IEMOP_RAISE_INVALID_OPCODE_RET(); \
576 } while (0)
577
578/**
579 * Done decoding VEX, L=0 and W=0.
580 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present,
581 * if we're in real or v8086 mode, if VEX.L!=0, if VEX.W!=0, or if the
582 * a_fFeature is not present in the guest CPU.
583 */
584#define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_W0_EX(a_fFeature) \
585 do \
586 { \
587 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \
588 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX \
589 | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
590 && pVCpu->iem.s.uVexLength == 0 \
591 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
592 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
593 { /* likely */ } \
594 else \
595 IEMOP_RAISE_INVALID_OPCODE_RET(); \
596 } while (0)
597
598
599#define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \
600 do \
601 { \
602 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
603 { /* likely */ } \
604 else \
605 { \
606 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_fDisOpType); \
607 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
608 } \
609 } while (0)
610#define IEMOP_HLP_DECODED_NL_2(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_uDisParam1, a_fDisOpType) \
611 do \
612 { \
613 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
614 { /* likely */ } \
615 else \
616 { \
617 NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_uDisParam1); NOREF(a_fDisOpType); \
618 IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
619 } \
620 } while (0)
621
622/**
623 * Done decoding, raise \#UD exception if any lock, repz or repnz prefixes
624 * are present.
625 */
626#define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() \
627 do \
628 { \
629 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
630 { /* likely */ } \
631 else \
632 IEMOP_RAISE_INVALID_OPCODE_RET(); \
633 } while (0)
634
635/**
636 * Done decoding, raise \#UD exception if any operand-size override, repz or repnz
637 * prefixes are present.
638 */
639#define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() \
640 do \
641 { \
642 if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
643 { /* likely */ } \
644 else \
645 IEMOP_RAISE_INVALID_OPCODE_RET(); \
646 } while (0)
647
648/**
649 * Used the threaded code generator to check if a jump stays within the same
650 * page in 64-bit code.
651 */
652#define IEMOP_HLP_PC64_IS_JMP_REL_WITHIN_PAGE(a_offDisp) \
653 ( ((pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (a_offDisp)) >> GUEST_PAGE_SHIFT) \
654 == (pVCpu->cpum.GstCtx.rip >> GUEST_PAGE_SHIFT))
655
656RTGCPTR iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset) IEM_NOEXCEPT_MAY_LONGJMP;
657RTGCPTR iemOpHlpCalcRmEffAddrJmpEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, uint64_t *puInfo) IEM_NOEXCEPT_MAY_LONGJMP;
658
659/** @} */
660
661#endif /* !VMM_INCLUDED_SRC_VMMAll_target_x86_IEMOpHlp_x86_h */
Note: See TracBrowser for help on using the repository browser.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette