VirtualBox

Ignore:
Timestamp:
Feb 17, 2025 3:51:04 PM (2 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
167583
Message:

VMM/IEM: Splitting up IEMOpHlp.h. jiraref:VBP-1531

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/include/IEMOpHlp.h

    r106061 r108261  
    3535 * @{
    3636 */
    37 void iemOpStubMsg2(PVMCPUCC pVCpu) RT_NOEXCEPT;
    3837
    3938/**
     
    4342 * working on IEM.
    4443 */
    45 #if 0
    46 # define IEMOP_BITCH_ABOUT_STUB() \
    47     do { \
    48         RTAssertMsg1(NULL, __LINE__, __FILE__, __FUNCTION__); \
    49         iemOpStubMsg2(pVCpu); \
    50         RTAssertPanic(); \
    51     } while (0)
    52 #else
    53 # define IEMOP_BITCH_ABOUT_STUB() Log(("Stub: %s (line %d)\n", __FUNCTION__, __LINE__));
    54 #endif
     44#define IEMOP_BITCH_ABOUT_STUB() Log(("Stub: %s (line %d)\n", __FUNCTION__, __LINE__));
    5545
    5646/** Stubs an opcode. */
     
    111101#endif
    112102
    113 #ifdef DEBUG
    114 # define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) \
    115     do { \
    116         IEMOP_INC_STATS(a_Stats); \
    117         Log4(("decode - %04x:%RGv %s%s [#%u]\n", pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, \
    118               pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK ? "lock " : "", a_szMnemonic, pVCpu->iem.s.cInstructions)); \
    119     } while (0)
    120 
    121 # define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
    122     do { \
    123         IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
    124         (void)RT_CONCAT(IEMOPFORM_, a_Form); \
    125         (void)RT_CONCAT(OP_,a_Upper); \
    126         (void)(a_fDisHints); \
    127         (void)(a_fIemHints); \
    128     } while (0)
    129 
    130 # define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
    131     do { \
    132         IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
    133         (void)RT_CONCAT(IEMOPFORM_, a_Form); \
    134         (void)RT_CONCAT(OP_,a_Upper); \
    135         (void)RT_CONCAT(OP_PARM_,a_Op1); \
    136         (void)(a_fDisHints); \
    137         (void)(a_fIemHints); \
    138     } while (0)
    139 
    140 # define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
    141     do { \
    142         IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
    143         (void)RT_CONCAT(IEMOPFORM_, a_Form); \
    144         (void)RT_CONCAT(OP_,a_Upper); \
    145         (void)RT_CONCAT(OP_PARM_,a_Op1); \
    146         (void)RT_CONCAT(OP_PARM_,a_Op2); \
    147         (void)(a_fDisHints); \
    148         (void)(a_fIemHints); \
    149     } while (0)
    150 
    151 # define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
    152     do { \
    153         IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
    154         (void)RT_CONCAT(IEMOPFORM_, a_Form); \
    155         (void)RT_CONCAT(OP_,a_Upper); \
    156         (void)RT_CONCAT(OP_PARM_,a_Op1); \
    157         (void)RT_CONCAT(OP_PARM_,a_Op2); \
    158         (void)RT_CONCAT(OP_PARM_,a_Op3); \
    159         (void)(a_fDisHints); \
    160         (void)(a_fIemHints); \
    161     } while (0)
    162 
    163 # define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
    164     do { \
    165         IEMOP_MNEMONIC(a_Stats, a_szMnemonic); \
    166         (void)RT_CONCAT(IEMOPFORM_, a_Form); \
    167         (void)RT_CONCAT(OP_,a_Upper); \
    168         (void)RT_CONCAT(OP_PARM_,a_Op1); \
    169         (void)RT_CONCAT(OP_PARM_,a_Op2); \
    170         (void)RT_CONCAT(OP_PARM_,a_Op3); \
    171         (void)RT_CONCAT(OP_PARM_,a_Op4); \
    172         (void)(a_fDisHints); \
    173         (void)(a_fIemHints); \
    174     } while (0)
    175 
    176 #else
    177 # define IEMOP_MNEMONIC(a_Stats, a_szMnemonic) IEMOP_INC_STATS(a_Stats)
    178 
    179 # define IEMOP_MNEMONIC0EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
    180          IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
    181 # define IEMOP_MNEMONIC1EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
    182          IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
    183 # define IEMOP_MNEMONIC2EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
    184          IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
    185 # define IEMOP_MNEMONIC3EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
    186          IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
    187 # define IEMOP_MNEMONIC4EX(a_Stats, a_szMnemonic, a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
    188          IEMOP_MNEMONIC(a_Stats, a_szMnemonic)
    189 
    190 #endif
    191 
    192 #define IEMOP_MNEMONIC0(a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints) \
    193     IEMOP_MNEMONIC0EX(a_Lower, \
    194                       #a_Lower, \
    195                       a_Form, a_Upper, a_Lower, a_fDisHints, a_fIemHints)
    196 #define IEMOP_MNEMONIC1(a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints) \
    197     IEMOP_MNEMONIC1EX(RT_CONCAT3(a_Lower,_,a_Op1), \
    198                       #a_Lower " " #a_Op1, \
    199                       a_Form, a_Upper, a_Lower, a_Op1, a_fDisHints, a_fIemHints)
    200 #define IEMOP_MNEMONIC2(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints) \
    201     IEMOP_MNEMONIC2EX(RT_CONCAT5(a_Lower,_,a_Op1,_,a_Op2), \
    202                       #a_Lower " " #a_Op1 "," #a_Op2, \
    203                       a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_fDisHints, a_fIemHints)
    204 #define IEMOP_MNEMONIC3(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints) \
    205     IEMOP_MNEMONIC3EX(RT_CONCAT7(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3), \
    206                       #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3, \
    207                       a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_fDisHints, a_fIemHints)
    208 #define IEMOP_MNEMONIC4(a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints) \
    209     IEMOP_MNEMONIC4EX(RT_CONCAT9(a_Lower,_,a_Op1,_,a_Op2,_,a_Op3,_,a_Op4), \
    210                       #a_Lower " " #a_Op1 "," #a_Op2 "," #a_Op3 "," #a_Op4, \
    211                       a_Form, a_Upper, a_Lower, a_Op1, a_Op2, a_Op3, a_Op4, a_fDisHints, a_fIemHints)
    212 
    213103/** @} */
    214104
     
    236126#endif
    237127
    238 /** The instruction requires a 186 or later. */
    239 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_186
    240 # define IEMOP_HLP_MIN_186() do { } while (0)
    241 #else
    242 # define IEMOP_HLP_MIN_186() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_186, true)
    243 #endif
    244 
    245 /** The instruction requires a 286 or later. */
    246 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_286
    247 # define IEMOP_HLP_MIN_286() do { } while (0)
    248 #else
    249 # define IEMOP_HLP_MIN_286() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_286, true)
    250 #endif
    251 
    252 /** The instruction requires a 386 or later. */
    253 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
    254 # define IEMOP_HLP_MIN_386() do { } while (0)
    255 #else
    256 # define IEMOP_HLP_MIN_386() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, true)
    257 #endif
    258 
    259 /** The instruction requires a 386 or later if the given expression is true. */
    260 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_386
    261 # define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) do { } while (0)
    262 #else
    263 # define IEMOP_HLP_MIN_386_EX(a_fOnlyIf) IEMOP_HLP_MIN_CPU(IEMTARGETCPU_386, a_fOnlyIf)
    264 #endif
    265 
    266 /** The instruction requires a 486 or later. */
    267 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_486
    268 # define IEMOP_HLP_MIN_486() do { } while (0)
    269 #else
    270 # define IEMOP_HLP_MIN_486() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_486, true)
    271 #endif
    272 
    273 /** The instruction requires a Pentium (586) or later. */
    274 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PENTIUM
    275 # define IEMOP_HLP_MIN_586() do { } while (0)
    276 #else
    277 # define IEMOP_HLP_MIN_586() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PENTIUM, true)
    278 #endif
    279 
    280 /** The instruction requires a PentiumPro (686) or later. */
    281 #if IEM_CFG_TARGET_CPU >= IEMTARGETCPU_PPRO
    282 # define IEMOP_HLP_MIN_686() do { } while (0)
    283 #else
    284 # define IEMOP_HLP_MIN_686() IEMOP_HLP_MIN_CPU(IEMTARGETCPU_PPRO, true)
    285 #endif
    286 
    287 
    288 /** The instruction raises an \#UD in real and V8086 mode. */
    289 #define IEMOP_HLP_NO_REAL_OR_V86_MODE() \
    290     do \
    291     { \
    292         if (!IEM_IS_REAL_OR_V86_MODE(pVCpu)) { /* likely */ } \
    293         else IEMOP_RAISE_INVALID_OPCODE_RET(); \
    294     } while (0)
    295 
    296 #ifdef VBOX_WITH_NESTED_HWVIRT_VMX
    297 /** This instruction raises an \#UD in real and V8086 mode or when not using a
    298  * 64-bit code segment when in long mode (applicable to all VMX instructions
    299  * except VMCALL).
    300  *
    301  * @todo r=bird: This is not recompiler friendly. The scenario with
    302  *       16-bit/32-bit code running in long mode doesn't fit at all.
    303  */
    304 # define IEMOP_HLP_VMX_INSTR(a_szInstr, a_InsDiagPrefix) \
    305     do \
    306     { \
    307         if (   !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    308             && (  !IEM_IS_LONG_MODE(pVCpu) \
    309                 || IEM_IS_64BIT_CODE(pVCpu))) \
    310         { /* likely */ } \
    311         else \
    312         { \
    313             if (IEM_IS_REAL_OR_V86_MODE(pVCpu)) \
    314             { \
    315                 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_RealOrV86Mode; \
    316                 Log5((a_szInstr ": Real or v8086 mode -> #UD\n")); \
    317                 IEMOP_RAISE_INVALID_OPCODE_RET(); \
    318             } \
    319             if (IEM_IS_LONG_MODE(pVCpu) && !IEM_IS_64BIT_CODE(pVCpu)) \
    320             { \
    321                 pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_LongModeCS; \
    322                 Log5((a_szInstr ": Long mode without 64-bit code segment -> #UD\n")); \
    323                 IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET(); /** @todo This doesn't work. */ \
    324             } \
    325         } \
    326     } while (0)
    327 
    328 /** The instruction can only be executed in VMX operation (VMX root mode and
    329  * non-root mode).
    330  *
    331  *  @note Update IEM_VMX_IN_VMX_OPERATION if changes are made here.
    332  *
    333  * @todo r=bird: This is absolutely *INCORRECT* since IEM_VMX_IS_ROOT_MODE
    334  *       is a complicated runtime state (calls CPUMIsGuestInVmxRootMode), and
    335  *       not something we can decide while decoding.  Convert to an IEM_MC!
    336  */
    337 # define IEMOP_HLP_IN_VMX_OPERATION(a_szInstr, a_InsDiagPrefix) \
    338     do \
    339     { \
    340         if (IEM_VMX_IS_ROOT_MODE(pVCpu)) { /* likely */ } \
    341         else \
    342         { \
    343             pVCpu->cpum.GstCtx.hwvirt.vmx.enmDiag = a_InsDiagPrefix##_VmxRoot; \
    344             Log5((a_szInstr ": Not in VMX operation (root mode) -> #UD\n")); \
    345             IEMOP_RAISE_INVALID_OPCODE_RUNTIME_RET(); /** @todo This doesn't work. */ \
    346         } \
    347     } while (0)
    348 #endif /* VBOX_WITH_NESTED_HWVIRT_VMX */
    349 
    350 /** The instruction is not available in 64-bit mode, throw \#UD if we're in
    351  * 64-bit mode. */
    352 #define IEMOP_HLP_NO_64BIT() \
    353     do \
    354     { \
    355         if (!IEM_IS_64BIT_CODE(pVCpu)) \
    356         { /* likely */ } \
    357         else \
    358             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    359     } while (0)
    360 
    361 /** The instruction is only available in 64-bit mode, throw \#UD if we're not in
    362  * 64-bit mode. */
    363 #define IEMOP_HLP_ONLY_64BIT() \
    364     do \
    365     { \
    366         if (IEM_IS_64BIT_CODE(pVCpu)) \
    367         { /* likely */ } \
    368         else \
    369             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    370     } while (0)
    371 
    372 /** The instruction defaults to 64-bit operand size if 64-bit mode. */
    373 #define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE() \
    374     do \
    375     { \
    376         if (IEM_IS_64BIT_CODE(pVCpu)) \
    377             iemRecalEffOpSize64Default(pVCpu); \
    378     } while (0)
    379 
    380 /** The instruction defaults to 64-bit operand size if 64-bit mode and intel
    381  *  CPUs ignore the operand size prefix complete (e.g. relative jumps). */
    382 #define IEMOP_HLP_DEFAULT_64BIT_OP_SIZE_AND_INTEL_IGNORES_OP_SIZE_PREFIX() \
    383     do \
    384     { \
    385         if (IEM_IS_64BIT_CODE(pVCpu)) \
    386             iemRecalEffOpSize64DefaultAndIntelIgnoresOpSizePrefix(pVCpu); \
    387     } while (0)
    388 
    389 /** The instruction has 64-bit operand size if 64-bit mode. */
    390 #define IEMOP_HLP_64BIT_OP_SIZE() \
    391     do \
    392     { \
    393         if (IEM_IS_64BIT_CODE(pVCpu)) \
    394             pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; \
    395     } while (0)
    396 
    397 /** Only a REX prefix immediately preceeding the first opcode byte takes
    398  * effect. This macro helps ensuring this as well as logging bad guest code.  */
    399 #define IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE(a_szPrf) \
    400     do \
    401     { \
    402         if (RT_UNLIKELY(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX)) \
    403         { \
    404             Log5((a_szPrf ": Overriding REX prefix at %RX16! fPrefixes=%#x\n", pVCpu->cpum.GstCtx.rip, pVCpu->iem.s.fPrefixes)); \
    405             pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_REX_MASK; \
    406             pVCpu->iem.s.uRexB     = 0; \
    407             pVCpu->iem.s.uRexIndex = 0; \
    408             pVCpu->iem.s.uRexReg   = 0; \
    409             iemRecalEffOpSize(pVCpu); \
    410         } \
    411     } while (0)
    412 
    413 /** The instruction ignores any REX.W/VEX.W prefix if not in 64-bit mode. */
    414 #define IEMOP_HLP_IGNORE_VEX_W_PREFIX_IF_NOT_IN_64BIT() \
    415     do \
    416     { \
    417         if (!IEM_IS_64BIT_CODE(pVCpu)) \
    418             pVCpu->iem.s.fPrefixes &= ~IEM_OP_PRF_SIZE_REX_W; \
    419     } while (0)
    420 
    421 /**
    422  * Done decoding.
    423  */
    424 #define IEMOP_HLP_DONE_DECODING() \
    425     do \
    426     { \
    427         /*nothing for now, maybe later... */ \
    428     } while (0)
    429 
    430 #define IEMOP_HLP_DONE_DECODING_EX(a_fFeature) \
    431     do \
    432     { \
    433         if (RT_LIKELY(IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    434         { /* likely */ } \
    435         else \
    436             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    437     } while (0)
    438 
    439 /**
    440  * Done decoding, raise \#UD exception if lock prefix present.
    441  */
    442 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX() \
    443     do \
    444     { \
    445         if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
    446         { /* likely */ } \
    447         else \
    448             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    449     } while (0)
    450 
    451 /**
    452  * Done decoding, raise \#UD exception if lock prefix present, or if the
    453  * a_fFeature is present in the guest CPU.
    454  */
    455 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(a_fFeature) \
    456     do \
    457     { \
    458         if (RT_LIKELY(   !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
    459                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    460         { /* likely */ } \
    461         else \
    462             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    463     } while (0)
    464 
    465 /**
    466  * Done decoding, raise \#UD exception if lock prefix present, or if the
    467  * a_fFeature is present in the guest CPU.
    468  */
    469 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX_2_OR(a_fFeature1, a_fFeature2) \
    470     do \
    471     { \
    472         if (RT_LIKELY(   !(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) \
    473                       && (   IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature1 \
    474                           || IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2) )) \
    475         { /* likely */ } \
    476         else \
    477             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    478     } while (0)
    479 
    480 
    481 /**
    482  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    483  * repnz or size prefixes are present, if in real or v8086 mode, or if the
    484  * a_fFeature is not present in the guest CPU.
    485  */
    486 #define IEMOP_HLP_DONE_VEX_DECODING_EX(a_fFeature) \
    487     do \
    488     { \
    489         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    490                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
    491                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    492                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    493         { /* likely */ } \
    494         else \
    495             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    496     } while (0)
    497 
    498 /**
    499  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    500  * repnz or size prefixes are present, or if in real or v8086 mode, or if the
    501  * a_fFeature is not present in the guest CPU.
    502  */
    503 #define IEMOP_HLP_DONE_VEX_DECODING_L0_EX(a_fFeature) \
    504     do \
    505     { \
    506         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    507                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
    508                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    509                       && pVCpu->iem.s.uVexLength == 0 \
    510                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    511         { /* likely */ } \
    512         else \
    513             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    514     } while (0)
    515 
    516 /**
    517  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    518  * repnz or size prefixes are present, or if in real or v8086 mode, or if the
    519  * a_fFeature is not present in the guest CPU.
    520  */
    521 #define IEMOP_HLP_DONE_VEX_DECODING_L0_EX_2(a_fFeature, a_fFeature2) \
    522     do \
    523     { \
    524         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    525                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
    526                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    527                       && pVCpu->iem.s.uVexLength == 0 \
    528                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \
    529                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \
    530         { /* likely */ } \
    531         else \
    532             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    533     } while (0)
    534 
    535 /**
    536  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    537  * repnz or size prefixes are present, or if in real or v8086 mode, or if the
    538  * a_fFeature is not present in the guest CPU.
    539  */
    540 #define IEMOP_HLP_DONE_VEX_DECODING_L1_EX(a_fFeature) \
    541     do \
    542     { \
    543         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    544                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
    545                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    546                       && pVCpu->iem.s.uVexLength == 1 \
    547                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    548         { /* likely */ } \
    549         else \
    550             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    551     } while (0)
    552 
    553 /**
    554  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    555  * repnz or size prefixes are present, or if VEX.W is one, or if in real or
    556  * v8086 mode, or if the a_fFeature is not present in the guest CPU.
    557  */
    558 #define IEMOP_HLP_DONE_VEX_DECODING_W0_EX(a_fFeature) \
    559     do \
    560     { \
    561         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    562                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX \
    563                               | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
    564                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    565                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature)) \
    566         { /* likely */ } \
    567         else \
    568             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    569     } while (0)
    570 
    571 /**
    572  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    573  * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate
    574  * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
    575  * the guest CPU.
    576  */
    577 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \
    578     do \
    579     { \
    580         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    581                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX)) \
    582                       && !pVCpu->iem.s.uVex3rdReg \
    583                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    584                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
    585         { /* likely */ } \
    586         else \
    587             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    588     } while (0)
    589 
    590 /**
    591  * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz,
    592  * repnz or size prefixes are present, or if VEX.W is one, or if the VEX.VVVV field doesn't indicate
    593  * register 0, if in real or v8086 mode, or if the a_fFeature is not present in
    594  * the guest CPU.
    595  */
    596 #define IEMOP_HLP_DONE_VEX_DECODING_W0_AND_NO_VVVV_EX(a_fFeature) \
    597     do \
    598     { \
    599         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    600                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REX \
    601                               | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
    602                       && !pVCpu->iem.s.uVex3rdReg \
    603                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    604                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
    605         { /* likely */ } \
    606         else \
    607             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    608     } while (0)
    609 
    610 /**
    611  * Done decoding VEX, no V, L=0.
    612  * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
    613  * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature
    614  * is not present in the guest CPU.
    615  */
    616 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) \
    617     do \
    618     { \
    619         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    620                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
    621                       && pVCpu->iem.s.uVexLength == 0 \
    622                       && pVCpu->iem.s.uVex3rdReg == 0 \
    623                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    624                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
    625         { /* likely */ } \
    626         else \
    627             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    628     } while (0)
    629 
    630 /**
    631  * Done decoding VEX, no V, L=0.
    632  * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
    633  * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature or a_fFeature2
    634  * is not present in the guest CPU.
    635  */
    636 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX_2(a_fFeature, a_fFeature2) \
    637     do \
    638     { \
    639         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    640                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
    641                       && pVCpu->iem.s.uVexLength == 0 \
    642                       && pVCpu->iem.s.uVex3rdReg == 0 \
    643                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    644                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature \
    645                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature2)) \
    646         { /* likely */ } \
    647         else \
    648             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    649     } while (0)
    650 
    651 /**
    652  * Done decoding VEX, no V, L=1.
    653  * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if
    654  * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=1, or if the a_fFeature
    655  * is not present in the guest CPU.
    656  */
    657 #define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) \
    658     do \
    659     { \
    660         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    661                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \
    662                       && pVCpu->iem.s.uVexLength == 1 \
    663                       && pVCpu->iem.s.uVex3rdReg == 0 \
    664                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    665                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
    666         { /* likely */ } \
    667         else \
    668             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    669     } while (0)
    670 
    671 /**
    672  * Done decoding VEX, L=0 and W=0.
    673  * Raises \#UD exception if rex, rep, opsize or lock prefixes are present,
    674  * if we're in real or v8086 mode, if VEX.L!=0, if VEX.W!=0, or if the
    675  * a_fFeature is not present in the guest CPU.
    676  */
    677 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_W0_EX(a_fFeature) \
    678     do \
    679     { \
    680         if (RT_LIKELY(   !(  pVCpu->iem.s.fPrefixes \
    681                            & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX \
    682                               | IEM_OP_PRF_SIZE_REX_W /*VEX.W*/)) \
    683                       && pVCpu->iem.s.uVexLength == 0 \
    684                       && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \
    685                       && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \
    686         { /* likely */ } \
    687         else \
    688             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    689     } while (0)
    690 
    691 
    692 #define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \
    693     do \
    694     { \
    695         if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
    696         { /* likely */ } \
    697         else \
    698         { \
    699             NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_fDisOpType); \
    700             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    701         } \
    702     } while (0)
    703 #define IEMOP_HLP_DECODED_NL_2(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_uDisParam1, a_fDisOpType) \
    704     do \
    705     { \
    706         if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))) \
    707         { /* likely */ } \
    708         else \
    709         { \
    710             NOREF(a_uDisOpNo); NOREF(a_fIemOpFlags); NOREF(a_uDisParam0); NOREF(a_uDisParam1); NOREF(a_fDisOpType); \
    711             IEMOP_RAISE_INVALID_LOCK_PREFIX_RET(); \
    712         } \
    713     } while (0)
    714 
    715 /**
    716  * Done decoding, raise \#UD exception if any lock, repz or repnz prefixes
    717  * are present.
    718  */
    719 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() \
    720     do \
    721     { \
    722         if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_LOCK | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
    723         { /* likely */ } \
    724         else \
    725             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    726     } while (0)
    727 
    728 /**
    729  * Done decoding, raise \#UD exception if any operand-size override, repz or repnz
    730  * prefixes are present.
    731  */
    732 #define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() \
    733     do \
    734     { \
    735         if (RT_LIKELY(!(pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)))) \
    736         { /* likely */ } \
    737         else \
    738             IEMOP_RAISE_INVALID_OPCODE_RET(); \
    739     } while (0)
    740128
    741129/**
     
    751139    } while (0)
    752140
    753 /**
    754  * Used the threaded code generator to check if a jump stays within the same
    755  * page in 64-bit code.
     141/** @}  */
     142
     143/*
     144 * Include the target specific header.
    756145 */
    757 #define IEMOP_HLP_PC64_IS_JMP_REL_WITHIN_PAGE(a_offDisp) \
    758      (   ((pVCpu->cpum.GstCtx.rip + IEM_GET_INSTR_LEN(pVCpu) + (a_offDisp)) >> GUEST_PAGE_SHIFT) \
    759       == (pVCpu->cpum.GstCtx.rip >> GUEST_PAGE_SHIFT))
    760 
    761 VBOXSTRICTRC    iemOpHlpCalcRmEffAddr(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff) RT_NOEXCEPT;
    762 VBOXSTRICTRC    iemOpHlpCalcRmEffAddrEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, PRTGCPTR pGCPtrEff, uint64_t *puInfo) RT_NOEXCEPT;
    763 #ifdef IEM_WITH_SETJMP
    764 RTGCPTR         iemOpHlpCalcRmEffAddrJmp(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset) IEM_NOEXCEPT_MAY_LONGJMP;
    765 RTGCPTR         iemOpHlpCalcRmEffAddrJmpEx(PVMCPUCC pVCpu, uint8_t bRm, uint32_t cbImmAndRspOffset, uint64_t *puInfo) IEM_NOEXCEPT_MAY_LONGJMP;
     146#ifdef VBOX_VMM_TARGET_X86
     147# include "VMMAll/target-x86/IEMOpHlp-x86.h"
    766148#endif
    767149
    768 /** @}  */
    769 
    770150#endif /* !VMM_INCLUDED_SRC_include_IEMOpHlp_h */
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette