Changeset 104378 in vbox for trunk/src/VBox/VMM/include
- Timestamp:
- Apr 19, 2024 2:43:14 PM (10 months ago)
- Location:
- trunk/src/VBox/VMM/include
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/include/IEMInternal.h
r104367 r104378 53 53 */ 54 54 55 /* Make doxygen happy w/o overcomplicating the #if checks. */ 56 #ifdef DOXYGEN_RUNNING 57 # define IEM_WITH_THROW_CATCH 58 # define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP 59 #endif 60 55 61 /** For expanding symbol in slickedit and other products tagging and 56 62 * crossreferencing IEM symbols. */ … … 83 89 * Linux, but it should be quite a bit faster for normal code. 84 90 */ 85 #if (defined(__cplusplus) && defined(IEM_WITH_SETJMP) && defined(IN_RING3) && (defined(__GNUC__) || defined(_MSC_VER))) \ 86 || defined(DOXYGEN_RUNNING) 91 #if defined(__cplusplus) && defined(IEM_WITH_SETJMP) && defined(IN_RING3) && (defined(__GNUC__) || defined(_MSC_VER)) /* ASM-NOINC-START */ 87 92 # define IEM_WITH_THROW_CATCH 88 #endif 93 #endif /*ASM-NOINC-END*/ 89 94 90 95 /** @def IEMNATIVE_WITH_DELAYED_PC_UPDATING … … 117 122 * non-volatile (and does something even more crazy for ARM), this probably 118 123 * won't work reliably on Windows. */ 119 #if defined(DOXYGEN_RUNNING) || (!defined(RT_OS_WINDOWS) && (defined(RT_ARCH_ARM64) /*|| defined(_RT_ARCH_AMD64)*/)) 120 # define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP 121 #endif 124 #ifdef RT_ARCH_ARM64 125 # ifndef RT_OS_WINDOWS 126 # define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP 127 # endif 128 #endif 129 /* ASM-NOINC-START */ 122 130 #ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP 123 131 # if !defined(IN_RING3) \ … … 189 197 # define IEM_NOEXCEPT_MAY_LONGJMP RT_NOEXCEPT 190 198 #endif 199 /* ASM-NOINC-END */ 191 200 192 201 #define IEM_IMPLEMENTS_TASKSWITCH … … 194 203 /** @def IEM_WITH_3DNOW 195 204 * Includes the 3DNow decoding. */ 196 #if (!defined(IEM_WITH_3DNOW) && !defined(IEM_WITHOUT_3DNOW)) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 197 # define IEM_WITH_3DNOW 205 #if !defined(IEM_WITH_3DNOW) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 206 # ifndef IEM_WITHOUT_3DNOW 207 # define IEM_WITH_3DNOW 208 # endif 198 209 #endif 199 210 200 211 /** @def IEM_WITH_THREE_0F_38 201 212 * Includes the three byte opcode map for instrs starting with 0x0f 0x38. */ 202 #if (!defined(IEM_WITH_THREE_0F_38) && !defined(IEM_WITHOUT_THREE_0F_38)) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 203 # define IEM_WITH_THREE_0F_38 213 #if !defined(IEM_WITH_THREE_0F_38) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 214 # ifdef IEM_WITHOUT_THREE_0F_38 215 # define IEM_WITH_THREE_0F_38 216 # endif 204 217 #endif 205 218 206 219 /** @def IEM_WITH_THREE_0F_3A 207 220 * Includes the three byte opcode map for instrs starting with 0x0f 0x38. */ 208 #if (!defined(IEM_WITH_THREE_0F_3A) && !defined(IEM_WITHOUT_THREE_0F_3A)) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 209 # define IEM_WITH_THREE_0F_3A 221 #if !defined(IEM_WITH_THREE_0F_3A) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 222 # ifndef IEM_WITHOUT_THREE_0F_3A 223 # define IEM_WITH_THREE_0F_3A 224 # endif 210 225 #endif 211 226 212 227 /** @def IEM_WITH_VEX 213 228 * Includes the VEX decoding. */ 214 #if (!defined(IEM_WITH_VEX) && !defined(IEM_WITHOUT_VEX)) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 215 # define IEM_WITH_VEX 229 #if !defined(IEM_WITH_VEX) || defined(DOXYGEN_RUNNING) /* For doxygen, set in Config.kmk. */ 230 # ifndef IEM_WITHOUT_VEX 231 # define IEM_WITH_VEX 232 # endif 216 233 #endif 217 234 … … 234 251 /** @def IEM_USE_UNALIGNED_DATA_ACCESS 235 252 * Use unaligned accesses instead of elaborate byte assembly. */ 236 #if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(DOXYGEN_RUNNING) 253 #if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) || defined(DOXYGEN_RUNNING) /*ASM-NOINC*/ 237 254 # define IEM_USE_UNALIGNED_DATA_ACCESS 238 #endif 255 #endif /*ASM-NOINC*/ 239 256 240 257 //#define IEM_LOG_MEMORY_WRITES … … 242 259 243 260 244 #ifndef RT_IN_ASSEMBLER /* the rest of the file */261 #ifndef RT_IN_ASSEMBLER /* ASM-NOINC-START - the rest of the file */ 245 262 246 263 # if !defined(IN_TSTVMSTRUCT) && !defined(DOXYGEN_RUNNING) … … 6198 6215 DECLASM(DECL_NO_RETURN(void)) iemNativeTbLongJmp(void *pvFramePointer, int rc) RT_NOEXCEPT; 6199 6216 6200 #endif /* !RT_IN_ASSEMBLER */6217 #endif /* !RT_IN_ASSEMBLER - ASM-NOINC-END */ 6201 6218 6202 6219 … … 6205 6222 RT_C_DECLS_END 6206 6223 6224 /* ASM-INC: %include "IEMInternalStruct.mac" */ 6225 6207 6226 #endif /* !VMM_INCLUDED_SRC_include_IEMInternal_h */ 6208 6227 -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r104367 r104378 55 55 /** @def IEMNATIVE_WITH_EFLAGS_SKIPPING 56 56 * Enables skipping EFLAGS calculations/updating based on liveness info. */ 57 #if (defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) && 1) || defined(DOXYGEN_RUNNING)57 #if defined(IEMNATIVE_WITH_LIVENESS_ANALYSIS) || defined(DOXYGEN_RUNNING) 58 58 # define IEMNATIVE_WITH_EFLAGS_SKIPPING 59 59 #endif … … 63 63 * Enables strict consistency checks around EFLAGS skipping. 64 64 * @note Only defined when IEMNATIVE_WITH_EFLAGS_SKIPPING is also defined. */ 65 #if (defined(VBOX_STRICT) && defined(IEMNATIVE_WITH_EFLAGS_SKIPPING)) || defined(DOXYGEN_RUNNING) 65 #ifdef IEMNATIVE_WITH_EFLAGS_SKIPPING 66 # ifdef VBOX_STRICT 67 # define IEMNATIVE_STRICT_EFLAGS_SKIPPING 68 # endif 69 #elif defined(DOXYGEN_RUNNING) 66 70 # define IEMNATIVE_STRICT_EFLAGS_SKIPPING 67 71 #endif … … 186 190 * Dedicated temporary SIMD register. */ 187 191 #endif 188 #if defined(RT_ARCH_AMD64) && !defined(DOXYGEN_RUNNING)192 #ifdef RT_ARCH_AMD64 189 193 # define IEMNATIVE_REG_FIXED_PVMCPU X86_GREG_xBX 194 # define IEMNATIVE_REG_FIXED_PVMCPU_ASM xBX 190 195 # define IEMNATIVE_REG_FIXED_TMP0 X86_GREG_x11 191 # define IEMNATIVE_REG_FIXED_MASK ( RT_BIT_32(IEMNATIVE_REG_FIXED_PVMCPU) \192 | RT_BIT_32(IEMNATIVE_REG_FIXED_TMP0) \193 | RT_BIT_32(X86_GREG_xSP) \194 | RT_BIT_32(X86_GREG_xBP) )196 # define IEMNATIVE_REG_FIXED_MASK ( RT_BIT_32(IEMNATIVE_REG_FIXED_PVMCPU) \ 197 | RT_BIT_32(IEMNATIVE_REG_FIXED_TMP0) \ 198 | RT_BIT_32(X86_GREG_xSP) \ 199 | RT_BIT_32(X86_GREG_xBP) ) 195 200 196 201 # ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 197 # define IEMNATIVE_SIMD_REG_FIXED_TMP0 5 /* xmm5/ymm5 */ 198 # if defined(IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS) || !defined(_MSC_VER) 199 # define IEMNATIVE_SIMD_REG_FIXED_MASK (RT_BIT_32(IEMNATIVE_SIMD_REG_FIXED_TMP0)) 202 # define IEMNATIVE_SIMD_REG_FIXED_TMP0 5 /* xmm5/ymm5 */ 203 # ifndef IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS 204 # ifndef _MSC_VER /* On Windows xmm6 through xmm15 are marked as callee saved. */ 205 # define IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS 206 # endif 207 # endif 208 # ifdef IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS 209 # define IEMNATIVE_SIMD_REG_FIXED_MASK (RT_BIT_32(IEMNATIVE_SIMD_REG_FIXED_TMP0)) 200 210 # else 201 /** On Windows xmm6 through xmm15 are marked as callee saved. */ 202 # define IEMNATIVE_SIMD_REG_FIXED_MASK ( UINT32_C(0xffc0) \ 203 | RT_BIT_32(IEMNATIVE_SIMD_REG_FIXED_TMP0)) 211 # define IEMNATIVE_SIMD_REG_FIXED_MASK ( UINT32_C(0xffc0) \ 212 | RT_BIT_32(IEMNATIVE_SIMD_REG_FIXED_TMP0)) 204 213 # endif 205 214 # endif … … 229 238 230 239 # ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 231 # define IEMNATIVE_SIMD_REG_FIXED_TMP0 ARMV8_A64_REG_Q30240 # define IEMNATIVE_SIMD_REG_FIXED_TMP0 ARMV8_A64_REG_Q30 232 241 # if defined(IEMNATIVE_WITH_SIMD_REG_ACCESS_ALL_REGISTERS) 233 # define IEMNATIVE_SIMD_REG_FIXED_MASK RT_BIT_32(ARMV8_A64_REG_Q30)242 # define IEMNATIVE_SIMD_REG_FIXED_MASK RT_BIT_32(ARMV8_A64_REG_Q30) 234 243 # else 235 244 /* … … 242 251 * having to save and restore them in the prologue/epilogue. 243 252 */ 244 # define IEMNATIVE_SIMD_REG_FIXED_MASK ( UINT32_C(0xff00) \245 | RT_BIT_32(ARMV8_A64_REG_Q31) \246 | RT_BIT_32(ARMV8_A64_REG_Q30) \247 | RT_BIT_32(ARMV8_A64_REG_Q29) \248 | RT_BIT_32(ARMV8_A64_REG_Q27) \249 | RT_BIT_32(ARMV8_A64_REG_Q25) \250 | RT_BIT_32(ARMV8_A64_REG_Q23) \251 | RT_BIT_32(ARMV8_A64_REG_Q21) \252 | RT_BIT_32(ARMV8_A64_REG_Q19) \253 | RT_BIT_32(ARMV8_A64_REG_Q17) \254 | RT_BIT_32(ARMV8_A64_REG_Q15) \255 | RT_BIT_32(ARMV8_A64_REG_Q13) \256 | RT_BIT_32(ARMV8_A64_REG_Q11) \257 | RT_BIT_32(ARMV8_A64_REG_Q9) \258 | RT_BIT_32(ARMV8_A64_REG_Q7) \259 | RT_BIT_32(ARMV8_A64_REG_Q5) \260 | RT_BIT_32(ARMV8_A64_REG_Q3) \261 | RT_BIT_32(ARMV8_A64_REG_Q1))253 # define IEMNATIVE_SIMD_REG_FIXED_MASK ( UINT32_C(0xff00) \ 254 | RT_BIT_32(ARMV8_A64_REG_Q31) \ 255 | RT_BIT_32(ARMV8_A64_REG_Q30) \ 256 | RT_BIT_32(ARMV8_A64_REG_Q29) \ 257 | RT_BIT_32(ARMV8_A64_REG_Q27) \ 258 | RT_BIT_32(ARMV8_A64_REG_Q25) \ 259 | RT_BIT_32(ARMV8_A64_REG_Q23) \ 260 | RT_BIT_32(ARMV8_A64_REG_Q21) \ 261 | RT_BIT_32(ARMV8_A64_REG_Q19) \ 262 | RT_BIT_32(ARMV8_A64_REG_Q17) \ 263 | RT_BIT_32(ARMV8_A64_REG_Q15) \ 264 | RT_BIT_32(ARMV8_A64_REG_Q13) \ 265 | RT_BIT_32(ARMV8_A64_REG_Q11) \ 266 | RT_BIT_32(ARMV8_A64_REG_Q9) \ 267 | RT_BIT_32(ARMV8_A64_REG_Q7) \ 268 | RT_BIT_32(ARMV8_A64_REG_Q5) \ 269 | RT_BIT_32(ARMV8_A64_REG_Q3) \ 270 | RT_BIT_32(ARMV8_A64_REG_Q1)) 262 271 # endif 263 272 # endif … … 309 318 # endif 310 319 311 # else 320 # else /* !RT_OS_WINDOWS */ 312 321 # define IEMNATIVE_CALL_ARG_GREG_COUNT 6 313 322 # define IEMNATIVE_CALL_ARG0_GREG X86_GREG_xDI … … 336 345 # define IEMNATIVE_CALL_VOLATILE_SIMD_REG_MASK (UINT32_C(0xffff)) 337 346 # endif 338 # endif 347 # endif /* !RT_OS_WINDOWS */ 339 348 340 349 #elif defined(RT_ARCH_ARM64) … … 384 393 385 394 /** This is the maximum argument count we'll ever be needing. */ 386 #if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) 387 # define IEMNATIVE_CALL_MAX_ARG_COUNT 8 388 #else 389 # define IEMNATIVE_CALL_MAX_ARG_COUNT 7 395 #define IEMNATIVE_CALL_MAX_ARG_COUNT 7 396 #ifdef RT_OS_WINDOWS 397 # ifdef VBOXSTRICTRC_STRICT_ENABLED 398 # undef IEMNATIVE_CALL_MAX_ARG_COUNT 399 # define IEMNATIVE_CALL_MAX_ARG_COUNT 8 400 # endif 390 401 #endif 391 402 /** @} */ … … 427 438 428 439 429 #ifndef RT_IN_ASSEMBLER /* the rest of the file */440 #ifndef RT_IN_ASSEMBLER /* ASM-NOINC-START - the rest of the file */ 430 441 431 442 … … 2503 2514 #endif 2504 2515 2505 #endif /* !RT_IN_ASSEMBLER */2516 #endif /* !RT_IN_ASSEMBLER - ASM-NOINC-END */ 2506 2517 2507 2518 /** @} */
Note:
See TracChangeset
for help on using the changeset viewer.