VirtualBox

Changeset 107200 in vbox


Ignore:
Timestamp:
Nov 29, 2024 10:15:46 PM (5 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
166204
Message:

VMM/IEM: Deal with hidden pointer to VBOXSTRICTRC return struct on win.arm64. jiraref:VBP-1466

Location:
trunk
Files:
6 edited

Legend:

Unmodified
Added
Removed
  • trunk/include/VBox/cdefs.h

    r106061 r107200  
    8383#endif
    8484
     85/** @def VBOXSTRICTRC_STRICT_ENABLED
     86 * Indicates that VBOXSTRICTRC is in strict mode.
     87 */
     88#if defined(__cplusplus) \
     89 && ARCH_BITS == 64    /* cdecl requires classes and structs as hidden params. */ \
     90 && !defined(_MSC_VER) /* trouble similar to 32-bit gcc. */ \
     91 &&  (   defined(RT_STRICT) \
     92      || defined(VBOX_STRICT) \
     93      || defined(DEBUG) \
     94      || defined(DOXYGEN_RUNNING) )
     95# define VBOXSTRICTRC_STRICT_ENABLED 1
     96#endif
     97
    8598
    8699/*
     
    92105#define VBOX_STRICT_GUEST
    93106#define VBOX_NO_STRICT_GUEST
     107#define VBOXSTRICTRC_STRICT_ENABLED
    94108#define IN_DBG
    95109#define IN_DIS
  • trunk/include/VBox/types.h

    r106061 r107200  
    267267    VMSTATE_MAKE_32BIT_HACK = 0x7fffffff
    268268} VMSTATE;
    269 
    270 /** @def VBOXSTRICTRC_STRICT_ENABLED
    271  * Indicates that VBOXSTRICTRC is in strict mode.
    272  */
    273 #if defined(__cplusplus) \
    274  && ARCH_BITS == 64    /* cdecl requires classes and structs as hidden params. */ \
    275  && !defined(_MSC_VER) /* trouble similar to 32-bit gcc. */ \
    276  &&  (   defined(RT_STRICT) \
    277       || defined(VBOX_STRICT) \
    278       || defined(DEBUG) \
    279       || defined(DOXYGEN_RUNNING) )
    280 # define VBOXSTRICTRC_STRICT_ENABLED 1
    281 #endif
    282269
    283270/** We need RTERR_STRICT_RC.  */
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veHlpA-arm64.S

    r105490 r107200  
    8181        add     x29, sp, #(IEMNATIVE_FRAME_SAVE_REG_SIZE - 16)
    8282        /* Allocate the variable area from SP. */
    83         sub     sp, sp, #IEMNATIVE_FRAME_VAR_SIZE
     83        sub     sp, sp, #(IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE)
    8484        /* Load the fixed register values from parameters. */
    8585        mov     IEMNATIVE_REG_FIXED_PVMCPU_ASM,   x0
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h

    r106465 r107200  
    41504150     * Load the two or three hidden arguments.
    41514151     */
    4152 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64)
    4153     off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict */
     4152#if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64))
     4153    off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */
    41544154    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    41554155    off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, cbInstr);
     
    41684168     */
    41694169    off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)pfnCImpl);
    4170 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64)
    4171     off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict (see above) */
     4170#if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64))
     4171    off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */
    41724172#endif
    41734173    fGstShwFlush = iemNativeCImplFlagsToGuestShadowFlushMask(pReNative->fCImpl, fGstShwFlush | RT_BIT_64(kIemNativeGstReg_Pc));
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp

    r106724 r107200  
    67136713     * Load the parameters.
    67146714     */
    6715 #if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_ARCH_AMD64)
     6715#if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64))
    67166716    /* Special code the hidden VBOXSTRICTRC pointer. */
    67176717    off = iemNativeEmitLoadGprFromGpr(  pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
     
    67206720        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, uParam0);
    67216721    if (cAddParams > 1)
     6722# if IEMNATIVE_CALL_ARG_GREG_COUNT >= 5
     6723        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG4_GREG, uParam1);
     6724# else
    67226725        off = iemNativeEmitStoreImm64ByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG0, uParam1);
     6726# endif
    67236727    if (cAddParams > 2)
     6728# if IEMNATIVE_CALL_ARG_GREG_COUNT >= 6
     6729        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG5_GREG, uParam2);
     6730# else
    67246731        off = iemNativeEmitStoreImm64ByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG1, uParam2);
    6725     off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict */
     6732# endif
     6733    off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */
    67266734
    67276735#else
     
    67466754    off = iemNativeEmitCallImm(pReNative, off, pfnCImpl);
    67476755
    6748 #if defined(RT_ARCH_AMD64) && defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS)
    6749     off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict (see above) */
     6756#if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64))
     6757    off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */
    67506758#endif
    67516759
     
    68076815        off = iemNativeEmitStoreGprByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG0, X86_GREG_x10);
    68086816    }
    6809     off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict */
     6817    off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */
    68106818#  endif /* VBOXSTRICTRC_STRICT_ENABLED */
    68116819# else
     
    68226830
    68236831# if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS)
    6824     off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict (see above) */
     6832    off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */
    68256833# endif
    68266834
     
    68296837     * ARM64:
    68306838     */
     6839# if !defined(RT_OS_WINDOWS) || !defined(VBOXSTRICTRC_STRICT_ENABLED)
    68316840    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
    68326841    if (cParams > 0)
     
    68366845    if (cParams > 2)
    68376846        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, pCallEntry->auParams[2]);
     6847# else
     6848    off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU);
     6849    if (cParams > 0)
     6850        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, pCallEntry->auParams[0]);
     6851    if (cParams > 1)
     6852        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, pCallEntry->auParams[1]);
     6853    if (cParams > 2)
     6854        off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG4_GREG, pCallEntry->auParams[2]);
     6855    off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */
     6856# endif
    68386857
    68396858    off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)g_apfnIemThreadedFunctions[pCallEntry->enmFunction]);
     6859
     6860# if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS)
     6861    off = iemNativeEmitLoadGprByBpU32(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */
     6862# endif
    68406863
    68416864#else
     
    70427065    uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10);
    70437066
    7044     /* ldp x19, x20, [sp #IEMNATIVE_FRAME_VAR_SIZE]! ; Unallocate the variable space and restore x19+x20. */
    7045     AssertCompile(IEMNATIVE_FRAME_VAR_SIZE < 64*8);
     7067    /* ldp x19, x20, [sp #(IEMNATIVE_FRAME_VAR_SIZE+IEMNATIVE_FRAME_ALIGN_SIZE)]! ; Unallocate the variable space and restore x19+x20. */
     7068    AssertCompile(IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE < 64*8);
    70467069    pu32CodeBuf[off++] = Armv8A64MkInstrStLdPair(true /*fLoad*/, 2 /*64-bit*/, kArm64InstrStLdPairType_PreIndex,
    70477070                                                 ARMV8_A64_REG_X19, ARMV8_A64_REG_X20, ARMV8_A64_REG_SP,
    7048                                                  IEMNATIVE_FRAME_VAR_SIZE / 8);
     7071                                                 (IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE) / 8);
    70497072    /* Restore x21 thru x28 + BP and LR (ret address) (SP remains unchanged in the kSigned variant). */
    70507073    pu32CodeBuf[off++] = Armv8A64MkInstrStLdPair(true /*fLoad*/, 2 /*64-bit*/, kArm64InstrStLdPairType_Signed,
  • trunk/src/VBox/VMM/include/IEMN8veRecompiler.h

    r106622 r107200  
    3939
    4040#include <iprt/assertcompile.h> /* for RT_IN_ASSEMBLER mode */
     41#include <VBox/cdefs.h>         /* for VBOXSTRICTRC_STRICT_ENABLED */
    4142
    4243/** @def IEMNATIVE_WITH_TB_DEBUG_INFO
     
    159160/** Frame pointer (RBP) relative offset of the fourth incoming shadow argument. */
    160161#  define IEMNATIVE_FP_OFF_IN_SHADOW_ARG3   (40)
     162/** The offset to VBOXSTRICTRC on the stack. */
     163#  define IEMNATIVE_FP_OFF_VBOXSTRICRC      IEMNATIVE_FP_OFF_IN_SHADOW_ARG0
    161164# endif
    162165
    163166#elif RT_ARCH_ARM64
    164 /** No alignment padding needed for arm64. */
    165 # define IEMNATIVE_FRAME_ALIGN_SIZE         0
     167/** No alignment padding needed for arm64.
     168 * @note HACK ALERT! We abuse this for keeping VBOXSTRICTRC on windows, since
     169 *       it isn't allowed to be returned by register. */
     170# define IEMNATIVE_FRAME_ALIGN_SIZE        0
     171# ifdef VBOXSTRICTRC_STRICT_ENABLED
     172#  ifdef RT_OS_WINDOWS
     173#   undef  IEMNATIVE_FRAME_ALIGN_SIZE
     174#   define IEMNATIVE_FRAME_ALIGN_SIZE       16
     175/** The offset to VBOXSTRICTRC on the stack. */
     176#   define IEMNATIVE_FP_OFF_VBOXSTRICRC     (IEMNATIVE_FP_OFF_LAST_PUSH - IEMNATIVE_FRAME_ALIGN_SIZE)
     177#  endif
     178# endif
    166179/** No stack argument slots, got 8 registers for arguments will suffice. */
    167180# define IEMNATIVE_FRAME_STACK_ARG_COUNT    0
     
    27912804/** Number of hidden arguments for CIMPL calls.
    27922805 * @note We're sufferning from the usual VBOXSTRICTRC fun on Windows. */
    2793 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64)
     2806#if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64))
    27942807# define IEM_CIMPL_HIDDEN_ARGS 3
    27952808#else
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette