Changeset 107200 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Nov 29, 2024 10:15:46 PM (4 months ago)
- svn:sync-xref-src-repo-rev:
- 166204
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veHlpA-arm64.S
r105490 r107200 81 81 add x29, sp, #(IEMNATIVE_FRAME_SAVE_REG_SIZE - 16) 82 82 /* Allocate the variable area from SP. */ 83 sub sp, sp, # IEMNATIVE_FRAME_VAR_SIZE83 sub sp, sp, #(IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE) 84 84 /* Load the fixed register values from parameters. */ 85 85 mov IEMNATIVE_REG_FIXED_PVMCPU_ASM, x0 -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r106465 r107200 4150 4150 * Load the two or three hidden arguments. 4151 4151 */ 4152 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64)4153 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_ IN_SHADOW_ARG0); /* rcStrict */4152 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)) 4153 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */ 4154 4154 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU); 4155 4155 off = iemNativeEmitLoadGpr8Imm(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, cbInstr); … … 4168 4168 */ 4169 4169 off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)pfnCImpl); 4170 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64)4171 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_ IN_SHADOW_ARG0); /* rcStrict (see above) */4170 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)) 4171 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */ 4172 4172 #endif 4173 4173 fGstShwFlush = iemNativeCImplFlagsToGuestShadowFlushMask(pReNative->fCImpl, fGstShwFlush | RT_BIT_64(kIemNativeGstReg_Pc)); -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r106724 r107200 6713 6713 * Load the parameters. 6714 6714 */ 6715 #if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_ARCH_AMD64)6715 #if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)) 6716 6716 /* Special code the hidden VBOXSTRICTRC pointer. */ 6717 6717 off = iemNativeEmitLoadGprFromGpr( pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU); … … 6720 6720 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, uParam0); 6721 6721 if (cAddParams > 1) 6722 # if IEMNATIVE_CALL_ARG_GREG_COUNT >= 5 6723 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG4_GREG, uParam1); 6724 # else 6722 6725 off = iemNativeEmitStoreImm64ByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG0, uParam1); 6726 # endif 6723 6727 if (cAddParams > 2) 6728 # if IEMNATIVE_CALL_ARG_GREG_COUNT >= 6 6729 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG5_GREG, uParam2); 6730 # else 6724 6731 off = iemNativeEmitStoreImm64ByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG1, uParam2); 6725 off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_IN_SHADOW_ARG0); /* rcStrict */ 6732 # endif 6733 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */ 6726 6734 6727 6735 #else … … 6746 6754 off = iemNativeEmitCallImm(pReNative, off, pfnCImpl); 6747 6755 6748 #if defined(RT_ ARCH_AMD64) && defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS)6749 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_ IN_SHADOW_ARG0); /* rcStrict (see above) */6756 #if defined(RT_OS_WINDOWS) && defined(VBOXSTRICTRC_STRICT_ENABLED) && (defined(RT_ARCH_AMD64) || defined(RT_ARCH_ARM64)) 6757 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */ 6750 6758 #endif 6751 6759 … … 6807 6815 off = iemNativeEmitStoreGprByBp(pReNative, off, IEMNATIVE_FP_OFF_STACK_ARG0, X86_GREG_x10); 6808 6816 } 6809 off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_ IN_SHADOW_ARG0); /* rcStrict */6817 off = iemNativeEmitLeaGprByBp(pReNative, off, X86_GREG_xCX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */ 6810 6818 # endif /* VBOXSTRICTRC_STRICT_ENABLED */ 6811 6819 # else … … 6822 6830 6823 6831 # if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) 6824 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_ IN_SHADOW_ARG0); /* rcStrict (see above) */6832 off = iemNativeEmitLoadGprByBpU32(pReNative, off, X86_GREG_xAX, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */ 6825 6833 # endif 6826 6834 … … 6829 6837 * ARM64: 6830 6838 */ 6839 # if !defined(RT_OS_WINDOWS) || !defined(VBOXSTRICTRC_STRICT_ENABLED) 6831 6840 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU); 6832 6841 if (cParams > 0) … … 6836 6845 if (cParams > 2) 6837 6846 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, pCallEntry->auParams[2]); 6847 # else 6848 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG1_GREG, IEMNATIVE_REG_FIXED_PVMCPU); 6849 if (cParams > 0) 6850 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG2_GREG, pCallEntry->auParams[0]); 6851 if (cParams > 1) 6852 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG3_GREG, pCallEntry->auParams[1]); 6853 if (cParams > 2) 6854 off = iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_ARG4_GREG, pCallEntry->auParams[2]); 6855 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict */ 6856 # endif 6838 6857 6839 6858 off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)g_apfnIemThreadedFunctions[pCallEntry->enmFunction]); 6859 6860 # if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) 6861 off = iemNativeEmitLoadGprByBpU32(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_FP_OFF_VBOXSTRICRC); /* rcStrict (see above) */ 6862 # endif 6840 6863 6841 6864 #else … … 7042 7065 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 10); 7043 7066 7044 /* ldp x19, x20, [sp # IEMNATIVE_FRAME_VAR_SIZE]! ; Unallocate the variable space and restore x19+x20. */7045 AssertCompile(IEMNATIVE_FRAME_VAR_SIZE < 64*8);7067 /* ldp x19, x20, [sp #(IEMNATIVE_FRAME_VAR_SIZE+IEMNATIVE_FRAME_ALIGN_SIZE)]! ; Unallocate the variable space and restore x19+x20. */ 7068 AssertCompile(IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE < 64*8); 7046 7069 pu32CodeBuf[off++] = Armv8A64MkInstrStLdPair(true /*fLoad*/, 2 /*64-bit*/, kArm64InstrStLdPairType_PreIndex, 7047 7070 ARMV8_A64_REG_X19, ARMV8_A64_REG_X20, ARMV8_A64_REG_SP, 7048 IEMNATIVE_FRAME_VAR_SIZE/ 8);7071 (IEMNATIVE_FRAME_VAR_SIZE + IEMNATIVE_FRAME_ALIGN_SIZE) / 8); 7049 7072 /* Restore x21 thru x28 + BP and LR (ret address) (SP remains unchanged in the kSigned variant). */ 7050 7073 pu32CodeBuf[off++] = Armv8A64MkInstrStLdPair(true /*fLoad*/, 2 /*64-bit*/, kArm64InstrStLdPairType_Signed,
Note:
See TracChangeset
for help on using the changeset viewer.