VirtualBox

source: vbox/trunk/src/VBox/VMM/VMMAll/IEMAllN8veHlpA.asm@ 104348

Last change on this file since 104348 was 104331, checked in by vboxsync, 10 months ago

VMM/IEM: Added some basic function alignment to IEMAllN8veHlpA.asm. [build fix] bugref:10370

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 9.1 KB
Line 
1; $Id: IEMAllN8veHlpA.asm 104331 2024-04-12 23:38:33Z vboxsync $
2;; @file
3; IEM - Native Recompiler Assembly Helpers.
4;
5
6;
7; Copyright (C) 2023 Oracle and/or its affiliates.
8;
9; This file is part of VirtualBox base platform packages, as
10; available from https://www.virtualbox.org.
11;
12; This program is free software; you can redistribute it and/or
13; modify it under the terms of the GNU General Public License
14; as published by the Free Software Foundation, in version 3 of the
15; License.
16;
17; This program is distributed in the hope that it will be useful, but
18; WITHOUT ANY WARRANTY; without even the implied warranty of
19; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
20; General Public License for more details.
21;
22; You should have received a copy of the GNU General Public License
23; along with this program; if not, see <https://www.gnu.org/licenses>.
24;
25; SPDX-License-Identifier: GPL-3.0-only
26;
27
28;*********************************************************************************************************************************
29;* Header Files *
30;*********************************************************************************************************************************
31%define RT_ASM_WITH_SEH64_ALT ; yasm gets confused by alignment, so we cannot use RT_ASM_WITH_SEH64. sigh.
32%include "VBox/asmdefs.mac"
33
34;; @todo r=aeichner The following is copied from IEMInternal.h
35;%define VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP - not enabled right now on amd64
36
37;; @todo r=aeichner The following defines are copied from IEMN8veRecompiler.h
38
39; /** @def IEMNATIVE_WITH_RECOMPILER_PROLOGUE_SINGLETON
40; * Enables having only a single prologue for native TBs. */
41%define IEMNATIVE_WITH_RECOMPILER_PROLOGUE_SINGLETON
42
43; /** An stack alignment adjustment (between non-volatile register pushes and
44; * the stack variable area, so the latter better aligned). */
45%define IEMNATIVE_FRAME_ALIGN_SIZE 8
46
47; /** The size of the area for stack variables and spills and stuff.
48; * @note This limit is duplicated in the python script(s). We add 0x40 for
49; * alignment padding. */
50%define IEMNATIVE_FRAME_VAR_SIZE (0xc0 + 0x40)
51
52; This needs to correspond to IEMNATIVE_REG_FIXED_PVMCPU in IEMN8veRecompiler.h
53%define IEMNATIVE_REG_FIXED_PVMCPU_ASM xBX
54
55; /** Number of stack arguments slots for calls made from the frame. */
56%ifdef RT_OS_WINDOWS
57%define IEMNATIVE_FRAME_STACK_ARG_COUNT 4
58%else
59%define IEMNATIVE_FRAME_STACK_ARG_COUNT 2
60%endif
61; /** Number of any shadow arguments (spill area) for calls we make. */
62%ifdef RT_OS_WINDOWS
63%define IEMNATIVE_FRAME_SHADOW_ARG_COUNT 4
64%else
65%define IEMNATIVE_FRAME_SHADOW_ARG_COUNT 0
66%endif
67
68
69BEGINCODE
70
71extern NAME(iemThreadedFunc_BltIn_LogCpuStateWorker)
72extern NAME(iemNativeHlpCheckTlbLookup)
73
74
75%ifdef IEMNATIVE_WITH_RECOMPILER_PROLOGUE_SINGLETON
76
77;;
78; This is the common prologue of a TB, saving all volatile registers
79; and creating the stack frame for saving temporary values.
80;
81; @param pVCpu (gcc:rdi, msc:rcx) The cross-context vCPU structure pointer.
82; @param pfnTbStart (gcc:rsi, msc:rdx) The TB instruction start pointer.
83;
84; @note This must match the epilogue code generated by iemNativeEmitEpilog()!
85;
86; Any changes here must also be reflected in the unwind code installed by
87; iemExecMemAllocatorInitAndRegisterUnwindInfoForChunk()!
88;
89ALIGNCODE(64) ; Make sure the whole prolog is within a single cache line.
90BEGINPROC iemNativeTbEntry
91 push rbp
92 SEH64_PUSH_xBP
93 mov rbp, rsp
94 SEH64_SET_FRAME_xBP 0
95 push rbx
96 SEH64_PUSH_GREG rbp
97%ifdef ASM_CALL64_MSC
98 mov IEMNATIVE_REG_FIXED_PVMCPU_ASM, rcx ; ASSUMES IEMNATIVE_REG_FIXED_PVMCPU_ASM is rbx
99 push rsi
100 SEH64_PUSH_GREG rsi
101 push rdi
102 SEH64_PUSH_GREG rdi
103%else
104 mov IEMNATIVE_REG_FIXED_PVMCPU_ASM, rdi ; ASSUMES IEMNATIVE_REG_FIXED_PVMCPU_ASM is rbx
105%endif
106 push r12
107 SEH64_PUSH_GREG r12
108 push r13
109 SEH64_PUSH_GREG r13
110 push r14
111 SEH64_PUSH_GREG r14
112 push r15
113 SEH64_PUSH_GREG r15
114%ifdef VBOX_WITH_IEM_NATIVE_RECOMPILER_LONGJMP
115 %error "Port me - need to store RBP in IEMCPU::pvTbFramePointerR3"
116%endif
117%define MY_STACK_ALLOC ( IEMNATIVE_FRAME_ALIGN_SIZE \
118 + IEMNATIVE_FRAME_VAR_SIZE \
119 + IEMNATIVE_FRAME_STACK_ARG_COUNT * 8 \
120 + IEMNATIVE_FRAME_SHADOW_ARG_COUNT * 8)
121 sub rsp, MY_STACK_ALLOC
122 SEH64_ALLOCATE_STACK MY_STACK_ALLOC
123
124SEH64_END_PROLOGUE
125
126 ; Jump to the TB code.
127%ifdef ASM_CALL64_MSC
128 jmp rdx
129%else
130 jmp rsi
131%endif
132ENDPROC iemNativeTbEntry
133%endif
134
135
136;;
137; This does the epilogue of a TB, given the RBP for the frame and eax value to return.
138;
139; @param pFrame (gcc:rdi, msc:rcx) The frame pointer.
140; @param rc (gcc:esi, msc:edx) The return value.
141;
142; @note This doesn't really work for MSC since xmm6 thru xmm15 are non-volatile
143; and since we don't save them in the TB prolog we'll potentially return
144; with different values if any functions on the calling stack uses them
145; as they're unlikely to restore them till they return.
146;
147; For the GCC calling convention all xmm registers are volatile and the
148; only worry would be someone fiddling the control bits of MXCSR or FCW
149; without restoring them. This is highly unlikely, unless we're doing
150; it ourselves, I think.
151;
152ALIGNCODE(16)
153BEGINPROC iemNativeTbLongJmp
154%ifdef ASM_CALL64_MSC
155 mov rbp, rcx
156 mov eax, edx
157%else
158 mov rbp, rdi
159 mov eax, esi
160%endif
161 SEH64_PUSH_xBP ; non-sense, but whatever.
162SEH64_END_PROLOGUE
163
164 ;
165 ; This must exactly match what iemNativeEmitEpilog does.
166 ;
167%ifdef ASM_CALL64_MSC
168 lea rsp, [rbp - 5 * 8]
169%else
170 lea rsp, [rbp - 7 * 8]
171%endif
172 pop r15
173 pop r14
174 pop r13
175 pop r12
176%ifdef ASM_CALL64_MSC
177 pop rdi
178 pop rsi
179%endif
180 pop rbx
181 leave
182 ret
183ENDPROC iemNativeTbLongJmp
184
185
186
187;;
188; This is wrapper function that saves and restores all volatile registers
189; so the impact of inserting LogCpuState is minimal to the other TB code.
190;
191ALIGNCODE(64)
192BEGINPROC iemNativeHlpAsmSafeWrapLogCpuState
193 push xBP
194 SEH64_PUSH_xBP
195 mov xBP, xSP
196 SEH64_SET_FRAME_xBP 0
197SEH64_END_PROLOGUE
198
199 ;
200 ; Save all volatile registers.
201 ;
202 push xAX
203 push xCX
204 push xDX
205%ifdef RT_OS_WINDOWS
206 push xSI
207 push xDI
208%endif
209 push r8
210 push r9
211 push r10
212 push r11
213 sub rsp, 8+20h
214
215 ;
216 ; Call C function to do the actual work.
217 ;
218%ifdef RT_OS_WINDOWS
219 mov rcx, rbx ; IEMNATIVE_REG_FIXED_PVMCPU
220 mov rdx, [rbp + 10h] ; Just in case we decide to put something there.
221 xor r8, r8
222 xor r9, r9
223%else
224 mov rdi, rbx ; IEMNATIVE_REG_FIXED_PVMCPU
225 mov rsi, [rbp + 10h] ; Just in case we decide to put something there.
226 xor ecx, ecx
227 xor edx, edx
228%endif
229 call NAME(iemThreadedFunc_BltIn_LogCpuStateWorker)
230
231 ;
232 ; Restore volatile registers and return to the TB code.
233 ;
234 add rsp, 8+20h
235 pop r11
236 pop r10
237 pop r9
238 pop r8
239%ifdef RT_OS_WINDOWS
240 pop xDI
241 pop xSI
242%endif
243 pop xDX
244 pop xCX
245 pop xAX
246 leave
247 ret
248ENDPROC iemNativeHlpAsmSafeWrapLogCpuState
249
250
251;;
252; This is wrapper function that saves and restores all volatile registers
253; so the impact of inserting CheckTlbLookup is minimal to the other TB code.
254;
255ALIGNCODE(16)
256BEGINPROC iemNativeHlpAsmSafeWrapCheckTlbLookup
257 push xBP
258 SEH64_PUSH_xBP
259 mov xBP, xSP
260 SEH64_SET_FRAME_xBP 0
261SEH64_END_PROLOGUE
262
263 ;
264 ; Save all volatile registers.
265 ;
266 push xAX
267 push xCX
268 push xDX
269%ifdef RT_OS_WINDOWS
270 push xSI
271 push xDI
272%endif
273 push r8
274 push r9
275 push r10
276 push r11
277 sub rsp, 8+20h
278
279 ;
280 ; Call C function to do the actual work.
281 ;
282%ifdef RT_OS_WINDOWS
283 mov rcx, [rbp + 10h]
284 mov rdx, [rbp + 18h]
285 mov r8, [rbp + 20h]
286 mov r9, [rbp + 28h]
287%else
288 mov rdi, [rbp + 10h]
289 mov rsi, [rbp + 18h]
290 mov ecx, [rbp + 20h]
291 mov edx, [rbp + 28h]
292%endif
293 call NAME(iemNativeHlpCheckTlbLookup)
294
295 ;
296 ; Restore volatile registers and return to the TB code.
297 ;
298 add rsp, 8+20h
299 pop r11
300 pop r10
301 pop r9
302 pop r8
303%ifdef RT_OS_WINDOWS
304 pop xDI
305 pop xSI
306%endif
307 pop xDX
308 pop xCX
309 pop xAX
310 leave
311 ret 20h
312ENDPROC iemNativeHlpAsmSafeWrapCheckTlbLookup
313
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette