- Timestamp:
- Jun 26, 2009 6:20:27 PM (16 years ago)
- Location:
- trunk
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/vm.h
r20838 r20992 179 179 struct VMMCPU s; 180 180 #endif 181 char padding[ 256]; /* multiple of 64 */181 char padding[384]; /* multiple of 64 */ 182 182 } vmm; 183 183 -
trunk/src/VBox/VMM/VMMInternal.h
r20875 r20992 154 154 uint64_t rsp; 155 155 uint64_t rip; 156 # ifdef RT_OS_WINDOWS 157 uint128_t xmm6; 158 uint128_t xmm7; 159 uint128_t xmm8; 160 uint128_t xmm9; 161 uint128_t xmm10; 162 uint128_t xmm11; 163 uint128_t xmm12; 164 uint128_t xmm13; 165 uint128_t xmm14; 166 uint128_t xmm15; 167 # endif 156 168 #endif 157 169 /** @} */ -
trunk/src/VBox/VMM/VMMInternal.mac
r20545 r20992 68 68 .rsp resq 1 69 69 .rip resq 1 70 %ifdef RT_OS_WINDOWS 71 .xmm6 resq 2 72 .xmm7 resq 2 73 .xmm8 resq 2 74 .xmm9 resq 2 75 .xmm10 resq 2 76 .xmm11 resq 2 77 .xmm12 resq 2 78 .xmm13 resq 2 79 .xmm14 resq 2 80 .xmm15 resq 2 81 %endif 70 82 71 83 ; additional state and stack info. -
trunk/src/VBox/VMM/VMMR0/VMMR0JmpA-amd64.asm
r20875 r20992 89 89 lea r10, [rbp + 10h] ; (used in resume) 90 90 mov [xDX + VMMR0JMPBUF.rsp], r10 91 %ifdef RT_OS_WINDOWS 92 movdqa [xDX + VMMR0JMPBUF.xmm6], xmm6 93 movdqa [xDX + VMMR0JMPBUF.xmm7], xmm7 94 movdqa [xDX + VMMR0JMPBUF.xmm8], xmm8 95 movdqa [xDX + VMMR0JMPBUF.xmm9], xmm9 96 movdqa [xDX + VMMR0JMPBUF.xmm10], xmm10 97 movdqa [xDX + VMMR0JMPBUF.xmm11], xmm11 98 movdqa [xDX + VMMR0JMPBUF.xmm12], xmm12 99 movdqa [xDX + VMMR0JMPBUF.xmm13], xmm13 100 movdqa [xDX + VMMR0JMPBUF.xmm14], xmm14 101 movdqa [xDX + VMMR0JMPBUF.xmm15], xmm15 102 %endif 91 103 92 104 ; … … 135 147 ; 136 148 .proper_return: 149 %ifdef RT_OS_WINDOWS 150 movdqa xmm6, [xDX + VMMR0JMPBUF.xmm6 ] 151 movdqa xmm7, [xDX + VMMR0JMPBUF.xmm7 ] 152 movdqa xmm8, [xDX + VMMR0JMPBUF.xmm8 ] 153 movdqa xmm9, [xDX + VMMR0JMPBUF.xmm9 ] 154 movdqa xmm10, [xDX + VMMR0JMPBUF.xmm10] 155 movdqa xmm11, [xDX + VMMR0JMPBUF.xmm11] 156 movdqa xmm12, [xDX + VMMR0JMPBUF.xmm12] 157 movdqa xmm13, [xDX + VMMR0JMPBUF.xmm13] 158 movdqa xmm14, [xDX + VMMR0JMPBUF.xmm14] 159 movdqa xmm15, [xDX + VMMR0JMPBUF.xmm15] 160 %endif 137 161 mov rbx, [xDX + VMMR0JMPBUF.rbx] 138 162 %ifdef ASM_CALL64_MSC 139 163 mov rsi, [xDX + VMMR0JMPBUF.rsi] 140 164 mov rdi, [xDX + VMMR0JMPBUF.rdi] 141 165 %endif 142 166 mov r12, [xDX + VMMR0JMPBUF.r12] 143 167 mov r13, [xDX + VMMR0JMPBUF.r13] … … 160 184 ; 161 185 ; Aborting resume. 186 ; Note! No need to restore XMM registers here since we haven't touched them yet. 162 187 ; 163 188 .bad: … … 223 248 .magic_ok: 224 249 %endif 250 %ifdef RT_OS_WINDOWS 251 movdqa xmm6, [rsp + 000h] 252 movdqa xmm7, [rsp + 010h] 253 movdqa xmm8, [rsp + 020h] 254 movdqa xmm9, [rsp + 030h] 255 movdqa xmm10, [rsp + 040h] 256 movdqa xmm11, [rsp + 050h] 257 movdqa xmm12, [rsp + 060h] 258 movdqa xmm13, [rsp + 070h] 259 movdqa xmm14, [rsp + 080h] 260 movdqa xmm15, [rsp + 090h] 261 add rsp, 0a0h 262 %endif 225 263 popf 226 264 pop rbx 227 265 %ifdef ASM_CALL64_MSC 228 266 pop rsi 229 267 pop rdi 230 268 %endif 231 269 pop r12 232 270 pop r13 … … 256 294 push r13 257 295 push r12 258 296 %ifdef ASM_CALL64_MSC 259 297 push rdi 260 298 push rsi 261 299 %endif 262 300 push rbx 263 301 pushf 302 %ifdef RT_OS_WINDOWS 303 sub rsp, 0a0h 304 movdqa [rsp + 000h], xmm6 305 movdqa [rsp + 010h], xmm7 306 movdqa [rsp + 020h], xmm8 307 movdqa [rsp + 030h], xmm9 308 movdqa [rsp + 040h], xmm10 309 movdqa [rsp + 050h], xmm11 310 movdqa [rsp + 060h], xmm12 311 movdqa [rsp + 070h], xmm13 312 movdqa [rsp + 080h], xmm14 313 movdqa [rsp + 090h], xmm15 314 %endif 264 315 %ifdef VBOX_STRICT 265 316 push RESUME_MAGIC … … 269 320 ; Normalize the parameters. 270 321 ; 271 322 %ifdef ASM_CALL64_MSC 272 323 mov eax, edx ; rc 273 324 mov rdx, rcx ; pJmpBuf 274 325 %else 275 326 mov rdx, rdi ; pJmpBuf 276 327 mov eax, esi ; rc 277 328 %endif 278 329 279 330 ; … … 324 375 ; Do the long jump. 325 376 ; 377 %ifdef RT_OS_WINDOWS 378 movdqa xmm6, [xDX + VMMR0JMPBUF.xmm6 ] 379 movdqa xmm7, [xDX + VMMR0JMPBUF.xmm7 ] 380 movdqa xmm8, [xDX + VMMR0JMPBUF.xmm8 ] 381 movdqa xmm9, [xDX + VMMR0JMPBUF.xmm9 ] 382 movdqa xmm10, [xDX + VMMR0JMPBUF.xmm10] 383 movdqa xmm11, [xDX + VMMR0JMPBUF.xmm11] 384 movdqa xmm12, [xDX + VMMR0JMPBUF.xmm12] 385 movdqa xmm13, [xDX + VMMR0JMPBUF.xmm13] 386 movdqa xmm14, [xDX + VMMR0JMPBUF.xmm14] 387 movdqa xmm15, [xDX + VMMR0JMPBUF.xmm15] 388 %endif 326 389 mov rbx, [xDX + VMMR0JMPBUF.rbx] 327 390 %ifdef ASM_CALL64_MSC 328 391 mov rsi, [xDX + VMMR0JMPBUF.rsi] 329 392 mov rdi, [xDX + VMMR0JMPBUF.rdi] 330 393 %endif 331 394 mov r12, [xDX + VMMR0JMPBUF.r12] 332 395 mov r13, [xDX + VMMR0JMPBUF.r13] … … 336 399 mov rcx, [xDX + VMMR0JMPBUF.rip] 337 400 mov rsp, [xDX + VMMR0JMPBUF.rsp] 401 ;; @todo flags???? 338 402 jmp rcx 339 403 … … 351 415 %endif 352 416 mov eax, VERR_INTERNAL_ERROR_4 417 %ifdef RT_OS_WINDOWS 418 add rsp, 0a0h ; skip XMM registers since they are unmodified. 419 %endif 353 420 popf 354 421 pop rbx 355 422 %ifdef ASM_CALL64_MSC 356 423 pop rsi 357 424 pop rdi 358 425 %endif 359 426 pop r12 360 427 pop r13 -
trunk/src/VBox/VMM/testcase/tstVMStructSize.cpp
r20875 r20992 197 197 198 198 CHECK_MEMBER_ALIGNMENT(VMCPU, vmm.s.u64CallRing3Arg, 8); 199 CHECK_MEMBER_ALIGNMENT(VMCPU, vmm.s.CallRing3JmpBufR0, 8); 199 CHECK_MEMBER_ALIGNMENT(VMCPU, vmm.s.CallRing3JmpBufR0, 16); 200 #ifdef RT_OS_WINDOWS 201 CHECK_MEMBER_ALIGNMENT(VMCPU, vmm.s.CallRing3JmpBufR0.xmm6, 16); 202 #endif 200 203 CHECK_MEMBER_ALIGNMENT(VM, vmm.s.u64LastYield, 8); 201 204 CHECK_MEMBER_ALIGNMENT(VM, vmm.s.StatRunRC, 8);
Note:
See TracChangeset
for help on using the changeset viewer.