Changeset 18849 in vbox for trunk/src/VBox
- Timestamp:
- Apr 8, 2009 4:34:39 PM (16 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/VMMR0A.asm
r14721 r18849 113 113 114 114 ; 115 ; Return like in the long jump. 116 ; (It is vital that we restore all registers since they might've changed 117 ; by a long jump.) 115 ; Return like in the long jump but clear eip, no short cuts here. 118 116 ; 119 117 .proper_return: … … 128 126 129 127 .entry_error: 130 mov eax, VERR_INTERNAL_ERROR 128 mov eax, VERR_INTERNAL_ERROR_2 131 129 jmp .proper_return 132 130 … … 147 145 mov esi, [edx + VMMR0JMPBUF.esi] 148 146 mov ebx, [edx + VMMR0JMPBUF.ebx] 149 mov eax, VERR_INTERNAL_ERROR ; todo better return code!147 mov eax, VERR_INTERNAL_ERROR_3 ; todo better return code! 150 148 ret 151 149 … … 232 230 jnz .resume 233 231 234 mov [rbp - 8], rdx ; Save it and fix stack alignment (16). 232 %ifdef VMM_R0_SWITCH_STACK 233 mov r15, [rdx + VMMR0JMPBUF.pvSavedStack] 234 test r15, r15 235 jz .entry_error 236 %ifdef VBOX_STRICT 237 mov rdi, r15 238 mov rcx, 1024 239 mov rax, 00eeeeeeeffeeeeeeeh 240 repne stosq 241 mov [rdi - 10h], rbx 242 %endif 243 lea r15, [r15 + 8192 - 40h] 244 mov rsp, r15 ; Switch stack! 245 %endif ; VMM_R0_SWITCH_STACK 246 247 mov r12, rdx ; Save pJmpBuf. 235 248 %ifdef ASM_CALL64_MSC 236 249 mov rcx, r8 ; pvUser -> arg0 … … 241 254 %endif 242 255 call r11 243 mov rdx, [rbp - 8] ; pJmpBuf 244 245 ; restore the registers that we're not allowed to modify 246 ; otherwise a resume might restore the wrong values (from the previous run) 256 mov rdx, r12 ; Restore pJmpBuf 257 258 ; 259 ; Return like in the long jump but clear eip, no short cuts here. 260 ; 261 .proper_return: 247 262 mov rbx, [rdx + VMMR0JMPBUF.rbx] 248 263 %ifdef ASM_CALL64_MSC … … 254 269 mov r14, [rdx + VMMR0JMPBUF.r14] 255 270 mov r15, [rdx + VMMR0JMPBUF.r15] 256 271 mov rbp, [rdx + VMMR0JMPBUF.rbp] 272 mov rcx, [rdx + VMMR0JMPBUF.rip] 257 273 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 258 leave 259 ret 274 mov rsp, [rdx + VMMR0JMPBUF.rsp] 275 jmp rcx 276 277 .entry_error: 278 mov eax, VERR_INTERNAL_ERROR_2 279 jmp .proper_return 260 280 261 281 ; … … 263 283 ; 264 284 .resume: 285 %ifdef VMM_R0_SWITCH_STACK 286 ; Switch stack. 287 mov rsp, [rdx + VMMR0JMPBUF.SpResume] 288 %else ; !VMM_R0_SWITCH_STACK 265 289 ; Sanity checks. 266 290 cmp r10, [rdx + VMMR0JMPBUF.SpCheck] … … 269 293 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 270 294 mov rbx, [rdx + VMMR0JMPBUF.rbx] 271 %ifdef ASM_CALL64_MSC295 %ifdef ASM_CALL64_MSC 272 296 mov rsi, [rdx + VMMR0JMPBUF.rsi] 273 297 mov rdi, [rdx + VMMR0JMPBUF.rdi] 274 %endif298 %endif 275 299 mov r12, [rdx + VMMR0JMPBUF.r12] 276 300 mov r13, [rdx + VMMR0JMPBUF.r13] 277 301 mov r14, [rdx + VMMR0JMPBUF.r14] 278 302 mov r15, [rdx + VMMR0JMPBUF.r15] 279 mov eax, VERR_INTERNAL_ERROR ; todo better return code!303 mov eax, VERR_INTERNAL_ERROR_2 280 304 leave 281 305 ret … … 295 319 ; Restore the stack. 296 320 ; 297 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 0298 321 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack] 299 322 shr ecx, 3 … … 302 325 mov rsp, rdi 303 326 rep movsq 327 %endif ; !VMM_R0_SWITCH_STACK 328 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 0 304 329 305 330 ; … … 415 440 pop esi 416 441 pop edi 417 mov eax, VERR_INTERNAL_ERROR 442 mov eax, VERR_INTERNAL_ERROR_4 418 443 leave 419 444 ret … … 455 480 456 481 ; 457 ; Sa ve the stack.482 ; Sanity checks. 458 483 ; 459 484 mov rdi, [rdx + VMMR0JMPBUF.pvSavedStack] … … 461 486 jz .nok 462 487 mov [rdx + VMMR0JMPBUF.SpResume], rsp 488 %ifndef VMM_R0_SWITCH_STACK 463 489 mov rsi, rsp 464 490 mov rcx, [rdx + VMMR0JMPBUF.rsp] … … 467 493 ; two sanity checks on the size. 468 494 cmp rcx, 8192 ; check max size. 469 jbe .ok 470 .nok: 471 mov eax, VERR_INTERNAL_ERROR 472 popf 473 pop rbx 474 %ifdef ASM_CALL64_MSC 475 pop rsi 476 pop rdi 477 %endif 478 pop r12 479 pop r13 480 pop r14 481 pop r15 482 leave 483 ret 484 485 .ok: 495 jnbe .nok 496 497 ; 498 ; Copy the stack 499 ; 486 500 test ecx, 7 ; check alignment 487 501 jnz .nok … … 489 503 shr ecx, 3 490 504 rep movsq 505 506 %endif ; !VMM_R0_SWITCH_STACK 491 507 492 508 ; store the last pieces of info. … … 511 527 mov rsp, [rdx + VMMR0JMPBUF.rsp] 512 528 jmp rcx 529 530 ; 531 ; Failure 532 ; 533 .nok: 534 mov eax, VERR_INTERNAL_ERROR_4 535 popf 536 pop rbx 537 %ifdef ASM_CALL64_MSC 538 pop rsi 539 pop rdi 540 %endif 541 pop r12 542 pop r13 543 pop r14 544 pop r15 545 leave 546 ret 547 513 548 %endif 514 549 ENDPROC vmmR0CallHostLongJmp
Note:
See TracChangeset
for help on using the changeset viewer.