Changeset 726 in vbox
- Timestamp:
- Feb 6, 2007 7:21:48 PM (18 years ago)
- svn:sync-xref-src-repo-rev:
- 18368
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR0/VMMR0A.asm
r671 r726 44 44 ; 45 45 ; @returns VINF_SUCCESS on success or whatever is passed to vmmR0CallHostLongJmp. 46 ; @param pJmpBuf Our jmp_buf.47 ; @param pfn The function to be called when not resuming.48 ; @param pVM The argument of that function.46 ; @param pJmpBuf msc:rcx gcc:rdi x86:[esp+4] Our jmp_buf. 47 ; @param pfn msc:rdx gcc:rsi x86:[esp+8] The function to be called when not resuming. 48 ; @param pVM msc:r8 gcc:rcx x86:[esp+c] The argument of that function. 49 49 ; 50 50 BEGINPROC vmmR0CallHostSetJmp … … 128 128 129 129 %ifdef __AMD64__ 130 int3 ; implement me! 130 ; 131 ; Save the registers. 132 ; 133 %ifdef ASM_CALL64_MSC 134 xchg rdx, rcx ; rdx=pJmpBuf rcx=pfn 135 %else 136 mov rdx, rdi ; pJmpBuf 137 %endif 138 mov [rdx + VMMR0JMPBUF.rbx], rbx 139 %ifdef ASM_CALL64_MSC 140 mov [rdx + VMMR0JMPBUF.rsi], rsi 141 mov [rdx + VMMR0JMPBUF.rdi], rdi 142 %endif 143 mov [rdx + VMMR0JMPBUF.rbp], rbp 144 mov [rdx + VMMR0JMPBUF.r12], r12 145 mov [rdx + VMMR0JMPBUF.r13], r13 146 mov [rdx + VMMR0JMPBUF.r14], r14 147 mov [rdx + VMMR0JMPBUF.r15], r15 148 mov rax, [rsp] 149 mov [rdx + VMMR0JMPBUF.rip], rax 150 lea r10, [rsp + 8] ; (used in resume) 151 mov [rdx + VMMR0JMPBUF.rsp], r10 152 153 ; 154 ; If we're not in a ring-3 call, call pfn and return. 155 ; 156 test byte [rdx + VMMR0JMPBUF.fInRing3Call], 1 157 jnz .resume 158 159 push rdx ; Save it and fix stack alignment (16). 160 %ifdef ASM_CALL64_MSC 161 mov rax, rcx ; pfn 162 mov rcx, r8 ; pVM -> arg0 163 %else 164 mov rax, rsi ; pfn 165 mov rdi, rcx ; pVM -> arg0 166 %endif 167 call rax 168 pop rdx ; pJmpBuf 169 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 170 ret 171 172 ; 173 ; Resume VMMR0CallHost the call. 174 ; 175 .resume: 176 ; Sanity checks. 177 cmp r10, [rdx + VMMR0JMPBUF.SpCheck] 178 je .rspCheck_ok 179 .bad: 180 and qword [rdx + VMMR0JMPBUF.rip], byte 0 ; used for valid check. 181 mov rbx, [rdx + VMMR0JMPBUF.rbx] 182 %ifdef ASM_CALL64_MSC 183 mov rsi, [rdx + VMMR0JMPBUF.rsi] 184 mov rdi, [rdx + VMMR0JMPBUF.rdi] 185 %endif 186 mov r12, [rdx + VMMR0JMPBUF.r12] 187 mov r13, [rdx + VMMR0JMPBUF.r13] 188 mov r14, [rdx + VMMR0JMPBUF.r14] 189 mov r15, [rdx + VMMR0JMPBUF.r15] 190 mov eax, VERR_INTERNAL_ERROR ; todo better return code! 191 ret 192 193 .rspCheck_ok: 194 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack] 195 cmp rcx, 8192 196 ja .bad 197 test rcx, 3 198 jnz .bad 199 mov rdi, [rdx + VMMR0JMPBUF.rsp] 200 sub rdi, [rdx + VMMR0JMPBUF.SpResume] 201 cmp rcx, rdi 202 jne .bad 203 204 ; 205 ; Restore the stack. 206 ; 207 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 0 208 mov ecx, [rdx + VMMR0JMPBUF.cbSavedStack] 209 shr ecx, 3 210 mov rsi, [rdx + VMMR0JMPBUF.pvSavedStack] 211 mov rdi, [rdx + VMMR0JMPBUF.SpResume] 212 mov rsp, rdi 213 rep movsq 214 215 ; 216 ; Continue where we left off. 217 ; 218 popf 219 pop rbx 220 %ifdef ASM_CALL64_MSC 221 pop rsi 222 pop rdi 223 %endif 224 pop r12 225 pop r13 226 pop r14 227 pop r15 228 pop rbp 229 xor eax, eax ; VINF_SUCCESS 230 ret 131 231 %endif 132 232 ENDPROC vmmR0CallHostSetJmp … … 137 237 ; This will save the stack and registers. 138 238 ; 139 ; @param pJmpBuf 140 ; @param rc 239 ; @param pJmpBuf msc:rcx gcc:rdi x86:[ebp+8] Pointer to the jump buffer. 240 ; @param rc msc:rdx gcc:rsi x86:[ebp+c] The return code. 141 241 ; 142 242 BEGINPROC vmmR0CallHostLongJmp … … 209 309 210 310 %ifdef __AMD64__ 211 int3 ; implement me! 311 ; 312 ; Save the registers on the stack. 313 ; 314 push rbp 315 mov rbp, rsp 316 push r15 317 push r14 318 push r13 319 push r12 320 %ifdef ASM_CALL64_MSC 321 push rdi 322 push rsi 323 %endif 324 push rbx 325 pushf 326 327 ; 328 ; Normalize the parameters. 329 ; 330 %ifdef ASM_CALL64_MSC 331 ; pJmpBuf already in rdx 332 mov eax, ecx ; rc 333 %else 334 mov rdx, rdi ; pJmpBuf 335 mov eax, esi ; rc 336 %endif 337 338 ; 339 ; Is the jump buffer armed? 340 ; 341 cmp qword [rdx + VMMR0JMPBUF.rip], byte 0 342 je .nok 343 344 ; 345 ; Save the stack. 346 ; 347 mov rdi, [rdx + VMMR0JMPBUF.pvSavedStack] 348 mov [rdx + VMMR0JMPBUF.SpResume], rsp 349 mov rsi, rsp 350 mov rcx, [rdx + VMMR0JMPBUF.rsp] 351 sub rcx, rsi 352 353 ; two sanity checks on the size. 354 cmp rcx, 8192 ; check max size. 355 jbe .ok 356 .nok: 357 mov eax, VERR_INTERNAL_ERROR 358 popf 359 pop rbx 360 %ifdef ASM_CALL64_MSC 361 pop rsi 362 pop rdi 363 %endif 364 pop r12 365 pop r13 366 pop r14 367 pop r15 368 leave 369 ret 370 371 .ok: 372 test ecx, 7 ; check alignment 373 jnz .nok 374 mov [rdx + VMMR0JMPBUF.cbSavedStack], ecx 375 shr ecx, 3 376 rep movsq 377 378 ; store the last pieces of info. 379 mov rcx, [edx + VMMR0JMPBUF.rsp] 380 mov [rdx + VMMR0JMPBUF.SpCheck], rcx 381 mov byte [rdx + VMMR0JMPBUF.fInRing3Call], 1 382 383 ; 384 ; Do the long jump. 385 ; 386 mov rbx, [rdx + VMMR0JMPBUF.rbx] 387 %ifdef ASM_CALL64_MSC 388 mov rsi, [rdx + VMMR0JMPBUF.rsi] 389 mov rdi, [rdx + VMMR0JMPBUF.rdi] 390 %endif 391 mov r12, [rdx + VMMR0JMPBUF.r12] 392 mov r13, [rdx + VMMR0JMPBUF.r13] 393 mov r14, [rdx + VMMR0JMPBUF.r14] 394 mov r15, [rdx + VMMR0JMPBUF.r15] 395 mov rbp, [rdx + VMMR0JMPBUF.rbp] 396 mov rcx, [rdx + VMMR0JMPBUF.rip] 397 mov rsp, [rdx + VMMR0JMPBUF.rsp] 398 jmp rcx 212 399 %endif 213 400 ENDPROC vmmR0CallHostLongJmp -
trunk/src/VBox/VMM/testcase/tstVMMR0CallHost-1.cpp
r161 r726 62 62 DECLCALLBACK(int) tst2(intptr_t i) 63 63 { 64 if (i < 0 || i > 8192) 65 { 66 RTPrintf("tstVMMR0CallHost-1: FAILURE - i=%d is out of range [0..8192]\n", i); 67 return 1; 68 } 64 69 int iExpect = (i % 7) == 0 ? i + 10000 : i; 65 70 int rc = foo(i, 0, -1);
Note:
See TracChangeset
for help on using the changeset viewer.