- Timestamp:
- Jun 14, 2013 9:42:21 AM (12 years ago)
- Location:
- trunk
- Files:
-
- 24 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/iprt/asmdefs.mac
r44528 r46548 718 718 719 719 720 ;; @def x S720 ;; @def xCB 721 721 ; The stack unit size / The register unit size. 722 722 … … 749 749 750 750 %ifdef RT_ARCH_AMD64 751 %define x S8751 %define xCB 8 752 752 %define xSP rsp 753 753 %define xBP rbp … … 760 760 %define xWrtRIP wrt rip 761 761 %else 762 %define x S4762 %define xCB 4 763 763 %define xSP esp 764 764 %define xBP ebp -
trunk/src/VBox/Disassembler/testcase/tstAsm.mac
r44529 r46548 17 17 18 18 %if TEST_BITS == 64 19 %define x S819 %define xCB 8 20 20 %define xSP rsp 21 21 %define xBP rbp … … 28 28 %endif 29 29 %if TEST_BITS == 32 30 %define x S430 %define xCB 4 31 31 %define xSP esp 32 32 %define xBP ebp … … 39 39 %endif 40 40 %if TEST_BITS == 16 41 %define x S 141 %define xCB 2 42 42 %define xSP sp 43 43 %define xBP bp … … 49 49 %define xSI si 50 50 %endif 51 %ifndef x S51 %ifndef xCB 52 52 %error "TEST_BITS is missing or wrong." 53 53 %endif -
trunk/src/VBox/HostDrivers/Support/SUPLibTracerA.asm
r41147 r46548 103 103 pop xAX 104 104 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.rflags], xAX 105 mov xAX, [xBP + x S]105 mov xAX, [xBP + xCB] 106 106 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.rip], xAX 107 107 mov xAX, [xBP] 108 108 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.rbp], xAX 109 lea xAX, [xBP + x S*2]109 lea xAX, [xBP + xCB*2] 110 110 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.rsp], xAX 111 111 %ifdef ASM_CALL64_MSC 112 112 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.uVtgProbeLoc], rcx 113 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*0], rdx114 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*1], r8115 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*2], r9116 mov xAX, [xBP + x S*2 + 0x20 + xS*0]117 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*3], xAX118 mov xAX, [xBP + x S*2 + 0x20 + xS*1]119 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*4], xAX120 mov xAX, [xBP + x S*2 + 0x20 + xS*2]121 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*5], xAX122 mov xAX, [xBP + x S*2 + 0x20 + xS*3]123 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*6], xAX124 mov xAX, [xBP + x S*2 + 0x20 + xS*4]125 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*7], xAX126 mov xAX, [xBP + x S*2 + 0x20 + xS*5]127 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*8], xAX128 mov xAX, [xBP + x S*2 + 0x20 + xS*6]129 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*9], xAX113 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*0], rdx 114 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*1], r8 115 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*2], r9 116 mov xAX, [xBP + xCB*2 + 0x20 + xCB*0] 117 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*3], xAX 118 mov xAX, [xBP + xCB*2 + 0x20 + xCB*1] 119 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*4], xAX 120 mov xAX, [xBP + xCB*2 + 0x20 + xCB*2] 121 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*5], xAX 122 mov xAX, [xBP + xCB*2 + 0x20 + xCB*3] 123 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*6], xAX 124 mov xAX, [xBP + xCB*2 + 0x20 + xCB*4] 125 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*7], xAX 126 mov xAX, [xBP + xCB*2 + 0x20 + xCB*5] 127 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*8], xAX 128 mov xAX, [xBP + xCB*2 + 0x20 + xCB*6] 129 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*9], xAX 130 130 mov eax, [xCX + 4] ; VTGPROBELOC::idProbe. 131 131 %else 132 132 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.uVtgProbeLoc], rdi 133 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*0], rsi134 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*1], rdx135 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*2], rcx136 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*3], r8137 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*4], r9138 mov xAX, [xBP + x S*2 + xS*0]139 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*5], xAX140 mov xAX, [xBP + x S*2 + xS*1]141 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*6], xAX142 mov xAX, [xBP + x S*2 + xS*2]143 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*7], xAX144 mov xAX, [xBP + x S*2 + xS*3]145 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*8], xAX146 mov xAX, [xBP + x S*2 + xS*4]147 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + x S*9], xAX133 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*0], rsi 134 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*1], rdx 135 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*2], rcx 136 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*3], r8 137 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*4], r9 138 mov xAX, [xBP + xCB*2 + xCB*0] 139 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*5], xAX 140 mov xAX, [xBP + xCB*2 + xCB*1] 141 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*6], xAX 142 mov xAX, [xBP + xCB*2 + xCB*2] 143 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*7], xAX 144 mov xAX, [xBP + xCB*2 + xCB*3] 145 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*8], xAX 146 mov xAX, [xBP + xCB*2 + xCB*4] 147 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX64.u.Amd64.aArgs + xCB*9], xAX 148 148 mov eax, [xDI + 4] ; VTGPROBELOC::idProbe. 149 149 %endif … … 180 180 pop xAX 181 181 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.eflags], xAX 182 mov xAX, [xBP + x S]182 mov xAX, [xBP + xCB] 183 183 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.eip], xAX 184 184 mov xAX, [xBP] 185 185 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.ebp], xAX 186 lea xAX, [xBP + x S*2]186 lea xAX, [xBP + xCB*2] 187 187 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.esp], xAX 188 188 189 mov xCX, [xBP + x S*2 + xS*0]189 mov xCX, [xBP + xCB*2 + xCB*0] 190 190 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.uVtgProbeLoc], xCX ; keep, used below. 191 191 … … 193 193 .more: 194 194 dec edx 195 mov xAX, [xBP + x S*2 + xS*xDX]196 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.aArgs + x S*xDX], xAX195 mov xAX, [xBP + xCB*2 + xCB*xDX] 196 mov [xSP + SUPTRACERUMODFIREPROBE.In + SUPDRVTRACERUSRCTX32.u.X86.aArgs + xCB*xDX], xAX 197 197 jnz .more 198 198 -
trunk/src/VBox/Runtime/common/math/ceill.asm
r44528 r46548 39 39 sub xSP, 10h 40 40 41 fld tword [xBP + x S*2]41 fld tword [xBP + xCB*2] 42 42 43 43 ; Make it round up by modifying the fpu control word. -
trunk/src/VBox/Runtime/common/math/cosl.asm
r44528 r46548 32 32 ; compute the cosine of ldr, measured in radians. 33 33 ; @returns st(0) 34 ; @param lrd [rbp + x S*2]34 ; @param lrd [rbp + xCB*2] 35 35 BEGINPROC RT_NOCRT(cosl) 36 36 push xBP … … 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 fcos 42 42 fnstsw ax -
trunk/src/VBox/Runtime/common/math/fabs.asm
r44528 r46548 49 49 50 50 %else 51 fld qword [xBP + x S*2]51 fld qword [xBP + xCB*2] 52 52 fabs 53 53 %endif -
trunk/src/VBox/Runtime/common/math/fabsf.asm
r44528 r46548 49 49 50 50 %else 51 fld dword [xBP + x S*2]51 fld dword [xBP + xCB*2] 52 52 fabs 53 53 %endif -
trunk/src/VBox/Runtime/common/math/fabsl.asm
r44528 r46548 32 32 ; Compute the absolute value of lrd (|lrd|). 33 33 ; @returns st(0) 34 ; @param lrd [xSP + x S*2]34 ; @param lrd [xSP + xCB*2] 35 35 BEGINPROC RT_NOCRT(fabsl) 36 36 push xBP 37 37 mov xBP, xSP 38 38 39 fld tword [xBP + x S*2]39 fld tword [xBP + xCB*2] 40 40 fabs 41 41 -
trunk/src/VBox/Runtime/common/math/floor.asm
r44528 r46548 42 42 fld qword [xSP] 43 43 %else 44 fld qword [xBP + x S*2]44 fld qword [xBP + xCB*2] 45 45 %endif 46 46 -
trunk/src/VBox/Runtime/common/math/floorf.asm
r44528 r46548 42 42 fld dword [xSP] 43 43 %else 44 fld dword [xBP + x S*2]44 fld dword [xBP + xCB*2] 45 45 %endif 46 46 -
trunk/src/VBox/Runtime/common/math/floorl.asm
r44528 r46548 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 42 42 ; Make it round down by modifying the fpu control word. -
trunk/src/VBox/Runtime/common/math/ldexpl.asm
r44528 r46548 32 32 ; Computes lrd * 2^exp 33 33 ; @returns st(0) 34 ; @param lrd [rbp + x S*2]34 ; @param lrd [rbp + xCB*2] 35 35 ; @param exp [ebp + 14h] GCC:edi MSC:ecx 36 36 BEGINPROC RT_NOCRT(ldexpl) … … 44 44 fild dword [rsp] 45 45 %else 46 fild dword [ebp + x S*2 + RTLRD_CB]46 fild dword [ebp + xCB*2 + RTLRD_CB] 47 47 %endif 48 fld tword [xBP + x S*2]48 fld tword [xBP + xCB*2] 49 49 fscale 50 50 fstp st1 -
trunk/src/VBox/Runtime/common/math/llrintl.asm
r44528 r46548 32 32 ; Round rd to the nearest integer value, rounding according to the current rounding direction. 33 33 ; @returns 32-bit: edx:eax 64-bit: rax 34 ; @param lrd [rbp + x S*2]34 ; @param lrd [rbp + xCB*2] 35 35 BEGINPROC RT_NOCRT(llrintl) 36 36 push xBP … … 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 fistp qword [xSP] 42 42 fwait -
trunk/src/VBox/Runtime/common/math/logl.asm
r44528 r46548 32 32 ; compute the natural logarithm of lrd 33 33 ; @returns st(0) 34 ; @param lrd [rbp + x S*2]34 ; @param lrd [rbp + xCB*2] 35 35 BEGINPROC RT_NOCRT(logl) 36 36 push xBP … … 39 39 40 40 fldln2 ; st0=log(2) 41 fld tword [xBP + x S*2]; st1=log(2) st0=lrd41 fld tword [xBP + xCB*2] ; st1=log(2) st0=lrd 42 42 fld st0 ; st1=log(2) st0=lrd st0=lrd 43 43 fsub qword [.one xWrtRIP] ; st2=log(2) st1=lrd st0=lrd-1.0 -
trunk/src/VBox/Runtime/common/math/lrintl.asm
r44528 r46548 32 32 ; Round rd to the nearest integer value, rounding according to the current rounding direction. 33 33 ; @returns 32-bit: eax 64-bit: rax 34 ; @param lrd [rbp + x S*2]34 ; @param lrd [rbp + xCB*2] 35 35 BEGINPROC RT_NOCRT(lrintl) 36 36 push xBP … … 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 %ifdef RT_ARCH_AMD64 42 42 fistp qword [xSP] -
trunk/src/VBox/Runtime/common/math/sinl.asm
r44528 r46548 32 32 ; Compute the sine of lrd 33 33 ; @returns st(0) 34 ; @param lrd [xSP + x S*2]34 ; @param lrd [xSP + xCB*2] 35 35 BEGINPROC RT_NOCRT(sinl) 36 36 push xBP … … 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 fsin 42 42 fnstsw ax -
trunk/src/VBox/Runtime/common/math/tanl.asm
r44528 r46548 32 32 ; Compute the sine of lrd 33 33 ; @returns st(0) 34 ; @param lrd [xSP + x S*2]34 ; @param lrd [xSP + xCB*2] 35 35 BEGINPROC RT_NOCRT(tanl) 36 36 push xBP … … 38 38 sub xSP, 10h 39 39 40 fld tword [xBP + x S*2]40 fld tword [xBP + xCB*2] 41 41 fptan 42 42 fnstsw ax -
trunk/src/VBox/Runtime/common/math/trunc.asm
r44528 r46548 43 43 fld qword [xSP] 44 44 %else 45 fld qword [xBP + x S*2]45 fld qword [xBP + xCB*2] 46 46 %endif 47 47 -
trunk/src/VBox/Runtime/common/math/truncf.asm
r44528 r46548 43 43 fld dword [xSP] 44 44 %else 45 fld dword [xBP + x S*2]45 fld dword [xBP + xCB*2] 46 46 %endif 47 47 -
trunk/src/VBox/Runtime/common/math/truncl.asm
r44528 r46548 39 39 sub xSP, 10h 40 40 41 fld tword [xBP + x S*2]41 fld tword [xBP + xCB*2] 42 42 43 43 ; Make it truncate up by modifying the fpu control word. -
trunk/src/VBox/VMM/VMMR0/HMR0A.asm
r46312 r46548 1584 1584 %undef ASM_CALL64_MSC 1585 1585 %define ASM_CALL64_GCC 1586 %define x S81586 %define xCB 8 1587 1587 %define xSP rsp 1588 1588 %define xBP rbp -
trunk/src/VBox/VMM/VMMR0/HMR0Mixed.mac
r46267 r46548 122 122 ; VT-x only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly! 123 123 %ifdef VMX_SKIP_GDTR_IDTR 124 sub xSP, x S*2124 sub xSP, xCB*2 125 125 sgdt [xSP] 126 126 127 sub xSP, x S*2127 sub xSP, xCB*2 128 128 sidt [xSP] 129 129 %endif … … 182 182 %ifdef VMX_SKIP_GDTR_IDTR 183 183 lidt [xSP] 184 add xSP, x S*2184 add xSP, xCB*2 185 185 lgdt [xSP] 186 add xSP, x S*2186 add xSP, xCB*2 187 187 %endif 188 188 189 189 push xDI 190 mov xDI, [xSP + x S* 3] ; pCtx (*3 to skip the saved LDTR + TR).190 mov xDI, [xSP + xCB * 3] ; pCtx (*3 to skip the saved LDTR + TR). 191 191 192 192 mov [ss:xDI + CPUMCTX.eax], eax … … 218 218 ; @todo get rid of sgdt 219 219 pop xBX ; Saved TR 220 sub xSP, x S*2220 sub xSP, xCB*2 221 221 sgdt [xSP] 222 222 mov xAX, xBX … … 225 225 and dword [ss:xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 226 226 ltr bx 227 add xSP, x S*2227 add xSP, xCB*2 228 228 229 229 pop xAX ; Saved LDTR 230 230 lldt ax 231 231 232 add xSP, x S; pCtx232 add xSP, xCB ; pCtx 233 233 234 234 %ifdef VMX_USE_CACHED_VMCS_ACCESSES … … 274 274 %ifdef VMX_SKIP_GDTR_IDTR 275 275 lidt [xSP] 276 add xSP, x S*2276 add xSP, xCB*2 277 277 lgdt [xSP] 278 add xSP, x S*2278 add xSP, xCB*2 279 279 %endif 280 280 … … 283 283 ; @todo get rid of sgdt 284 284 pop xBX ; Saved TR 285 sub xSP, x S*2285 sub xSP, xCB*2 286 286 sgdt [xSP] 287 287 mov xAX, xBX … … 290 290 and dword [ss:xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 291 291 ltr bx 292 add xSP, x S*2292 add xSP, xCB*2 293 293 294 294 pop xAX ; Saved LDTR … … 296 296 297 297 %ifdef VMX_USE_CACHED_VMCS_ACCESSES 298 add xSP, x S*2; pCtx + pCache298 add xSP, xCB*2 ; pCtx + pCache 299 299 %else 300 add xSP, x S; pCtx300 add xSP, xCB ; pCtx 301 301 %endif 302 302 … … 313 313 %ifdef VMX_SKIP_GDTR_IDTR 314 314 lidt [xSP] 315 add xSP, x S*2315 add xSP, xCB*2 316 316 lgdt [xSP] 317 add xSP, x S*2317 add xSP, xCB*2 318 318 %endif 319 319 … … 322 322 ; @todo get rid of sgdt 323 323 pop xBX ; Saved TR 324 sub xSP, x S*2324 sub xSP, xCB*2 325 325 sgdt [xSP] 326 326 mov xAX, xBX … … 329 329 and dword [ss:xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 330 330 ltr bx 331 add xSP, x S*2331 add xSP, xCB*2 332 332 333 333 pop xAX ; Saved LDTR … … 335 335 336 336 %ifdef VMX_USE_CACHED_VMCS_ACCESSES 337 add xSP, x S*2; pCtx + pCache337 add xSP, xCB*2 ; pCtx + pCache 338 338 %else 339 add xSP, x S; pCtx339 add xSP, xCB ; pCtx 340 340 %endif 341 341 … … 451 451 ; VT-x only saves the base of the GDTR & IDTR and resets the limit to 0xffff; we must restore the limit correctly! 452 452 %ifdef VMX_SKIP_GDTR_IDTR 453 sub xSP, x S*2453 sub xSP, xCB*2 454 454 sgdt [xSP] 455 455 456 sub xSP, x S*2456 sub xSP, xCB*2 457 457 sidt [xSP] 458 458 %endif … … 519 519 %ifdef VMX_SKIP_GDTR_IDTR 520 520 lidt [xSP] 521 add xSP, x S*2521 add xSP, xCB*2 522 522 lgdt [xSP] 523 add xSP, x S*2523 add xSP, xCB*2 524 524 %endif 525 525 526 526 push xDI 527 mov xDI, [xSP + x S * 3]; pCtx (*3 to skip the saved LDTR + TR)527 mov xDI, [xSP + xCB * 3] ; pCtx (*3 to skip the saved LDTR + TR) 528 528 529 529 mov qword [xDI + CPUMCTX.eax], rax … … 559 559 ; @todo get rid of sgdt 560 560 pop xBX ; Saved TR 561 sub xSP, x S*2561 sub xSP, xCB*2 562 562 sgdt [xSP] 563 563 mov xAX, xBX … … 566 566 and dword [xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 567 567 ltr bx 568 add xSP, x S*2568 add xSP, xCB*2 569 569 570 570 pop xAX ; Saved LDTR … … 628 628 %ifdef VMX_SKIP_GDTR_IDTR 629 629 lidt [xSP] 630 add xSP, x S*2630 add xSP, xCB*2 631 631 lgdt [xSP] 632 add xSP, x S*2632 add xSP, xCB*2 633 633 %endif 634 634 … … 637 637 ; @todo get rid of sgdt 638 638 pop xBX ; Saved TR 639 sub xSP, x S*2639 sub xSP, xCB*2 640 640 sgdt [xSP] 641 641 mov xAX, xBX … … 644 644 and dword [xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 645 645 ltr bx 646 add xSP, x S*2646 add xSP, xCB*2 647 647 648 648 pop xAX ; Saved LDTR … … 665 665 666 666 %ifdef VMX_USE_CACHED_VMCS_ACCESSES 667 add xSP, x S; pCache667 add xSP, xCB ; pCache 668 668 %endif 669 669 … … 680 680 %ifdef VMX_SKIP_GDTR_IDTR 681 681 lidt [xSP] 682 add xSP, x S*2682 add xSP, xCB*2 683 683 lgdt [xSP] 684 add xSP, x S*2684 add xSP, xCB*2 685 685 %endif 686 686 … … 689 689 ; @todo get rid of sgdt 690 690 pop xBX ; Saved TR 691 sub xSP, x S*2691 sub xSP, xCB*2 692 692 sgdt [xSP] 693 693 mov xAX, xBX … … 696 696 and dword [xAX + 4], ~0200h ; Clear busy flag (2nd type2 bit). 697 697 ltr bx 698 add xSP, x S*2698 add xSP, xCB*2 699 699 700 700 pop xAX ; Saved LDTR … … 717 717 718 718 %ifdef VMX_USE_CACHED_VMCS_ACCESSES 719 add xSP, x S; pCache719 add xSP, xCB ; pCache 720 720 %endif 721 721 … … 770 770 771 771 ; Save the Guest CPU context pointer. 772 mov xSI, [xBP + x S*2 + RTHCPHYS_CB*2]; pCtx772 mov xSI, [xBP + xCB*2 + RTHCPHYS_CB*2] ; pCtx 773 773 push xSI ; push for saving the state at the end 774 774 775 775 ; Save host fs, gs, sysenter msr etc. 776 mov xAX, [xBP + x S*2]; pVMCBHostPhys (64 bits physical address; x86: take low dword only)776 mov xAX, [xBP + xCB*2] ; pVMCBHostPhys (64 bits physical address; x86: take low dword only) 777 777 push xAX ; save for the vmload after vmrun 778 778 vmsave 779 779 780 780 ; Setup eax for VMLOAD. 781 mov xAX, [xBP + x S*2 + RTHCPHYS_CB] ; pVMCBPhys (64 bits physical address; take low dword only)781 mov xAX, [xBP + xCB*2 + RTHCPHYS_CB] ; pVMCBPhys (64 bits physical address; take low dword only) 782 782 783 783 ; Restore Guest's general purpose registers. … … 829 829 pop xBP 830 830 %ifdef RT_ARCH_AMD64 831 add xSP, 4*x S831 add xSP, 4*xCB 832 832 %endif 833 833 ret … … 874 874 875 875 ; Save the Guest CPU context pointer. 876 mov rsi, [rbp + x S*2 + RTHCPHYS_CB*2]; pCtx876 mov rsi, [rbp + xCB*2 + RTHCPHYS_CB*2] ; pCtx 877 877 push rsi ; push for saving the state at the end 878 878 879 879 ; Save host fs, gs, sysenter msr etc. 880 mov rax, [rbp + x S*2]; pVMCBHostPhys (64 bits physical address; x86: take low dword only)880 mov rax, [rbp + xCB*2] ; pVMCBHostPhys (64 bits physical address; x86: take low dword only) 881 881 push rax ; Save for the vmload after vmrun 882 882 vmsave 883 883 884 884 ; Setup eax for VMLOAD. 885 mov rax, [rbp + x S*2 + RTHCPHYS_CB]; pVMCBPhys (64 bits physical address; take low dword only)885 mov rax, [rbp + xCB*2 + RTHCPHYS_CB] ; pVMCBPhys (64 bits physical address; take low dword only) 886 886 887 887 ; Restore Guest's general purpose registers. … … 948 948 popf 949 949 pop rbp 950 add rsp, 4*x S950 add rsp, 4*xCB 951 951 ret 952 952 ENDPROC MY_NAME(SVMR0VMRun64) -
trunk/src/VBox/VMM/testcase/Instructions/env-common.mac
r46543 r46548 129 129 %else 130 130 mov sBX, [VBINSTST_NAME(g_uVBInsTstSubTestIndicator)] 131 mov sCX, [xBP + 4 + x S] ; expected132 mov sAX, [xBP + 4 + x S+ 4] ; actual133 mov sDX, [xBP + 4 + x S+ 8] ; reg#131 mov sCX, [xBP + 4 + xCB] ; expected 132 mov sAX, [xBP + 4 + xCB + 4] ; actual 133 mov sDX, [xBP + 4 + xCB + 8] ; reg# 134 134 push sBX 135 135 push sCX -
trunk/src/VBox/VMM/testcase/tstX86-1A.asm
r40248 r46548 1522 1522 1523 1523 .failed3: 1524 add xSP, 20h + x S1524 add xSP, 20h + xCB 1525 1525 jmp .return 1526 1526 … … 1942 1942 push xDI 1943 1943 1944 lea xDI, [xSP + x S* 5]1944 lea xDI, [xSP + xCB * 5] 1945 1945 mov xCX, 512 / 4 1946 1946 mov eax, 0cccccccch … … 1954 1954 1955 1955 ; Save the FPU state. 1956 mov dword [xSP + x S+ X86FXSTATE.FPUIP], 01957 mov dword [xSP + x S+ X86FXSTATE.FPUCS], 01958 mov dword [xSP + x S+ X86FXSTATE.FPUDP], 01959 mov dword [xSP + x S+ X86FXSTATE.FPUDS], 01960 arch_fxsave [xSP + x S]1956 mov dword [xSP + xCB + X86FXSTATE.FPUIP], 0 1957 mov dword [xSP + xCB + X86FXSTATE.FPUCS], 0 1958 mov dword [xSP + xCB + X86FXSTATE.FPUDP], 0 1959 mov dword [xSP + xCB + X86FXSTATE.FPUDS], 0 1960 arch_fxsave [xSP + xCB] 1961 1961 1962 1962 ; Save GRegs (80h bytes). 1963 1963 %ifdef RT_ARCH_AMD64 1964 mov [xSP + 512 + x S+ 000h], xAX1965 mov [xSP + 512 + x S+ 008h], xBX1966 mov [xSP + 512 + x S+ 010h], xCX1967 mov [xSP + 512 + x S+ 018h], xDX1968 mov [xSP + 512 + x S+ 020h], xDI1969 mov [xSP + 512 + x S+ 028h], xSI1970 mov [xSP + 512 + x S+ 030h], xBP1971 mov [xSP + 512 + x S+ 038h], r81972 mov [xSP + 512 + x S+ 040h], r91973 mov [xSP + 512 + x S+ 048h], r101974 mov [xSP + 512 + x S+ 050h], r111975 mov [xSP + 512 + x S+ 058h], r121976 mov [xSP + 512 + x S+ 060h], r131977 mov [xSP + 512 + x S+ 068h], r141978 mov [xSP + 512 + x S+ 070h], r151964 mov [xSP + 512 + xCB + 000h], xAX 1965 mov [xSP + 512 + xCB + 008h], xBX 1966 mov [xSP + 512 + xCB + 010h], xCX 1967 mov [xSP + 512 + xCB + 018h], xDX 1968 mov [xSP + 512 + xCB + 020h], xDI 1969 mov [xSP + 512 + xCB + 028h], xSI 1970 mov [xSP + 512 + xCB + 030h], xBP 1971 mov [xSP + 512 + xCB + 038h], r8 1972 mov [xSP + 512 + xCB + 040h], r9 1973 mov [xSP + 512 + xCB + 048h], r10 1974 mov [xSP + 512 + xCB + 050h], r11 1975 mov [xSP + 512 + xCB + 058h], r12 1976 mov [xSP + 512 + xCB + 060h], r13 1977 mov [xSP + 512 + xCB + 068h], r14 1978 mov [xSP + 512 + xCB + 070h], r15 1979 1979 pushf 1980 1980 pop rax 1981 mov [xSP + 512 + x S+ 078h], rax1982 mov rax, [xSP + 512 + x S+ 000h]1981 mov [xSP + 512 + xCB + 078h], rax 1982 mov rax, [xSP + 512 + xCB + 000h] 1983 1983 %else 1984 mov [xSP + 512 + x S+ 000h], eax1985 mov [xSP + 512 + x S+ 004h], eax1986 mov [xSP + 512 + x S+ 008h], ebx1987 mov [xSP + 512 + x S+ 00ch], ebx1988 mov [xSP + 512 + x S+ 010h], ecx1989 mov [xSP + 512 + x S+ 014h], ecx1990 mov [xSP + 512 + x S+ 018h], edx1991 mov [xSP + 512 + x S+ 01ch], edx1992 mov [xSP + 512 + x S+ 020h], edi1993 mov [xSP + 512 + x S+ 024h], edi1994 mov [xSP + 512 + x S+ 028h], esi1995 mov [xSP + 512 + x S+ 02ch], esi1996 mov [xSP + 512 + x S+ 030h], ebp1997 mov [xSP + 512 + x S+ 034h], ebp1998 mov [xSP + 512 + x S+ 038h], eax1999 mov [xSP + 512 + x S+ 03ch], eax2000 mov [xSP + 512 + x S+ 040h], eax2001 mov [xSP + 512 + x S+ 044h], eax2002 mov [xSP + 512 + x S+ 048h], eax2003 mov [xSP + 512 + x S+ 04ch], eax2004 mov [xSP + 512 + x S+ 050h], eax2005 mov [xSP + 512 + x S+ 054h], eax2006 mov [xSP + 512 + x S+ 058h], eax2007 mov [xSP + 512 + x S+ 05ch], eax2008 mov [xSP + 512 + x S+ 060h], eax2009 mov [xSP + 512 + x S+ 064h], eax2010 mov [xSP + 512 + x S+ 068h], eax2011 mov [xSP + 512 + x S+ 06ch], eax2012 mov [xSP + 512 + x S+ 070h], eax2013 mov [xSP + 512 + x S+ 074h], eax1984 mov [xSP + 512 + xCB + 000h], eax 1985 mov [xSP + 512 + xCB + 004h], eax 1986 mov [xSP + 512 + xCB + 008h], ebx 1987 mov [xSP + 512 + xCB + 00ch], ebx 1988 mov [xSP + 512 + xCB + 010h], ecx 1989 mov [xSP + 512 + xCB + 014h], ecx 1990 mov [xSP + 512 + xCB + 018h], edx 1991 mov [xSP + 512 + xCB + 01ch], edx 1992 mov [xSP + 512 + xCB + 020h], edi 1993 mov [xSP + 512 + xCB + 024h], edi 1994 mov [xSP + 512 + xCB + 028h], esi 1995 mov [xSP + 512 + xCB + 02ch], esi 1996 mov [xSP + 512 + xCB + 030h], ebp 1997 mov [xSP + 512 + xCB + 034h], ebp 1998 mov [xSP + 512 + xCB + 038h], eax 1999 mov [xSP + 512 + xCB + 03ch], eax 2000 mov [xSP + 512 + xCB + 040h], eax 2001 mov [xSP + 512 + xCB + 044h], eax 2002 mov [xSP + 512 + xCB + 048h], eax 2003 mov [xSP + 512 + xCB + 04ch], eax 2004 mov [xSP + 512 + xCB + 050h], eax 2005 mov [xSP + 512 + xCB + 054h], eax 2006 mov [xSP + 512 + xCB + 058h], eax 2007 mov [xSP + 512 + xCB + 05ch], eax 2008 mov [xSP + 512 + xCB + 060h], eax 2009 mov [xSP + 512 + xCB + 064h], eax 2010 mov [xSP + 512 + xCB + 068h], eax 2011 mov [xSP + 512 + xCB + 06ch], eax 2012 mov [xSP + 512 + xCB + 070h], eax 2013 mov [xSP + 512 + xCB + 074h], eax 2014 2014 pushf 2015 2015 pop eax 2016 mov [xSP + 512 + x S+ 078h], eax2017 mov [xSP + 512 + x S+ 07ch], eax2018 mov eax, [xSP + 512 + x S+ 000h]2016 mov [xSP + 512 + xCB + 078h], eax 2017 mov [xSP + 512 + xCB + 07ch], eax 2018 mov eax, [xSP + 512 + xCB + 000h] 2019 2019 %endif 2020 2020 ret … … 2029 2029 ; 2030 2030 CompareFPUAndGRegsOnStack: 2031 lea xSP, [xSP - (1024 - x S)]2031 lea xSP, [xSP - (1024 - xCB)] 2032 2032 call SaveFPUAndGRegsToStack 2033 2033 … … 2037 2037 2038 2038 mov xCX, 640 2039 lea xSI, [xSP + x S*3]2039 lea xSI, [xSP + xCB*3] 2040 2040 lea xDI, [xSI + 1024] 2041 2041 … … 2045 2045 2046 2046 ;int3 2047 lea xAX, [xSP + x S*3]2047 lea xAX, [xSP + xCB*3] 2048 2048 xchg xAX, xSI 2049 2049 sub xAX, xSI … … 2060 2060 pop xDI 2061 2061 pop xSI 2062 lea xSP, [xSP + (1024 - x S)]2062 lea xSP, [xSP + (1024 - xCB)] 2063 2063 or eax, eax 2064 2064 ret … … 2073 2073 ; 2074 2074 CompareFPUAndGRegsOnStackIgnoreOpAndIp: 2075 lea xSP, [xSP - (1024 - x S)]2075 lea xSP, [xSP - (1024 - xCB)] 2076 2076 call SaveFPUAndGRegsToStack 2077 2077 … … 2081 2081 2082 2082 mov xCX, 640 2083 lea xSI, [xSP + x S*3]2083 lea xSI, [xSP + xCB*3] 2084 2084 lea xDI, [xSI + 1024] 2085 2085 … … 2094 2094 2095 2095 ;int3 2096 lea xAX, [xSP + x S*3]2096 lea xAX, [xSP + xCB*3] 2097 2097 xchg xAX, xSI 2098 2098 sub xAX, xSI … … 2109 2109 pop xDI 2110 2110 pop xSI 2111 lea xSP, [xSP + (1024 - x S)]2111 lea xSP, [xSP + (1024 - xCB)] 2112 2112 or eax, eax 2113 2113 ret … … 2494 2494 ; 2495 2495 ; @returns ZF=1 on success, ZF=0 on failure. 2496 ; @param xSP + x Sfxsave image followed by fnstenv.2496 ; @param xSP + xCB fxsave image followed by fnstenv. 2497 2497 ; @param xCX Opcode address (no prefixes). 2498 2498 ; … … 2504 2504 ; Check the IP. 2505 2505 %ifdef RT_ARCH_AMD64 2506 cmp rcx, [xBP + x S*2 + X86FXSTATE.FPUIP]2506 cmp rcx, [xBP + xCB*2 + X86FXSTATE.FPUIP] 2507 2507 %else 2508 cmp ecx, [xBP + x S*2 + X86FXSTATE.FPUIP]2508 cmp ecx, [xBP + xCB*2 + X86FXSTATE.FPUIP] 2509 2509 %endif 2510 2510 jne .failure1 … … 2512 2512 .check_fpucs: 2513 2513 mov ax, cs 2514 cmp ax, [xBP + x S*2 + 512 + X86FSTENV32P.FPUCS]2514 cmp ax, [xBP + xCB*2 + 512 + X86FSTENV32P.FPUCS] 2515 2515 jne .failure2 2516 2516 … … 2520 2520 and ax, 07ffh 2521 2521 2522 cmp ax, [xBP + x S*2 + X86FXSTATE.FOP]2522 cmp ax, [xBP + xCB*2 + X86FXSTATE.FOP] 2523 2523 je .success 2524 cmp ax, [xBP + x S*2 + 512 + X86FSTENV32P.FOP]2524 cmp ax, [xBP + xCB*2 + 512 + X86FSTENV32P.FOP] 2525 2525 je .success 2526 2526 2527 2527 ; xor ax, ax 2528 ; cmp ax, [xBP + x S*2 + X86FXSTATE.FOP]2528 ; cmp ax, [xBP + xCB*2 + X86FXSTATE.FOP] 2529 2529 ; jne .failure3 2530 2530 … … 2540 2540 ; fnstenv image instead even if that only contains the lower 32-bit. 2541 2541 xor eax, eax 2542 cmp xAX, [xBP + x S*2 + X86FXSTATE.FPUIP]2542 cmp xAX, [xBP + xCB*2 + X86FXSTATE.FPUIP] 2543 2543 jne .failure1_for_real 2544 cmp xAX, [xBP + x S*2 + X86FXSTATE.FPUDP]2544 cmp xAX, [xBP + xCB*2 + X86FXSTATE.FPUDP] 2545 2545 jne .failure1_for_real 2546 cmp ecx, [xBP + x S*2 + 512 + X86FSTENV32P.FPUIP]2546 cmp ecx, [xBP + xCB*2 + 512 + X86FSTENV32P.FPUIP] 2547 2547 je .check_fpucs 2548 2548 .failure1_for_real: … … 2638 2638 ; 2639 2639 ; @returns ZF=1 on success, ZF=0+EAX on failure. 2640 ; @param xSP + x Sfxsave image followed by fnstenv.2640 ; @param xSP + xCB fxsave image followed by fnstenv. 2641 2641 ; @param xCX Opcode address (no prefixes). 2642 2642 ; @param xDX Memory address (DS relative). … … 2649 2649 ; Check the memory operand. 2650 2650 %ifdef RT_ARCH_AMD64 2651 cmp rdx, [xBP + x S*2 + X86FXSTATE.FPUDP]2651 cmp rdx, [xBP + xCB*2 + X86FXSTATE.FPUDP] 2652 2652 %else 2653 cmp edx, [xBP + x S*2 + X86FXSTATE.FPUDP]2653 cmp edx, [xBP + xCB*2 + X86FXSTATE.FPUDP] 2654 2654 %endif 2655 2655 jne .failure1 … … 2657 2657 .check_fpuds: 2658 2658 mov ax, ds 2659 cmp ax, [xBP + x S*2 + 512 + X86FSTENV32P.FPUDS]2659 cmp ax, [xBP + xCB*2 + 512 + X86FSTENV32P.FPUDS] 2660 2660 jne .failure2 2661 2661 … … 2670 2670 ; if there is a flag controlling this anywhere... 2671 2671 xor eax, eax 2672 cmp xAX, [xBP + x S*2 + X86FXSTATE.FPUDP]2672 cmp xAX, [xBP + xCB*2 + X86FXSTATE.FPUDP] 2673 2673 jne .failure1_for_real 2674 cmp xAX, [xBP + x S*2 + X86FXSTATE.FPUIP]2674 cmp xAX, [xBP + xCB*2 + X86FXSTATE.FPUIP] 2675 2675 jne .failure1_for_real 2676 cmp edx, [xBP + x S*2 + 512 + X86FSTENV32P.FPUDP]2676 cmp edx, [xBP + xCB*2 + 512 + X86FSTENV32P.FPUDP] 2677 2677 je .check_fpuds 2678 2678 .failure1_for_real:
Note:
See TracChangeset
for help on using the changeset viewer.