VirtualBox

Changeset 41933 in vbox for trunk/src/VBox/VMM/VMMSwitcher


Ignore:
Timestamp:
Jun 27, 2012 6:37:33 PM (13 years ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
78801
Message:

VMMSwitcher: Drop the unused assembly switcher functions taking guest or host contexts as arguments.

Location:
trunk/src/VBox/VMM/VMMSwitcher
Files:
11 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMSwitcher/32BitTo32Bit.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/32BitToAMD64.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/32BitToPAE.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/AMD64To32Bit.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/AMD64ToPAE.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/AMD64andLegacy.mac

    r41906 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
     
    691691
    692692;;
    693 ; VMMGCGuestToHostAsmGuestCtx
    694 ;
    695 ; Switches from Guest Context to Host Context.
    696 ; Of course it's only called from within the GC.
     693; VMMGCGuestToHostAsm
     694;
     695; This is an alternative entry point which we'll be using
     696; when the we have saved the guest state already or we haven't
     697; been messing with the guest at all.
    697698;
    698699; @param    eax     Return code.
    699 ; @param    esp + 4 Pointer to CPUMCTXCORE.
    700 ;
    701 ; @remark   ASSUMES interrupts disabled.
    702 ;
    703 ALIGNCODE(16)
    704 BEGINPROC VMMGCGuestToHostAsmGuestCtx
    705     DEBUG_CHAR('~')
     700; @uses     eax, edx, ecx (or it may use them in the future)
     701;
     702ALIGNCODE(16)
     703BEGINPROC VMMGCGuestToHostAsm
     704    DEBUG_CHAR('%')
    706705
    707706%ifdef VBOX_WITH_STATISTICS
     
    720719
    721720    ;
    722     ; Load the CPUMCPU pointer.
     721    ; Load the CPUM pointer.
    723722    ;
    724723    FIXUP FIX_GC_CPUMCPU_OFF, 1, 0
    725724    mov     edx, 0ffffffffh
    726725
    727     ; Skip return address (assumes called!)
    728     lea     esp, [esp + 4]
    729 
    730     ;
    731     ; Guest Context (assumes esp now points to CPUMCTXCORE structure).
    732     ;
    733     ; general purpose registers
    734     push    eax                         ; save return code.
    735     mov     eax, [esp + 4 + CPUMCTXCORE.edi]
    736     mov     [edx + CPUMCPU.Guest.edi], eax
    737     mov     eax, [esp + 4 + CPUMCTXCORE.esi]
    738     mov     [edx + CPUMCPU.Guest.esi], eax
    739     mov     eax, [esp + 4 + CPUMCTXCORE.ebp]
    740     mov     [edx + CPUMCPU.Guest.ebp], eax
    741     mov     eax, [esp + 4 + CPUMCTXCORE.eax]
    742     mov     [edx + CPUMCPU.Guest.eax], eax
    743     mov     eax, [esp + 4 + CPUMCTXCORE.ebx]
    744     mov     [edx + CPUMCPU.Guest.ebx], eax
    745     mov     eax, [esp + 4 + CPUMCTXCORE.edx]
    746     mov     [edx + CPUMCPU.Guest.edx], eax
    747     mov     eax, [esp + 4 + CPUMCTXCORE.ecx]
    748     mov     [edx + CPUMCPU.Guest.ecx], eax
    749     mov     eax, [esp + 4 + CPUMCTXCORE.esp]
    750     mov     [edx + CPUMCPU.Guest.esp], eax
    751     ; selectors
    752     mov     eax, [esp + 4 + CPUMCTXCORE.ss.Sel]
    753     mov     [edx + CPUMCPU.Guest.ss.Sel], eax
    754     mov     eax, [esp + 4 + CPUMCTXCORE.gs.Sel]
    755     mov     [edx + CPUMCPU.Guest.gs.Sel], eax
    756     mov     eax, [esp + 4 + CPUMCTXCORE.fs.Sel]
    757     mov     [edx + CPUMCPU.Guest.fs.Sel], eax
    758     mov     eax, [esp + 4 + CPUMCTXCORE.es.Sel]
    759     mov     [edx + CPUMCPU.Guest.es.Sel], eax
    760     mov     eax, [esp + 4 + CPUMCTXCORE.ds.Sel]
    761     mov     [edx + CPUMCPU.Guest.ds.Sel], eax
    762     mov     eax, [esp + 4 + CPUMCTXCORE.cs.Sel]
    763     mov     [edx + CPUMCPU.Guest.cs.Sel], eax
    764     ; flags
    765     mov     eax, [esp + 4 + CPUMCTXCORE.eflags]
    766     mov     [edx + CPUMCPU.Guest.eflags], eax
    767     ; eip
    768     mov     eax, [esp + 4 + CPUMCTXCORE.eip]
    769     mov     [edx + CPUMCPU.Guest.eip], eax
    770     ; jump to common worker code.
    771     pop     eax                         ; restore return code.
    772 
    773     add     esp, CPUMCTXCORE_size      ; skip CPUMCTXCORE structure
    774 
    775     jmp     vmmGCGuestToHostAsm_EIPDone
    776 ENDPROC VMMGCGuestToHostAsmGuestCtx
    777 
    778 
    779 ;;
    780 ; VMMGCGuestToHostAsmHyperCtx
    781 ;
    782 ; This is an alternative entry point which we'll be using
    783 ; when the we have the hypervisor context and need to save
    784 ; that before going to the host.
    785 ;
    786 ; This is typically useful when abandoning the hypervisor
    787 ; because of a trap and want the trap state to be saved.
    788 ;
    789 ; @param    eax     Return code.
    790 ; @param    ecx     Points to CPUMCTXCORE.
    791 ; @uses     eax,edx,ecx
    792 ALIGNCODE(16)
    793 BEGINPROC VMMGCGuestToHostAsmHyperCtx
    794     DEBUG_CHAR('#')
    795 
    796 %ifdef VBOX_WITH_STATISTICS
    797     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalInGC
    798     mov     edx, 0ffffffffh
    799     STAM32_PROFILE_ADV_STOP edx
    800 
    801     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalGCToQemu
    802     mov     edx, 0ffffffffh
    803     STAM32_PROFILE_ADV_START edx
    804 
    805     FIXUP FIX_GC_VM_OFF, 1, VM.StatSwitcherToHC
    806     mov     edx, 0ffffffffh
    807     STAM32_PROFILE_ADV_START edx
    808 %endif
    809 
    810     ;
    811     ; Load the CPUM pointer.
    812     ;
    813     FIXUP FIX_GC_CPUMCPU_OFF, 1, 0
    814     mov     edx, 0ffffffffh
    815 
    816     push    eax                         ; save return code.
    817     ; general purpose registers
    818     mov     eax, [ecx + CPUMCTXCORE.edi]
    819     mov     [edx + CPUMCPU.Hyper.edi], eax
    820     mov     eax, [ecx + CPUMCTXCORE.esi]
    821     mov     [edx + CPUMCPU.Hyper.esi], eax
    822     mov     eax, [ecx + CPUMCTXCORE.ebp]
    823     mov     [edx + CPUMCPU.Hyper.ebp], eax
    824     mov     eax, [ecx + CPUMCTXCORE.eax]
    825     mov     [edx + CPUMCPU.Hyper.eax], eax
    826     mov     eax, [ecx + CPUMCTXCORE.ebx]
    827     mov     [edx + CPUMCPU.Hyper.ebx], eax
    828     mov     eax, [ecx + CPUMCTXCORE.edx]
    829     mov     [edx + CPUMCPU.Hyper.edx], eax
    830     mov     eax, [ecx + CPUMCTXCORE.ecx]
    831     mov     [edx + CPUMCPU.Hyper.ecx], eax
    832     mov     eax, [ecx + CPUMCTXCORE.esp]
    833     mov     [edx + CPUMCPU.Hyper.esp], eax
    834     ; selectors
    835     mov     eax, [ecx + CPUMCTXCORE.ss.Sel]
    836     mov     [edx + CPUMCPU.Hyper.ss.Sel], eax
    837     mov     eax, [ecx + CPUMCTXCORE.gs.Sel]
    838     mov     [edx + CPUMCPU.Hyper.gs.Sel], eax
    839     mov     eax, [ecx + CPUMCTXCORE.fs.Sel]
    840     mov     [edx + CPUMCPU.Hyper.fs.Sel], eax
    841     mov     eax, [ecx + CPUMCTXCORE.es.Sel]
    842     mov     [edx + CPUMCPU.Hyper.es.Sel], eax
    843     mov     eax, [ecx + CPUMCTXCORE.ds.Sel]
    844     mov     [edx + CPUMCPU.Hyper.ds.Sel], eax
    845     mov     eax, [ecx + CPUMCTXCORE.cs.Sel]
    846     mov     [edx + CPUMCPU.Hyper.cs.Sel], eax
    847     ; flags
    848     mov     eax, [ecx + CPUMCTXCORE.eflags]
    849     mov     [edx + CPUMCPU.Hyper.eflags], eax
    850     ; eip
    851     mov     eax, [ecx + CPUMCTXCORE.eip]
    852     mov     [edx + CPUMCPU.Hyper.eip], eax
    853     ; jump to common worker code.
    854     pop     eax                         ; restore return code.
    855     jmp     vmmGCGuestToHostAsm_SkipHyperRegs
    856 
    857 ENDPROC VMMGCGuestToHostAsmHyperCtx
    858 
    859 
    860 ;;
    861 ; VMMGCGuestToHostAsm
    862 ;
    863 ; This is an alternative entry point which we'll be using
    864 ; when the we have saved the guest state already or we haven't
    865 ; been messing with the guest at all.
    866 ;
    867 ; @param    eax     Return code.
    868 ; @uses     eax, edx, ecx (or it may use them in the future)
    869 ;
    870 ALIGNCODE(16)
    871 BEGINPROC VMMGCGuestToHostAsm
    872     DEBUG_CHAR('%')
    873 
    874 %ifdef VBOX_WITH_STATISTICS
    875     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalInGC
    876     mov     edx, 0ffffffffh
    877     STAM32_PROFILE_ADV_STOP edx
    878 
    879     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalGCToQemu
    880     mov     edx, 0ffffffffh
    881     STAM32_PROFILE_ADV_START edx
    882 
    883     FIXUP FIX_GC_VM_OFF, 1, VM.StatSwitcherToHC
    884     mov     edx, 0ffffffffh
    885     STAM32_PROFILE_ADV_START edx
    886 %endif
    887 
    888     ;
    889     ; Load the CPUM pointer.
    890     ;
    891     FIXUP FIX_GC_CPUMCPU_OFF, 1, 0
    892     mov     edx, 0ffffffffh
    893 
    894726    pop     dword [edx + CPUMCPU.Hyper.eip] ; call return from stack
    895     jmp short vmmGCGuestToHostAsm_EIPDone
    896 
    897 ALIGNCODE(16)
    898 vmmGCGuestToHostAsm_EIPDone:
     727
    899728    ; general registers which we care about.
    900729    mov     dword [edx + CPUMCPU.Hyper.ebx], ebx
     
    905734
    906735    ; special registers which may change.
    907 vmmGCGuestToHostAsm_SkipHyperRegs:
    908736%ifdef STRICT_IF
    909737    pushf
     
    12321060        at VMMSWITCHERDEF.offGCCallTrampoline,          dd NAME(vmmGCCallTrampoline)        - NAME(Start)
    12331061        at VMMSWITCHERDEF.offGCGuestToHostAsm,          dd NAME(VMMGCGuestToHostAsm)        - NAME(Start)
    1234         at VMMSWITCHERDEF.offGCGuestToHostAsmHyperCtx,  dd NAME(VMMGCGuestToHostAsmHyperCtx)- NAME(Start)
    1235         at VMMSWITCHERDEF.offGCGuestToHostAsmGuestCtx,  dd NAME(VMMGCGuestToHostAsmGuestCtx)- NAME(Start)
    12361062        ; disasm help
    12371063        at VMMSWITCHERDEF.offHCCode0,                   dd 0
  • trunk/src/VBox/VMM/VMMSwitcher/LegacyandAMD64.mac

    r41907 r41933  
    33
    44;
    5 ; Copyright (C) 2006-2007 Oracle Corporation
     5; Copyright (C) 2006-2012 Oracle Corporation
    66;
    77; This file is part of VirtualBox Open Source Edition (OSE), as
     
    660660ENDPROC VMMGCGuestToHostAsm
    661661
    662 ;;
    663 ; VMMGCGuestToHostAsmHyperCtx
    664 ;
    665 ; This is an alternative entry point which we'll be using
    666 ; when the we have the hypervisor context and need to save
    667 ; that before going to the host.
    668 ;
    669 ; This is typically useful when abandoning the hypervisor
    670 ; because of a trap and want the trap state to be saved.
    671 ;
    672 ; @param    eax     Return code.
    673 ; @param    ecx     Points to CPUMCTXCORE.
    674 ; @uses     eax,edx,ecx
    675 ALIGNCODE(16)
    676 BEGINPROC VMMGCGuestToHostAsmHyperCtx
    677      int3
    678 
    679 ;;
    680 ; VMMGCGuestToHostAsmGuestCtx
    681 ;
    682 ; Switches from Guest Context to Host Context.
    683 ; Of course it's only called from within the GC.
    684 ;
    685 ; @param    eax     Return code.
    686 ; @param    esp + 4 Pointer to CPUMCTXCORE.
    687 ;
    688 ; @remark   ASSUMES interrupts disabled.
    689 ;
    690 ALIGNCODE(16)
    691 BEGINPROC VMMGCGuestToHostAsmGuestCtx
    692       int3
    693 
    694 GLOBALNAME End
     662
    695663;
    696664; The description string (in the text section).
     
    724692        at VMMSWITCHERDEF.offGCCallTrampoline,          dd NAME(vmmGCCallTrampoline)        - NAME(Start)
    725693        at VMMSWITCHERDEF.offGCGuestToHostAsm,          dd NAME(VMMGCGuestToHostAsm)        - NAME(Start)
    726         at VMMSWITCHERDEF.offGCGuestToHostAsmHyperCtx,  dd NAME(VMMGCGuestToHostAsmHyperCtx)- NAME(Start)
    727         at VMMSWITCHERDEF.offGCGuestToHostAsmGuestCtx,  dd NAME(VMMGCGuestToHostAsmGuestCtx)- NAME(Start)
    728694        ; disasm help
    729695        at VMMSWITCHERDEF.offHCCode0,                   dd 0
  • trunk/src/VBox/VMM/VMMSwitcher/PAETo32Bit.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/PAEToAMD64.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/PAEToPAE.asm

    r28800 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
  • trunk/src/VBox/VMM/VMMSwitcher/PAEand32Bit.mac

    r41907 r41933  
    55
    66;
    7 ; Copyright (C) 2006-2007 Oracle Corporation
     7; Copyright (C) 2006-2012 Oracle Corporation
    88;
    99; This file is part of VirtualBox Open Source Edition (OSE), as
     
    595595
    596596;;
    597 ; VMMGCGuestToHostAsmGuestCtx
    598 ;
    599 ; Switches from Guest Context to Host Context.
    600 ; Of course it's only called from within the GC.
    601 ;
    602 ; @param    eax     Return code.
    603 ; @param    esp + 4 Pointer to CPUMCTXCORE.
    604 ;
    605 ; @remark   ASSUMES interrupts disabled.
    606 ;
    607 ALIGNCODE(16)
    608 BEGINPROC VMMGCGuestToHostAsmGuestCtx
    609     DEBUG_CHAR('~')
    610 
    611 %ifdef VBOX_WITH_STATISTICS
    612     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalInGC
    613     mov     edx, 0ffffffffh
    614     STAM_PROFILE_ADV_STOP edx
    615 
    616     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalGCToQemu
    617     mov     edx, 0ffffffffh
    618     STAM_PROFILE_ADV_START edx
    619 
    620     FIXUP FIX_GC_VM_OFF, 1, VM.StatSwitcherToHC
    621     mov     edx, 0ffffffffh
    622     STAM_PROFILE_ADV_START edx
    623 %endif
    624 
    625     ;
    626     ; Load the CPUMCPU pointer.
    627     ;
    628     FIXUP FIX_GC_CPUMCPU_OFF, 1, 0
    629     mov     edx, 0ffffffffh
    630 
    631     ; Skip return address (assumes called!)
    632     lea     esp, [esp + 4]
    633 
    634     ;
    635     ; Guest Context (assumes esp now points to CPUMCTXCORE structure).
    636     ;
    637     ; general purpose registers.
    638     push    eax
    639 
    640     mov     eax, [esp + 4 + CPUMCTXCORE.eax]
    641     mov     [edx + CPUMCPU.Guest.eax], eax
    642     mov     eax, [esp + 4 + CPUMCTXCORE.ecx]
    643     mov     [edx + CPUMCPU.Guest.ecx], eax
    644     mov     eax, [esp + 4 + CPUMCTXCORE.edx]
    645     mov     [edx + CPUMCPU.Guest.edx], eax
    646     mov     eax, [esp + 4 + CPUMCTXCORE.ebx]
    647     mov     [edx + CPUMCPU.Guest.ebx], eax
    648     mov     eax, [esp + 4 + CPUMCTXCORE.esp]
    649     mov     [edx + CPUMCPU.Guest.esp], eax
    650     mov     eax, [esp + 4 + CPUMCTXCORE.ebp]
    651     mov     [edx + CPUMCPU.Guest.ebp], eax
    652     mov     eax, [esp + 4 + CPUMCTXCORE.esi]
    653     mov     [edx + CPUMCPU.Guest.esi], eax
    654     mov     eax, [esp + 4 + CPUMCTXCORE.edi]
    655     mov     [edx + CPUMCPU.Guest.edi], eax
    656     mov     eax, dword [esp + 4 + CPUMCTXCORE.es.Sel]
    657     mov     dword [edx + CPUMCPU.Guest.es.Sel], eax
    658     mov     eax, dword [esp + 4 + CPUMCTXCORE.cs.Sel]
    659     mov     dword [edx + CPUMCPU.Guest.cs.Sel], eax
    660     mov     eax, dword [esp + 4 + CPUMCTXCORE.ss.Sel]
    661     mov     dword [edx + CPUMCPU.Guest.ss.Sel], eax
    662     mov     eax, dword [esp + 4 + CPUMCTXCORE.ds.Sel]
    663     mov     dword [edx + CPUMCPU.Guest.ds.Sel], eax
    664     mov     eax, dword [esp + 4 + CPUMCTXCORE.fs.Sel]
    665     mov     dword [edx + CPUMCPU.Guest.fs.Sel], eax
    666     mov     eax, dword [esp + 4 + CPUMCTXCORE.gs.Sel]
    667     mov     dword [edx + CPUMCPU.Guest.gs.Sel], eax
    668     mov     eax, [esp + 4 + CPUMCTXCORE.eflags]
    669     mov     dword [edx + CPUMCPU.Guest.eflags], eax
    670     mov     eax, [esp + 4 + CPUMCTXCORE.eip]
    671     mov     dword [edx + CPUMCPU.Guest.eip], eax
    672     pop     eax
    673 
    674     add     esp, CPUMCTXCORE_size      ; skip CPUMCTXCORE structure
    675 
    676     jmp     vmmGCGuestToHostAsm_EIPDone
    677 ENDPROC VMMGCGuestToHostAsmGuestCtx
    678 
    679 
    680 ;;
    681 ; VMMGCGuestToHostAsmHyperCtx
    682 ;
    683 ; This is an alternative entry point which we'll be using
    684 ; when the we have the hypervisor context and need to save
    685 ; that before going to the host.
    686 ;
    687 ; This is typically useful when abandoning the hypervisor
    688 ; because of a trap and want the trap state to be saved.
    689 ;
    690 ; @param    eax     Return code.
    691 ; @param    ecx     Points to CPUMCTXCORE.
    692 ; @uses     eax,edx,ecx
    693 ALIGNCODE(16)
    694 BEGINPROC VMMGCGuestToHostAsmHyperCtx
    695     DEBUG_CHAR('#')
    696 
    697 %ifdef VBOX_WITH_STATISTICS
    698     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalInGC
    699     mov     edx, 0ffffffffh
    700     STAM_PROFILE_ADV_STOP edx
    701 
    702     FIXUP FIX_GC_VM_OFF, 1, VM.StatTotalGCToQemu
    703     mov     edx, 0ffffffffh
    704     STAM_PROFILE_ADV_START edx
    705 
    706     FIXUP FIX_GC_VM_OFF, 1, VM.StatSwitcherToHC
    707     mov     edx, 0ffffffffh
    708     STAM_PROFILE_ADV_START edx
    709 %endif
    710 
    711     ;
    712     ; Load the CPUM pointer.
    713     ;
    714     FIXUP FIX_GC_CPUMCPU_OFF, 1, 0
    715     mov     edx, 0ffffffffh
    716 
    717     push    eax                         ; save return code.
    718     ; general purpose registers
    719     mov     eax, [ecx + CPUMCTXCORE.edi]
    720     mov     [edx + CPUMCPU.Hyper.edi], eax
    721     mov     eax, [ecx + CPUMCTXCORE.esi]
    722     mov     [edx + CPUMCPU.Hyper.esi], eax
    723     mov     eax, [ecx + CPUMCTXCORE.ebp]
    724     mov     [edx + CPUMCPU.Hyper.ebp], eax
    725     mov     eax, [ecx + CPUMCTXCORE.eax]
    726     mov     [edx + CPUMCPU.Hyper.eax], eax
    727     mov     eax, [ecx + CPUMCTXCORE.ebx]
    728     mov     [edx + CPUMCPU.Hyper.ebx], eax
    729     mov     eax, [ecx + CPUMCTXCORE.edx]
    730     mov     [edx + CPUMCPU.Hyper.edx], eax
    731     mov     eax, [ecx + CPUMCTXCORE.ecx]
    732     mov     [edx + CPUMCPU.Hyper.ecx], eax
    733     mov     eax, [ecx + CPUMCTXCORE.esp]
    734     mov     [edx + CPUMCPU.Hyper.esp], eax
    735     ; selectors
    736     mov     eax, [ecx + CPUMCTXCORE.ss.Sel]
    737     mov     [edx + CPUMCPU.Hyper.ss.Sel], eax
    738     mov     eax, [ecx + CPUMCTXCORE.gs.Sel]
    739     mov     [edx + CPUMCPU.Hyper.gs.Sel], eax
    740     mov     eax, [ecx + CPUMCTXCORE.fs.Sel]
    741     mov     [edx + CPUMCPU.Hyper.fs.Sel], eax
    742     mov     eax, [ecx + CPUMCTXCORE.es.Sel]
    743     mov     [edx + CPUMCPU.Hyper.es.Sel], eax
    744     mov     eax, [ecx + CPUMCTXCORE.ds.Sel]
    745     mov     [edx + CPUMCPU.Hyper.ds.Sel], eax
    746     mov     eax, [ecx + CPUMCTXCORE.cs.Sel]
    747     mov     [edx + CPUMCPU.Hyper.cs.Sel], eax
    748     ; flags
    749     mov     eax, [ecx + CPUMCTXCORE.eflags]
    750     mov     [edx + CPUMCPU.Hyper.eflags], eax
    751     ; eip
    752     mov     eax, [ecx + CPUMCTXCORE.eip]
    753     mov     [edx + CPUMCPU.Hyper.eip], eax
    754     ; jump to common worker code.
    755     pop     eax                         ; restore return code.
    756     jmp     vmmGCGuestToHostAsm_SkipHyperRegs
    757 
    758 ENDPROC VMMGCGuestToHostAsmHyperCtx
    759 
    760 
    761 ;;
    762597; VMMGCGuestToHostAsm
    763598;
     
    794629
    795630    pop     dword [edx + CPUMCPU.Hyper.eip] ; call return from stack
    796     jmp short vmmGCGuestToHostAsm_EIPDone
    797 
    798 ALIGNCODE(16)
    799 vmmGCGuestToHostAsm_EIPDone:
     631
    800632    ; general registers which we care about.
    801633    mov     dword [edx + CPUMCPU.Hyper.ebx], ebx
     
    806638
    807639    ; special registers which may change.
    808 vmmGCGuestToHostAsm_SkipHyperRegs:
    809640    ; str     [edx + CPUMCPU.Hyper.tr] - double fault only, and it won't be right then either.
    810641    sldt    [edx + CPUMCPU.Hyper.ldtr.Sel]
     
    1092923        at VMMSWITCHERDEF.offGCCallTrampoline,          dd NAME(vmmGCCallTrampoline)        - NAME(Start)
    1093924        at VMMSWITCHERDEF.offGCGuestToHostAsm,          dd NAME(VMMGCGuestToHostAsm)        - NAME(Start)
    1094         at VMMSWITCHERDEF.offGCGuestToHostAsmHyperCtx,  dd NAME(VMMGCGuestToHostAsmHyperCtx)- NAME(Start)
    1095         at VMMSWITCHERDEF.offGCGuestToHostAsmGuestCtx,  dd NAME(VMMGCGuestToHostAsmGuestCtx)- NAME(Start)
    1096925        ; disasm help
    1097926        at VMMSWITCHERDEF.offHCCode0,                   dd 0
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette