VirtualBox

Ignore:
Timestamp:
Oct 18, 2024 12:27:52 AM (4 months ago)
Author:
vboxsync
Message:

VMM/IEM: Added iemNativeEmitLoadGprWithGstReg[Ex]T and iemNativeEmitStoreGprToGstReg[Ex]T as better way of explictly loading & storing standard guest registers. bugref:10720

File:
1 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h

    r106453 r106465  
    198198
    199199/*********************************************************************************************************************************
     200*   Guest Register Load & Store Helpers                                                                                          *
     201*********************************************************************************************************************************/
     202
     203
     204/**
     205 * Alternative to iemNativeEmitLoadGprWithGstShadowRegEx() and
     206 * iemNativeEmitLoadGprWithGstShadowReg() which should be more efficient as it
     207 * lets the compiler do the equivalent of the g_aGstShadowInfo lookup.
     208 *
     209 * @note This does not mark @a idxHstReg as having a shadow copy of @a a_enmGstReg,
     210 *       that is something the caller needs to do if applicable.
     211 */
     212template<IEMNATIVEGSTREG const a_enmGstReg>
     213DECL_INLINE_THROW(uint32_t) iemNativeEmitLoadGprWithGstRegExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxHstReg)
     214{
     215    /* 64-bit registers: */
     216    if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Pc)
     217        return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));
     218    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Rsp)
     219        return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp));
     220    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_CsBase)
     221        return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cs.u64Base));
     222    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr0)
     223    //    return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr0));
     224    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr4)
     225    //    return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr4));
     226    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Xcr0)
     227    //    return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.aXcr[0]));
     228
     229    /* 32-bit registers:   */
     230    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_EFlags)
     231        return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));
     232    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_MxCsr)
     233        return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.MXCSR));
     234
     235    /* 16-bit registers */
     236    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFcw)
     237        return iemNativeEmitLoadGprFromVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FCW));
     238    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFsw)
     239        return iemNativeEmitLoadGprFromVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FSW));
     240#if RT_CPLUSPLUS_PREREQ(201700) && !defined(__clang_major__)
     241    else
     242    {
     243        AssertCompile(false);
     244        return off;
     245    }
     246#endif
     247}
     248
     249
     250/** See iemNativeEmitLoadGprWithGstRegExT(). */
     251template<IEMNATIVEGSTREG const a_enmGstReg>
     252DECL_INLINE_THROW(uint32_t) iemNativeEmitLoadGprWithGstRegT(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg)
     253{
     254#ifdef RT_ARCH_AMD64
     255    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 16);
     256#elif defined(RT_ARCH_ARM64)
     257    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
     258#else
     259# error "port me"
     260#endif
     261    off = iemNativeEmitLoadGprWithGstRegExT<a_enmGstReg>(pCodeBuf, off, idxHstReg);
     262    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     263    return off;
     264}
     265
     266
     267/**
     268 * Store companion to iemNativeEmitLoadGprWithGstRegExT().
     269 */
     270template<IEMNATIVEGSTREG const a_enmGstReg>
     271DECL_INLINE_THROW(uint32_t) iemNativeEmitStoreGprToGstRegExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxHstReg,
     272                                                             uint8_t idxTmpReg = IEMNATIVE_REG_FIXED_TMP0)
     273{
     274    /* 64-bit registers: */
     275    if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Pc)
     276        return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip), idxTmpReg);
     277    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Rsp)
     278        return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp), idxTmpReg);
     279    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr0)
     280    //    return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr0), idxTmpReg);
     281    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr4)
     282    //    return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr4), idxTmpReg);
     283    //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Xcr0)
     284    //    return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.aXcr[0]), idxTmpReg);
     285    /* 32-bit registers:   */
     286    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_EFlags)
     287        return iemNativeEmitStoreGprToVCpuU32Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags), idxTmpReg);
     288    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_MxCsr)
     289        return iemNativeEmitStoreGprToVCpuU32Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.MXCSR), idxTmpReg);
     290    /* 16-bit registers */
     291    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFcw)
     292        return iemNativeEmitStoreGprToVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FCW), idxTmpReg);
     293    else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFsw)
     294        return iemNativeEmitStoreGprToVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FSW), idxTmpReg);
     295#if RT_CPLUSPLUS_PREREQ(201700) && !defined(__clang_major__)
     296    else
     297    {
     298        AssertCompile(false);
     299        return off;
     300    }
     301#endif
     302}
     303
     304
     305/** See iemNativeEmitLoadGprWithGstRegExT(). */
     306template<IEMNATIVEGSTREG const a_enmGstReg>
     307DECL_INLINE_THROW(uint32_t) iemNativeEmitStoreGprToGstRegT(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg)
     308{
     309#ifdef RT_ARCH_AMD64
     310    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 16);
     311#elif defined(RT_ARCH_ARM64)
     312    PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6);
     313#else
     314# error "port me"
     315#endif
     316    off = iemNativeEmitStoreGprToGstRegExT<a_enmGstReg>(pCodeBuf, off, idxHstReg);
     317    IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off);
     318    return off;
     319}
     320
     321
     322
     323/*********************************************************************************************************************************
    200324*   EFLAGS                                                                                                                       *
    201325*********************************************************************************************************************************/
     
    468592        idxRegEfl = ASMBitFirstSetU32(bmAvailableRegs) - 1;
    469593        bmAvailableRegs &= ~RT_BIT_32(idxRegTmp);
    470         off = iemNativeEmitLoadGprFromVCpuU32Ex(pCodeBuf, off, idxRegEfl, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));
     594        off = iemNativeEmitLoadGprWithGstRegExT<kIemNativeGstReg_EFlags>(pCodeBuf, off, idxRegEfl);
    471595    }
    472596    Assert(bmAvailableRegs != 0);
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette