VirtualBox

Changeset 102424 in vbox for trunk/src/VBox/VMM/include


Ignore:
Timestamp:
Dec 1, 2023 10:43:39 PM (17 months ago)
Author:
vboxsync
svn:sync-xref-src-repo-rev:
160567
Message:

VMM/IEM: Continue refactoring IEM_MC_MEM_MAP into type specific MCs using bUnmapInfo. bugref:10371

Location:
trunk/src/VBox/VMM/include
Files:
3 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/include/IEMInline.h

    r101387 r102424  
    37443744}
    37453745
     3746DECLINLINE(void) iemMemRollbackAndUnmapWo(PVMCPUCC pVCpu, const void *pvMem, uint8_t bMapInfo) RT_NOEXCEPT
     3747{
     3748# if defined(IEM_WITH_DATA_TLB) && defined(IN_RING3)
     3749    if (RT_LIKELY(bMapInfo == 0))
     3750        return;
     3751# endif
     3752    iemMemRollbackAndUnmapWoSafe(pVCpu, pvMem, bMapInfo);
     3753}
     3754
    37463755#endif /* IEM_WITH_SETJMP */
    37473756
     
    38193828#include "../VMMAll/IEMAllMemRWTmplInline.cpp.h"
    38203829
     3830#undef TMPL_MEM_NO_STORE
     3831#undef TMPL_MEM_NO_MAPPING
     3832
     3833#define TMPL_MEM_TYPE       RTFLOAT80U
     3834#define TMPL_MEM_TYPE_ALIGN 7
     3835#define TMPL_MEM_TYPE_SIZE  10
     3836#define TMPL_MEM_FN_SUFF    R80
     3837#define TMPL_MEM_FMT_TYPE   "%.10Rhxs"
     3838#define TMPL_MEM_FMT_DESC   "tword"
     3839#include "../VMMAll/IEMAllMemRWTmplInline.cpp.h"
     3840
    38213841#undef TMPL_MEM_CHECK_UNALIGNED_WITHIN_PAGE_OK
    38223842
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r102394 r102424  
    50455045VBOXSTRICTRC    iemMemCommitAndUnmapPostponeTroubleToR3(PVMCPUCC pVCpu, void *pvMem, uint32_t fAccess) RT_NOEXCEPT;
    50465046#endif
     5047void            iemMemRollbackAndUnmap(PVMCPUCC pVCpu, void *pvMem, uint32_t fAccess) RT_NOEXCEPT;
    50475048void            iemMemRollback(PVMCPUCC pVCpu) RT_NOEXCEPT;
    50485049VBOXSTRICTRC    iemMemApplySegment(PVMCPUCC pVCpu, uint32_t fAccess, uint8_t iSegReg, size_t cbMem, PRTGCPTR pGCPtrMem) RT_NOEXCEPT;
     
    51105111uint64_t        iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51115112uint64_t        iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5113void            iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PRTFLOAT80U pr80Dst) IEM_NOEXCEPT_MAY_LONGJMP;
    51125114# endif
    5113 void            iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51145115void            iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51155116void            iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     
    51435144void            iemMemStoreDataU256SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
    51445145void            iemMemStoreDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
     5146void            iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
    51455147#if 0
    51465148void            iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
     
    51685170uint64_t       *iemMemMapDataU64WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51695171uint64_t const *iemMemMapDataU64RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5172PRTFLOAT80U     iemMemMapDataR80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5173PRTFLOAT80U     iemMemMapDataR80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5174PCRTFLOAT80U    iemMemMapDataR80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51705175
    51715176void            iemMemCommitAndUnmapRwSafeJmp(PVMCPUCC pVCpu, void *pvMem, uint8_t bMapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
    51725177void            iemMemCommitAndUnmapWoSafeJmp(PVMCPUCC pVCpu, void *pvMem, uint8_t bMapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
    51735178void            iemMemCommitAndUnmapRoSafeJmp(PVMCPUCC pVCpu, const void *pvMem, uint8_t bMapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
     5179void            iemMemRollbackAndUnmapWoSafe(PVMCPUCC pVCpu, const void *pvMem, uint8_t bMapInfo) RT_NOEXCEPT;
    51745180#endif
    51755181
  • trunk/src/VBox/VMM/include/IEMMc.h

    r102349 r102424  
    981981    ((a_r64Dst).u = iemMemFlatFetchDataU64Jmp(pVCpu, (a_GCPtrMem)))
    982982# define IEM_MC_FETCH_MEM_FLAT_R80(a_r80Dst, a_GCPtrMem) \
    983     iemMemFetchDataR80Jmp(pVCpu, &(a_r80Dst), UINT8_MAX, (a_GCPtrMem))
     983    iemMemFlatFetchDataR80Jmp(pVCpu, &(a_r80Dst), (a_GCPtrMem))
    984984# define IEM_MC_FETCH_MEM_FLAT_D80(a_d80Dst, a_GCPtrMem) \
    985985    iemMemFetchDataD80Jmp(pVCpu, &(a_d80Dst), UINT8_MAX, (a_GCPtrMem))
     
    19321932#endif
    19331933
     1934/** int32_t alias. */
     1935#ifndef IEM_WITH_SETJMP
     1936# define IEM_MC_MEM_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     1937         IEM_MC_MEM_MAP_U32_WO(a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem)
     1938#else
     1939# define IEM_MC_MEM_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     1940    (a_pi32Mem) = (int32_t *)iemMemMapDataU32WoJmp(pVCpu, &(a_bUnmapInfo), (a_iSeg), (a_GCPtrMem))
     1941#endif
     1942
     1943/** Flat int32_t alias. */
     1944#ifndef IEM_WITH_SETJMP
     1945# define IEM_MC_MEM_FLAT_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_GCPtrMem) \
     1946         IEM_MC_MEM_FLAT_MAP_U32_WO(a_pi32Mem, a_bUnmapInfo, a_GCPtrMem)
     1947#else
     1948# define IEM_MC_MEM_FLAT_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_GCPtrMem) \
     1949    (a_pi32Mem) = (int32_t *)iemMemFlatMapDataU32WoJmp(pVCpu, &(a_bUnmapInfo), (a_GCPtrMem))
     1950#endif
     1951
     1952/** RTFLOAT32U alias. */
     1953#ifndef IEM_WITH_SETJMP
     1954# define IEM_MC_MEM_MAP_R32_WO(a_pr32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     1955         IEM_MC_MEM_MAP_U32_WO(a_pr32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem)
     1956#else
     1957# define IEM_MC_MEM_MAP_R32_WO(a_pr32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     1958    (a_pr32Mem) = (PRTFLOAT32U)iemMemMapDataU32WoJmp(pVCpu, &(a_bUnmapInfo), (a_iSeg), (a_GCPtrMem))
     1959#endif
     1960
     1961/** Flat RTFLOAT32U alias. */
     1962#ifndef IEM_WITH_SETJMP
     1963# define IEM_MC_MEM_FLAT_MAP_R32_WO(a_pr32Mem, a_bUnmapInfo, a_GCPtrMem) \
     1964         IEM_MC_MEM_FLAT_MAP_U32_WO(a_pr32Mem, a_bUnmapInfo, a_GCPtrMem)
     1965#else
     1966# define IEM_MC_MEM_FLAT_MAP_R32_WO(a_pr32Mem, a_bUnmapInfo, a_GCPtrMem) \
     1967    (a_pr32Mem) = (PRTFLOAT32U)iemMemFlatMapDataU32WoJmp(pVCpu, &(a_bUnmapInfo), (a_GCPtrMem))
     1968#endif
     1969
    19341970
    19351971/* 64-bit */
     
    20622098
    20632099
     2100/* misc */
     2101
     2102/**
     2103 * Maps guest memory for 80-bit float writeonly direct (or bounce) buffer acccess.
     2104 *
     2105 * @param[out] a_pr80Mem    Where to return the pointer to the mapping.
     2106 * @param[out] a_bUnmapInfo Where to return umapping instructions. uint8_t.
     2107 * @param[in]  a_iSeg       The segment register to access via. No UINT8_MAX!
     2108 * @param[in]  a_GCPtrMem   The memory address.
     2109 * @remarks Will return/long jump on errors.
     2110 * @see     IEM_MC_MEM_COMMIT_AND_UNMAP_WO
     2111 */
     2112#ifndef IEM_WITH_SETJMP
     2113# define IEM_MC_MEM_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) do { \
     2114        IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pr80Mem), sizeof(RTFLOAT80U), (a_iSeg), \
     2115                                           (a_GCPtrMem), IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1)); \
     2116        a_bUnmapInfo = 1 | (IEM_ACCESS_TYPE_WRITE << 4); \
     2117    } while (0)
     2118#else
     2119# define IEM_MC_MEM_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     2120    (a_pr80Mem) = iemMemMapDataR80WoJmp(pVCpu, &(a_bUnmapInfo), (a_iSeg), (a_GCPtrMem))
     2121#endif
     2122
     2123/**
     2124 * Maps guest memory for 80-bit float writeonly direct (or bounce) buffer acccess.
     2125 *
     2126 * @param[out] a_pr80Mem    Where to return the pointer to the mapping.
     2127 * @param[out] a_bUnmapInfo Where to return umapping instructions. uint8_t.
     2128 * @param[in]  a_GCPtrMem   The memory address.
     2129 * @remarks Will return/long jump on errors.
     2130 * @see     IEM_MC_MEM_COMMIT_AND_UNMAP_WO
     2131 */
     2132#ifndef IEM_WITH_SETJMP
     2133# define IEM_MC_MEM_FLAT_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_GCPtrMem) do { \
     2134        IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pr80Mem), sizeof(RTFLOAT80U), UINT8_MAX, \
     2135                                           (a_GCPtrMem), IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1)); \
     2136        a_bUnmapInfo = 1 | (IEM_ACCESS_TYPE_WRITE << 4); \
     2137    } while (0)
     2138#else
     2139# define IEM_MC_MEM_FLAT_MAP_R80_WO(a_pr80Mem, a_bUnmapInfo, a_GCPtrMem) \
     2140    (a_pr80Mem) = iemMemFlatMapDataR80WoJmp(pVCpu, &(a_bUnmapInfo), (a_GCPtrMem))
     2141#endif
     2142
     2143
    20642144/* commit + unmap */
    20652145
     
    21122192 *
    21132193 * @remarks     May in theory return - for now.
     2194 *
     2195 * @deprecated
    21142196 */
    21152197#define IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(a_pvMem, a_fAccess, a_u16FSW) \
     
    21202202            IEM_MC_RETURN_ON_FAILURE(iemMemCommitAndUnmap(pVCpu, (a_pvMem), (a_fAccess))); \
    21212203    } while (0)
     2204
     2205
     2206/** Commits the memory and unmaps the guest memory unless the FPU status word
     2207 * indicates (@a a_u16FSW) and FPU control word indicates a pending exception
     2208 * that would cause FLD not to store.
     2209 *
     2210 * The current understanding is that \#O, \#U, \#IA and \#IS will prevent a
     2211 * store, while \#P will not.
     2212 *
     2213 * @remarks     May in theory return - for now.
     2214 */
     2215#ifndef IEM_WITH_SETJMP
     2216# define IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(a_pvMem, a_bMapInfo, a_u16FSW) do { \
     2217        RT_NOREF_PV(a_bMapInfo); Assert(a_bMapInfo == (1 | (IEM_ACCESS_TYPE_WRITE << 4)) ); \
     2218        if (   !(a_u16FSW & X86_FSW_ES) \
     2219            || !(  (a_u16FSW & (X86_FSW_UE | X86_FSW_OE | X86_FSW_IE)) \
     2220                 & ~(pVCpu->cpum.GstCtx.XState.x87.FCW & X86_FCW_MASK_ALL) ) ) \
     2221            IEM_MC_RETURN_ON_FAILURE(iemMemCommitAndUnmap(pVCpu, (a_pvMem), IEM_ACCESS_DATA_W)); \
     2222        else \
     2223            iemMemRollbackAndUnmap(pVCpu, (a_pvMem), IEM_ACCESS_DATA_W); \
     2224    } while (0)
     2225#else
     2226# define IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(a_pvMem, a_bMapInfo, a_u16FSW) do { \
     2227        if (   !(a_u16FSW & X86_FSW_ES) \
     2228            || !(  (a_u16FSW & (X86_FSW_UE | X86_FSW_OE | X86_FSW_IE)) \
     2229                 & ~(pVCpu->cpum.GstCtx.XState.x87.FCW & X86_FCW_MASK_ALL) ) ) \
     2230            iemMemCommitAndUnmapWoJmp(pVCpu, (a_pvMem), a_bMapInfo); \
     2231        else \
     2232            iemMemRollbackAndUnmapWo(pVCpu, (a_pvMem), a_bMapInfo); \
     2233    } while (0)
     2234#endif
     2235
     2236/** Rolls back (conceptually only, assumes no writes) and unmaps the guest  memory. */
     2237#ifndef IEM_WITH_SETJMP
     2238# define IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(a_pvMem, a_bMapInfo) do { \
     2239        RT_NOREF_PV(a_bMapInfo); Assert(a_bMapInfo == (1 | (IEM_ACCESS_TYPE_WRITE << 4)) ); \
     2240        iemMemRollbackAndUnmap(pVCpu, (a_pvMem), IEM_ACCESS_DATA_W); \
     2241    } while (0)
     2242#else
     2243# define IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(a_pvMem, a_bMapInfo) \
     2244        iemMemRollbackAndUnmapWo(pVCpu, (a_pvMem), a_bMapInfo)
     2245#endif
    21222246
    21232247
Note: See TracChangeset for help on using the changeset viewer.

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette