VirtualBox

Changeset 102427 in vbox


Ignore:
Timestamp:
Dec 1, 2023 11:21:47 PM (12 months ago)
Author:
vboxsync
Message:

VMM/IEM: Continue refactoring IEM_MC_MEM_MAP into type specific MCs using bUnmapInfo. bugref:10371

Location:
trunk/src/VBox/VMM
Files:
10 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAll.cpp

    r102424 r102427  
    71037103#include "IEMAllMemRWTmpl.cpp.h"
    71047104
     7105#define TMPL_MEM_TYPE       RTPBCD80U
     7106#define TMPL_MEM_TYPE_ALIGN (sizeof(uint64_t) - 1) /** @todo testcase: 80-bit BCD alignment */
     7107#define TMPL_MEM_FN_SUFF    D80
     7108#define TMPL_MEM_FMT_TYPE   "%.10Rhxs"
     7109#define TMPL_MEM_FMT_DESC   "tword"
     7110#include "IEMAllMemRWTmpl.cpp.h"
     7111
    71057112
    71067113/**
     
    71587165#endif
    71597166    return rc;
    7160 }
    7161 #endif
    7162 
    7163 
    7164 /**
    7165  * Fetches a data decimal tword.
    7166  *
    7167  * @returns Strict VBox status code.
    7168  * @param   pVCpu               The cross context virtual CPU structure of the calling thread.
    7169  * @param   pd80Dst             Where to return the tword.
    7170  * @param   iSegReg             The index of the segment register to use for
    7171  *                              this access.  The base and limits are checked.
    7172  * @param   GCPtrMem            The address of the guest memory.
    7173  */
    7174 VBOXSTRICTRC iemMemFetchDataD80(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) RT_NOEXCEPT
    7175 {
    7176     /* The lazy approach for now... */
    7177     PCRTPBCD80U pd80Src;
    7178     VBOXSTRICTRC rc = iemMemMap(pVCpu, (void **)&pd80Src, sizeof(*pd80Src), iSegReg, GCPtrMem,
    7179                                 IEM_ACCESS_DATA_R, 7 /** @todo FBLD alignment check */);
    7180     if (rc == VINF_SUCCESS)
    7181     {
    7182         *pd80Dst = *pd80Src;
    7183         rc = iemMemCommitAndUnmap(pVCpu, (void *)pd80Src, IEM_ACCESS_DATA_R);
    7184         Log(("IEM RD tword %d|%RGv: %.10Rhxs\n", iSegReg, GCPtrMem, pd80Dst));
    7185     }
    7186     return rc;
    7187 }
    7188 
    7189 
    7190 #ifdef IEM_WITH_SETJMP
    7191 /**
    7192  * Fetches a data decimal tword, longjmp on error.
    7193  *
    7194  * @param   pVCpu               The cross context virtual CPU structure of the calling thread.
    7195  * @param   pd80Dst             Where to return the tword.
    7196  * @param   iSegReg             The index of the segment register to use for
    7197  *                              this access.  The base and limits are checked.
    7198  * @param   GCPtrMem            The address of the guest memory.
    7199  */
    7200 void iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP
    7201 {
    7202     /* The lazy approach for now... */
    7203     PCRTPBCD80U pd80Src = (PCRTPBCD80U)iemMemMapJmp(pVCpu, sizeof(*pd80Src), iSegReg, GCPtrMem,
    7204                                                     IEM_ACCESS_DATA_R, 7 /** @todo FBSTP alignment check */);
    7205     *pd80Dst = *pd80Src;
    7206     iemMemCommitAndUnmapJmp(pVCpu, (void *)pd80Src, IEM_ACCESS_DATA_R);
    7207     Log(("IEM RD tword %d|%RGv: %.10Rhxs\n", iSegReg, GCPtrMem, pd80Dst));
    72087167}
    72097168#endif
  • trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h

    r102426 r102427  
    1233112331{
    1233212332    IEMOP_MNEMONIC(fbstp_m80d, "fbstp m80d");
    12333     IEM_MC_BEGIN(3, 2, 0, 0);
     12333    IEM_MC_BEGIN(3, 3, 0, 0);
    1233412334    IEM_MC_LOCAL(RTGCPTR,               GCPtrEffDst);
    12335     IEM_MC_LOCAL(uint16_t,              u16Fsw);
    12336     IEM_MC_ARG_LOCAL_REF(uint16_t *,    pu16Fsw,    u16Fsw, 0);
    12337     IEM_MC_ARG(PRTPBCD80U,              pd80Dst,            1);
    12338     IEM_MC_ARG(PCRTFLOAT80U,            pr80Value,          2);
    12339 
    1234012335    IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);
     12336
    1234112337    IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();
    1234212338    IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE();
    1234312339    IEM_MC_MAYBE_RAISE_FPU_XCPT();
    12344 
    12345     IEM_MC_MEM_MAP_EX(pd80Dst, IEM_ACCESS_DATA_W, sizeof(*pd80Dst), pVCpu->iem.s.iEffSeg, GCPtrEffDst, 7 /*cbAlign*/, 1 /*arg*/);
    1234612340    IEM_MC_PREPARE_FPU_USAGE();
     12341
     12342    IEM_MC_LOCAL(uint8_t,               bUnmapInfo);
     12343    IEM_MC_ARG(PRTPBCD80U,              pd80Dst,            1);
     12344    IEM_MC_MEM_MAP_D80_WO(pd80Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst);
     12345
     12346    IEM_MC_ARG(PCRTFLOAT80U,            pr80Value,          2);
    1234712347    IEM_MC_IF_FPUREG_NOT_EMPTY_REF_R80(pr80Value, 0) {
     12348        IEM_MC_LOCAL(uint16_t,          u16Fsw);
     12349        IEM_MC_ARG_LOCAL_REF(uint16_t *,pu16Fsw,    u16Fsw, 0);
    1234812350        IEM_MC_CALL_FPU_AIMPL_3(iemAImpl_fst_r80_to_d80, pu16Fsw, pd80Dst, pr80Value);
    12349         IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE(pd80Dst, IEM_ACCESS_DATA_W, u16Fsw);
     12351        IEM_MC_MEM_COMMIT_AND_UNMAP_FOR_FPU_STORE_WO(pd80Dst, bUnmapInfo, u16Fsw);
    1235012352        IEM_MC_UPDATE_FSW_WITH_MEM_OP_THEN_POP(u16Fsw, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
    1235112353    } IEM_MC_ELSE() {
    1235212354        IEM_MC_IF_FCW_IM() {
    1235312355            IEM_MC_STORE_MEM_INDEF_D80_BY_REF(pd80Dst);
    12354             IEM_MC_MEM_COMMIT_AND_UNMAP(pd80Dst, IEM_ACCESS_DATA_W);
     12356            IEM_MC_MEM_COMMIT_AND_UNMAP_WO(pd80Dst, bUnmapInfo);
     12357        } IEM_MC_ELSE() {
     12358            IEM_MC_MEM_ROLLBACK_AND_UNMAP_WO(pd80Dst, bUnmapInfo);
    1235512359        } IEM_MC_ENDIF();
    1235612360        IEM_MC_FPU_STACK_UNDERFLOW_MEM_OP_THEN_POP(UINT8_MAX, pVCpu->iem.s.iEffSeg, GCPtrEffDst, pVCpu->iem.s.uFpuOpcode);
  • trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py

    r102426 r102427  
    30283028    'IEM_MC_MEM_MAP':                                            (McBlock.parseMcGeneric,           True,  False, ),
    30293029    'IEM_MC_MEM_MAP_EX':                                         (McBlock.parseMcGeneric,           True,  False, ),
     3030    'IEM_MC_MEM_MAP_D80_WO':                                     (McBlock.parseMcGeneric,           True,  False, ),
    30303031    'IEM_MC_MEM_MAP_I16_WO':                                     (McBlock.parseMcGeneric,           True,  False, ),
    30313032    'IEM_MC_MEM_MAP_I32_WO':                                     (McBlock.parseMcGeneric,           True,  False, ),
  • trunk/src/VBox/VMM/VMMAll/IEMAllMemRWTmpl.cpp.h

    r102424 r102427  
    500500#undef TMPL_MEM_FMT_TYPE
    501501#undef TMPL_MEM_FMT_DESC
    502 #undef TMPL_MEM_BY_REF
    503502#undef TMPL_WITH_PUSH_SREG
    504503
  • trunk/src/VBox/VMM/VMMAll/IEMAllN8vePython.py

    r102426 r102427  
    146146    'IEM_MC_MEM_FLAT_MAP_EX':                                    (None, True,  False, ),
    147147    'IEM_MC_MEM_FLAT_MAP':                                       (None, True,  False, ),
     148    'IEM_MC_MEM_FLAT_MAP_D80_WO':                                (None, True,  False, ),
    148149    'IEM_MC_MEM_FLAT_MAP_I16_WO':                                (None, True,  False, ),
    149150    'IEM_MC_MEM_FLAT_MAP_I32_WO':                                (None, True,  False, ),
  • trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py

    r102426 r102427  
    705705        'IEM_MC_STORE_MEM_U256_ALIGN_AVX':        (  0, 'IEM_MC_STORE_MEM_FLAT_U256_ALIGN_AVX' ),
    706706        'IEM_MC_MEM_MAP':                         (  2, 'IEM_MC_MEM_FLAT_MAP' ),
     707        'IEM_MC_MEM_MAP_D80_WO':                  (  2, 'IEM_MC_MEM_FLAT_MAP_D80_WO' ),
    707708        'IEM_MC_MEM_MAP_I16_WO':                  (  2, 'IEM_MC_MEM_FLAT_MAP_I16_WO' ),
    708709        'IEM_MC_MEM_MAP_I32_WO':                  (  2, 'IEM_MC_MEM_FLAT_MAP_I32_WO' ),
  • trunk/src/VBox/VMM/include/IEMInline.h

    r102424 r102427  
    38393839#include "../VMMAll/IEMAllMemRWTmplInline.cpp.h"
    38403840
     3841#define TMPL_MEM_TYPE       RTPBCD80U
     3842#define TMPL_MEM_TYPE_ALIGN 7           /** @todo RTPBCD80U alignment testcase */
     3843#define TMPL_MEM_TYPE_SIZE  10
     3844#define TMPL_MEM_FN_SUFF    D80
     3845#define TMPL_MEM_FMT_TYPE   "%.10Rhxs"
     3846#define TMPL_MEM_FMT_DESC   "tword"
     3847#include "../VMMAll/IEMAllMemRWTmplInline.cpp.h"
     3848
    38413849#undef TMPL_MEM_CHECK_UNALIGNED_WITHIN_PAGE_OK
    38423850
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r102424 r102427  
    51115111uint64_t        iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51125112uint64_t        iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    5113 void            iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PRTFLOAT80U pr80Dst) IEM_NOEXCEPT_MAY_LONGJMP;
     5113void            iemMemFetchDataR80Jmp(PVMCPUCC pVCpu, PRTFLOAT80U pr80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5114void            iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51145115# endif
    5115 void            iemMemFetchDataD80Jmp(PVMCPUCC pVCpu, PRTPBCD80U pd80Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51165116void            iemMemFetchDataU128Jmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51175117void            iemMemFetchDataU128AlignedSseJmp(PVMCPUCC pVCpu, PRTUINT128U pu128Dst, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     
    51455145void            iemMemStoreDataU256AlignedAvxSafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTUINT256U pu256Value) IEM_NOEXCEPT_MAY_LONGJMP;
    51465146void            iemMemStoreDataR80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTFLOAT80U pr80Value) IEM_NOEXCEPT_MAY_LONGJMP;
     5147void            iemMemStoreDataD80SafeJmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, PCRTPBCD80U pd80Value) IEM_NOEXCEPT_MAY_LONGJMP;
    51475148#if 0
    51485149void            iemMemStoreDataU8Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem, uint8_t u8Value) IEM_NOEXCEPT_MAY_LONGJMP;
     
    51735174PRTFLOAT80U     iemMemMapDataR80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51745175PCRTFLOAT80U    iemMemMapDataR80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5176PRTPBCD80U      iemMemMapDataD80RwSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5177PRTPBCD80U      iemMemMapDataD80WoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     5178PCRTPBCD80U     iemMemMapDataD80RoSafeJmp(PVMCPUCC pVCpu, uint8_t *pbUnmapInfo, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    51755179
    51765180void            iemMemCommitAndUnmapRwSafeJmp(PVMCPUCC pVCpu, void *pvMem, uint8_t bMapInfo) IEM_NOEXCEPT_MAY_LONGJMP;
  • trunk/src/VBox/VMM/include/IEMMc.h

    r102426 r102427  
    21942194    (a_pr80Mem) = iemMemFlatMapDataR80WoJmp(pVCpu, &(a_bUnmapInfo), (a_GCPtrMem))
    21952195#endif
     2196
     2197
     2198/**
     2199 * Maps guest memory for 80-bit BCD writeonly direct (or bounce) buffer acccess.
     2200 *
     2201 * @param[out] a_pd80Mem    Where to return the pointer to the mapping.
     2202 * @param[out] a_bUnmapInfo Where to return umapping instructions. uint8_t.
     2203 * @param[in]  a_iSeg       The segment register to access via. No UINT8_MAX!
     2204 * @param[in]  a_GCPtrMem   The memory address.
     2205 * @remarks Will return/long jump on errors.
     2206 * @see     IEM_MC_MEM_COMMIT_AND_UNMAP_WO
     2207 */
     2208#ifndef IEM_WITH_SETJMP
     2209# define IEM_MC_MEM_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) do { \
     2210        IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pd80Mem), sizeof(RTFLOAT80U), (a_iSeg), \
     2211                                           (a_GCPtrMem), IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1)); \
     2212        a_bUnmapInfo = 1 | (IEM_ACCESS_TYPE_WRITE << 4); \
     2213    } while (0)
     2214#else
     2215# define IEM_MC_MEM_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) \
     2216    (a_pd80Mem) = iemMemMapDataD80WoJmp(pVCpu, &(a_bUnmapInfo), (a_iSeg), (a_GCPtrMem))
     2217#endif
     2218
     2219/**
     2220 * Maps guest memory for 80-bit BCD writeonly direct (or bounce) buffer acccess.
     2221 *
     2222 * @param[out] a_pd80Mem    Where to return the pointer to the mapping.
     2223 * @param[out] a_bUnmapInfo Where to return umapping instructions. uint8_t.
     2224 * @param[in]  a_GCPtrMem   The memory address.
     2225 * @remarks Will return/long jump on errors.
     2226 * @see     IEM_MC_MEM_COMMIT_AND_UNMAP_WO
     2227 */
     2228#ifndef IEM_WITH_SETJMP
     2229# define IEM_MC_MEM_FLAT_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_GCPtrMem) do { \
     2230        IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pd80Mem), sizeof(RTFLOAT80U), UINT8_MAX, \
     2231                                           (a_GCPtrMem), IEM_ACCESS_DATA_W, sizeof(uint64_t) - 1)); \
     2232        a_bUnmapInfo = 1 | (IEM_ACCESS_TYPE_WRITE << 4); \
     2233    } while (0)
     2234#else
     2235# define IEM_MC_MEM_FLAT_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_GCPtrMem) \
     2236    (a_pd80Mem) = iemMemFlatMapDataD80WoJmp(pVCpu, &(a_bUnmapInfo), (a_GCPtrMem))
     2237#endif
     2238
    21962239
    21972240
  • trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp

    r102426 r102427  
    930930#define IEM_MC_POP_U64(a_pu64Value)                                     do { CHK_VAR(a_pu64Value); (void)fMcBegin; } while (0)
    931931
     932#define IEM_MC_MEM_MAP_D80_WO(a_pd80Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) do { CHK_VAR(a_pd80Mem); (a_pd80Mem) = NULL; CHK_PTYPE(RTPBCD80U *,      a_pd80Mem); CHK_VAR(a_bUnmapInfo); CHK_TYPE(uint8_t, a_bUnmapInfo); a_bUnmapInfo = 1; CHK_GCPTR(a_GCPtrMem); CHK_VAR(a_GCPtrMem); CHK_SEG_IDX(a_iSeg); (void)fMcBegin; } while (0)
    932933#define IEM_MC_MEM_MAP_I16_WO(a_pi16Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) do { CHK_VAR(a_pi16Mem); (a_pi16Mem) = NULL; CHK_PTYPE(int16_t *,        a_pi16Mem); CHK_VAR(a_bUnmapInfo); CHK_TYPE(uint8_t, a_bUnmapInfo); a_bUnmapInfo = 1; CHK_GCPTR(a_GCPtrMem); CHK_VAR(a_GCPtrMem); CHK_SEG_IDX(a_iSeg); (void)fMcBegin; } while (0)
    933934#define IEM_MC_MEM_MAP_I32_WO(a_pi32Mem, a_bUnmapInfo, a_iSeg, a_GCPtrMem) do { CHK_VAR(a_pi32Mem); (a_pi32Mem) = NULL; CHK_PTYPE(int32_t *,        a_pi32Mem); CHK_VAR(a_bUnmapInfo); CHK_TYPE(uint8_t, a_bUnmapInfo); a_bUnmapInfo = 1; CHK_GCPTR(a_GCPtrMem); CHK_VAR(a_GCPtrMem); CHK_SEG_IDX(a_iSeg); (void)fMcBegin; } while (0)
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette