VirtualBox

Changeset 100811 in vbox


Ignore:
Timestamp:
Aug 6, 2023 1:54:38 AM (16 months ago)
Author:
vboxsync
Message:

VMM/IEM: Working on implementing the FLAT mode (64-bit mode and 32-bit FLAT) optimizations. Introduced a special 64-bit FS+GS(+CS) variant so we can deal with it the same way as the flat 32-bit variant, this means lumping CS prefixed stuff (unlikely) in with FS and GS. We call the FLAT variant for DS, ES, and SS accesses and the other mode for memory accesses via FS, GS and CS. bugref:10369

Location:
trunk/src/VBox/VMM
Files:
5 edited

Legend:

Unmodified
Added
Removed
  • trunk/src/VBox/VMM/VMMAll/IEMAll.cpp

    r100777 r100811  
    71557155# endif
    71567156}
    7157 #endif
     7157
     7158/**
     7159 * Fetches a data dword from a FLAT address, longjmp on error.
     7160 *
     7161 * @returns The dword
     7162 * @param   pVCpu               The cross context virtual CPU structure of the calling thread.
     7163 * @param   GCPtrMem            The address of the guest memory.
     7164 */
     7165uint32_t iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP
     7166{
     7167# if defined(IEM_WITH_DATA_TLB) && defined(IN_RING3)
     7168    /*
     7169     * Convert from segmented to flat address and check that it doesn't cross a page boundrary.
     7170     */
     7171    RTGCPTR GCPtrEff = GCPtrMem;
     7172    if (RT_LIKELY((GCPtrEff & GUEST_PAGE_OFFSET_MASK) <= GUEST_PAGE_SIZE - sizeof(uint32_t)))
     7173    {
     7174        /*
     7175         * TLB lookup.
     7176         */
     7177        uint64_t const uTag  = IEMTLB_CALC_TAG(    &pVCpu->iem.s.DataTlb, GCPtrEff);
     7178        PIEMTLBENTRY   pTlbe = IEMTLB_TAG_TO_ENTRY(&pVCpu->iem.s.DataTlb, uTag);
     7179        if (pTlbe->uTag == uTag)
     7180        {
     7181            /*
     7182             * Check TLB page table level access flags.
     7183             */
     7184            uint64_t const fNoUser = IEM_GET_CPL(pVCpu) == 3 ? IEMTLBE_F_PT_NO_USER : 0;
     7185            if (   (pTlbe->fFlagsAndPhysRev & (  IEMTLBE_F_PHYS_REV       | IEMTLBE_F_PG_UNASSIGNED | IEMTLBE_F_PG_NO_READ
     7186                                               | IEMTLBE_F_PT_NO_ACCESSED | IEMTLBE_F_NO_MAPPINGR3  | fNoUser))
     7187                == pVCpu->iem.s.DataTlb.uTlbPhysRev)
     7188            {
     7189                STAM_STATS({pVCpu->iem.s.DataTlb.cTlbHits++;});
     7190
     7191                /*
     7192                 * Alignment check:
     7193                 */
     7194                /** @todo check priority \#AC vs \#PF */
     7195                if (   !(GCPtrEff & (sizeof(uint32_t) - 1))
     7196                    || !(pVCpu->cpum.GstCtx.cr0 & X86_CR0_AM)
     7197                    || !pVCpu->cpum.GstCtx.eflags.Bits.u1AC
     7198                    || IEM_GET_CPL(pVCpu) != 3)
     7199                {
     7200                    /*
     7201                     * Fetch and return the dword
     7202                     */
     7203                    Assert(pTlbe->pbMappingR3); /* (Only ever cleared by the owning EMT.) */
     7204                    Assert(!((uintptr_t)pTlbe->pbMappingR3 & GUEST_PAGE_OFFSET_MASK));
     7205                    uint32_t const u32Ret = *(uint32_t const *)&pTlbe->pbMappingR3[GCPtrEff & GUEST_PAGE_OFFSET_MASK];
     7206                    Log9(("IEM RD dword %RGv: %#010x\n", GCPtrMem, u32Ret));
     7207                    return u32Ret;
     7208                }
     7209                Log10(("iemMemFlatFetchDataU32Jmp: Raising #AC for %RGv\n", GCPtrEff));
     7210                iemRaiseAlignmentCheckExceptionJmp(pVCpu);
     7211            }
     7212        }
     7213    }
     7214
     7215    /* Fall back on the slow careful approach in case of TLB miss, MMIO, exception
     7216       outdated page pointer, or other troubles. */
     7217    Log10(("iemMemFlatFetchDataU32Jmp: %RGv fallback\n", GCPtrMem));
     7218    return iemMemFetchDataU32SafeJmp(pVCpu, UINT8_MAX, GCPtrMem);
     7219
     7220# else
     7221    uint32_t const *pu32Src = (uint32_t const *)iemMemMapJmp(pVCpu, sizeof(*pu32Src), UINT8_MAX, GCPtrMem,
     7222                                                             IEM_ACCESS_DATA_R, sizeof(*pu32Src) - 1);
     7223    uint32_t const  u32Ret  = *pu32Src;
     7224    iemMemCommitAndUnmapJmp(pVCpu, (void *)pu32Src, IEM_ACCESS_DATA_R);
     7225    Log9(("IEM RD dword %RGv: %#010x\n", GCPtrMem, u32Ret));
     7226    return u32Ret;
     7227# endif
     7228}
     7229
     7230#endif /* IEM_WITH_SETJMP */
    71587231
    71597232
  • trunk/src/VBox/VMM/VMMAll/IEMAllThrdFuncs.cpp

    r100732 r100811  
    179179/** Variant of IEM_MC_CALC_RM_EFF_ADDR with additional parameters. */
    180180#define IEM_MC_CALC_RM_EFF_ADDR_THREADED_64(a_GCPtrEff, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm) \
     181    (a_GCPtrEff) = iemOpHlpCalcRmEffAddrThreadedAddr64(pVCpu, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm)
     182
     183/** Variant of IEM_MC_CALC_RM_EFF_ADDR with additional parameters. */
     184#define IEM_MC_CALC_RM_EFF_ADDR_THREADED_64_FSGS(a_GCPtrEff, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm) \
    181185    (a_GCPtrEff) = iemOpHlpCalcRmEffAddrThreadedAddr64(pVCpu, a_bRmEx, a_uSibAndRspOffset, a_u32Disp, a_cbImm)
    182186
  • trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py

    r100806 r100811  
    143143    ksVariation_32_Addr16   = '_32_Addr16';     ##< 32-bit mode code (386+), address size prefixed to 16-bit addressing.
    144144    ksVariation_64          = '_64';            ##< 64-bit mode code.
     145    ksVariation_64_FsGs     = '_64_FsGs';       ##< 64-bit mode code, with memory accesses via FS or GS.
    145146    ksVariation_64_Addr32   = '_64_Addr32';     ##< 64-bit mode code, address size prefixed to 32-bit addressing.
    146147    kasVariations           = (
     
    153154        ksVariation_32_Addr16,
    154155        ksVariation_64,
     156        ksVariation_64_FsGs,
    155157        ksVariation_64_Addr32,
    156158    );
     
    169171        ksVariation_32_Addr16,
    170172        ksVariation_64,
     173        ksVariation_64_FsGs,
    171174        ksVariation_64_Addr32,
    172175    );
     
    174177        ksVariation_Default,
    175178        ksVariation_64,
     179        ksVariation_64_FsGs,
    176180        ksVariation_32_Flat,
    177181        ksVariation_32,
     
    191195        ksVariation_32_Addr16:  '32-bit w/ address prefix (Addr16)',
    192196        ksVariation_64:         '64-bit',
     197        ksVariation_64_FsGs:    '64-bit with memory accessed via FS or GS',
    193198        ksVariation_64_Addr32:  '64-bit w/ address prefix (Addr32)',
    194199
     
    411416
    412417
     418    ## Maps memory related MCs to info for FLAT conversion.
     419    ## This is used in 64-bit and flat 32-bit variants to skip the unnecessary
     420    ## segmentation checking for every memory access.  Only applied to access
     421    ## via ES, DS and SS.  FS, GS and CS gets the full segmentation threatment,
     422    ## the latter (CS) is just to keep things simple (we could safely fetch via
     423    ## it, but only in 64-bit mode could we safely write via it, IIRC).
     424    kdMemMcToFlatInfo = {
     425        'IEM_MC_FETCH_MEM_U8':                    (  1, 'IEM_MC_FETCH_MEM_FLAT_U8' ),
     426        'IEM_MC_FETCH_MEM16_U8':                  (  1, 'IEM_MC_FETCH_MEM16_FLAT_U8' ),
     427        'IEM_MC_FETCH_MEM32_U8':                  (  1, 'IEM_MC_FETCH_MEM32_FLAT_U8' ),
     428        'IEM_MC_FETCH_MEM_U16':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_U16' ),
     429        'IEM_MC_FETCH_MEM_U16_DISP':              (  1, 'IEM_MC_FETCH_MEM_FLAT_U16_DISP' ),
     430        'IEM_MC_FETCH_MEM_I16':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_I16' ),
     431        'IEM_MC_FETCH_MEM_U32':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_U32' ),
     432        'IEM_MC_FETCH_MEM_U32_DISP':              (  1, 'IEM_MC_FETCH_MEM_FLAT_U32_DISP' ),
     433        'IEM_MC_FETCH_MEM_I32':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_I32' ),
     434        'IEM_MC_FETCH_MEM_U64':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_U64' ),
     435        'IEM_MC_FETCH_MEM_U64_DISP':              (  1, 'IEM_MC_FETCH_MEM_FLAT_U64_DISP' ),
     436        'IEM_MC_FETCH_MEM_U64_ALIGN_U128':        (  1, 'IEM_MC_FETCH_MEM_FLAT_U64_ALIGN_U128' ),
     437        'IEM_MC_FETCH_MEM_I64':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_I64' ),
     438        'IEM_MC_FETCH_MEM_R32':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_R32' ),
     439        'IEM_MC_FETCH_MEM_R64':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_R64' ),
     440        'IEM_MC_FETCH_MEM_R80':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_R80' ),
     441        'IEM_MC_FETCH_MEM_D80':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_D80' ),
     442        'IEM_MC_FETCH_MEM_U128':                  (  1, 'IEM_MC_FETCH_MEM_FLAT_U128' ),
     443        'IEM_MC_FETCH_MEM_U128_NO_AC':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U128_NO_AC' ),
     444        'IEM_MC_FETCH_MEM_U128_ALIGN_SSE':        (  1, 'IEM_MC_FETCH_MEM_FLAT_U128_ALIGN_SSE' ),
     445        'IEM_MC_FETCH_MEM_XMM':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_XMM' ),
     446        'IEM_MC_FETCH_MEM_XMM_NO_AC':             (  1, 'IEM_MC_FETCH_MEM_FLAT_XMM_NO_AC' ),
     447        'IEM_MC_FETCH_MEM_XMM_ALIGN_SSE':         (  1, 'IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE' ),
     448        'IEM_MC_FETCH_MEM_XMM_U32':               (  2, 'IEM_MC_FETCH_MEM_FLAT_XMM_U32' ),
     449        'IEM_MC_FETCH_MEM_XMM_U64':               (  2, 'IEM_MC_FETCH_MEM_FLAT_XMM_U64' ),
     450        'IEM_MC_FETCH_MEM_U256':                  (  1, 'IEM_MC_FETCH_MEM_FLAT_U256' ),
     451        'IEM_MC_FETCH_MEM_U256_NO_AC':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U256_NO_AC' ),
     452        'IEM_MC_FETCH_MEM_U256_ALIGN_AVX':        (  1, 'IEM_MC_FETCH_MEM_FLAT_U256_ALIGN_AVX' ),
     453        'IEM_MC_FETCH_MEM_YMM':                   (  1, 'IEM_MC_FETCH_MEM_FLAT_YMM' ),
     454        'IEM_MC_FETCH_MEM_YMM_NO_AC':             (  1, 'IEM_MC_FETCH_MEM_FLAT_YMM_NO_AC' ),
     455        'IEM_MC_FETCH_MEM_YMM_ALIGN_AVX':         (  1, 'IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX' ),
     456        'IEM_MC_FETCH_MEM_U8_ZX_U16':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_ZX_U16' ),
     457        'IEM_MC_FETCH_MEM_U8_ZX_U32':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_ZX_U32' ),
     458        'IEM_MC_FETCH_MEM_U8_ZX_U64':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_ZX_U64' ),
     459        'IEM_MC_FETCH_MEM_U16_ZX_U32':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U16_ZX_U32' ),
     460        'IEM_MC_FETCH_MEM_U16_ZX_U64':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U16_ZX_U64' ),
     461        'IEM_MC_FETCH_MEM_U32_ZX_U64':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U32_ZX_U64' ),
     462        'IEM_MC_FETCH_MEM_U8_SX_U16':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_SX_U16' ),
     463        'IEM_MC_FETCH_MEM_U8_SX_U32':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_SX_U32' ),
     464        'IEM_MC_FETCH_MEM_U8_SX_U64':             (  1, 'IEM_MC_FETCH_MEM_FLAT_U8_SX_U64' ),
     465        'IEM_MC_FETCH_MEM_U16_SX_U32':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U16_SX_U32' ),
     466        'IEM_MC_FETCH_MEM_U16_SX_U64':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U16_SX_U64' ),
     467        'IEM_MC_FETCH_MEM_U32_SX_U64':            (  1, 'IEM_MC_FETCH_MEM_FLAT_U32_SX_U64' ),
     468        'IEM_MC_STORE_MEM_U8':                    (  0, 'IEM_MC_STORE_MEM_FLAT_U8' ),
     469        'IEM_MC_STORE_MEM_U16':                   (  0, 'IEM_MC_STORE_MEM_FLAT_U16' ),
     470        'IEM_MC_STORE_MEM_U32':                   (  0, 'IEM_MC_STORE_MEM_FLAT_U32' ),
     471        'IEM_MC_STORE_MEM_U64':                   (  0, 'IEM_MC_STORE_MEM_FLAT_U64' ),
     472        'IEM_MC_STORE_MEM_U8_CONST':              (  0, 'IEM_MC_STORE_MEM_FLAT_U8_CONST' ),
     473        'IEM_MC_STORE_MEM_U16_CONST':             (  0, 'IEM_MC_STORE_MEM_FLAT_U16_CONST' ),
     474        'IEM_MC_STORE_MEM_U32_CONST':             (  0, 'IEM_MC_STORE_MEM_FLAT_U32_CONST' ),
     475        'IEM_MC_STORE_MEM_U64_CONST':             (  0, 'IEM_MC_STORE_MEM_FLAT_U64_CONST' ),
     476        'IEM_MC_STORE_MEM_U128':                  (  0, 'IEM_MC_STORE_MEM_FLAT_U128' ),
     477        'IEM_MC_STORE_MEM_U128_ALIGN_SSE':        (  0, 'IEM_MC_STORE_MEM_FLAT_U128_ALIGN_SSE' ),
     478        'IEM_MC_STORE_MEM_U256':                  (  0, 'IEM_MC_STORE_MEM_FLAT_U256' ),
     479        'IEM_MC_STORE_MEM_U256_ALIGN_AVX':        (  0, 'IEM_MC_STORE_MEM_FLAT_U256_ALIGN_AVX' ),
     480        'IEM_MC_PUSH_U16':                        ( -1, 'IEM_MC_FLAT_PUSH_U16' ),
     481        'IEM_MC_PUSH_U32':                        ( -1, 'IEM_MC_FLAT_PUSH_U32' ),
     482        'IEM_MC_PUSH_U32_SREG':                   ( -1, 'IEM_MC_FLAT_PUSH_U32_SREG' ),
     483        'IEM_MC_PUSH_U64':                        ( -1, 'IEM_MC_FLAT_PUSH_U64' ),
     484        'IEM_MC_POP_U16':                         ( -1, 'IEM_MC_FLAT_POP_U16' ),
     485        'IEM_MC_POP_U32':                         ( -1, 'IEM_MC_FLAT_POP_U32' ),
     486        'IEM_MC_POP_U64':                         ( -1, 'IEM_MC_FLAT_POP_U64' ),
     487        'IEM_MC_POP_EX_U16':                      ( -1, 'IEM_MC_FLAT_POP_EX_U16' ),
     488        'IEM_MC_POP_EX_U32':                      ( -1, 'IEM_MC_FLAT_POP_EX_U32' ),
     489        'IEM_MC_POP_EX_U64':                      ( -1, 'IEM_MC_FLAT_POP_EX_U64' ),
     490        'IEM_MC_MEM_MAP':                         (  2, 'IEM_MC_MEM_FLAT_MAP' ),
     491        'IEM_MC_MEM_MAP_EX':                      (  3, 'IEM_MC_MEM_FLAT_MAP_EX' ),
     492    };
     493
    413494    def analyzeMorphStmtForThreaded(self, aoStmts, iParamRef = 0):
    414495        """
     
    489570                        oNewStmt.asParams.append(self.dParamRefs['pVCpu->iem.s.enmEffOpSize'][0].sNewName);
    490571                    oNewStmt.sName += '_THREADED';
    491                     if self.sVariation in (self.ksVariation_64, self.ksVariation_64_Addr32):
     572                    if self.sVariation in (self.ksVariation_64, self.ksVariation_64_FsGs, self.ksVariation_64_Addr32):
    492573                        oNewStmt.sName += '_PC64';
    493574                    elif self.sVariation == self.ksVariation_16_Pre386:
     
    506587                    oNewStmt.sName += '_THREADED';
    507588                    oNewStmt.asParams.insert(0, self.dParamRefs['cbInstr'][0].sNewName);
     589
     590                # ... and in FLAT modes we must morph memory access into FLAT accesses ...
     591                elif (    self.sVariation in (self.ksVariation_64, self.ksVariation_32_Flat,)
     592                      and (   oNewStmt.sName.startswith('IEM_MC_FETCH_MEM')
     593                           or (oNewStmt.sName.startswith('IEM_MC_STORE_MEM_') and oNewStmt.sName.find('_BY_REF') < 0)
     594                           or oNewStmt.sName.startswith('IEM_MC_MEM_MAP')
     595                           or (oNewStmt.sName.startswith('IEM_MC_PUSH') and oNewStmt.sName.find('_FPU') < 0)
     596                           or oNewStmt.sName.startswith('IEM_MC_POP') )):
     597                    idxEffSeg = self.kdMemMcToFlatInfo[oNewStmt.sName][0];
     598                    if idxEffSeg != -1:
     599                        if (    oNewStmt.asParams[idxEffSeg].find('iEffSeg') < 0
     600                            and oNewStmt.asParams[idxEffSeg] not in ('X86_SREG_ES', ) ):
     601                            self.raiseProblem('Expected iEffSeg as param #%d to %s: %s'
     602                                              % (idxEffSeg + 1, oNewStmt.sName, oNewStmt.asParams[idxEffSeg],));
     603                        oNewStmt.asParams.pop(idxEffSeg);
     604                    oNewStmt.sName = self.kdMemMcToFlatInfo[oNewStmt.sName][1];
    508605
    509606                # Process branches of conditionals recursively.
     
    682779                                                             'uint32_t', oStmt, sStdRef = 'u32Disp'));
    683780                else:
    684                     assert self.sVariation in (self.ksVariation_64, self.ksVariation_64_Addr32);
     781                    assert self.sVariation in (self.ksVariation_64, self.ksVariation_64_FsGs, self.ksVariation_64_Addr32);
    685782                    self.aoParamRefs.append(ThreadedParamRef('IEM_GET_MODRM_EX(pVCpu, bRm)',
    686783                                                             'uint8_t',  oStmt, sStdRef = 'bRmEx'));
     
    698795                self.aoParamRefs.append(ThreadedParamRef(sOrgRef, 'uint16_t', oStmt, idxReg, sStdRef = sStdRef));
    699796                aiSkipParams[idxReg] = True; # Skip the parameter below.
     797
     798            # If in flat mode variation, ignore the effective segment parameter to memory MCs.
     799            if (    self.sVariation in (self.ksVariation_64, self.ksVariation_32_Flat,)
     800                and oStmt.sName in self.kdMemMcToFlatInfo
     801                and self.kdMemMcToFlatInfo[oStmt.sName][0] != -1):
     802                aiSkipParams[self.kdMemMcToFlatInfo[oStmt.sName][0]] = True;
    700803
    701804            # Inspect the target of calls to see if we need to pass down a
     
    11131216        sSwitchValue = 'pVCpu->iem.s.fExec & IEM_F_MODE_CPUMODE_MASK';
    11141217        if (   ThrdFnVar.ksVariation_64_Addr32 in dByVari
     1218            or ThrdFnVar.ksVariation_64_FsGs   in dByVari
    11151219            or ThrdFnVar.ksVariation_32_Addr16 in dByVari
    11161220            or ThrdFnVar.ksVariation_32_Flat   in dByVari
     
    11181222            sSwitchValue  = '(pVCpu->iem.s.fExec & (IEM_F_MODE_CPUMODE_MASK | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK))';
    11191223            sSwitchValue += ' | (pVCpu->iem.s.enmEffAddrMode == (pVCpu->iem.s.fExec & IEM_F_MODE_CPUMODE_MASK) ? 0 : 8)';
    1120             fSimple       = False;
     1224            # Accesses via FS and GS and CS goes thru non-FLAT functions. (CS
     1225            # is not writable in 32-bit mode (at least), thus the penalty mode
     1226            # for any accesses via it (simpler this way).)
     1227            sSwitchValue += ' | (pVCpu->iem.s.iEffSeg < X86_SREG_FS && pVCpu->iem.s.iEffSeg != X86_SREG_CS ? 0 : 16)';
     1228            fSimple       = False;                                              # threaded functions.
    11211229
    11221230        #
     
    11281236            assert not fSimple;
    11291237            aoCases.extend([
    1130                 Case('IEMMODE_64BIT',     ThrdFnVar.ksVariation_64),
    1131                 Case('IEMMODE_64BIT | 8', ThrdFnVar.ksVariation_64_Addr32),
     1238                Case('IEMMODE_64BIT',       ThrdFnVar.ksVariation_64),
     1239                Case('IEMMODE_64BIT | 16',  ThrdFnVar.ksVariation_64_FsGs),
     1240                Case('IEMMODE_64BIT | 8 | 16', None), # fall thru
     1241                Case('IEMMODE_64BIT | 8',   ThrdFnVar.ksVariation_64_Addr32),
    11321242            ]);
    11331243        elif ThrdFnVar.ksVariation_64 in dByVari:
     
    11381248            assert not fSimple;
    11391249            aoCases.extend([
    1140                 Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK', ThrdFnVar.ksVariation_32_Flat),
    1141                 Case('IEMMODE_32BIT', ThrdFnVar.ksVariation_32),
    1142                 Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK | 8', None), # fall thru
    1143                 Case('IEMMODE_32BIT | 8', ThrdFnVar.ksVariation_32_Addr16),
     1250                Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK',         ThrdFnVar.ksVariation_32_Flat),
     1251                Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK | 16',    None), # fall thru
     1252                Case('IEMMODE_32BIT | 16',                                          None), # fall thru
     1253                Case('IEMMODE_32BIT',                                               ThrdFnVar.ksVariation_32),
     1254                Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK | 8',     None), # fall thru
     1255                Case('IEMMODE_32BIT | IEM_F_MODE_X86_FLAT_OR_PRE_386_MASK | 8 | 16',None), # fall thru
     1256                Case('IEMMODE_32BIT                                       | 8 | 16',None), # fall thru
     1257                Case('IEMMODE_32BIT                                       | 8',     ThrdFnVar.ksVariation_32_Addr16),
    11441258            ]);
    11451259        elif ThrdFnVar.ksVariation_32 in dByVari:
     
    11501264            assert not fSimple;
    11511265            aoCases.extend([
    1152                 Case('IEMMODE_16BIT',     ThrdFnVar.ksVariation_16),
    1153                 Case('IEMMODE_16BIT | 8', ThrdFnVar.ksVariation_16_Addr32),
     1266                Case('IEMMODE_16BIT | 16',      None), # fall thru
     1267                Case('IEMMODE_16BIT',           ThrdFnVar.ksVariation_16),
     1268                Case('IEMMODE_16BIT | 8 | 16',  None), # fall thru
     1269                Case('IEMMODE_16BIT | 8',       ThrdFnVar.ksVariation_16_Addr32),
    11541270            ]);
    11551271        elif ThrdFnVar.ksVariation_16 in dByVari:
  • trunk/src/VBox/VMM/include/IEMInternal.h

    r100803 r100811  
    45884588uint16_t        iemMemFetchDataU16Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    45894589uint32_t        iemMemFetchDataU32Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
     4590uint32_t        iemMemFlatFetchDataU32Jmp(PVMCPUCC pVCpu, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    45904591uint64_t        iemMemFetchDataU64Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
    45914592uint64_t        iemMemFetchDataU64AlignedU128Jmp(PVMCPUCC pVCpu, uint8_t iSegReg, RTGCPTR GCPtrMem) IEM_NOEXCEPT_MAY_LONGJMP;
  • trunk/src/VBox/VMM/include/IEMMc.h

    r100804 r100811  
    814814# define IEM_MC_FETCH_MEM32_U8(a_u8Dst, a_iSeg, a_GCPtrMem32) \
    815815    ((a_u8Dst) = iemMemFetchDataU8Jmp(pVCpu, (a_iSeg), (a_GCPtrMem32)))
     816
     817# define IEM_MC_FETCH_MEM_FLAT_U8(a_u8Dst, a_GCPtrMem) \
     818    ((a_u8Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     819# define IEM_MC_FETCH_MEM16_FLAT_U8(a_u8Dst, a_GCPtrMem16) \
     820    ((a_u8Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem16)))
     821# define IEM_MC_FETCH_MEM32_FLAT_U8(a_u8Dst, a_GCPtrMem32) \
     822    ((a_u8Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem32)))
    816823#endif
    817824
     
    830837# define IEM_MC_FETCH_MEM_I16(a_i16Dst, a_iSeg, a_GCPtrMem) \
    831838    ((a_i16Dst) = (int16_t)iemMemFetchDataU16Jmp(pVCpu, (a_iSeg), (a_GCPtrMem)))
     839
     840# define IEM_MC_FETCH_MEM_FLAT_U16(a_u16Dst, a_GCPtrMem) \
     841    ((a_u16Dst) = iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     842# define IEM_MC_FETCH_MEM_FLAT_U16_DISP(a_u16Dst, a_GCPtrMem, a_offDisp) \
     843    ((a_u16Dst) = iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem) + (a_offDisp)))
     844# define IEM_MC_FETCH_MEM_FLAT_I16(a_i16Dst, a_GCPtrMem) \
     845    ((a_i16Dst) = (int16_t)iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
    832846#endif
    833847
     
    846860# define IEM_MC_FETCH_MEM_I32(a_i32Dst, a_iSeg, a_GCPtrMem) \
    847861    ((a_i32Dst) = (int32_t)iemMemFetchDataU32Jmp(pVCpu, (a_iSeg), (a_GCPtrMem)))
     862
     863# define IEM_MC_FETCH_MEM_FLAT_U32(a_u32Dst, a_GCPtrMem) \
     864    ((a_u32Dst) = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem)))
     865# define IEM_MC_FETCH_MEM_FLAT_U32_DISP(a_u32Dst, a_GCPtrMem, a_offDisp) \
     866    ((a_u32Dst) = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem) + (a_offDisp)))
     867# define IEM_MC_FETCH_MEM_FLAT_I32(a_i32Dst, a_GCPtrMem) \
     868    ((a_i32Dst) = (int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem)))
    848869#endif
    849870
     
    871892# define IEM_MC_FETCH_MEM_I64(a_i64Dst, a_iSeg, a_GCPtrMem) \
    872893    ((a_i64Dst) = (int64_t)iemMemFetchDataU64Jmp(pVCpu, (a_iSeg), (a_GCPtrMem)))
     894
     895# define IEM_MC_FETCH_MEM_FLAT_U64(a_u64Dst, a_GCPtrMem) \
     896    ((a_u64Dst) = iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     897# define IEM_MC_FETCH_MEM_FLAT_U64_DISP(a_u64Dst, a_GCPtrMem, a_offDisp) \
     898    ((a_u64Dst) = iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem) + (a_offDisp)))
     899# define IEM_MC_FETCH_MEM_FLAT_U64_ALIGN_U128(a_u64Dst, a_GCPtrMem) \
     900    ((a_u64Dst) = iemMemFetchDataU64AlignedU128Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     901# define IEM_MC_FETCH_MEM_FLAT_I64(a_i64Dst, a_GCPtrMem) \
     902    ((a_i64Dst) = (int64_t)iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
    873903#endif
    874904
     
    891921# define IEM_MC_FETCH_MEM_D80(a_d80Dst, a_iSeg, a_GCPtrMem) \
    892922    iemMemFetchDataD80Jmp(pVCpu, &(a_d80Dst), (a_iSeg), (a_GCPtrMem))
     923
     924# define IEM_MC_FETCH_MEM_FLAT_R32(a_r32Dst, a_GCPtrMem) \
     925    ((a_r32Dst).u = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem)))
     926# define IEM_MC_FETCH_MEM_FLAT_R64(a_r64Dst, a_GCPtrMem) \
     927    ((a_r64Dst).u = iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     928# define IEM_MC_FETCH_MEM_FLAT_R80(a_r80Dst, a_GCPtrMem) \
     929    iemMemFetchDataR80Jmp(pVCpu, &(a_r80Dst), UINT8_MAX, (a_GCPtrMem))
     930# define IEM_MC_FETCH_MEM_FLAT_D80(a_d80Dst, a_GCPtrMem) \
     931    iemMemFetchDataD80Jmp(pVCpu, &(a_d80Dst), UINT8_MAX, (a_GCPtrMem))
    893932#endif
    894933
     
    929968# define IEM_MC_FETCH_MEM_XMM_U64(a_XmmDst, a_iQWord, a_iSeg, a_GCPtrMem) \
    930969    (a_XmmDst).au64[(a_iQWord)] = iemMemFetchDataU64Jmp(pVCpu, (a_iSeg), (a_GCPtrMem))
     970
     971# define IEM_MC_FETCH_MEM_FLAT_U128(a_u128Dst, a_GCPtrMem) \
     972    iemMemFetchDataU128Jmp(pVCpu, &(a_u128Dst), UINT8_MAX, (a_GCPtrMem))
     973# define IEM_MC_FETCH_MEM_FLAT_U128_NO_AC(a_u128Dst, a_GCPtrMem) \
     974    iemMemFetchDataU128Jmp(pVCpu, &(a_u128Dst), UINT8_MAX, (a_GCPtrMem))
     975# define IEM_MC_FETCH_MEM_FLAT_U128_ALIGN_SSE(a_u128Dst, a_GCPtrMem) \
     976    iemMemFetchDataU128AlignedSseJmp(pVCpu, &(a_u128Dst), UINT8_MAX, (a_GCPtrMem))
     977
     978# define IEM_MC_FETCH_MEM_FLAT_XMM(a_XmmDst, a_GCPtrMem) \
     979    iemMemFetchDataU128Jmp(pVCpu, &(a_XmmDst).uXmm, UINT8_MAX, (a_GCPtrMem))
     980# define IEM_MC_FETCH_MEM_FLAT_XMM_NO_AC(a_XmmDst, a_GCPtrMem) \
     981    iemMemFetchDataU128Jmp(pVCpu, &(a_XmmDst).uXmm, UINT8_MAX, (a_GCPtrMem))
     982# define IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE(a_XmmDst, a_GCPtrMem) \
     983    iemMemFetchDataU128AlignedSseJmp(pVCpu, &(a_XmmDst).uXmm, UINT8_MAX, (a_GCPtrMem))
     984# define IEM_MC_FETCH_MEM_FLAT_XMM_U32(a_XmmDst, a_iDWord, a_GCPtrMem) \
     985    (a_XmmDst).au32[(a_iDWord)] = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem))
     986# define IEM_MC_FETCH_MEM_FLAT_XMM_U64(a_XmmDst, a_iQWord, a_GCPtrMem) \
     987    (a_XmmDst).au64[(a_iQWord)] = iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem))
    931988#endif
    932989
     
    9591016# define IEM_MC_FETCH_MEM_YMM_ALIGN_AVX(a_YmmDst, a_iSeg, a_GCPtrMem) \
    9601017    iemMemFetchDataU256AlignedSseJmp(pVCpu, &(a_YmmDst).ymm, (a_iSeg), (a_GCPtrMem))
     1018
     1019# define IEM_MC_FETCH_MEM_FLAT_U256(a_u256Dst, a_GCPtrMem) \
     1020    iemMemFetchDataU256Jmp(pVCpu, &(a_u256Dst), UINT8_MAX, (a_GCPtrMem))
     1021# define IEM_MC_FETCH_MEM_FLAT_U256_NO_AC(a_u256Dst, a_GCPtrMem) \
     1022    iemMemFetchDataU256Jmp(pVCpu, &(a_u256Dst), UINT8_MAX, (a_GCPtrMem))
     1023# define IEM_MC_FETCH_MEM_FLAT_U256_ALIGN_AVX(a_u256Dst, a_GCPtrMem) \
     1024    iemMemFetchDataU256AlignedSseJmp(pVCpu, &(a_u256Dst), UINT8_MAX, (a_GCPtrMem))
     1025
     1026# define IEM_MC_FETCH_MEM_FLAT_YMM(a_YmmDst, a_GCPtrMem) \
     1027    iemMemFetchDataU256Jmp(pVCpu, &(a_YmmDst).ymm, UINT8_MAX, (a_GCPtrMem))
     1028# define IEM_MC_FETCH_MEM_FLAT_YMM_NO_AC(a_YmmDst, a_GCPtrMem) \
     1029    iemMemFetchDataU256Jmp(pVCpu, &(a_YmmDst).ymm, UINT8_MAX, (a_GCPtrMem))
     1030# define IEM_MC_FETCH_MEM_FLAT_YMM_ALIGN_AVX(a_YmmDst, a_GCPtrMem) \
     1031    iemMemFetchDataU256AlignedSseJmp(pVCpu, &(a_YmmDst).ymm, UINT8_MAX, (a_GCPtrMem))
    9611032#endif
    9621033
     
    10131084# define IEM_MC_FETCH_MEM_U32_ZX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \
    10141085    ((a_u64Dst) = iemMemFetchDataU32Jmp(pVCpu, (a_iSeg), (a_GCPtrMem)))
     1086
     1087# define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U16(a_u16Dst, a_GCPtrMem) \
     1088    ((a_u16Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1089# define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U32(a_u32Dst, a_GCPtrMem) \
     1090    ((a_u32Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1091# define IEM_MC_FETCH_MEM_FLAT_U8_ZX_U64(a_u64Dst, a_GCPtrMem) \
     1092    ((a_u64Dst) = iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1093# define IEM_MC_FETCH_MEM_FLAT_U16_ZX_U32(a_u32Dst, a_GCPtrMem) \
     1094    ((a_u32Dst) = iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1095# define IEM_MC_FETCH_MEM_FLAT_U16_ZX_U64(a_u64Dst, a_GCPtrMem) \
     1096    ((a_u64Dst) = iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1097# define IEM_MC_FETCH_MEM_FLAT_U32_ZX_U64(a_u64Dst, a_GCPtrMem) \
     1098    ((a_u64Dst) = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem)))
    10151099#endif /* IEM_WITH_SETJMP */
    10161100
     
    10651149# define IEM_MC_FETCH_MEM_U32_SX_U64(a_u64Dst, a_iSeg, a_GCPtrMem) \
    10661150    ((a_u64Dst) = (int32_t)iemMemFetchDataU32Jmp(pVCpu, (a_iSeg), (a_GCPtrMem)))
     1151
     1152# define IEM_MC_FETCH_MEM_FLAT_U8_SX_U16(a_u16Dst, a_GCPtrMem) \
     1153    ((a_u16Dst) = (int8_t)iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1154# define IEM_MC_FETCH_MEM_FLAT_U8_SX_U32(a_u32Dst, a_GCPtrMem) \
     1155    ((a_u32Dst) = (int8_t)iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1156# define IEM_MC_FETCH_MEM_FLAT_U8_SX_U64(a_u64Dst, a_GCPtrMem) \
     1157    ((a_u64Dst) = (int8_t)iemMemFetchDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1158# define IEM_MC_FETCH_MEM_FLAT_U16_SX_U32(a_u32Dst, a_GCPtrMem) \
     1159    ((a_u32Dst) = (int16_t)iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1160# define IEM_MC_FETCH_MEM_FLAT_U16_SX_U64(a_u64Dst, a_GCPtrMem) \
     1161    ((a_u64Dst) = (int16_t)iemMemFetchDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)))
     1162# define IEM_MC_FETCH_MEM_FLAT_U32_SX_U64(a_u64Dst, a_GCPtrMem) \
     1163    ((a_u64Dst) = (int32_t)iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem)))
    10671164#endif /* IEM_WITH_SETJMP */
    10681165
     
    10851182# define IEM_MC_STORE_MEM_U64(a_iSeg, a_GCPtrMem, a_u64Value) \
    10861183    iemMemStoreDataU64Jmp(pVCpu, (a_iSeg), (a_GCPtrMem), (a_u64Value))
     1184
     1185# define IEM_MC_STORE_MEM_FLAT_U8(a_GCPtrMem, a_u8Value) \
     1186    iemMemStoreDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u8Value))
     1187# define IEM_MC_STORE_MEM_FLAT_U16(a_GCPtrMem, a_u16Value) \
     1188    iemMemStoreDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u16Value))
     1189# define IEM_MC_STORE_MEM_FLAT_U32(a_GCPtrMem, a_u32Value) \
     1190    iemMemStoreDataU32Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u32Value))
     1191# define IEM_MC_STORE_MEM_FLAT_U64(a_GCPtrMem, a_u64Value) \
     1192    iemMemStoreDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u64Value))
    10871193#endif
    10881194
     
    11051211# define IEM_MC_STORE_MEM_U64_CONST(a_iSeg, a_GCPtrMem, a_u64C) \
    11061212    iemMemStoreDataU64Jmp(pVCpu, (a_iSeg), (a_GCPtrMem), (a_u64C))
     1213
     1214# define IEM_MC_STORE_MEM_FLAT_U8_CONST(a_GCPtrMem, a_u8C) \
     1215    iemMemStoreDataU8Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u8C))
     1216# define IEM_MC_STORE_MEM_FLAT_U16_CONST(a_GCPtrMem, a_u16C) \
     1217    iemMemStoreDataU16Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u16C))
     1218# define IEM_MC_STORE_MEM_FLAT_U32_CONST(a_GCPtrMem, a_u32C) \
     1219    iemMemStoreDataU32Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u32C))
     1220# define IEM_MC_STORE_MEM_FLAT_U64_CONST(a_GCPtrMem, a_u64C) \
     1221    iemMemStoreDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u64C))
    11071222#endif
    11081223
     
    11341249# define IEM_MC_STORE_MEM_U128_ALIGN_SSE(a_iSeg, a_GCPtrMem, a_u128Value) \
    11351250    iemMemStoreDataU128AlignedSseJmp(pVCpu, (a_iSeg), (a_GCPtrMem), (a_u128Value))
     1251
     1252# define IEM_MC_STORE_MEM_FLAT_U128(a_GCPtrMem, a_u128Value) \
     1253    iemMemStoreDataU128Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u128Value))
     1254# define IEM_MC_STORE_MEM_FLAT_U128_ALIGN_SSE(a_GCPtrMem, a_u128Value) \
     1255    iemMemStoreDataU128AlignedSseJmp(pVCpu, UINT8_MAX, (a_GCPtrMem), (a_u128Value))
    11361256#endif
    11371257
     
    11461266# define IEM_MC_STORE_MEM_U256_ALIGN_AVX(a_iSeg, a_GCPtrMem, a_u256Value) \
    11471267    iemMemStoreDataU256AlignedAvxJmp(pVCpu, (a_iSeg), (a_GCPtrMem), &(a_u256Value))
     1268
     1269# define IEM_MC_STORE_MEM_FLAT_U256(a_GCPtrMem, a_u256Value) \
     1270    iemMemStoreDataU256Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem), &(a_u256Value))
     1271# define IEM_MC_STORE_MEM_FLAT_U256_ALIGN_AVX(a_GCPtrMem, a_u256Value) \
     1272    iemMemStoreDataU256AlignedAvxJmp(pVCpu, UINT8_MAX, (a_GCPtrMem), &(a_u256Value))
    11481273#endif
    11491274
    1150 
     1275/* Regular stack push and pop: */
    11511276#define IEM_MC_PUSH_U16(a_u16Value) \
    11521277    IEM_MC_RETURN_ON_FAILURE(iemMemStackPushU16(pVCpu, (a_u16Value)))
     
    11721297    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU64(pVCpu, (a_pu64Value)))
    11731298
     1299/* Flat stack push and pop: */
     1300#define IEM_MC_FLAT_PUSH_U16(a_u16Value) \
     1301    IEM_MC_RETURN_ON_FAILURE(iemMemStackPushU16(pVCpu, (a_u16Value)))
     1302#define IEM_MC_FLAT_PUSH_U32(a_u32Value) \
     1303    IEM_MC_RETURN_ON_FAILURE(iemMemStackPushU32(pVCpu, (a_u32Value)))
     1304#define IEM_MC_FLAT_PUSH_U32_SREG(a_u32Value) \
     1305    IEM_MC_RETURN_ON_FAILURE(iemMemStackPushU32SReg(pVCpu, (a_u32Value)))
     1306#define IEM_MC_FLAT_PUSH_U64(a_u64Value) \
     1307    IEM_MC_RETURN_ON_FAILURE(iemMemStackPushU64(pVCpu, (a_u64Value)))
     1308
     1309#define IEM_MC_FLAT_POP_U16(a_pu16Value) \
     1310    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU16(pVCpu, (a_pu16Value)))
     1311#define IEM_MC_FLAT_POP_U32(a_pu32Value) \
     1312    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU32(pVCpu, (a_pu32Value)))
     1313#define IEM_MC_FLAT_POP_U64(a_pu64Value) \
     1314    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU64(pVCpu, (a_pu64Value)))
     1315
     1316#define IEM_MC_FLAT_POP_EX_U16(a_pu16Value, a_) \
     1317    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU16Ex(pVCpu, (a_pu16Value), (a_pNewRsp)))
     1318#define IEM_MC_FLAT_POP_EX_U32(a_pu32Value) \
     1319    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU32(pVCpu, (a_pu32Value)))
     1320#define IEM_MC_FLAT_POP_EX_U64(a_pu64Value) \
     1321    IEM_MC_RETURN_ON_FAILURE(iemMemStackPopU64(pVCpu, (a_pu64Value)))
     1322
     1323
    11741324/** Maps guest memory for direct or bounce buffered access.
    11751325 * The purpose is to pass it to an operand implementation, thus the a_iArg.
     
    11801330                                       (a_GCPtrMem), (a_fAccess), sizeof(*(a_pMem)) - 1))
    11811331
     1332/** Flat variant of IEM_MC_MEM_MAP. */
     1333#define IEM_MC_MEM_FLAT_MAP(a_pMem, a_fAccess, a_GCPtrMem, a_iArg) \
     1334    IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pMem), sizeof(*(a_pMem)), UINT8_MAX, \
     1335                                       (a_GCPtrMem), (a_fAccess), sizeof(*(a_pMem)) - 1))
     1336
    11821337/** Maps guest memory for direct or bounce buffered access.
    11831338 * The purpose is to pass it to an operand implementation, thus the a_iArg.
     
    11861341#define IEM_MC_MEM_MAP_EX(a_pvMem, a_fAccess, a_cbMem, a_iSeg, a_GCPtrMem, a_cbAlign, a_iArg) \
    11871342    IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pvMem), (a_cbMem), (a_iSeg), \
     1343                                       (a_GCPtrMem), (a_fAccess), (a_cbAlign)))
     1344
     1345/** Flat variant of IEM_MC_MEM_MAP_EX. */
     1346#define IEM_MC_MEM_FLAT_MAP_EX(a_pvMem, a_fAccess, a_cbMem, a_GCPtrMem, a_cbAlign, a_iArg) \
     1347    IEM_MC_RETURN_ON_FAILURE(iemMemMap(pVCpu, (void **)&(a_pvMem), (a_cbMem), UINT8_MAX, \
    11881348                                       (a_GCPtrMem), (a_fAccess), (a_cbAlign)))
    11891349
Note: See TracChangeset for help on using the changeset viewer.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette