Changeset 102737 in vbox
- Timestamp:
- Dec 31, 2023 4:05:24 PM (11 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veHlpA.asm
r102663 r102737 36 36 37 37 extern NAME(iemThreadedFunc_BltIn_LogCpuStateWorker) 38 extern NAME(iemNativeHlpCheckTlbLookup) 38 39 39 40 … … 100 101 ENDPROC iemNativeHlpAsmSafeWrapLogCpuState 101 102 103 104 ;; 105 ; This is wrapper function that saves and restores all volatile registers 106 ; so the impact of inserting CheckTlbLookup is minimal to the other TB code. 107 ; 108 BEGINPROC iemNativeHlpAsmSafeWrapCheckTlbLookup 109 push xBP 110 SEH64_PUSH_xBP 111 mov xBP, xSP 112 SEH64_SET_FRAME_xBP 0 113 SEH64_END_PROLOGUE 114 115 ; 116 ; Save all volatile registers. 117 ; 118 push xAX 119 push xCX 120 push xDX 121 %ifdef RT_OS_WINDOWS 122 push xSI 123 push xDI 124 %endif 125 push r8 126 push r9 127 push r10 128 push r11 129 sub rsp, 8+20h 130 131 ; 132 ; Call C function to do the actual work. 133 ; 134 %ifdef RT_OS_WINDOWS 135 mov rcx, [rbp + 10h] 136 mov rdx, [rbp + 18h] 137 mov r8, [rbp + 20h] 138 mov r9, [rbp + 28h] 139 %else 140 mov rdi, [rbp + 10h] 141 mov rsi, [rbp + 18h] 142 mov ecx, [rbp + 20h] 143 mov edx, [rbp + 28h] 144 %endif 145 call NAME(iemNativeHlpCheckTlbLookup) 146 147 ; 148 ; Restore volatile registers and return to the TB code. 149 ; 150 add rsp, 8+20h 151 pop r11 152 pop r10 153 pop r9 154 pop r8 155 %ifdef RT_OS_WINDOWS 156 pop xDI 157 pop xSI 158 %endif 159 pop xDX 160 pop xCX 161 pop xAX 162 leave 163 ret 20h 164 ENDPROC iemNativeHlpAsmSafeWrapCheckTlbLookup 165 -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102736 r102737 7143 7143 AssertFailed(); 7144 7144 } 7145 else 7146 { 7147 /* 7148 * Allocate a temporary stack slot and spill the register to it. 7149 */ 7150 unsigned const idxStackSlot = ASMBitLastSetU32(~pReNative->Core.bmStack) - 1; 7151 AssertStmt(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, 7152 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 7153 pReNative->Core.bmStack |= RT_BIT_32(idxStackSlot); 7154 pReNative->Core.aHstRegs[idxHstReg].idxStackSlot = (uint8_t)idxStackSlot; 7155 Log12(("iemNativeVarSaveVolatileRegsPreHlpCall: spilling idxReg=%d onto the stack (slot %#x bp+%d, off=%#x)\n", 7156 idxHstReg, idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off)); 7157 off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxHstReg); 7158 } 7145 7159 } while (fHstRegs); 7146 7160 } … … 7201 7215 } 7202 7216 AssertFailed(); 7217 } 7218 else 7219 { 7220 /* 7221 * Restore from temporary stack slot. 7222 */ 7223 uint8_t const idxStackSlot = pReNative->Core.aHstRegs[idxHstReg].idxStackSlot; 7224 AssertContinue(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS && (pReNative->Core.bmStack & RT_BIT_32(idxStackSlot))); 7225 pReNative->Core.bmStack &= ~RT_BIT_32(idxStackSlot); 7226 pReNative->Core.aHstRegs[idxHstReg].idxStackSlot = UINT8_MAX; 7227 7228 off = iemNativeEmitLoadGprByBp(pReNative, off, idxHstReg, iemNativeStackCalcBpDisp(idxStackSlot)); 7203 7229 } 7204 7230 } while (fHstRegs); … … 10093 10119 10094 10120 10121 /** 10122 * This is called via iemNativeHlpAsmSafeWrapCheckTlbLookup. 10123 */ 10124 DECLASM(void) iemNativeHlpCheckTlbLookup(PVMCPU pVCpu, uintptr_t uResult, uint64_t GCPtr, uint32_t uSegAndSizeAndAccess) 10125 { 10126 uint8_t const iSegReg = RT_BYTE1(uSegAndSizeAndAccess); 10127 uint8_t const cbMem = RT_BYTE2(uSegAndSizeAndAccess); 10128 uint32_t const fAccess = uSegAndSizeAndAccess >> 16; 10129 Log(("iemNativeHlpCheckTlbLookup: %x:%#RX64 LB %#x fAccess=%#x -> %#RX64\n", iSegReg, GCPtr, cbMem, fAccess, uResult)); 10130 10131 /* Do the lookup manually. */ 10132 RTGCPTR const GCPtrFlat = iSegReg == UINT8_MAX ? GCPtr : GCPtr + pVCpu->cpum.GstCtx.aSRegs[iSegReg].u64Base; 10133 uint64_t const uTag = IEMTLB_CALC_TAG( &pVCpu->iem.s.DataTlb, GCPtrFlat); 10134 PIEMTLBENTRY const pTlbe = IEMTLB_TAG_TO_ENTRY(&pVCpu->iem.s.DataTlb, uTag); 10135 if (RT_LIKELY(pTlbe->uTag == uTag)) 10136 { 10137 /* 10138 * Check TLB page table level access flags. 10139 */ 10140 AssertCompile(IEMTLBE_F_PT_NO_USER == 4); 10141 uint64_t const fNoUser = (IEM_GET_CPL(pVCpu) + 1) & IEMTLBE_F_PT_NO_USER; 10142 uint64_t const fNoWriteNoDirty = !(fAccess & IEM_ACCESS_TYPE_WRITE) ? 0 10143 : IEMTLBE_F_PT_NO_WRITE | IEMTLBE_F_PT_NO_DIRTY | IEMTLBE_F_PG_NO_WRITE; 10144 uint64_t const fFlagsAndPhysRev = pTlbe->fFlagsAndPhysRev & ( IEMTLBE_F_PHYS_REV | IEMTLBE_F_NO_MAPPINGR3 10145 | IEMTLBE_F_PG_UNASSIGNED 10146 | IEMTLBE_F_PT_NO_ACCESSED 10147 | fNoWriteNoDirty | fNoUser); 10148 uint64_t const uTlbPhysRev = pVCpu->iem.s.DataTlb.uTlbPhysRev; 10149 if (RT_LIKELY(fFlagsAndPhysRev == uTlbPhysRev)) 10150 { 10151 /* 10152 * Return the address. 10153 */ 10154 uint8_t const * const pbAddr = &pTlbe->pbMappingR3[GCPtrFlat & GUEST_PAGE_OFFSET_MASK]; 10155 if ((uintptr_t)pbAddr == uResult) 10156 return; 10157 AssertFailed(); 10158 } 10159 else 10160 AssertMsgFailed(("fFlagsAndPhysRev=%#RX64 vs uTlbPhysRev=%#RX64: %#RX64\n", 10161 fFlagsAndPhysRev, uTlbPhysRev, fFlagsAndPhysRev ^ uTlbPhysRev)); 10162 } 10163 else 10164 AssertFailed(); 10165 __debugbreak(); 10166 } 10167 DECLASM(void) iemNativeHlpAsmSafeWrapCheckTlbLookup(void); 10168 10169 10095 10170 #ifdef IEMNATIVE_WITH_TLB_LOOKUP 10096 10171 DECL_INLINE_THROW(uint32_t) … … 10427 10502 AssertCompile(IEMTLBE_F_PT_NO_USER == 4); 10428 10503 uint64_t const fNoUser = (((pReNative->fExec >> IEM_F_X86_CPL_SHIFT) & IEM_F_X86_CPL_SMASK) + 1) & IEMTLBE_F_PT_NO_USER; 10429 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, pTlbState->idxReg1, 10430 IEMTLBE_F_PHYS_REV | IEMTLBE_F_NO_MAPPINGR3 10431 | IEMTLBE_F_PG_UNASSIGNED | IEMTLBE_F_PG_NO_READ 10432 | IEMTLBE_F_PT_NO_ACCESSED | fNoUser); 10504 uint64_t fTlbe = IEMTLBE_F_PHYS_REV | IEMTLBE_F_NO_MAPPINGR3 | IEMTLBE_F_PG_UNASSIGNED | IEMTLBE_F_PT_NO_ACCESSED 10505 | fNoUser; 10506 if (fAccess & IEM_ACCESS_TYPE_READ) 10507 fTlbe |= IEMTLBE_F_PG_NO_READ; 10508 if (fAccess & IEM_ACCESS_TYPE_WRITE) 10509 fTlbe |= IEMTLBE_F_PT_NO_WRITE | IEMTLBE_F_PG_NO_WRITE | IEMTLBE_F_PT_NO_DIRTY; 10510 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, pTlbState->idxReg1, fTlbe); 10433 10511 # if defined(RT_ARCH_AMD64) 10434 10512 /* and reg1, [reg2->fFlagsAndPhysRev] */ … … 10462 10540 off = iemNativeEmitLoadGprByGprEx(pCodeBuf, off, pTlbState->idxReg1, pTlbState->idxReg2, 10463 10541 RT_UOFFSETOF(IEMTLBENTRY, pbMappingR3)); 10464 /* if (!reg1) jmp tlbmiss*/10542 /* if (!reg1) goto tlbmiss; */ 10465 10543 /** @todo eliminate the need for this test? */ 10466 10544 off = iemNativeEmitTestIfGprIsZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, pTlbState->idxReg1, … … 10480 10558 /* add result, reg1 */ 10481 10559 off = iemNativeEmitAddTwoGprsEx(pCodeBuf, off, idxRegMemResult, pTlbState->idxReg1); 10560 10561 # if 0 10562 /* 10563 * To verify the result we call a helper function. 10564 * 10565 * It's like the state logging, so parameters are passed on the stack. 10566 * iemNativeHlpAsmSafeWrapCheckTlbLookup(pVCpu, result, addr, seg | (cbMem << 8) | (fAccess << 16)) 10567 */ 10568 # ifdef RT_ARCH_AMD64 10569 /* push seg | (cbMem << 8) | (fAccess << 16) */ 10570 pCodeBuf[off++] = 0x68; 10571 pCodeBuf[off++] = iSegReg; 10572 pCodeBuf[off++] = cbMem; 10573 pCodeBuf[off++] = RT_BYTE1(fAccess); 10574 pCodeBuf[off++] = RT_BYTE2(fAccess); 10575 /* push pTlbState->idxRegPtr / immediate address. */ 10576 if (pTlbState->idxRegPtr != UINT8_MAX) 10577 { 10578 if (pTlbState->idxRegPtr >= 8) 10579 pCodeBuf[off++] = X86_OP_REX_B; 10580 pCodeBuf[off++] = 0x50 + (pTlbState->idxRegPtr & 7); 10581 } 10582 else 10583 { 10584 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, pTlbState->idxReg1, pTlbState->uAbsPtr); 10585 if (pTlbState->idxReg1 >= 8) 10586 pCodeBuf[off++] = X86_OP_REX_B; 10587 pCodeBuf[off++] = 0x50 + (pTlbState->idxReg1 & 7); 10588 } 10589 /* push idxRegMemResult */ 10590 if (idxRegMemResult >= 8) 10591 pCodeBuf[off++] = X86_OP_REX_B; 10592 pCodeBuf[off++] = 0x50 + (idxRegMemResult & 7); 10593 /* push pVCpu */ 10594 pCodeBuf[off++] = 0x50 + IEMNATIVE_REG_FIXED_PVMCPU; 10595 /* mov reg1, helper */ 10596 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, pTlbState->idxReg1, (uintptr_t)iemNativeHlpAsmSafeWrapCheckTlbLookup); 10597 /* call [reg1] */ 10598 pCodeBuf[off++] = X86_OP_REX_W | (pTlbState->idxReg1 < 8 ? 0 : X86_OP_REX_B); 10599 pCodeBuf[off++] = 0xff; 10600 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 2, pTlbState->idxReg1 & 7); 10601 /* The stack is cleaned up by helper function. */ 10602 10603 # else 10604 # error "Port me" 10605 # endif 10606 # endif 10482 10607 10483 10608 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); … … 11746 11871 off = iemNativeEmitTlbLookup(pReNative, off, &TlbState, iSegReg, cbMem, fAlignMask, fAccess, 11747 11872 idxLabelTlbLookup, idxLabelTlbMiss, idxRegMemResult); 11748 TlbState.freeRegsAndReleaseVars(pReNative, idxVarGCPtrMem);11749 11873 11750 11874 /* … … 11758 11882 */ 11759 11883 iemNativeLabelDefine(pReNative, idxLabelTlbDone, off); 11884 11885 TlbState.freeRegsAndReleaseVars(pReNative, idxVarGCPtrMem); 11760 11886 11761 11887 # ifndef IEMNATIVE_WITH_FREE_AND_FLUSH_VOLATILE_REGS_AT_TLB_LOOKUP -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r102733 r102737 563 563 /** Variable index if holding a variable, otherwise UINT8_MAX. */ 564 564 uint8_t idxVar; 565 /** Stack slot assigned by iemNativeVarSaveVolatileRegsPreHlpCall and freed 566 * by iemNativeVarRestoreVolatileRegsPostHlpCall. This is not valid outside 567 * that scope. */ 568 uint8_t idxStackSlot; 565 569 /** Alignment padding. */ 566 uint8_t abAlign[ 6];570 uint8_t abAlign[5]; 567 571 } IEMNATIVEHSTREG; 568 572
Note:
See TracChangeset
for help on using the changeset viewer.