Changeset 104506 in vbox
- Timestamp:
- May 3, 2024 1:08:49 PM (7 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp
r104468 r104506 31 31 *********************************************************************************************************************************/ 32 32 #define LOG_GROUP LOG_GROUP_IEM_RE_NATIVE 33 //#define IEM_WITH_OPAQUE_DECODER_STATE - need offCurInstrStart access for iemNativeHlpMemCodeNewPageTlbMiss and friends. 33 #define IEM_WITH_OPAQUE_DECODER_STATE 34 34 #define VMCPU_INCL_CPUM_GST_CTX 35 35 #define VMM_INCLUDED_SRC_include_IEMMc_h /* block IEMMc.h inclusion. */ -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r104468 r104506 177 177 { 178 178 # ifdef VBOX_STRICT 179 uint32_t fAssertFlags = (pVCpu->iem.s.fExec & IEMTB_F_IEM_F_MASK ) | IEMTB_F_TYPE_NATIVE;179 uint32_t fAssertFlags = (pVCpu->iem.s.fExec & IEMTB_F_IEM_F_MASK & IEMTB_F_KEY_MASK) | IEMTB_F_TYPE_NATIVE; 180 180 if (pVCpu->cpum.GstCtx.rflags.uBoth & CPUMCTX_INHIBIT_SHADOW) 181 181 fAssertFlags |= IEMTB_F_INHIBIT_SHADOW; … … 188 188 fAssertFlags |= IEMTB_F_CS_LIM_CHECKS; 189 189 } 190 Assert(!(fFlags & ~(IEMTB_F_ IEM_F_MASK | IEMTB_F_TYPE_MASK)));190 Assert(!(fFlags & ~(IEMTB_F_KEY_MASK | IEMTB_F_TYPE_MASK))); 191 191 AssertMsg(fFlags == fAssertFlags, ("fFlags=%#RX32 fAssertFlags=%#RX32 cs:rip=%04x:%#010RX64\n", 192 192 fFlags, fAssertFlags, pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip)); … … 196 196 * Check them + type. 197 197 */ 198 if ((pNewTb->fFlags & (IEMTB_F_ IEM_F_MASK | IEMTB_F_TYPE_MASK)) == fFlags)198 if ((pNewTb->fFlags & (IEMTB_F_KEY_MASK | IEMTB_F_TYPE_MASK)) == fFlags) 199 199 { 200 200 /* 201 201 * Check for interrupts and stuff. 202 202 */ 203 /** @todo We duplicate code here that's also in iemNativeHlpReturnBreakViaLookupWithTlb. 204 * The main problem are the statistics and to some degree the logging. :/ */ 203 205 if (!a_fWithIrqCheck || !iemNativeHlpReturnBreakViaLookupIsIrqOrForceFlagPending(pVCpu) ) 204 206 { … … 208 210 || !TMTimerPollBoolWith32BitMilliTS(pVCpu->CTX_SUFF(pVM), pVCpu, &pVCpu->iem.s.msRecompilerPollNow) ) 209 211 { 212 /* 213 * Success. Update statistics and switch to the next TB. 214 */ 210 215 pVCpu->iem.s.cTbExecNative = cTbExecNative + 1; 211 216 if (a_fWithIrqCheck) … … 255 260 */ 256 261 template <bool const a_fWithIrqCheck> 257 static IEM_DECL_NATIVE_HLP_DEF(uintptr_t, iemNativeHlpReturnBreakViaLookupWithTlb,(PVMCPUCC pVCpu, uint8_t idxTbLookup, 258 uint32_t fFlags)) 262 static IEM_DECL_NATIVE_HLP_DEF(uintptr_t, iemNativeHlpReturnBreakViaLookupWithTlb,(PVMCPUCC pVCpu, uint8_t idxTbLookup)) 259 263 { 260 264 PIEMTB const pTb = pVCpu->iem.s.pCurTbR3; 261 265 Assert(idxTbLookup < pTb->cTbLookupEntries); 262 266 PIEMTB * const ppNewTb = IEMTB_GET_TB_LOOKUP_TAB_ENTRY(pTb, idxTbLookup); 263 #if 0 /** @todo Do TLB lookup */ 267 #if 1 268 PIEMTB const pNewTb = *ppNewTb; 269 if (pNewTb) 270 { 271 /* 272 * Calculate the flags for the next TB and check if they match. 273 */ 274 uint32_t fFlags = (pVCpu->iem.s.fExec & IEMTB_F_IEM_F_MASK & IEMTB_F_KEY_MASK) | IEMTB_F_TYPE_NATIVE; 275 if (!(pVCpu->cpum.GstCtx.rflags.uBoth & (CPUMCTX_INHIBIT_SHADOW | CPUMCTX_INHIBIT_NMI))) 276 { /* likely */ } 277 else 278 { 279 if (pVCpu->cpum.GstCtx.rflags.uBoth & CPUMCTX_INHIBIT_SHADOW) 280 fFlags |= IEMTB_F_INHIBIT_SHADOW; 281 if (pVCpu->cpum.GstCtx.rflags.uBoth & CPUMCTX_INHIBIT_NMI) 282 fFlags |= IEMTB_F_INHIBIT_NMI; 283 } 284 if (!IEM_F_MODE_X86_IS_FLAT(fFlags)) 285 { 286 int64_t const offFromLim = (int64_t)pVCpu->cpum.GstCtx.cs.u32Limit - (int64_t)pVCpu->cpum.GstCtx.eip; 287 if (offFromLim >= X86_PAGE_SIZE + 16 - (int32_t)(pVCpu->cpum.GstCtx.cs.u64Base & GUEST_PAGE_OFFSET_MASK)) 288 { /* likely */ } 289 else 290 fFlags |= IEMTB_F_CS_LIM_CHECKS; 291 } 292 Assert(!(fFlags & ~(IEMTB_F_KEY_MASK | IEMTB_F_TYPE_MASK))); 293 294 if ((pNewTb->fFlags & (IEMTB_F_KEY_MASK | IEMTB_F_TYPE_MASK)) == fFlags) 295 { 296 /* 297 * Do the TLB lookup for flat RIP and compare the result with the next TB. 298 * 299 * Note! This replicates iemGetPcWithPhysAndCode and iemGetPcWithPhysAndCodeMissed. 300 */ 301 /* Calc the effective PC. */ 302 uint64_t uPc = pVCpu->cpum.GstCtx.rip; 303 Assert(pVCpu->cpum.GstCtx.cs.u64Base == 0 || !IEM_IS_64BIT_CODE(pVCpu)); 304 uPc += pVCpu->cpum.GstCtx.cs.u64Base; 305 306 /* Advance within the current buffer (PAGE) when possible. */ 307 RTGCPHYS GCPhysPc; 308 uint64_t off; 309 if ( pVCpu->iem.s.pbInstrBuf 310 && (off = uPc - pVCpu->iem.s.uInstrBufPc) < pVCpu->iem.s.cbInstrBufTotal) /*ugly*/ 311 { 312 pVCpu->iem.s.offInstrNextByte = (uint32_t)off; 313 pVCpu->iem.s.offCurInstrStart = (uint16_t)off; 314 if ((uint16_t)off + 15 <= pVCpu->iem.s.cbInstrBufTotal) 315 pVCpu->iem.s.cbInstrBuf = (uint16_t)off + 15; 316 else 317 pVCpu->iem.s.cbInstrBuf = pVCpu->iem.s.cbInstrBufTotal; 318 GCPhysPc = pVCpu->iem.s.GCPhysInstrBuf + off; 319 } 320 else 321 { 322 pVCpu->iem.s.pbInstrBuf = NULL; 323 pVCpu->iem.s.offCurInstrStart = 0; 324 pVCpu->iem.s.offInstrNextByte = 0; 325 iemOpcodeFetchBytesJmp(pVCpu, 0, NULL); 326 GCPhysPc = pVCpu->iem.s.pbInstrBuf ? pVCpu->iem.s.GCPhysInstrBuf + pVCpu->iem.s.offCurInstrStart : NIL_RTGCPHYS; 327 } 328 329 if (pNewTb->GCPhysPc == GCPhysPc) 330 { 331 /* 332 * Check for interrupts and stuff. 333 */ 334 /** @todo We duplicate code here that's also in iemNativeHlpReturnBreakViaLookupWithPc. 335 * The main problem are the statistics and to some degree the logging. :/ */ 336 if (!a_fWithIrqCheck || !iemNativeHlpReturnBreakViaLookupIsIrqOrForceFlagPending(pVCpu) ) 337 { 338 /* Do polling. */ 339 uint64_t const cTbExecNative = pVCpu->iem.s.cTbExecNative; 340 if ( RT_LIKELY(cTbExecNative & 511) 341 || !TMTimerPollBoolWith32BitMilliTS(pVCpu->CTX_SUFF(pVM), pVCpu, &pVCpu->iem.s.msRecompilerPollNow) ) 342 { 343 /* 344 * Success. Update statistics and switch to the next TB. 345 */ 346 pVCpu->iem.s.cTbExecNative = cTbExecNative + 1; 347 if (a_fWithIrqCheck) 348 STAM_REL_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2Irq); 349 else 350 STAM_REL_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2NoIrq); 351 352 pNewTb->cUsed += 1; 353 pNewTb->msLastUsed = pVCpu->iem.s.msRecompilerPollNow; 354 pVCpu->iem.s.pCurTbR3 = pNewTb; 355 pVCpu->iem.s.ppTbLookupEntryR3 = IEMTB_GET_TB_LOOKUP_TAB_ENTRY(pNewTb, 0); 356 Log10(("iemNativeHlpReturnBreakViaLookupWithTlb: match at %04x:%08RX64 (%RGp): pTb=%p[%#x]-> %p\n", 357 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, GCPhysPc, pTb, idxTbLookup, pNewTb)); 358 return (uintptr_t)pNewTb->Native.paInstructions; 359 } 360 } 361 Log10(("iemNativeHlpReturnBreakViaLookupWithTlb: IRQ or FF pending\n")); 362 STAM_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2PendingIrq); 363 } 364 else 365 { 366 Log10(("iemNativeHlpReturnBreakViaLookupWithTlb: GCPhysPc mismatch at %04x:%08RX64: %RGp vs %RGp (pTb=%p[%#x]-> %p)\n", 367 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, GCPhysPc, pNewTb->GCPhysPc, pTb, idxTbLookup, pNewTb)); 368 STAM_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2MismatchGCPhysPc); 369 } 370 } 371 else 372 { 373 Log10(("iemNativeHlpReturnBreakViaLookupWithTlb: fFlags mismatch at %04x:%08RX64: %#x vs %#x (pTb=%p[%#x]-> %p)\n", 374 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, fFlags, pNewTb->fFlags, pTb, idxTbLookup, pNewTb)); 375 STAM_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2MismatchFlags); 376 } 377 } 378 else 379 STAM_COUNTER_INC(&pVCpu->iem.s.StatNativeTbExitDirectLinking2NoTb); 264 380 #else 265 381 NOREF(fFlags); -
trunk/src/VBox/VMM/include/IEMInternal.h
r104468 r104506 1596 1596 * @note Set to zero when the code TLB is flushed to trigger TLB reload. */ 1597 1597 uint16_t cbInstrBufTotal; /* 0x28 */ 1598 # ifndef IEM_WITH_OPAQUE_DECODER_STATE1599 1598 /** Offset into pbInstrBuf of the first byte of the current instruction. 1600 1599 * Can be negative to efficiently handle cross page instructions. */ 1601 1600 int16_t offCurInstrStart; /* 0x2a */ 1602 1601 1602 # ifndef IEM_WITH_OPAQUE_DECODER_STATE 1603 1603 /** The prefix mask (IEM_OP_PRF_XXX). */ 1604 1604 uint32_t fPrefixes; /* 0x2c */ … … 1624 1624 # endif 1625 1625 # else /* IEM_WITH_OPAQUE_DECODER_STATE */ 1626 uint8_t abOpaqueDecoderPart1[0x36 - 0x2 a];1626 uint8_t abOpaqueDecoderPart1[0x36 - 0x2c]; 1627 1627 # endif /* IEM_WITH_OPAQUE_DECODER_STATE */ 1628 1628
Note:
See TracChangeset
for help on using the changeset viewer.