- Timestamp:
- Mar 25, 2024 10:07:26 AM (12 months ago)
- svn:sync-xref-src-repo-rev:
- 162423
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r104021 r104034 112 112 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 113 113 /* If for whatever reason it is possible to reference the PC register at some point we need to do the writeback here first. */ 114 #endif 115 116 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 117 #if 0 /** @todo r=aeichner EFLAGS writeback delay. */ 118 if ( enmClass == kIemNativeGstRegRef_EFlags 119 && pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(kIemNativeGstReg_EFlags)) 120 off = iemNativeRegFlushPendingWrite(pReNative, off, kIemNativeGstReg_EFlags); 121 #else 122 Assert(!(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(kIemNativeGstReg_EFlags))); 123 #endif 124 125 if ( enmClass == kIemNativeGstRegRef_Gpr 126 && pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(idxReg)) 127 off = iemNativeRegFlushPendingWrite(pReNative, off, IEMNATIVEGSTREG_GPR(idxReg)); 114 128 #endif 115 129 … … 1291 1305 Assert(!pEntry->fInElse); 1292 1306 1307 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1308 /* Writeback any dirty shadow registers. */ 1309 /** @todo r=aeichner Possible optimization is to only writeback guest registers which became dirty 1310 * in one of the branches and leave guest registers already dirty before the start of the if 1311 * block alone. */ 1312 off = iemNativeRegFlushDirtyGuest(pReNative, off); 1313 #endif 1314 1293 1315 /* Jump to the endif */ 1294 1316 off = iemNativeEmitJmpToLabel(pReNative, off, pEntry->idxLabelEndIf); … … 1324 1346 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 1325 1347 Assert(pReNative->Core.offPc == 0); 1348 #endif 1349 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1350 /* Writeback any dirty shadow registers (else branch). */ 1351 /** @todo r=aeichner Possible optimization is to only writeback guest registers which became dirty 1352 * in one of the branches and leave guest registers already dirty before the start of the if 1353 * block alone. */ 1354 off = iemNativeRegFlushDirtyGuest(pReNative, off); 1326 1355 #endif 1327 1356 … … 1335 1364 * But we'd need more info about future for that to be worth the effort. */ 1336 1365 PCIEMNATIVECORESTATE const pOther = pEntry->fInElse ? &pEntry->IfFinalState : &pEntry->InitialState; 1366 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1367 Assert( pOther->bmGstRegShadowDirty == 0 1368 && pReNative->Core.bmGstRegShadowDirty == 0); 1369 #endif 1370 1337 1371 if (memcmp(&pReNative->Core, pOther, sizeof(*pOther)) != 0) 1338 1372 { … … 1353 1387 Log12(("iemNativeEmitEndIf: dropping gst %s from hst %s\n", 1354 1388 g_aGstShadowInfo[idxGstReg].pszName, g_apszIemNativeHstRegNames[idxHstReg])); 1389 1390 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1391 /* Writeback any dirty shadow registers we are about to unshadow. */ 1392 off = iemNativeRegFlushDirtyGuestByHostRegShadow(pReNative, off, idxHstReg); 1393 #endif 1355 1394 iemNativeRegClearGstRegShadowing(pReNative, idxHstReg, off); 1356 1395 } … … 1358 1397 } 1359 1398 else 1399 { 1360 1400 Assert(pReNative->Core.bmHstRegsWithGstShadow == 0); 1401 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1402 Assert(pReNative->Core.bmGstRegShadowDirty == 0); 1403 #endif 1404 } 1361 1405 1362 1406 /* Check variables next. For now we must require them to be identical … … 2822 2866 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 2823 2867 2868 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 2824 2869 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGRegEx & 15])); 2870 #endif 2825 2871 2826 2872 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 2918 2964 iemNativeVarRegisterRelease(pReNative, idxValueVar); 2919 2965 2966 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 2920 2967 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGRegEx & 15])); 2968 #endif 2921 2969 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 2922 2970 return off; … … 2956 3004 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 2957 3005 3006 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 2958 3007 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3008 #endif 2959 3009 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 2960 3010 return off; … … 3024 3074 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3025 3075 3076 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3026 3077 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3078 #endif 3027 3079 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 3028 3080 return off; … … 3041 3093 kIemNativeGstRegUse_ForFullWrite); 3042 3094 off = iemNativeEmitLoadGprImm64(pReNative, off, idxGstTmpReg, uValue); 3095 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3043 3096 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3097 #endif 3044 3098 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 3045 3099 return off; … … 3076 3130 */ 3077 3131 uint8_t const idxVarReg = iemNativeVarRegisterAcquireForGuestReg(pReNative, idxValueVar, IEMNATIVEGSTREG_GPR(iGReg), &off); 3132 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3078 3133 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxVarReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3134 #endif 3079 3135 #ifdef VBOX_STRICT 3080 3136 off = iemNativeEmitTop32BitsClearCheck(pReNative, off, idxVarReg); … … 3096 3152 kIemNativeGstRegUse_ForFullWrite); 3097 3153 off = iemNativeEmitLoadGprImm64(pReNative, off, idxGstTmpReg, uValue); 3154 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3098 3155 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3156 #endif 3099 3157 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 3100 3158 return off; … … 3131 3189 */ 3132 3190 uint8_t const idxVarReg = iemNativeVarRegisterAcquireForGuestReg(pReNative, idxValueVar, IEMNATIVEGSTREG_GPR(iGReg), &off); 3191 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3133 3192 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxVarReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3193 #else 3194 RT_NOREF(idxVarReg); 3195 #endif 3134 3196 iemNativeVarRegisterRelease(pReNative, idxValueVar); 3135 3197 return off; … … 3148 3210 kIemNativeGstRegUse_ForUpdate); 3149 3211 off = iemNativeEmitLoadGprFromGpr32(pReNative, off, idxGstTmpReg, idxGstTmpReg); 3212 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3150 3213 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3214 #endif 3151 3215 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 3152 3216 return off; … … 3201 3265 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3202 3266 3267 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3203 3268 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3269 #endif 3204 3270 3205 3271 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 3257 3323 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3258 3324 3325 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3259 3326 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3327 #endif 3260 3328 3261 3329 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 3308 3376 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3309 3377 3378 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3310 3379 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3380 #endif 3311 3381 3312 3382 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 3364 3434 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3365 3435 3436 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3366 3437 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3438 #endif 3367 3439 3368 3440 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 3420 3492 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3421 3493 3494 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3422 3495 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3496 #endif 3423 3497 3424 3498 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 3472 3546 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 3473 3547 3548 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 3474 3549 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGReg])); 3550 #endif 3475 3551 3476 3552 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); … … 6195 6271 #endif /* IEMNATIVE_WITH_TLB_LOOKUP */ 6196 6272 6273 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6197 6274 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.rsp)); 6275 #endif 6198 6276 iemNativeRegFreeTmp(pReNative, idxRegRsp); 6199 6277 if (idxRegEffSp != idxRegRsp) … … 6496 6574 #endif 6497 6575 iemNativeRegClearAndMarkAsGstRegShadow(pReNative, idxRegMemResult, IEMNATIVEGSTREG_GPR(idxGReg), off); 6576 #if defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6577 pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(idxGReg); 6578 #endif 6579 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6498 6580 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxRegMemResult, 6499 6581 RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[idxGReg])); 6582 #endif 6500 6583 } 6501 6584 else … … 6505 6588 kIemNativeGstRegUse_ForUpdate); 6506 6589 off = iemNativeEmitGprMergeInGpr16(pReNative, off, idxRegDst, idxRegMemResult); 6590 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6507 6591 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxRegDst, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[idxGReg])); 6592 #endif 6508 6593 iemNativeRegFreeTmp(pReNative, idxRegDst); 6509 6594 } … … 6537 6622 } 6538 6623 } 6624 6625 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6539 6626 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp)); 6627 #endif 6540 6628 6541 6629 iemNativeRegFreeTmp(pReNative, idxRegRsp); -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r104033 r104034 3257 3257 pReNative->Core.bmHstRegsWithGstShadow = 0; 3258 3258 pReNative->Core.bmGstRegShadows = 0; 3259 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 3260 pReNative->Core.bmGstRegShadowDirty = 0; 3261 #endif 3259 3262 pReNative->Core.bmVars = 0; 3260 3263 pReNative->Core.bmStack = 0; … … 3792 3795 PIEMTBDBGENTRY const pEntry = iemNativeDbgInfoAddNewEntry(pReNative, pReNative->pDbgInfo); 3793 3796 pEntry->GuestRegShadowing.uType = kIemTbDbgEntryType_GuestRegShadowing; 3797 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 3798 pEntry->GuestRegShadowing.fDirty = (pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(enmGstReg)) ? 1 : 0; 3799 #endif 3794 3800 pEntry->GuestRegShadowing.uUnused = 0; 3795 3801 pEntry->GuestRegShadowing.idxGstReg = enmGstReg; 3796 3802 pEntry->GuestRegShadowing.idxHstReg = idxHstReg; 3797 3803 pEntry->GuestRegShadowing.idxHstRegPrev = idxHstRegPrev; 3804 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 3805 Assert( idxHstReg != UINT8_MAX 3806 || !(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(enmGstReg))); 3807 #endif 3798 3808 } 3799 3809 … … 4022 4032 4023 4033 4034 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4035 /** 4036 * Stores the host reg @a idxHstReg into guest shadow register @a enmGstReg. 4037 * 4038 * @returns New code buffer offset on success, UINT32_MAX on failure. 4039 * @param pReNative . 4040 * @param off The current code buffer position. 4041 * @param enmGstReg The guest register to store to. 4042 * @param idxHstReg The host register to store from. 4043 */ 4044 DECL_FORCE_INLINE_THROW(uint32_t) 4045 iemNativeEmitStoreGprWithGstShadowReg(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVEGSTREG enmGstReg, uint8_t idxHstReg) 4046 { 4047 Assert((unsigned)enmGstReg < (unsigned)kIemNativeGstReg_End); 4048 Assert(g_aGstShadowInfo[enmGstReg].cb != 0); 4049 4050 switch (g_aGstShadowInfo[enmGstReg].cb) 4051 { 4052 case sizeof(uint64_t): 4053 return iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off); 4054 case sizeof(uint32_t): 4055 return iemNativeEmitStoreGprToVCpuU32(pReNative, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off); 4056 case sizeof(uint16_t): 4057 return iemNativeEmitStoreGprToVCpuU16(pReNative, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off); 4058 #if 0 /* not present in the table. */ 4059 case sizeof(uint8_t): 4060 return iemNativeEmitStoreGprToVCpuU8(pReNative, off, idxHstReg, g_aGstShadowInfo[enmGstReg].off); 4061 #endif 4062 default: 4063 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IPE_NOT_REACHED_DEFAULT_CASE)); 4064 } 4065 } 4066 4067 4068 /** 4069 * Emits code to flush a pending write of the given guest register if any. 4070 * 4071 * @returns New code buffer offset. 4072 * @param pReNative The native recompile state. 4073 * @param off Current code buffer position. 4074 * @param enmGstReg The guest register to flush. 4075 */ 4076 DECL_HIDDEN_THROW(uint32_t) 4077 iemNativeRegFlushPendingWrite(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVEGSTREG enmGstReg) 4078 { 4079 uint8_t const idxHstReg = pReNative->Core.aidxGstRegShadows[enmGstReg]; 4080 4081 Assert(enmGstReg >= kIemNativeGstReg_GprFirst && enmGstReg <= kIemNativeGstReg_GprLast); 4082 Assert( idxHstReg != UINT8_MAX 4083 && pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(enmGstReg)); 4084 Log12(("iemNativeRegFlushPendingWrite: Clearing guest register %s shadowed by host %s\n", 4085 g_aGstShadowInfo[enmGstReg].pszName, g_apszIemNativeHstRegNames[idxHstReg])); 4086 4087 off = iemNativeEmitStoreGprWithGstShadowReg(pReNative, off, enmGstReg, idxHstReg); 4088 4089 pReNative->Core.bmGstRegShadowDirty &= ~RT_BIT_64(enmGstReg); 4090 return off; 4091 } 4092 4093 4094 /** 4095 * Flush the given set of guest registers if marked as dirty. 4096 * 4097 * @returns New code buffer offset. 4098 * @param pReNative The native recompile state. 4099 * @param off Current code buffer position. 4100 * @param fFlushGstReg The guest register set to flush (default is flush everything). 4101 */ 4102 DECL_HIDDEN_THROW(uint32_t) 4103 iemNativeRegFlushDirtyGuest(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint64_t fFlushGstReg /*= UINT64_MAX*/) 4104 { 4105 if (pReNative->Core.bmGstRegShadowDirty & fFlushGstReg) 4106 { 4107 uint64_t bmGstRegShadowDirty = pReNative->Core.bmGstRegShadowDirty & fFlushGstReg; 4108 uint32_t idxGstReg = 0; 4109 4110 do 4111 { 4112 if (bmGstRegShadowDirty & 0x1) 4113 { 4114 off = iemNativeRegFlushPendingWrite(pReNative, off, (IEMNATIVEGSTREG)idxGstReg); 4115 Assert(!(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(idxGstReg))); 4116 } 4117 idxGstReg++; 4118 bmGstRegShadowDirty >>= 1; 4119 } while (bmGstRegShadowDirty); 4120 } 4121 4122 return off; 4123 } 4124 4125 4126 /** 4127 * Flush all shadowed guest registers marked as dirty for the given host register. 4128 * 4129 * @returns New code buffer offset. 4130 * @param pReNative The native recompile state. 4131 * @param off Current code buffer position. 4132 * @param idxHstReg The host register. 4133 * 4134 * @note This doesn't do any unshadowing of guest registers from the host register. 4135 */ 4136 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushDirtyGuestByHostRegShadow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg) 4137 { 4138 /* We need to flush any pending guest register writes this host register shadows. */ 4139 uint64_t fGstRegShadows = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; 4140 if (pReNative->Core.bmGstRegShadowDirty & fGstRegShadows) 4141 { 4142 uint64_t bmGstRegShadowDirty = pReNative->Core.bmGstRegShadowDirty & fGstRegShadows; 4143 uint32_t idxGstReg = 0; 4144 do 4145 { 4146 if (bmGstRegShadowDirty & 0x1) 4147 { 4148 off = iemNativeRegFlushPendingWrite(pReNative, off, (IEMNATIVEGSTREG)idxGstReg); 4149 Assert(!(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(idxGstReg))); 4150 } 4151 idxGstReg++; 4152 bmGstRegShadowDirty >>= 1; 4153 } while (bmGstRegShadowDirty); 4154 } 4155 4156 return off; 4157 } 4158 #endif 4159 4160 4024 4161 /** 4025 4162 * Locate a register, possibly freeing one up. … … 4089 4226 if (pReNative->Core.bmGstRegShadows & fToFreeMask) 4090 4227 { 4228 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4229 /* Writeback any dirty shadow registers we are about to unshadow. */ 4230 *poff = iemNativeRegFlushDirtyGuest(pReNative, *poff, fToFreeMask); 4231 #endif 4232 4091 4233 STAM_COUNTER_INC(&pReNative->pVCpu->iem.s.StatNativeRegFindFreeLivenessUnshadowed); 4092 4234 iemNativeRegFlushGuestShadows(pReNative, fToFreeMask); … … 4119 4261 Assert(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxReg)); 4120 4262 4263 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4264 /* We need to flush any pending guest register writes this host register shadows. */ 4265 *poff = iemNativeRegFlushDirtyGuestByHostRegShadow(pReNative, *poff, idxReg); 4266 #endif 4267 4121 4268 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxReg); 4122 4269 pReNative->Core.bmGstRegShadows &= ~pReNative->Core.aHstRegs[idxReg].fGstRegShadows; … … 4157 4304 Assert( RT_BOOL(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxReg)) 4158 4305 == RT_BOOL(pReNative->Core.aHstRegs[idxReg].fGstRegShadows)); 4306 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4307 Assert(!(pReNative->Core.aHstRegs[idxReg].fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 4308 #endif 4159 4309 4160 4310 if (pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack) … … 4204 4354 4205 4355 uint64_t fGstRegShadows = pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows; 4356 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4357 Assert(!(fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 4358 #endif 4206 4359 Log12(("%s: moving idxVar=%#x from %s to %s (fGstRegShadows=%RX64)\n", 4207 4360 pszCaller, idxVar, g_apszIemNativeHstRegNames[idxRegOld], g_apszIemNativeHstRegNames[idxRegNew], fGstRegShadows)); … … 4262 4415 Assert( RT_BOOL(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxRegOld)) 4263 4416 == RT_BOOL(pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows)); 4417 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4418 Assert(!(pReNative->Core.aHstRegs[idxRegOld].fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 4419 #endif 4264 4420 4265 4421 … … 4479 4635 ? IEMNATIVE_HST_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK 4480 4636 : IEMNATIVE_HST_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK & ~IEMNATIVE_CALL_VOLATILE_GREG_MASK; 4637 4638 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4639 /** @todo r=aeichner Implement for registers other than GPR as well. */ 4640 if ( ( enmIntendedUse == kIemNativeGstRegUse_ForFullWrite 4641 || enmIntendedUse == kIemNativeGstRegUse_ForUpdate) 4642 && enmGstReg >= kIemNativeGstReg_GprFirst 4643 && enmGstReg <= kIemNativeGstReg_GprLast 4644 ) 4645 pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(enmGstReg); 4646 #endif 4481 4647 4482 4648 /* … … 4763 4929 Assert( (pReNative->Core.aHstRegs[idxReg].fGstRegShadows & pReNative->Core.bmGstRegShadows) 4764 4930 == pReNative->Core.aHstRegs[idxReg].fGstRegShadows); 4931 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 4932 Assert(!(pReNative->Core.aHstRegs[idxReg].fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 4933 #endif 4765 4934 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxReg); 4766 4935 pReNative->Core.bmGstRegShadows &= ~pReNative->Core.aHstRegs[idxReg].fGstRegShadows; … … 4869 5038 pReNative->Core.bmHstRegsWithGstShadow &= ~RT_BIT_32(idxHstReg); 4870 5039 uint64_t const fGstRegShadowsOld = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; 5040 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5041 Assert(!(pReNative->Core.bmGstRegShadowDirty & fGstRegShadowsOld)); 5042 #endif 4871 5043 pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = 0; 4872 5044 pReNative->Core.bmGstRegShadows &= ~fGstRegShadowsOld; … … 5061 5233 5062 5234 AssertMsg(pReNative->Core.aHstRegs[idxReg].fGstRegShadows != 0, ("idxReg=%#x\n", idxReg)); 5235 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5236 Assert(!(pReNative->Core.bmGstRegShadowDirty & pReNative->Core.aHstRegs[idxReg].fGstRegShadows)); 5237 #endif 5063 5238 pReNative->Core.bmGstRegShadows &= ~pReNative->Core.aHstRegs[idxReg].fGstRegShadows; 5064 5239 pReNative->Core.aHstRegs[idxReg].fGstRegShadows = 0; … … 5103 5278 Assert(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)); 5104 5279 Assert(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & RT_BIT_64(idxGstReg)); 5280 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5281 Assert(!(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(idxGstReg))); 5282 #endif 5105 5283 5106 5284 uint64_t const fInThisHstReg = (pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & fGstRegs) | RT_BIT_64(idxGstReg); … … 5124 5302 Assert(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)); 5125 5303 Assert(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & RT_BIT_64(idxGstReg)); 5304 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5305 Assert(!(pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(idxGstReg))); 5306 #endif 5126 5307 5127 5308 fGstRegs &= ~(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows | RT_BIT_64(idxGstReg)); … … 5169 5350 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows) 5170 5351 == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows); 5352 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5353 Assert(!(pReNative->Core.bmGstRegShadowDirty & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)); 5354 #endif 5171 5355 5172 5356 fGstShadows |= pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; … … 5187 5371 Assert( (pReNative->Core.bmGstRegShadows & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows) 5188 5372 == pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows); 5373 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5374 Assert(!(pReNative->Core.bmGstRegShadowDirty & pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)); 5375 #endif 5189 5376 5190 5377 pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows = 0; … … 5229 5416 5230 5417 uint64_t const fGstRegShadows = pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows; 5418 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 5419 Assert(!(pReNative->Core.bmGstRegShadowDirty & fGstRegShadows)); 5420 #endif 5231 5421 Assert((pReNative->Core.bmGstRegShadows & fGstRegShadows) == fGstRegShadows); 5232 5422 AssertStmt(fGstRegShadows != 0 && fGstRegShadows < RT_BIT_64(kIemNativeGstReg_End), … … 6034 6224 #endif 6035 6225 6226 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 6227 off = iemNativeRegFlushDirtyGuest(pReNative, off, ~fGstShwExcept); 6228 if ( fFlushShadows 6229 && (pReNative->Core.bmGstRegShadows & ~fGstShwExcept)) 6230 { 6231 uint64_t bmGstRegShadows = pReNative->Core.bmGstRegShadows & ~fGstShwExcept; 6232 uint8_t idxGstReg = 0; 6233 do 6234 { 6235 if (bmGstRegShadows & 0x1) 6236 { 6237 uint8_t const idxHstReg = pReNative->Core.aidxGstRegShadows[idxGstReg]; 6238 6239 iemNativeRegClearGstRegShadowing(pReNative, idxHstReg, off); 6240 iemNativeRegFlushGuestShadows(pReNative, RT_BIT_64(idxGstReg)); 6241 } 6242 idxGstReg++; 6243 bmGstRegShadows >>= 1; 6244 } while (bmGstRegShadows); 6245 } 6246 #endif 6247 6036 6248 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 6037 6249 /** @todo r=bird: There must be a quicker way to check if anything needs … … 6248 6460 iemNativeEmitGuestRegValueCheck(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxReg, IEMNATIVEGSTREG enmGstReg) 6249 6461 { 6462 #if defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 6463 /* We can't check the value against whats in CPUMCTX if the register is already marked as dirty, so skip the check. */ 6464 if (pReNative->Core.bmGstRegShadowDirty & RT_BIT_64(enmGstReg)) 6465 return off; 6466 #endif 6467 6250 6468 # ifdef RT_ARCH_AMD64 6251 6469 uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 32); … … 7620 7838 { 7621 7839 idxReg = g_aidxIemNativeCallRegs[uArgNo]; 7840 7841 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 7842 /* Writeback any dirty shadow registers we are about to unshadow. */ 7843 *poff = iemNativeRegFlushDirtyGuestByHostRegShadow(pReNative, *poff, idxReg); 7844 #endif 7845 7622 7846 iemNativeRegClearGstRegShadowing(pReNative, idxReg, *poff); 7623 7847 Log11(("iemNativeVarRegisterAcquire: idxVar=%#x idxReg=%u (matching arg %u)\n", idxVar, idxReg, uArgNo)); … … 7865 8089 if (idxReg < RT_ELEMENTS(pReNative->Core.aHstRegs)) 7866 8090 { 8091 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 8092 if (enmGstReg >= kIemNativeGstReg_GprFirst && enmGstReg <= kIemNativeGstReg_GprLast) 8093 pReNative->Core.bmGstRegShadowDirty |= RT_BIT_64(enmGstReg); 8094 #endif 8095 7867 8096 if (pReNative->Core.bmGstRegShadows & RT_BIT_64(enmGstReg)) 7868 8097 { … … 9217 9446 const char * const pszGstReg = g_aGstShadowInfo[pEntry->GuestRegShadowing.idxGstReg].pszName; 9218 9447 if (pEntry->GuestRegShadowing.idxHstReg == UINT8_MAX) 9219 pHlp->pfnPrintf(pHlp, " Guest register %s != host register %s\n", pszGstReg, 9220 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstRegPrev]); 9448 pHlp->pfnPrintf(pHlp, " Guest register %s != host register %s (Dirty: %RTbool)\n", pszGstReg, 9449 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstRegPrev], 9450 RT_BOOL(pEntry->GuestRegShadowing.fDirty)); 9221 9451 else if (pEntry->GuestRegShadowing.idxHstRegPrev == UINT8_MAX) 9222 pHlp->pfnPrintf(pHlp, " Guest register %s == host register %s\n", pszGstReg, 9223 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstReg]); 9452 pHlp->pfnPrintf(pHlp, " Guest register %s == host register %s (Dirty: %RTbool)\n", pszGstReg, 9453 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstReg], 9454 RT_BOOL(pEntry->GuestRegShadowing.fDirty)); 9224 9455 else 9225 pHlp->pfnPrintf(pHlp, " Guest register %s == host register %s (previously in %s )\n", pszGstReg,9456 pHlp->pfnPrintf(pHlp, " Guest register %s == host register %s (previously in %s, Dirty: %RTbool)\n", pszGstReg, 9226 9457 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstReg], 9227 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstRegPrev]); 9458 g_apszIemNativeHstRegNames[pEntry->GuestRegShadowing.idxHstRegPrev], 9459 RT_BOOL(pEntry->GuestRegShadowing.fDirty)); 9228 9460 continue; 9229 9461 } -
trunk/src/VBox/VMM/include/IEMInternal.h
r104019 r104034 1050 1050 /* kIemTbDbgEntryType_GuestRegShadowing. */ 1051 1051 uint32_t uType : 4; 1052 uint32_t uUnused : 4; 1052 /** Flag whether the register is marked as dirty. */ 1053 uint32_t fDirty : 1; 1054 uint32_t uUnused : 3; 1053 1055 /** The guest register being shadowed (IEMNATIVEGSTREG). */ 1054 1056 uint32_t idxGstReg : 8; -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r104021 r104034 70 70 #endif 71 71 72 /** @def IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 73 * Delay the writeback or dirty registers as long as possible. */ 74 #ifdef DEBUG_aeichner 75 # define IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 76 #endif 72 77 73 78 /** @name Stack Frame Layout … … 1137 1142 /** Bitmap marking valid entries in aidxGstRegShadows. */ 1138 1143 uint64_t bmGstRegShadows; 1144 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1145 /** Bitmap marking the shadowed guest register as dirty and needing writeback when flushing. */ 1146 uint64_t bmGstRegShadowDirty; 1147 #endif 1139 1148 1140 1149 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR … … 1548 1557 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 1549 1558 DECL_HIDDEN_THROW(uint32_t) iemNativeEmitPcWritebackSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off); 1559 #endif 1560 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 1561 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingWrite(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVEGSTREG enmGstReg); 1562 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushDirtyGuest(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint64_t fFlushGstReg = UINT64_MAX); 1563 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushDirtyGuestByHostRegShadow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg); 1550 1564 #endif 1551 1565 … … 2110 2124 Assert( RT_BOOL(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)) 2111 2125 == RT_BOOL(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows)); 2126 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 2127 Assert(!(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 2128 #endif 2112 2129 2113 2130 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO … … 2148 2165 Assert(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & RT_BIT_64(enmGstReg)); 2149 2166 Assert(pReNative->Core.bmHstRegsWithGstShadow & RT_BIT_32(idxHstReg)); 2167 #ifdef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 2168 Assert(!(pReNative->Core.aHstRegs[idxHstReg].fGstRegShadows & pReNative->Core.bmGstRegShadowDirty)); 2169 #endif 2150 2170 2151 2171 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO … … 2264 2284 #endif 2265 2285 2266 #if def IEMNATIVE_WITH_SIMD_REG_ALLOCATOR2286 #if defined(IEMNATIVE_WITH_SIMD_REG_ALLOCATOR) || defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 2267 2287 /** @todo r=bird: There must be a quicker way to check if anything needs doing here! */ 2268 2288 /** @todo This doesn't mix well with fGstShwExcept but we ignore this for now and just flush everything. */
Note:
See TracChangeset
for help on using the changeset viewer.