Changeset 105673 in vbox for trunk/src/VBox/VMM/include
- Timestamp:
- Aug 14, 2024 1:57:57 PM (6 months ago)
- Location:
- trunk/src/VBox/VMM/include
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/include/IEMInternal.h
r105664 r105673 92 92 # define IEM_WITH_THROW_CATCH 93 93 #endif /*ASM-NOINC-END*/ 94 95 /** @def IEM_WITH_INTRA_TB_JUMPS 96 * Enables loop-jumps within a TB (currently only to the first call). 97 */ 98 #if defined(DOXYGEN_RUNNING) || 1 99 # define IEM_WITH_INTRA_TB_JUMPS 100 #endif 94 101 95 102 /** @def IEMNATIVE_WITH_DELAYED_PC_UPDATING … … 1245 1252 uint8_t uTbLookup; 1246 1253 1247 /** Unused atm. */1248 uint8_t uUnused0;1254 /** Flags - IEMTHREADEDCALLENTRY_F_XXX. */ 1255 uint8_t fFlags; 1249 1256 1250 1257 /** Generic parameters. */ … … 1270 1277 /** Make a IEMTHRDEDCALLENTRY::uTbLookup value. */ 1271 1278 #define IEM_TB_LOOKUP_TAB_MAKE(a_idxTable, a_fLarge) ((a_idxTable) | ((a_fLarge) ? 0x80 : 0)) 1279 1280 1281 /** The call entry is a jump target. */ 1282 #define IEMTHREADEDCALLENTRY_F_JUMP_TARGET UINT8_C(0x01) 1283 1272 1284 1273 1285 /** … … 2129 2141 uint8_t abAlignment9[42]; 2130 2142 2131 /** @name Recompilation 2143 2144 /** @name Recompiled Exection 2132 2145 * @{ */ 2133 2146 /** Pointer to the current translation block. … … 2156 2169 uint64_t u64Unused; 2157 2170 #endif 2158 /** Fixed TB used for threaded recompilation.2159 * This is allocated once with maxed-out sizes and re-used afterwards. */2160 R3PTRTYPE(PIEMTB) pThrdCompileTbR3;2161 2171 /** Pointer to the ring-3 TB cache for this EMT. */ 2162 2172 R3PTRTYPE(PIEMTBCACHE) pTbCacheR3; … … 2165 2175 * entry, thus it can always safely be used w/o NULL checking. */ 2166 2176 R3PTRTYPE(PIEMTB *) ppTbLookupEntryR3; 2177 #if 0 /* unused */ 2167 2178 /** The PC (RIP) at the start of pCurTbR3/pCurTbR0. 2168 2179 * The TBs are based on physical addresses, so this is needed to correleated 2169 2180 * RIP to opcode bytes stored in the TB (AMD-V / VT-x). */ 2170 2181 uint64_t uCurTbStartPc; 2182 #endif 2183 2171 2184 /** Number of threaded TBs executed. */ 2172 2185 uint64_t cTbExecThreaded; 2173 2186 /** Number of native TBs executed. */ 2174 2187 uint64_t cTbExecNative; 2188 2189 /** The number of IRQ/FF checks till the next timer poll call. */ 2190 uint32_t cIrqChecksTillNextPoll; 2191 /** The virtual sync time at the last timer poll call in milliseconds. */ 2192 uint32_t msRecompilerPollNow; 2193 /** The virtual sync time at the last timer poll call in nanoseconds. */ 2194 uint64_t nsRecompilerPollNow; 2195 /** The previous cIrqChecksTillNextPoll value. */ 2196 uint32_t cIrqChecksTillNextPollPrev; 2197 /** The ideal nanosecond interval between two timer polls. 2198 * @todo make this adaptive? */ 2199 uint32_t cNsIdealPollInterval; 2200 2201 /** The current instruction number in a native TB. 2202 * This is set by code that may trigger an unexpected TB exit (throw/longjmp) 2203 * and will be picked up by the TB execution loop. Only used when 2204 * IEMNATIVE_WITH_INSTRUCTION_COUNTING is defined. */ 2205 uint8_t idxTbCurInstr; 2206 /** @} */ 2207 2208 /** @name Recompilation 2209 * @{ */ 2175 2210 /** Whether we need to check the opcode bytes for the current instruction. 2176 2211 * This is set by a previous instruction if it modified memory or similar. */ … … 2182 2217 /** Whether to end the current TB. */ 2183 2218 bool fEndTb; 2219 /** Indicates that the current instruction is an STI. This is set by the 2220 * iemCImpl_sti code and subsequently cleared by the recompiler. */ 2221 bool fTbCurInstrIsSti; 2222 /** Spaced reserved for recompiler data / alignment. */ 2223 bool afRecompilerStuff1[1]; 2184 2224 /** Number of instructions before we need emit an IRQ check call again. 2185 2225 * This helps making sure we don't execute too long w/o checking for … … 2189 2229 * fTbCurInstrIsSti. */ 2190 2230 uint8_t cInstrTillIrqCheck; 2191 /** Indicates that the current instruction is an STI. This is set by the 2192 * iemCImpl_sti code and subsequently cleared by the recompiler. */ 2193 bool fTbCurInstrIsSti; 2231 /** The index of the last CheckIrq call during threaded recompilation. */ 2232 uint16_t idxLastCheckIrqCallNo; 2194 2233 /** The size of the IEMTB::pabOpcodes allocation in pThrdCompileTbR3. */ 2195 2234 uint16_t cbOpcodesAllocated; 2196 /** The current instruction number in a native TB.2197 * This is set by code that may trigger an unexpected TB exit (throw/longjmp)2198 * and will be picked up by the TB execution loop. Only used when2199 * IEMNATIVE_WITH_INSTRUCTION_COUNTING is defined. */2200 uint8_t idxTbCurInstr;2201 /** Spaced reserved for recompiler data / alignment. */2202 bool afRecompilerStuff1[3];2203 /** The virtual sync time at the last timer poll call. */2204 uint32_t msRecompilerPollNow;2205 2235 /** The IEMTB::cUsed value when to attempt native recompilation of a TB. */ 2206 2236 uint32_t uTbNativeRecompileAtUsedCount; … … 2212 2242 * currently not up to date in EFLAGS. */ 2213 2243 uint32_t fSkippingEFlags; 2244 /** Spaced reserved for recompiler data / alignment. */ 2245 uint32_t u32RecompilerStuff2; 2246 #if 0 /* unused */ 2214 2247 /** Previous GCPhysInstrBuf value - only valid if fTbCrossedPage is set. */ 2215 2248 RTGCPHYS GCPhysInstrBufPrev; 2249 #endif 2250 2251 /** Fixed TB used for threaded recompilation. 2252 * This is allocated once with maxed-out sizes and re-used afterwards. */ 2253 R3PTRTYPE(PIEMTB) pThrdCompileTbR3; 2216 2254 /** Pointer to the ring-3 TB allocator for this EMT. */ 2217 2255 R3PTRTYPE(PIEMTBALLOCATOR) pTbAllocatorR3; … … 2222 2260 /** Dummy entry for ppTbLookupEntryR3. */ 2223 2261 R3PTRTYPE(PIEMTB) pTbLookupEntryDummyR3; 2262 /** @} */ 2224 2263 2225 2264 /** Dummy TLB entry used for accesses to pages with databreakpoints. */ … … 2230 2269 /** Statistics: Times BltIn_CheckIrq breaks out of the TB. */ 2231 2270 STAMCOUNTER StatCheckIrqBreaks; 2271 /** Statistics: Times BltIn_CheckTimers breaks direct linking TBs. */ 2272 STAMCOUNTER StatCheckTimersBreaks; 2232 2273 /** Statistics: Times BltIn_CheckMode breaks out of the TB. */ 2233 2274 STAMCOUNTER StatCheckModeBreaks; … … 2240 2281 /** Statistics: Times a jump or page crossing required a TB with CS.LIM checking. */ 2241 2282 STAMCOUNTER StatCheckNeedCsLimChecking; 2242 /** Statistics: Times a loop was detected within a TB. .*/2283 /** Statistics: Times a loop was detected within a TB. */ 2243 2284 STAMCOUNTER StatTbLoopInTbDetected; 2285 /** Statistics: Times a loop back to the start of the TB was detected. */ 2286 STAMCOUNTER StatTbLoopFullTbDetected; 2244 2287 /** Exec memory allocator statistics: Number of times allocaintg executable memory failed. */ 2245 2288 STAMCOUNTER StatNativeExecMemInstrBufAllocFailed; … … 2421 2464 STAMCOUNTER StatNativeTbExitObsoleteTb; 2422 2465 2466 /** Native recompiler: Number of full TB loops (jumps from end to start). */ 2467 STAMCOUNTER StatNativeTbExitLoopFullTb; 2468 2423 2469 /** Native recompiler: Failure situations with direct linking scenario \#1. 2424 2470 * Counter with StatNativeTbExitReturnBreak. Not in release builds. … … 2448 2494 2449 2495 #ifdef IEM_WITH_TLB_TRACE 2450 uint64_t au64Padding[ 2];2496 uint64_t au64Padding[6]; 2451 2497 #else 2452 uint64_t au64Padding[4]; 2453 #endif 2454 /** @} */ 2498 //uint64_t au64Padding[1]; 2499 #endif 2455 2500 2456 2501 #ifdef IEM_WITH_TLB_TRACE … … 2491 2536 AssertCompileMemberAlignment(IEMCPU, aMemMappingLocks, 16); 2492 2537 AssertCompileMemberAlignment(IEMCPU, aBounceBuffers, 64); 2538 AssertCompileMemberAlignment(IEMCPU, pCurTbR3, 64); 2493 2539 AssertCompileMemberAlignment(IEMCPU, DataTlb, 64); 2494 2540 AssertCompileMemberAlignment(IEMCPU, CodeTlb, 64); … … 6742 6788 extern const PFNIEMOP g_apfnIemThreadedRecompilerVecMap3[1024]; 6743 6789 6790 DECLHIDDEN(int) iemPollTimers(PVMCC pVM, PVMCPUCC pVCpu) RT_NOEXCEPT; 6791 6744 6792 DECLCALLBACK(int) iemTbInit(PVMCC pVM, uint32_t cInitialTbs, uint32_t cMaxTbs, 6745 6793 uint64_t cbInitialExec, uint64_t cbMaxExec, uint32_t cbChunkExec); … … 6777 6825 6778 6826 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckIrq); 6827 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimers); 6828 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckTimersAndIrq); 6779 6829 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckMode); 6780 6830 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckHwInstrBps); … … 6806 6856 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlb); 6807 6857 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_CheckOpcodesOnNewPageLoadingTlbConsiderCsLim); 6858 6859 IEM_DECL_IEMTHREADEDFUNC_PROTO(iemThreadedFunc_BltIn_Jump); 6808 6860 6809 6861 bool iemThreadedCompileEmitIrqCheckBefore(PVMCPUCC pVCpu, PIEMTB pTb); -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r105490 r105673 491 491 kIemNativeLabelType_LastTbExit = kIemNativeLabelType_Return, 492 492 493 /** Loop-jump target. */ 494 kIemNativeLabelType_LoopJumpTarget, 495 493 496 /* 494 497 * Labels with data, potentially multiple instances per TB: … … 1449 1452 /** Condition sequence number (for generating unique labels). */ 1450 1453 uint16_t uCondSeqNo; 1451 /** Check IRQ seq eunce number (for generating unique labels). */1454 /** Check IRQ sequence number (for generating unique labels). */ 1452 1455 uint16_t uCheckIrqSeqNo; 1453 1456 /** TLB load sequence number (for generating unique labels). */ … … 1632 1635 uint32_t offWhere = UINT32_MAX, uint16_t uData = 0); 1633 1636 DECL_HIDDEN_THROW(void) iemNativeLabelDefine(PIEMRECOMPILERSTATE pReNative, uint32_t idxLabel, uint32_t offWhere); 1637 DECLHIDDEN(uint32_t) iemNativeLabelFind(PIEMRECOMPILERSTATE pReNative, IEMNATIVELABELTYPE enmType, 1638 uint32_t offWhere = UINT32_MAX, uint16_t uData = 0) RT_NOEXCEPT; 1634 1639 DECL_HIDDEN_THROW(void) iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel, 1635 1640 IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0); -
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r105491 r105673 503 503 } 504 504 505 /** 506 * Special variant of iemNativeEmitGprByVCpuDisp for accessing the VM structure. 507 */ 508 DECL_FORCE_INLINE(uint32_t) 509 iemNativeEmitGprByVCpuSignedDisp(uint8_t *pbCodeBuf, uint32_t off, uint8_t iGprReg, int32_t offVCpu) 510 { 511 Assert(offVCpu < 0); 512 if (offVCpu < 128 && offVCpu >= -128) 513 { 514 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_MEM1, iGprReg & 7, IEMNATIVE_REG_FIXED_PVMCPU); 515 pbCodeBuf[off++] = (uint8_t)(int8_t)offVCpu; 516 } 517 else 518 { 519 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_MEM4, iGprReg & 7, IEMNATIVE_REG_FIXED_PVMCPU); 520 pbCodeBuf[off++] = RT_BYTE1((uint32_t)offVCpu); 521 pbCodeBuf[off++] = RT_BYTE2((uint32_t)offVCpu); 522 pbCodeBuf[off++] = RT_BYTE3((uint32_t)offVCpu); 523 pbCodeBuf[off++] = RT_BYTE4((uint32_t)offVCpu); 524 } 525 return off; 526 } 527 505 528 #elif defined(RT_ARCH_ARM64) 506 529 … … 585 608 } 586 609 610 611 /** 612 * Special variant of iemNativeEmitGprByVCpuLdStEx for accessing the VM 613 * structure. 614 * 615 * @note Loads can use @a iGprReg for large offsets, stores requires a temporary 616 * registers (@a iGprTmp). 617 * @note DON'T try this with prefetch. 618 */ 619 DECL_FORCE_INLINE_THROW(uint32_t) 620 iemNativeEmitGprBySignedVCpuLdStEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprReg, int32_t offVCpu, 621 ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData, uint8_t iGprTmp = UINT8_MAX) 622 { 623 Assert(offVCpu < 0); 624 Assert((uint32_t)-offVCpu < RT_BIT_32(28)); /* we should be way out of range for problematic sign extending issues. */ 625 Assert(!((uint32_t)-offVCpu & (cbData - 1))); 626 627 /* 628 * For negative offsets we need to use put the displacement in a register 629 * as the two variants with signed immediates will either post or pre 630 * increment the base address register. 631 */ 632 if (!ARMV8A64INSTRLDSTTYPE_IS_STORE(enmOperation) || iGprTmp != UINT8_MAX) 633 { 634 uint8_t const idxIndexReg = !ARMV8A64INSTRLDSTTYPE_IS_STORE(enmOperation) ? iGprReg : IEMNATIVE_REG_FIXED_TMP0; 635 off = iemNativeEmitLoadGpr32ImmEx(pCodeBuf, off, idxIndexReg, offVCpu / (int32_t)cbData); 636 pCodeBuf[off++] = Armv8A64MkInstrStLdRegIdx(enmOperation, iGprReg, IEMNATIVE_REG_FIXED_PVMCPU, idxIndexReg, 637 kArmv8A64InstrLdStExtend_Sxtw, cbData > 1 /*fShifted*/); 638 } 639 else 640 # ifdef IEM_WITH_THROW_CATCH 641 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IEM_IPE_9)); 642 # else 643 AssertReleaseFailedStmt(off = UINT32_MAX); 644 # endif 645 646 return off; 647 } 648 649 /** 650 * Special variant of iemNativeEmitGprByVCpuLdSt for accessing the VM structure. 651 */ 652 DECL_FORCE_INLINE_THROW(uint32_t) 653 iemNativeEmitGprBySignedVCpuLdSt(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprReg, 654 int32_t offVCpu, ARMV8A64INSTRLDSTTYPE enmOperation, unsigned cbData) 655 { 656 off = iemNativeEmitGprBySignedVCpuLdStEx(iemNativeInstrBufEnsure(pReNative, off, 2 + 1), off, iGprReg, 657 offVCpu, enmOperation, cbData, IEMNATIVE_REG_FIXED_TMP0); 658 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 659 return off; 660 } 661 587 662 #endif /* RT_ARCH_ARM64 */ 588 663 … … 601 676 pCodeBuf[off++] = X86_OP_REX_W | X86_OP_REX_R; 602 677 pCodeBuf[off++] = 0x8b; 603 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, iGpr, offVCpu);678 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, iGpr, offVCpu); 604 679 605 680 #elif defined(RT_ARCH_ARM64) … … 773 848 #endif 774 849 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 850 return off; 851 } 852 853 854 /** 855 * Emits a store of a GPR value to a 32-bit VCpu field. 856 * 857 * @note Limited range on ARM64. 858 */ 859 DECL_INLINE_THROW(uint32_t) 860 iemNativeEmitStoreGprToVCpuU32Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr, uint32_t offVCpu) 861 { 862 #ifdef RT_ARCH_AMD64 863 /* mov mem32, reg32 */ 864 if (iGpr >= 8) 865 pCodeBuf[off++] = X86_OP_REX_R; 866 pCodeBuf[off++] = 0x89; 867 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, iGpr, offVCpu); 868 869 #elif defined(RT_ARCH_ARM64) 870 off = iemNativeEmitGprByVCpuLdStEx(pCodeBuf, off, iGpr, offVCpu, kArmv8A64InstrLdStType_St_Word, sizeof(uint32_t)); 871 872 #else 873 # error "port me" 874 #endif 775 875 return off; 776 876 } … … 5436 5536 { 5437 5537 #if defined(RT_ARCH_AMD64) 5438 /* andEv, imm */5538 /* xor Ev, imm */ 5439 5539 if (iGprDst >= 8) 5440 5540 pCodeBuf[off++] = X86_OP_REX_B; … … 6130 6230 */ 6131 6231 DECL_INLINE_THROW(uint32_t) 6232 iemNativeEmitCmpGprWithImmEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprLeft, 6233 uint64_t uImm, uint8_t idxTmpReg = UINT8_MAX) 6234 { 6235 #ifdef RT_ARCH_AMD64 6236 if ((int8_t)uImm == (int64_t)uImm) 6237 { 6238 /* cmp Ev, Ib */ 6239 pCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0); 6240 pCodeBuf[off++] = 0x83; 6241 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 7, iGprLeft & 7); 6242 pCodeBuf[off++] = (uint8_t)uImm; 6243 return off; 6244 } 6245 if ((int32_t)uImm == (int64_t)uImm) 6246 { 6247 /* cmp Ev, imm */ 6248 pCodeBuf[off++] = X86_OP_REX_W | (iGprLeft >= 8 ? X86_OP_REX_B : 0); 6249 pCodeBuf[off++] = 0x81; 6250 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 7, iGprLeft & 7); 6251 pCodeBuf[off++] = RT_BYTE1(uImm); 6252 pCodeBuf[off++] = RT_BYTE2(uImm); 6253 pCodeBuf[off++] = RT_BYTE3(uImm); 6254 pCodeBuf[off++] = RT_BYTE4(uImm); 6255 return off; 6256 } 6257 6258 #elif defined(RT_ARCH_ARM64) 6259 if (uImm < _4K) 6260 { 6261 pCodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm, 6262 true /*64Bit*/, true /*fSetFlags*/); 6263 return off; 6264 } 6265 if ((uImm & ~(uint64_t)0xfff000) == 0) 6266 { 6267 pCodeBuf[off++] = Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iGprLeft, (uint32_t)uImm >> 12, 6268 true /*64Bit*/, true /*fSetFlags*/, true /*fShift12*/); 6269 return off; 6270 } 6271 6272 #else 6273 # error "Port me!" 6274 #endif 6275 6276 if (idxTmpReg != UINT8_MAX) 6277 { 6278 /* Use temporary register for the immediate. */ 6279 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, idxTmpReg, uImm); 6280 off = iemNativeEmitCmpGprWithGprEx(pCodeBuf, off, iGprLeft, idxTmpReg); 6281 } 6282 else 6283 # ifdef IEM_WITH_THROW_CATCH 6284 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IEM_IPE_9)); 6285 # else 6286 AssertReleaseFailedStmt(off = UINT32_MAX); 6287 # endif 6288 6289 return off; 6290 } 6291 6292 6293 /** 6294 * Emits a compare of a 64-bit GPR with a constant value, settings status 6295 * flags/whatever for use with conditional instruction. 6296 */ 6297 DECL_INLINE_THROW(uint32_t) 6132 6298 iemNativeEmitCmpGprWithImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint64_t uImm) 6133 6299 { 6134 6300 #ifdef RT_ARCH_AMD64 6135 if ( uImm <= UINT32_C(0xff))6301 if ((int8_t)uImm == (int64_t)uImm) 6136 6302 { 6137 6303 /* cmp Ev, Ib */ … … 6142 6308 pbCodeBuf[off++] = (uint8_t)uImm; 6143 6309 } 6144 else if ((int 64_t)uImm == (int32_t)uImm)6310 else if ((int32_t)uImm == (int64_t)uImm) 6145 6311 { 6146 6312 /* cmp Ev, imm */ … … 6974 7140 6975 7141 #elif defined(RT_ARCH_ARM64) 7142 int32_t const offDisp = offTarget - offFixup; 6976 7143 uint32_t * const pu32CodeBuf = pReNative->pInstrBuf; 6977 7144 if ((pu32CodeBuf[offFixup] & UINT32_C(0xff000000)) == UINT32_C(0x54000000)) 6978 7145 { 6979 7146 /* B.COND + BC.COND */ 6980 int32_t const offDisp = offTarget - offFixup;6981 7147 Assert(offDisp >= -262144 && offDisp < 262144); 6982 7148 pu32CodeBuf[offFixup] = (pu32CodeBuf[offFixup] & UINT32_C(0xff00001f)) 6983 7149 | (((uint32_t)offDisp & UINT32_C(0x0007ffff)) << 5); 6984 7150 } 6985 else 7151 else if ((pu32CodeBuf[offFixup] & UINT32_C(0xfc000000)) == UINT32_C(0x14000000)) 6986 7152 { 6987 7153 /* B imm26 */ 6988 Assert((pu32CodeBuf[offFixup] & UINT32_C(0xfc000000)) == UINT32_C(0x14000000));6989 int32_t const offDisp = offTarget - offFixup;6990 7154 Assert(offDisp >= -33554432 && offDisp < 33554432); 6991 7155 pu32CodeBuf[offFixup] = (pu32CodeBuf[offFixup] & UINT32_C(0xfc000000)) 6992 7156 | ((uint32_t)offDisp & UINT32_C(0x03ffffff)); 7157 } 7158 else 7159 { 7160 /* CBZ / CBNZ reg, imm19 */ 7161 Assert((pu32CodeBuf[offFixup] & UINT32_C(0x7e000000)) == UINT32_C(0x34000000)); 7162 Assert(offDisp >= -1048576 && offDisp < 1048576); 7163 pu32CodeBuf[offFixup] = (pu32CodeBuf[offFixup] & UINT32_C(0xff00001f)) 7164 | (((uint32_t)offDisp << 5) & UINT32_C(0x00ffffe0)); 7165 6993 7166 } 6994 7167 … … 8451 8624 8452 8625 8626 /** 8627 * Emits code that exits the current TB with @a enmExitReason if @a iGprSrc is zero. 8628 * 8629 * The operand size is given by @a f64Bit. 8630 */ 8631 DECL_FORCE_INLINE_THROW(uint32_t) 8632 iemNativeEmitTestIfGprIsZeroAndTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, 8633 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason) 8634 { 8635 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8636 #if defined(IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64) 8637 /* test reg32,reg32 / test reg64,reg64 */ 8638 if (f64Bit) 8639 pCodeBuf[off++] = X86_OP_REX_W | (iGprSrc < 8 ? 0 : X86_OP_REX_R | X86_OP_REX_B); 8640 else if (iGprSrc >= 8) 8641 pCodeBuf[off++] = X86_OP_REX_R | X86_OP_REX_B; 8642 pCodeBuf[off++] = 0x85; 8643 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, iGprSrc & 7, iGprSrc & 7); 8644 8645 /* jnz idxLabel */ 8646 return iemNativeEmitJccTbExitEx(pReNative, pCodeBuf, off, enmExitReason, kIemNativeInstrCond_e); 8647 8648 #else 8649 /* ARM64 doesn't have the necessary jump range, so we jump via local label 8650 just like when we keep everything local. */ 8651 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8652 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, iGprSrc, 8653 f64Bit, false /*fJmpIfNotZero*/, idxLabel); 8654 #endif 8655 } 8656 8657 8658 /** 8659 * Emits code to exit the current TB with the given reason @a enmExitReason if @a iGprSrc is zero. 8660 * 8661 * The operand size is given by @a f64Bit. 8662 */ 8663 DECL_INLINE_THROW(uint32_t) 8664 iemNativeEmitTestIfGprIsZeroAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8665 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason) 8666 { 8667 #if defined(IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64) 8668 off = iemNativeEmitTestIfGprIsZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6), 8669 off, iGprSrc, f64Bit, enmExitReason); 8670 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8671 return off; 8672 #else 8673 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8674 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, false /*fJmpIfNotZero*/, idxLabel); 8675 #endif 8676 } 8677 8678 8453 8679 #ifdef IEMNATIVE_WITH_SIMD_REG_ALLOCATOR 8454 8680 /*********************************************************************************************************************************
Note:
See TracChangeset
for help on using the changeset viewer.