Changeset 106125 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Sep 23, 2024 10:42:27 PM (4 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veExecMem.cpp
r106061 r106125 226 226 /** Pointer to the readable/executable view of the memory chunk. */ 227 227 void *pvChunkRx; 228 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE229 228 /** Pointer to the context structure detailing the per chunk common code. */ 230 229 PCIEMNATIVEPERCHUNKCTX pCtx; 231 #endif232 230 #ifdef IN_RING3 233 231 /** … … 803 801 804 802 805 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE806 803 /** 807 804 * For getting the per-chunk context detailing common code for a TB. … … 823 820 return NULL; 824 821 } 825 #endif826 822 827 823 … … 1502 1498 void *pvChunkRx = (void *)AddrRemapped; 1503 1499 #else 1504 # if defined(IN_RING3) || defined(IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE)1505 1500 int rc = VINF_SUCCESS; 1506 # endif1507 1501 void *pvChunkRx = pvChunk; 1508 1502 #endif … … 1531 1525 1532 1526 /* If there is a chunk context init callback call it. */ 1533 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 1534 pExecMemAllocator->aChunks[idxChunk].pCtx = iemNativeRecompileAttachExecMemChunkCtx(pVCpu, idxChunk); 1535 if (pExecMemAllocator->aChunks[idxChunk].pCtx) 1536 #endif 1537 { 1527 rc = iemNativeRecompileAttachExecMemChunkCtx(pVCpu, idxChunk, &pExecMemAllocator->aChunks[idxChunk].pCtx); 1538 1528 #ifdef IN_RING3 1539 /* 1540 * Initialize the unwind information (this cannot really fail atm). 1541 * (This sets pvUnwindInfo.) 1542 */ 1529 /* 1530 * Initialize the unwind information (this cannot really fail atm). 1531 * (This sets pvUnwindInfo.) 1532 */ 1533 if (RT_SUCCESS(rc)) 1543 1534 rc = iemExecMemAllocatorInitAndRegisterUnwindInfoForChunk(pVCpu, pExecMemAllocator, pvChunkRx, idxChunk); 1544 1535 #endif 1545 }1546 #if defined(IN_RING3) || defined(IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE)1547 1536 if (RT_SUCCESS(rc)) 1548 1537 { /* likely */ } … … 1567 1556 return rc; 1568 1557 } 1569 #endif1570 1558 1571 1559 return VINF_SUCCESS; … … 1618 1606 } 1619 1607 } 1620 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 1621 # if defined(RT_OS_AMD64) 1608 #if defined(RT_OS_AMD64) 1622 1609 Assert(cbChunk <= _2G); 1623 # 1610 #elif defined(RT_OS_ARM64) 1624 1611 if (cbChunk > _128M) 1625 1612 cbChunk = _128M; /* Max relative branch distance is +/-2^(25+2) = +/-0x8000000 (134 217 728). */ 1626 # endif1627 1613 #endif 1628 1614 -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp
r106113 r106125 725 725 } while (0) 726 726 727 # ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 728 # define NEAR_JMP_SIZE 5 729 # else 730 # define NEAR_JMP_SIZE 6 731 # endif 727 # define NEAR_JMP_SIZE 5 732 728 733 729 # define CHECK_OPCODES_CMP_JMP() /* cost: 7 bytes first time, then 2 bytes */ do { \ -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r106123 r106125 2056 2056 pReNative->bmLabelTypes = 0; 2057 2057 pReNative->cFixups = 0; 2058 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE2059 2058 pReNative->cTbExitFixups = 0; 2060 #endif2061 2059 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2062 2060 pReNative->pDbgInfo->cEntries = 0; … … 2217 2215 RTMemFree(pReNative->paLabels); 2218 2216 RTMemFree(pReNative->paFixups); 2219 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE2220 2217 RTMemFree(pReNative->paTbExitFixups); 2221 #endif2222 2218 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2223 2219 RTMemFree(pReNative->pDbgInfo); … … 2254 2250 pReNative->paLabels = (PIEMNATIVELABEL)RTMemAllocZ(sizeof(IEMNATIVELABEL) * _8K / cFactor); 2255 2251 pReNative->paFixups = (PIEMNATIVEFIXUP)RTMemAllocZ(sizeof(IEMNATIVEFIXUP) * _16K / cFactor); 2256 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE2257 2252 pReNative->paTbExitFixups = (PIEMNATIVEEXITFIXUP)RTMemAllocZ(sizeof(IEMNATIVEEXITFIXUP) * _8K / cFactor); 2258 #endif2259 2253 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2260 2254 pReNative->pDbgInfo = (PIEMTBDBG)RTMemAllocZ(RT_UOFFSETOF_DYN(IEMTBDBG, aEntries[_16K / cFactor])); … … 2262 2256 if (RT_LIKELY( pReNative->pInstrBuf 2263 2257 && pReNative->paLabels 2264 && pReNative->paFixups) 2265 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 2266 && pReNative->paTbExitFixups 2267 #endif 2258 && pReNative->paFixups 2259 && pReNative->paTbExitFixups) 2268 2260 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2269 2261 && pReNative->pDbgInfo … … 2277 2269 pReNative->cLabelsAlloc = _8K / cFactor; 2278 2270 pReNative->cFixupsAlloc = _16K / cFactor; 2279 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE2280 2271 pReNative->cTbExitFixupsAlloc = _8K / cFactor; 2281 #endif2282 2272 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 2283 2273 pReNative->cDbgInfoAlloc = _16K / cFactor; … … 2322 2312 { 2323 2313 Assert(uData == 0 || enmType >= kIemNativeLabelType_FirstWithMultipleInstances); 2324 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)2314 #if defined(RT_ARCH_AMD64) 2325 2315 Assert(enmType >= kIemNativeLabelType_LoopJumpTarget); 2326 2316 #endif … … 2512 2502 2513 2503 2514 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE2515 2504 /** 2516 2505 * Adds a fixup to the per chunk tail code. … … 2553 2542 pReNative->cTbExitFixups = cTbExitFixups + 1; 2554 2543 } 2555 #endif2556 2544 2557 2545 … … 6722 6710 } 6723 6711 6724 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE6725 6726 /**6727 * Worker for iemNativeEmitReturnBreakViaLookup.6728 */6729 static uint32_t iemNativeEmitViaLookupDoOne(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t offReturnBreak,6730 IEMNATIVELABELTYPE enmLabel, uintptr_t pfnHelper)6731 {6732 uint32_t const idxLabel = iemNativeLabelFind(pReNative, enmLabel);6733 if (idxLabel != UINT32_MAX)6734 {6735 iemNativeLabelDefine(pReNative, idxLabel, off);6736 off = iemNativeEmitCoreViaLookupDoOne(pReNative, off, offReturnBreak, pfnHelper);6737 }6738 return off;6739 }6740 6741 6742 /**6743 * Emits the code at the ReturnBreakViaLookup, ReturnBreakViaLookupWithIrq,6744 * ReturnBreakViaLookupWithTlb and ReturnBreakViaLookupWithTlbAndIrq labels6745 * (returns VINF_IEM_REEXEC_FINISH_WITH_FLAGS or jumps to the next TB).6746 */6747 static uint32_t iemNativeEmitReturnBreakViaLookup(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnBreakLabel)6748 {6749 uint32_t const offReturnBreak = pReNative->paLabels[idxReturnBreakLabel].off;6750 Assert(offReturnBreak < off);6751 6752 /*6753 * The lookup table index is in IEMNATIVE_CALL_ARG1_GREG for all.6754 * The GCPhysPc is in IEMNATIVE_CALL_ARG2_GREG for ReturnBreakViaLookupWithPc.6755 */6756 off = iemNativeEmitViaLookupDoOne(pReNative, off, offReturnBreak, kIemNativeLabelType_ReturnBreakViaLookup,6757 (uintptr_t)iemNativeHlpReturnBreakViaLookup<false /*a_fWithIrqCheck*/>);6758 off = iemNativeEmitViaLookupDoOne(pReNative, off, offReturnBreak, kIemNativeLabelType_ReturnBreakViaLookupWithIrq,6759 (uintptr_t)iemNativeHlpReturnBreakViaLookup<true /*a_fWithIrqCheck*/>);6760 off = iemNativeEmitViaLookupDoOne(pReNative, off, offReturnBreak, kIemNativeLabelType_ReturnBreakViaLookupWithTlb,6761 (uintptr_t)iemNativeHlpReturnBreakViaLookupWithTlb<false /*a_fWithIrqCheck*/>);6762 off = iemNativeEmitViaLookupDoOne(pReNative, off, offReturnBreak, kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq,6763 (uintptr_t)iemNativeHlpReturnBreakViaLookupWithTlb<true /*a_fWithIrqCheck*/>);6764 return off;6765 }6766 6767 #endif /* !IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE */6768 6712 6769 6713 /** … … 6777 6721 6778 6722 6779 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE6780 /**6781 * Emits the code at the ReturnWithFlags label (returns VINF_IEM_REEXEC_FINISH_WITH_FLAGS).6782 */6783 static uint32_t iemNativeEmitReturnWithFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnLabel)6784 {6785 uint32_t const idxLabel = iemNativeLabelFind(pReNative, kIemNativeLabelType_ReturnWithFlags);6786 if (idxLabel != UINT32_MAX)6787 {6788 iemNativeLabelDefine(pReNative, idxLabel, off);6789 /* set the return status */6790 off = iemNativeEmitCoreReturnWithFlags(pReNative, off);6791 /* jump back to the return sequence. */6792 off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel);6793 }6794 return off;6795 }6796 #endif6797 6798 6799 6723 /** 6800 6724 * Emits the code at the ReturnBreakFF label (returns VINF_IEM_REEXEC_BREAK_FF). … … 6807 6731 6808 6732 6809 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE6810 /**6811 * Emits the code at the ReturnBreakFF label (returns VINF_IEM_REEXEC_BREAK_FF).6812 */6813 static uint32_t iemNativeEmitReturnBreakFF(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnLabel)6814 {6815 uint32_t const idxLabel = iemNativeLabelFind(pReNative, kIemNativeLabelType_ReturnBreakFF);6816 if (idxLabel != UINT32_MAX)6817 {6818 iemNativeLabelDefine(pReNative, idxLabel, off);6819 /* set the return status */6820 off = iemNativeEmitCoreReturnBreakFF(pReNative, off);6821 /* jump back to the return sequence. */6822 off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel);6823 }6824 return off;6825 }6826 #endif6827 6828 6829 6733 /** 6830 6734 * Emits the code at the ReturnBreak label (returns VINF_IEM_REEXEC_BREAK). … … 6835 6739 return iemNativeEmitLoadGprImm64(pReNative, off, IEMNATIVE_CALL_RET_GREG, VINF_IEM_REEXEC_BREAK); 6836 6740 } 6837 6838 6839 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE6840 /**6841 * Emits the code at the ReturnBreak label (returns VINF_IEM_REEXEC_BREAK).6842 */6843 static uint32_t iemNativeEmitReturnBreak(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnLabel)6844 {6845 uint32_t const idxLabel = iemNativeLabelFind(pReNative, kIemNativeLabelType_ReturnBreak);6846 if (idxLabel != UINT32_MAX)6847 {6848 iemNativeLabelDefine(pReNative, idxLabel, off);6849 /* set the return status */6850 off = iemNativeEmitCoreReturnBreak(pReNative, off);6851 /* jump back to the return sequence. */6852 off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel);6853 }6854 return off;6855 }6856 #endif6857 6741 6858 6742 … … 6895 6779 6896 6780 6897 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE6898 /**6899 * Emits the RC fiddling code for handling non-zero return code or rcPassUp.6900 */6901 static uint32_t iemNativeEmitRcFiddling(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t idxReturnLabel)6902 {6903 /*6904 * Generate the rc + rcPassUp fiddling code if needed.6905 */6906 uint32_t const idxLabel = iemNativeLabelFind(pReNative, kIemNativeLabelType_NonZeroRetOrPassUp);6907 if (idxLabel != UINT32_MAX)6908 {6909 iemNativeLabelDefine(pReNative, idxLabel, off);6910 off = iemNativeEmitCoreRcFiddling(pReNative, off);6911 off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel);6912 }6913 return off;6914 }6915 #endif6916 6917 6918 6781 /** 6919 6782 * Emits a standard epilog. … … 7003 6866 return off; 7004 6867 } 7005 7006 7007 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE7008 /**7009 * Emits a standard epilog.7010 */7011 static uint32_t iemNativeEmitEpilog(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t *pidxReturnLabel)7012 {7013 /*7014 * Define label for common return point.7015 */7016 *pidxReturnLabel = UINT32_MAX;7017 uint32_t const idxReturn = iemNativeLabelCreate(pReNative, kIemNativeLabelType_Return, off);7018 *pidxReturnLabel = idxReturn;7019 7020 /*7021 * Emit the code.7022 */7023 return iemNativeEmitCoreEpilog(pReNative, off);7024 }7025 #endif7026 6868 7027 6869 … … 9074 8916 STR_CASE_CMN(NeedCsLimChecking); 9075 8917 STR_CASE_CMN(CheckBranchMiss); 9076 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE9077 8918 STR_CASE_CMN(ReturnSuccess); 9078 #else9079 STR_CASE_CMN(Return);9080 #endif9081 8919 STR_CASE_CMN(ReturnBreak); 9082 8920 STR_CASE_CMN(ReturnBreakFF); … … 9108 8946 PVMCPU pVCpu; 9109 8947 PCIEMTB pTb; 9110 # ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE9111 8948 PCIEMNATIVEPERCHUNKCTX pCtx; 9112 # endif9113 8949 # ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 9114 8950 PCIEMTBDBG pDbgInfo; … … 9123 8959 static const char *iemNativeDisasmGetSymbol(PIEMNATIVDISASMSYMCTX pSymCtx, uintptr_t uAddress, char *pszBuf, size_t cbBuf) 9124 8960 { 9125 #if defined(IEMNATIVE_WITH_TB_DEBUG_INFO) || defined(IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE)9126 8961 PCIEMTB const pTb = pSymCtx->pTb; 9127 8962 uintptr_t const offNative = (uAddress - (uintptr_t)pTb->Native.paInstructions) / sizeof(IEMNATIVEINSTR); 9128 8963 if (offNative <= pTb->Native.cInstructions) 9129 8964 { 9130 # 8965 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 9131 8966 /* 9132 8967 * Scan debug info for a matching label. … … 9196 9031 } 9197 9032 } 9198 # endif 9199 } 9200 # ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 9033 #endif 9034 } 9201 9035 else 9202 9036 { … … 9207 9041 return iemNativeGetLabelName((IEMNATIVELABELTYPE)i, true /*fCommonCode*/); 9208 9042 } 9209 # endif 9210 #endif 9211 RT_NOREF(pSymCtx, uAddress, pszBuf, cbBuf); 9043 RT_NOREF(pszBuf, cbBuf); 9212 9044 return NULL; 9213 9045 } … … 9305 9137 : (pTb->fFlags & IEM_F_MODE_CPUMODE_MASK) == IEMMODE_32BIT ? DISCPUMODE_32BIT 9306 9138 : DISCPUMODE_64BIT; 9307 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 9308 # ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 9139 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 9309 9140 IEMNATIVDISASMSYMCTX SymCtx = { pVCpu, pTb, iemExecMemGetTbChunkCtx(pVCpu, pTb), pDbgInfo }; 9310 # 9141 #else 9311 9142 IEMNATIVDISASMSYMCTX SymCtx = { pVCpu, pTb, iemExecMemGetTbChunkCtx(pVCpu, pTb) }; 9312 # endif9313 #elif defined(IEMNATIVE_WITH_TB_DEBUG_INFO)9314 IEMNATIVDISASMSYMCTX SymCtx = { pVCpu, pTb, pDbgInfo };9315 #else9316 IEMNATIVDISASMSYMCTX SymCtx = { pVCpu, pTb };9317 9143 #endif 9318 9144 #if defined(RT_ARCH_AMD64) && !defined(VBOX_WITH_IEM_USING_CAPSTONE_DISASSEMBLER) … … 9853 9679 9854 9680 9855 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE9856 9857 9681 /** Emit alignment padding between labels / functions. */ 9858 9682 DECL_INLINE_THROW(uint32_t) … … 9881 9705 * common code there. 9882 9706 * 9883 * @returns Pointer to the chunk context start.9707 * @returns VBox status code. 9884 9708 * @param pVCpu The cross context virtual CPU structure of the calling 9885 9709 * thread. 9886 9710 * @param idxChunk The index of the chunk being added and requiring a 9887 9711 * common code context. 9888 */ 9889 DECLHIDDEN(PCIEMNATIVEPERCHUNKCTX) iemNativeRecompileAttachExecMemChunkCtx(PVMCPU pVCpu, uint32_t idxChunk) 9890 { 9712 * @param ppCtx Where to return the pointer to the chunk context start. 9713 */ 9714 DECLHIDDEN(int) iemNativeRecompileAttachExecMemChunkCtx(PVMCPU pVCpu, uint32_t idxChunk, PCIEMNATIVEPERCHUNKCTX *ppCtx) 9715 { 9716 *ppCtx = NULL; 9717 9891 9718 /* 9892 9719 * Allocate a new recompiler state (since we're likely to be called while … … 9896 9723 */ 9897 9724 PIEMRECOMPILERSTATE pReNative = iemNativeInit(pVCpu, NULL); 9898 AssertReturn(pReNative, NULL);9725 AssertReturn(pReNative, VERR_NO_MEMORY); 9899 9726 9900 9727 # if defined(RT_ARCH_AMD64) … … 10029 9856 Log(("iemNativeRecompileAttachExecMemChunkCtx: Caught %Rrc while recompiling!\n", rc)); 10030 9857 iemNativeTerm(pReNative); 10031 return NULL;9858 return rc; 10032 9859 } 10033 9860 IEMNATIVE_CATCH_LONGJMP_END(pReNative); … … 10069 9896 10070 9897 iemNativeTerm(pReNative); 10071 returnpCtx;10072 } 10073 10074 #endif /* IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE */ 9898 *ppCtx = pCtx; 9899 return VINF_SUCCESS; 9900 } 9901 10075 9902 10076 9903 /** … … 10318 10145 off = iemNativeRegFlushPendingWrites(pReNative, off); 10319 10146 10320 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE10321 /*10322 * Successful return, so clear the return register (eax, w0).10323 */10324 off = iemNativeEmitGprZero(pReNative, off, IEMNATIVE_CALL_RET_GREG);10325 10326 /*10327 * Emit the epilog code.10328 */10329 uint32_t idxReturnLabel;10330 off = iemNativeEmitEpilog(pReNative, off, &idxReturnLabel);10331 #else10332 10147 /* 10333 10148 * Jump to the common per-chunk epilog code. … … 10335 10150 //off = iemNativeEmitBrk(pReNative, off, 0x1227); 10336 10151 off = iemNativeEmitTbExit(pReNative, off, kIemNativeLabelType_ReturnSuccess); 10337 #endif 10338 10339 #ifndef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 10152 10340 10153 /* 10341 * Generate special jump labels.10154 * Generate tail labels with jumps to the common per-chunk code on non-x86 hosts. 10342 10155 */ 10343 off = iemNativeEmitRcFiddling(pReNative, off, idxReturnLabel); 10344 10345 bool const fReturnBreakViaLookup = RT_BOOL( pReNative->bmLabelTypes 10346 & ( RT_BIT_64(kIemNativeLabelType_ReturnBreakViaLookup) 10347 | RT_BIT_64(kIemNativeLabelType_ReturnBreakViaLookupWithIrq) 10348 | RT_BIT_64(kIemNativeLabelType_ReturnBreakViaLookupWithTlb) 10349 | RT_BIT_64(kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq))); 10350 if (fReturnBreakViaLookup) 10351 { 10352 uint32_t const idxReturnBreakLabel = iemNativeLabelCreate(pReNative, kIemNativeLabelType_ReturnBreak); 10353 off = iemNativeEmitReturnBreak(pReNative, off, idxReturnLabel); 10354 off = iemNativeEmitReturnBreakViaLookup(pReNative, off, idxReturnBreakLabel); 10355 } 10356 else if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ReturnBreak)) 10357 off = iemNativeEmitReturnBreak(pReNative, off, idxReturnLabel); 10358 10359 if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ReturnBreakFF)) 10360 off = iemNativeEmitReturnBreakFF(pReNative, off, idxReturnLabel); 10361 10362 if (pReNative->bmLabelTypes & RT_BIT_64(kIemNativeLabelType_ReturnWithFlags)) 10363 off = iemNativeEmitReturnWithFlags(pReNative, off, idxReturnLabel); 10364 10365 /* 10366 * Generate simple TB tail labels that just calls a help with a pVCpu 10367 * arg and either return or longjmps/throws a non-zero status. 10368 * 10369 * The array entries must be ordered by enmLabel value so we can index 10370 * using fTailLabels bit numbers. 10371 */ 10372 typedef IEM_DECL_NATIVE_HLP_PTR(int, PFNIEMNATIVESIMPLETAILLABELCALL,(PVMCPUCC pVCpu)); 10373 static struct 10374 { 10375 IEMNATIVELABELTYPE enmLabel; 10376 PFNIEMNATIVESIMPLETAILLABELCALL pfnCallback; 10377 } const g_aSimpleTailLabels[] = 10378 { 10379 { kIemNativeLabelType_Invalid, NULL }, 10380 { kIemNativeLabelType_RaiseDe, iemNativeHlpExecRaiseDe }, 10381 { kIemNativeLabelType_RaiseUd, iemNativeHlpExecRaiseUd }, 10382 { kIemNativeLabelType_RaiseSseRelated, iemNativeHlpExecRaiseSseRelated }, 10383 { kIemNativeLabelType_RaiseAvxRelated, iemNativeHlpExecRaiseAvxRelated }, 10384 { kIemNativeLabelType_RaiseSseAvxFpRelated, iemNativeHlpExecRaiseSseAvxFpRelated }, 10385 { kIemNativeLabelType_RaiseNm, iemNativeHlpExecRaiseNm }, 10386 { kIemNativeLabelType_RaiseGp0, iemNativeHlpExecRaiseGp0 }, 10387 { kIemNativeLabelType_RaiseMf, iemNativeHlpExecRaiseMf }, 10388 { kIemNativeLabelType_RaiseXf, iemNativeHlpExecRaiseXf }, 10389 { kIemNativeLabelType_ObsoleteTb, iemNativeHlpObsoleteTb }, 10390 { kIemNativeLabelType_NeedCsLimChecking, iemNativeHlpNeedCsLimChecking }, 10391 { kIemNativeLabelType_CheckBranchMiss, iemNativeHlpCheckBranchMiss }, 10392 }; 10393 10394 AssertCompile(RT_ELEMENTS(g_aSimpleTailLabels) == (unsigned)kIemNativeLabelType_LastSimple + 1U); 10395 AssertCompile(kIemNativeLabelType_Invalid == 0); 10396 uint64_t fTailLabels = pReNative->bmLabelTypes & (RT_BIT_64(kIemNativeLabelType_LastSimple + 1U) - 2U); 10397 if (fTailLabels) 10398 { 10399 do 10400 { 10401 IEMNATIVELABELTYPE const enmLabel = (IEMNATIVELABELTYPE)(ASMBitFirstSetU64(fTailLabels) - 1U); 10402 fTailLabels &= ~RT_BIT_64(enmLabel); 10403 Assert(g_aSimpleTailLabels[enmLabel].enmLabel == enmLabel); 10404 10405 uint32_t const idxLabel = iemNativeLabelFind(pReNative, enmLabel); 10406 Assert(idxLabel != UINT32_MAX); 10407 if (idxLabel != UINT32_MAX) 10408 { 10409 iemNativeLabelDefine(pReNative, idxLabel, off); 10410 10411 /* int pfnCallback(PVMCPUCC pVCpu) */ 10412 off = iemNativeEmitLoadGprFromGpr(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, IEMNATIVE_REG_FIXED_PVMCPU); 10413 off = iemNativeEmitCallImm(pReNative, off, (uintptr_t)g_aSimpleTailLabels[enmLabel].pfnCallback); 10414 10415 /* jump back to the return sequence. */ 10416 off = iemNativeEmitJmpToLabel(pReNative, off, idxReturnLabel); 10417 } 10418 10419 } while (fTailLabels); 10420 } 10421 10422 #else /* IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE */ 10423 /* 10424 * Generate tail labels with jumps to the common per-chunk code. 10425 */ 10426 # ifndef RT_ARCH_AMD64 10156 #ifndef RT_ARCH_AMD64 10427 10157 Assert(!(pReNative->bmLabelTypes & ( RT_BIT_64(kIemNativeLabelType_ReturnSuccess) 10428 10158 | RT_BIT_64(kIemNativeLabelType_Invalid) ))); … … 10442 10172 } while (fTailLabels); 10443 10173 } 10444 # 10174 #else 10445 10175 Assert(!(pReNative->bmLabelTypes & (RT_BIT_64(kIemNativeLabelType_LastTbExit + 1) - 1U))); /* Should not be used! */ 10446 # endif 10447 #endif /* IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE */ 10176 #endif 10448 10177 } 10449 10178 IEMNATIVE_CATCH_LONGJMP_BEGIN(pReNative, rc); … … 10476 10205 * Allocate executable memory, copy over the code we've generated. 10477 10206 */ 10478 PIEMTBALLOCATOR const pTbAllocator = pVCpu->iem.s.pTbAllocatorR3;10207 PIEMTBALLOCATOR const pTbAllocator = pVCpu->iem.s.pTbAllocatorR3; 10479 10208 if (pTbAllocator->pDelayedFreeHead) 10480 10209 iemTbAllocatorProcessDelayedFrees(pVCpu, pVCpu->iem.s.pTbAllocatorR3); 10481 10210 10482 PIEMNATIVEINSTR paFinalInstrBufRx = NULL; 10483 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 10484 PCIEMNATIVEPERCHUNKCTX pCtx = NULL; 10485 PIEMNATIVEINSTR const paFinalInstrBuf = iemExecMemAllocatorAlloc(pVCpu, off * sizeof(IEMNATIVEINSTR), pTb, 10486 &paFinalInstrBufRx, &pCtx); 10487 10488 #else 10489 PIEMNATIVEINSTR const paFinalInstrBuf = iemExecMemAllocatorAlloc(pVCpu, off * sizeof(IEMNATIVEINSTR), pTb, 10490 &paFinalInstrBufRx, NULL); 10491 #endif 10211 PIEMNATIVEINSTR paFinalInstrBufRx = NULL; 10212 PCIEMNATIVEPERCHUNKCTX pCtx = NULL; 10213 PIEMNATIVEINSTR const paFinalInstrBuf = iemExecMemAllocatorAlloc(pVCpu, off * sizeof(IEMNATIVEINSTR), pTb, 10214 &paFinalInstrBufRx, &pCtx); 10215 10492 10216 AssertReturn(paFinalInstrBuf, pTb); 10493 10217 memcpy(paFinalInstrBuf, pReNative->pInstrBuf, off * sizeof(paFinalInstrBuf[0])); … … 10550 10274 } 10551 10275 10552 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE10553 10276 /* 10554 10277 * Apply TB exit fixups. … … 10562 10285 RTPTRUNION const Ptr = { &paFinalInstrBuf[paTbExitFixups[i].off] }; 10563 10286 10564 # 10287 #if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86) 10565 10288 Assert(paTbExitFixups[i].off + 4 <= off); 10566 10289 intptr_t const offDisp = pCtx->apExitLabels[paTbExitFixups[i].enmExitReason] - &paFinalInstrBufRx[paTbExitFixups[i].off + 4]; … … 10568 10291 *Ptr.pi32 = (int32_t)offDisp; 10569 10292 10570 # 10293 #elif defined(RT_ARCH_ARM64) 10571 10294 intptr_t const offDisp = pCtx->apExitLabels[paTbExitFixups[i].enmExitReason] - &paFinalInstrBufRx[paTbExitFixups[i].off]; 10572 10295 Assert(offDisp >= -33554432 && offDisp < 33554432); 10573 10296 *Ptr.pu32 = (*Ptr.pu32 & UINT32_C(0xfc000000)) | ((uint32_t)offDisp & UINT32_C(0x03ffffff)); 10574 10297 10575 # else 10576 # error "Port me!" 10577 # endif 10578 } 10579 #endif 10298 #else 10299 # error "Port me!" 10300 #endif 10301 } 10580 10302 10581 10303 iemExecMemAllocatorReadyForUse(pVCpu, paFinalInstrBufRx, off * sizeof(IEMNATIVEINSTR)); -
trunk/src/VBox/VMM/include/IEMInternal.h
r106123 r106125 6934 6934 DECLASM(DECL_NO_RETURN(void)) iemNativeTbLongJmp(void *pvFramePointer, int rc) RT_NOEXCEPT; 6935 6935 DECLHIDDEN(struct IEMNATIVEPERCHUNKCTX const *) iemExecMemGetTbChunkCtx(PVMCPU pVCpu, PCIEMTB pTb); 6936 DECLHIDDEN( struct IEMNATIVEPERCHUNKCTX const *) iemNativeRecompileAttachExecMemChunkCtx(PVMCPU pVCpu, uint32_t idxChunk);6936 DECLHIDDEN(int) iemNativeRecompileAttachExecMemChunkCtx(PVMCPU pVCpu, uint32_t idxChunk, struct IEMNATIVEPERCHUNKCTX const **ppCtx); 6937 6937 6938 6938 /** Packed 32-bit argument for iemCImpl_vpgather_worker_xx. */ -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r106124 r106125 99 99 #if 1 || defined(DOXYGEN_RUNNING) 100 100 # define IEMNATIVE_WITH_RECOMPILER_PROLOGUE_SINGLETON 101 #endif102 103 /** @def IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE104 * Enable this to use common epilogue and tail code for all TBs in a chunk. */105 #if 1 || defined(DOXYGEN_RUNNING)106 # define IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE107 101 #endif 108 102 … … 521 515 kIemNativeLabelType_ReturnWithFlags, 522 516 kIemNativeLabelType_NonZeroRetOrPassUp, 523 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE524 517 kIemNativeLabelType_ReturnSuccess, /**< Sets eax/w0 to zero and returns. */ 525 #else526 kIemNativeLabelType_Return,527 #endif528 518 /** The last fixup for branches that can span almost the whole TB length. 529 519 * @note Whether kIemNativeLabelType_Return needs to be one of these is 530 520 * a bit questionable, since nobody jumps to it except other tail code. */ 531 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE532 521 kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_ReturnSuccess, 533 #else534 kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_Return,535 #endif536 522 /** The last fixup for branches that exits the TB. */ 537 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE538 523 kIemNativeLabelType_LastTbExit = kIemNativeLabelType_ReturnSuccess, 539 #else540 kIemNativeLabelType_LastTbExit = kIemNativeLabelType_Return,541 #endif542 524 543 525 /** Loop-jump target. */ … … 579 561 580 562 563 581 564 /** Native code generator fixup types. */ 582 565 typedef enum … … 612 595 typedef IEMNATIVEFIXUP *PIEMNATIVEFIXUP; 613 596 614 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 597 615 598 616 599 /** Native code generator fixup to per chunk TB tail code. */ … … 638 621 typedef const IEMNATIVEPERCHUNKCTX *PCIEMNATIVEPERCHUNKCTX; 639 622 640 #endif /* IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE */641 623 642 624 … … 1625 1607 PIEMNATIVEFIXUP paFixups; 1626 1608 1627 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE1628 1609 /** Actual number of fixups in paTbExitFixups. */ 1629 1610 uint32_t cTbExitFixups; … … 1632 1613 /** Buffer used by the recompiler for recording fixups when generating code. */ 1633 1614 PIEMNATIVEEXITFIXUP paTbExitFixups; 1634 #endif1635 1615 1636 1616 #if defined(IEMNATIVE_WITH_TB_DEBUG_INFO) || defined(VBOX_WITH_STATISTICS) … … 1858 1838 DECL_HIDDEN_THROW(void) iemNativeAddFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, uint32_t idxLabel, 1859 1839 IEMNATIVEFIXUPTYPE enmType, int8_t offAddend = 0); 1860 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 1861 DECL_HIDDEN_THROW(void) iemNativeAddTbExitFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, IEMNATIVELABELTYPE enmExitReason); 1862 #endif 1840 DECL_HIDDEN_THROW(void) iemNativeAddTbExitFixup(PIEMRECOMPILERSTATE pReNative, uint32_t offWhere, 1841 IEMNATIVELABELTYPE enmExitReason); 1863 1842 DECL_HIDDEN_THROW(PIEMNATIVEINSTR) iemNativeInstrBufEnsureSlow(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint32_t cInstrReq); 1864 1843 -
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r106123 r106125 8310 8310 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8311 8311 8312 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8312 #if defined(RT_ARCH_AMD64) 8313 8313 /* jcc rel32 */ 8314 8314 pCodeBuf[off++] = 0x0f; … … 8340 8340 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8341 8341 8342 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 8343 # ifdef RT_ARCH_AMD64 8342 #ifdef RT_ARCH_AMD64 8344 8343 off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 6), off, enmExitReason, enmCond); 8345 # 8344 #elif defined(RT_ARCH_ARM64) 8346 8345 off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 2), off, enmExitReason, enmCond); 8347 # else 8348 # error "Port me!" 8349 # endif 8350 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8351 return off; 8352 #else 8353 return iemNativeEmitJccToNewLabel(pReNative, off, enmExitReason, 0 /*uData*/, enmCond); 8354 #endif 8346 #else 8347 # error "Port me!" 8348 #endif 8349 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8350 return off; 8355 8351 } 8356 8352 … … 8425 8421 iemNativeMarkCurCondBranchAsExiting(pReNative); 8426 8422 8427 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 8428 # ifdef RT_ARCH_AMD64 8423 #ifdef RT_ARCH_AMD64 8429 8424 /* jmp rel32 */ 8430 8425 pCodeBuf[off++] = 0xe9; … … 8435 8430 pCodeBuf[off++] = 0xff; 8436 8431 8437 # 8432 #elif defined(RT_ARCH_ARM64) 8438 8433 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8439 8434 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8440 8435 8441 # else 8442 # error "Port me!" 8443 # endif 8444 return off; 8445 8446 #else 8447 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8448 return iemNativeEmitJmpToLabelEx(pReNative, pCodeBuf, off, idxLabel); 8449 #endif 8436 #else 8437 # error "Port me!" 8438 #endif 8439 return off; 8450 8440 } 8451 8441 … … 8462 8452 iemNativeMarkCurCondBranchAsExiting(pReNative); 8463 8453 8464 #ifdef IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE 8465 # ifdef RT_ARCH_AMD64 8454 #ifdef RT_ARCH_AMD64 8466 8455 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); 8467 8456 … … 8474 8463 pCodeBuf[off++] = 0xff; 8475 8464 8476 # 8465 #elif defined(RT_ARCH_ARM64) 8477 8466 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 8478 8467 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8479 8468 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8480 8469 8481 # else 8482 # error "Port me!" 8483 # endif 8484 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8485 return off; 8486 8487 #else 8488 return iemNativeEmitJmpToNewLabel(pReNative, off, enmExitReason); 8489 #endif 8470 #else 8471 # error "Port me!" 8472 #endif 8473 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8474 return off; 8490 8475 } 8491 8476 … … 8587 8572 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8588 8573 8589 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8574 #if defined(RT_ARCH_AMD64) 8590 8575 Assert(iBitNo < 64); 8591 8576 uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 5); … … 8638 8623 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8639 8624 8640 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8625 #if defined(RT_ARCH_AMD64) 8641 8626 /* test reg32,reg32 / test reg64,reg64 */ 8642 8627 if (f64Bit) … … 8671 8656 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason) 8672 8657 { 8673 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8658 #if defined(RT_ARCH_AMD64) 8674 8659 off = iemNativeEmitTestIfGprIsNotZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6), 8675 8660 off, iGprSrc, f64Bit, enmExitReason); … … 8695 8680 { 8696 8681 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8697 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8682 #if defined(RT_ARCH_AMD64) 8698 8683 /* test reg32,reg32 / test reg64,reg64 */ 8699 8684 if (f64Bit) … … 8728 8713 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason) 8729 8714 { 8730 #if defined( IEMNATIVE_WITH_RECOMPILER_PER_CHUNK_TAIL_CODE) && defined(RT_ARCH_AMD64)8715 #if defined(RT_ARCH_AMD64) 8731 8716 off = iemNativeEmitTestIfGprIsZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6), 8732 8717 off, iGprSrc, f64Bit, enmExitReason);
Note:
See TracChangeset
for help on using the changeset viewer.