Changeset 106180 in vbox for trunk/src/VBox/VMM/include
- Timestamp:
- Sep 30, 2024 1:51:48 PM (5 months ago)
- svn:sync-xref-src-repo-rev:
- 164964
- Location:
- trunk/src/VBox/VMM/include
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r106127 r106180 80 80 # if 0 || defined(DOXYGEN_RUNNING) 81 81 # define IEMNATIVE_WITH_EFLAGS_POSTPONING 82 # endif 83 #endif 84 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 85 # ifndef IEMNATIVE_WITH_EFLAGS_SKIPPING 86 # error "IEMNATIVE_WITH_EFLAGS_POSTPONING requires IEMNATIVE_WITH_EFLAGS_SKIPPING at present" 82 87 # endif 83 88 #endif … … 314 319 * Mask of registers the callee will not save and may trash. */ 315 320 #ifdef RT_ARCH_AMD64 316 # define IEMNATIVE_CALL_RET_GREG 321 # define IEMNATIVE_CALL_RET_GREG X86_GREG_xAX 317 322 318 323 # ifdef RT_OS_WINDOWS … … 435 440 # define IEMNATIVE_CALL_VOLATILE_NOTMP_GREG_MASK IEMNATIVE_CALL_VOLATILE_GREG_MASK 436 441 #endif 442 443 /** @def IEMNATIVE_CALL_NONVOLATILE_GREG_MASK 444 * The allocatable non-volatile general purpose register set. */ 445 #define IEMNATIVE_CALL_NONVOLATILE_GREG_MASK \ 446 (~IEMNATIVE_CALL_VOLATILE_GREG_MASK & ~IEMNATIVE_REG_FIXED_MASK & IEMNATIVE_HST_GREG_MASK) 437 447 /** @} */ 438 448 … … 480 490 { 481 491 kIemNativeLabelType_Invalid = 0, 482 /* 483 * Labels w/o data, only once instance per TB - aka exit reasons. 492 /** @name Exit reasons - Labels w/o data, only once instance per TB. 484 493 * 485 * Note! Jumps to these requires instructions that are capable of spanning 486 * the max TB length. 494 * The labels requiring register inputs are documented. 495 * 496 * @note Jumps to these requires instructions that are capable of spanning the 497 * max TB length. 498 * @{ 487 499 */ 488 500 /* Simple labels comes first for indexing reasons. RaiseXx is order by the exception's numerical value(s). */ … … 496 508 kIemNativeLabelType_RaiseMf, /**< Raise (throw) X86_XCPT_MF (10h). */ 497 509 kIemNativeLabelType_RaiseXf, /**< Raise (throw) X86_XCPT_XF (13h). */ 498 kIemNativeLabelType_ObsoleteTb, 499 kIemNativeLabelType_NeedCsLimChecking, 500 kIemNativeLabelType_CheckBranchMiss, 510 kIemNativeLabelType_ObsoleteTb, /**< Calls iemNativeHlpObsoleteTb (no inputs). */ 511 kIemNativeLabelType_NeedCsLimChecking, /**< Calls iemNativeHlpNeedCsLimChecking (no inputs). */ 512 kIemNativeLabelType_CheckBranchMiss, /**< Calls iemNativeHlpCheckBranchMiss (no inputs). */ 501 513 kIemNativeLabelType_LastSimple = kIemNativeLabelType_CheckBranchMiss, 502 /* Manually defined labels. */ 514 515 /* Manually defined labels: */ 516 /**< Returns with VINF_SUCCESS, no inputs. */ 517 kIemNativeLabelType_ReturnSuccess, 518 /** Returns with VINF_IEM_REEXEC_FINISH_WITH_FLAGS, no inputs. */ 519 kIemNativeLabelType_ReturnWithFlags, 520 /** Returns with VINF_IEM_REEXEC_BREAK, no inputs. */ 503 521 kIemNativeLabelType_ReturnBreak, 522 /** Returns with VINF_IEM_REEXEC_BREAK_FF, no inputs. */ 504 523 kIemNativeLabelType_ReturnBreakFF, 524 /** The last TB exit label that doesn't have any input registers. */ 525 kIemNativeLabelType_LastTbExitWithoutInputs = kIemNativeLabelType_ReturnBreakFF, 526 527 /** Argument registers 1, 2 & 3 are set up. */ 505 528 kIemNativeLabelType_ReturnBreakViaLookup, 529 /** Argument registers 1, 2 & 3 are set up. */ 506 530 kIemNativeLabelType_ReturnBreakViaLookupWithIrq, 531 /** Argument registers 1 & 2 are set up. */ 507 532 kIemNativeLabelType_ReturnBreakViaLookupWithTlb, 533 /** Argument registers 1 & 2 are set up. */ 508 534 kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq, 509 kIemNativeLabelType_ReturnWithFlags, 535 /** Return register holds the RC and the instruction number is in CL/RCX 536 * on amd64 and the 2rd argument register elsewhere. */ 510 537 kIemNativeLabelType_NonZeroRetOrPassUp, 511 kIemNativeLabelType_ReturnSuccess, /**< Sets eax/w0 to zero and returns. */ 538 512 539 /** The last fixup for branches that can span almost the whole TB length. 513 540 * @note Whether kIemNativeLabelType_Return needs to be one of these is 514 541 * a bit questionable, since nobody jumps to it except other tail code. */ 515 kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_ ReturnSuccess,542 kIemNativeLabelType_LastWholeTbBranch = kIemNativeLabelType_NonZeroRetOrPassUp, 516 543 /** The last fixup for branches that exits the TB. */ 517 kIemNativeLabelType_LastTbExit = kIemNativeLabelType_ReturnSuccess, 544 kIemNativeLabelType_LastTbExit = kIemNativeLabelType_NonZeroRetOrPassUp, 545 /** @} */ 518 546 519 547 /** Loop-jump target. */ … … 538 566 #define IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmLabel) \ 539 567 ((a_enmLabel) <= kIemNativeLabelType_LastTbExit && (a_enmLabel) > kIemNativeLabelType_Invalid) 568 569 #define IEMNATIVELABELTYPE_IS_EXIT_WITHOUT_INPUTS(a_enmLabel) \ 570 ((a_enmLabel) <= kIemNativeLabelType_LastTbExitWithoutInputs && (a_enmLabel) > kIemNativeLabelType_Invalid) 571 572 /** 573 * Get the mask of input registers for an TB exit label. 574 * This will return zero for any non-exit lable. 575 */ 576 #ifdef RT_ARCH_AMD64 577 # define IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmLabel) \ 578 ( (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookup \ 579 || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithIrq \ 580 ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG) \ 581 : (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlb \ 582 || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq \ 583 ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \ 584 : (a_enmLabel) == kIemNativeLabelType_NonZeroRetOrPassUp \ 585 ? RT_BIT_32(IEMNATIVE_CALL_RET_GREG) | RT_BIT_32(X86_GREG_xCX) /* <-- the difference */ \ 586 : 0) 587 # else 588 # define IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmLabel) \ 589 ( (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookup \ 590 || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithIrq \ 591 ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG3_GREG) \ 592 : (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlb \ 593 || (a_enmLabel) == kIemNativeLabelType_ReturnBreakViaLookupWithTlbAndIrq \ 594 ? RT_BIT_32(IEMNATIVE_CALL_ARG1_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \ 595 : (a_enmLabel) == kIemNativeLabelType_NonZeroRetOrPassUp \ 596 ? RT_BIT_32(IEMNATIVE_CALL_RET_GREG) | RT_BIT_32(IEMNATIVE_CALL_ARG2_GREG) \ 597 : 0) 598 #endif 540 599 541 600 … … 660 719 #define IEMLIVENESSBIT_IDX_EFL_SF ((unsigned)kIemNativeGstReg_EFlags + 5) 661 720 #define IEMLIVENESSBIT_IDX_EFL_OF ((unsigned)kIemNativeGstReg_EFlags + 6) 721 #define IEMLIVENESSBIT_IDX_EFL_COUNT 7 662 722 663 723 … … 819 879 # define IEMLIVENESS_STATE_ARE_STATUS_EFL_TO_BE_CLOBBERED(a_pCurEntry) \ 820 880 ( (((a_pCurEntry)->Bit0.bm64 | (a_pCurEntry)->Bit1.bm64) & IEMLIVENESSBIT_STATUS_EFL_MASK) == 0 ) 881 882 /*** 883 * Construct a mask of what will be clobbered and never used. 884 * 885 * This is mainly used with IEMLIVENESSBIT_STATUS_EFL_MASK to avoid 886 * unnecessary EFLAGS calculations. 887 * 888 * @param a_pCurEntry The current liveness entry. 889 * @note Used by actual code. 890 */ 891 # define IEMLIVENESS_STATE_GET_WILL_BE_CLOBBERED_SET(a_pCurEntry) \ 892 ( ~((a_pCurEntry)->Bit0.bm64 | (a_pCurEntry)->Bit1.bm64) & IEMLIVENESSBIT_MASK ) 821 893 822 894 /** Construct a mask of the guest registers in the UNUSED and XCPT_OR_CALL … … 864 936 * @note Used by actual code. */ 865 937 # define IEMLIVENESS_STATE_GET_CAN_BE_FREED_SET(a_pCurEntry) \ 866 ( ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \ 867 & ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \ 868 & IEMLIVENESSBIT_MASK ) 869 938 ( ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \ 939 & ~(a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \ 940 & IEMLIVENESSBIT_MASK ) 941 942 /*** 943 * Construct a mask of what will be clobbered and never used. 944 * 945 * This is mainly used with IEMLIVENESSBIT_STATUS_EFL_MASK to avoid 946 * unnecessary EFLAGS calculations. 947 * 948 * @param a_pCurEntry The current liveness entry. 949 * @note Used by actual code. 950 */ 951 # define IEMLIVENESS_STATE_GET_WILL_BE_CLOBBERED_SET(a_pCurEntry) \ 952 ( (a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \ 953 & ~( (a_pCurEntry)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 \ 954 | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \ 955 | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_CALL].bm64) ) 956 957 /** 958 * Construct a mask of what (EFLAGS) which can be postponed. 959 * 960 * The postponement is for the avoiding EFLAGS status bits calculations in the 961 * primary code stream whenever possible, and instead only do these in the TLB 962 * load and TB exit code paths which shouldn't be traveled quite as often. 963 * A requirement, though, is that the status bits will be clobbered later in the 964 * TB. 965 * 966 * User need to apply IEMLIVENESSBIT_STATUS_EFL_MASK if appropriate/necessary. 967 * 968 * @param a_pCurEntry The current liveness entry. 969 * @note Used by actual code. 970 */ 971 # define IEMLIVENESS_STATE_GET_CAN_BE_POSTPONED_SET(a_pCurEntry) \ 972 ( (a_pCurEntry)->aBits[IEMLIVENESS_BIT_POTENTIAL_CALL].bm64 \ 973 & (a_pCurEntry)->aBits[IEMLIVENESS_BIT_WRITE].bm64 \ 974 & ~( (a_pCurEntry)->aBits[IEMLIVENESS_BIT_READ].bm64 \ 975 | (a_pCurEntry)->aBits[IEMLIVENESS_BIT_CALL].bm64) ) 870 976 871 977 #endif /* IEMLIVENESS_EXTENDED_LAYOUT */ … … 1039 1145 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 1040 1146 # define IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(a_pReNative, a_fEflNeeded) \ 1041 AssertMsg(!((a_pReNative)-> fPostponingEFlags & (a_fEflNeeded)), \1042 ("%#x & %#x -> %#x\n", (a_pReNative)-> fPostponingEFlags, \1043 a_fEflNeeded, (a_pReNative)-> fPostponingEFlags & (a_fEflNeeded) ))1147 AssertMsg(!((a_pReNative)->PostponedEfl.fEFlags & (a_fEflNeeded)), \ 1148 ("%#x & %#x -> %#x\n", (a_pReNative)->PostponedEfl.fEFlags, \ 1149 a_fEflNeeded, (a_pReNative)->PostponedEfl.fEFlags & (a_fEflNeeded) )) 1044 1150 #else 1045 1151 # define IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(a_pReNative, a_fEflNeeded) ((void)0) … … 1052 1158 #if defined(IEMNATIVE_WITH_EFLAGS_SKIPPING) && defined(IEMNATIVE_WITH_EFLAGS_POSTPONING) 1053 1159 # define IEMNATIVE_ASSERT_EFLAGS_SKIPPING_AND_POSTPONING(a_pReNative, a_fEflNeeded) \ 1054 AssertMsg(!(((a_pReNative)->fSkippingEFlags | (a_pReNative)-> fPostponingEFlags) & (a_fEflNeeded)), \1055 ("(%#x | %#x) & %#x -> %#x\n", (a_pReNative)->fSkippingEFlags, (a_pReNative)-> fPostponingEFlags, \1056 a_fEflNeeded, ((a_pReNative)->fSkippingEFlags | (a_pReNative)-> fPostponingEFlags) & (a_fEflNeeded) ))1160 AssertMsg(!(((a_pReNative)->fSkippingEFlags | (a_pReNative)->PostponedEfl.fEFlags) & (a_fEflNeeded)), \ 1161 ("(%#x | %#x) & %#x -> %#x\n", (a_pReNative)->fSkippingEFlags, (a_pReNative)->PostponedEfl.fEFlags, \ 1162 a_fEflNeeded, ((a_pReNative)->fSkippingEFlags | (a_pReNative)->PostponedEfl.fEFlags) & (a_fEflNeeded) )) 1057 1163 #elif defined(IEMNATIVE_WITH_EFLAGS_SKIPPING) 1058 1164 # define IEMNATIVE_ASSERT_EFLAGS_SKIPPING_AND_POSTPONING(a_pReNative, a_fEflNeeded) \ … … 1077 1183 #else 1078 1184 # define IEMNATIVE_STRICT_EFLAGS_SKIPPING_EMIT_CHECK(a_pReNative, a_off, a_fEflNeeded) do { } while (0) 1185 #endif 1186 1187 1188 /** @def IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS 1189 * Number of extra instructions to allocate for each TB exit to account for 1190 * postponed EFLAGS calculations. 1191 */ 1192 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 1193 # ifdef RT_ARCH_AMD64 1194 # define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS 32 1195 # elif defined(RT_ARCH_ARM64) || defined(DOXYGEN_RUNNING) 1196 # define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS 32 1197 # else 1198 # error "port me" 1199 # endif 1200 #else 1201 # define IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS 0 1202 #endif 1203 1204 /** @def IEMNATIVE_CLEAR_POSTPONED_EFLAGS 1205 * Helper macro function for calling iemNativeClearPostponedEFlags() when 1206 * IEMNATIVE_WITH_EFLAGS_POSTPONING is enabled. 1207 */ 1208 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 1209 # define IEMNATIVE_CLEAR_POSTPONED_EFLAGS(a_pReNative, a_fEflClobbered) iemNativeClearPostponedEFlags<a_fEflClobbered>(a_pReNative) 1210 #else 1211 # define IEMNATIVE_CLEAR_POSTPONED_EFLAGS(a_pReNative, a_fEflClobbered) ((void)0) 1079 1212 #endif 1080 1213 … … 1533 1666 1534 1667 1668 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 1669 typedef enum IEMNATIVE_POSTPONED_EFL_OP_T : uint8_t 1670 { 1671 kIemNativePostponedEflOp_Invalid = 0, 1672 /** Logical operation. 1673 * Operands: result register. 1674 * @note This clears OF, CF and (undefined) AF, thus no need for inputs. */ 1675 kIemNativePostponedEflOp_Logical, 1676 kIemNativePostponedEflOp_End 1677 } IEMNATIVE_POSTPONED_EFL_OP_T; 1678 #endif /* IEMNATIVE_WITH_EFLAGS_POSTPONING */ 1679 1535 1680 /** 1536 1681 * Conditional stack entry. … … 1670 1815 #endif 1671 1816 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 1672 uint32_t fPostponingEFlags; 1817 struct 1818 { 1819 /** EFLAGS status bits that we're currently postponing the calculcation of. */ 1820 uint32_t fEFlags; 1821 /** The postponed EFLAGS status bits calculation operation. */ 1822 IEMNATIVE_POSTPONED_EFL_OP_T enmOp; 1823 /** The bit-width of the postponed EFLAGS calculation. */ 1824 uint8_t cOpBits; 1825 /** Host register holding result or first source for the delayed operation, 1826 * UINT8_MAX if not in use. */ 1827 uint8_t idxReg1; 1828 /** Host register holding second source for the delayed operation, 1829 * UINT8_MAX if not in use. */ 1830 uint8_t idxReg2; 1831 } PostponedEfl; 1673 1832 #endif 1674 1833 -
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r106144 r106180 234 234 pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, uImm32 & UINT32_C(0xffff), 0, false /*f64Bit*/); 235 235 pCodeBuf[off++] = Armv8A64MkInstrMovK(iGpr, uImm32 >> 16, 1, false /*f64Bit*/); 236 } 237 238 #else 239 # error "port me" 240 #endif 241 return off; 242 } 243 244 245 /** 246 * Variant of iemNativeEmitLoadGpr32Imm where the caller ensures sufficent 247 * buffer space. 248 * 249 * Max buffer consumption: 250 * - AMD64: 6 instruction bytes. 251 * - ARM64: 2 instruction words (8 bytes). 252 * 253 * @note The top 32 bits will be cleared. 254 */ 255 template<uint32_t const a_uImm32> 256 DECL_FORCE_INLINE(uint32_t) iemNativeEmitLoadGpr32ImmExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr) 257 { 258 #ifdef RT_ARCH_AMD64 259 if (a_uImm32 == 0) 260 { 261 /* xor gpr, gpr */ 262 if (iGpr >= 8) 263 pCodeBuf[off++] = X86_OP_REX_R | X86_OP_REX_B; 264 pCodeBuf[off++] = 0x33; 265 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, iGpr & 7, iGpr & 7); 266 } 267 else 268 { 269 /* mov gpr, imm32 */ 270 if (iGpr >= 8) 271 pCodeBuf[off++] = X86_OP_REX_B; 272 pCodeBuf[off++] = 0xb8 + (iGpr & 7); 273 pCodeBuf[off++] = RT_BYTE1(a_uImm32); 274 pCodeBuf[off++] = RT_BYTE2(a_uImm32); 275 pCodeBuf[off++] = RT_BYTE3(a_uImm32); 276 pCodeBuf[off++] = RT_BYTE4(a_uImm32); 277 } 278 279 #elif defined(RT_ARCH_ARM64) 280 if RT_CONSTEXPR((a_uImm32 >> 16) == 0) 281 /* movz gpr, imm16 */ 282 pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32, 0, false /*f64Bit*/); 283 else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == 0) 284 /* movz gpr, imm16, lsl #16 */ 285 pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32 >> 16, 1, false /*f64Bit*/); 286 else if RT_CONSTEXPR((a_uImm32 & UINT32_C(0xffff)) == UINT32_C(0xffff)) 287 /* movn gpr, imm16, lsl #16 */ 288 pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32 >> 16, 1, false /*f64Bit*/); 289 else if RT_CONSTEXPR((a_uImm32 >> 16) == UINT32_C(0xffff)) 290 /* movn gpr, imm16 */ 291 pCodeBuf[off++] = Armv8A64MkInstrMovN(iGpr, ~a_uImm32, 0, false /*f64Bit*/); 292 else 293 { 294 pCodeBuf[off++] = Armv8A64MkInstrMovZ(iGpr, a_uImm32 & UINT32_C(0xffff), 0, false /*f64Bit*/); 295 pCodeBuf[off++] = Armv8A64MkInstrMovK(iGpr, a_uImm32 >> 16, 1, false /*f64Bit*/); 236 296 } 237 297 … … 8156 8216 * Emits a call to a 64-bit address. 8157 8217 */ 8218 template<bool const a_fSkipEflChecks = false> 8158 8219 DECL_INLINE_THROW(uint32_t) iemNativeEmitCallImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uintptr_t uPfn) 8159 8220 { 8221 if RT_CONSTEXPR(!a_fSkipEflChecks) 8222 { 8223 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8224 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY( pReNative, X86_EFL_STATUS_BITS); 8225 } 8226 8160 8227 #ifdef RT_ARCH_AMD64 8161 8228 off = iemNativeEmitLoadGprImm64(pReNative, off, X86_GREG_xAX, uPfn); … … 8281 8348 *********************************************************************************************************************************/ 8282 8349 8350 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8351 /* IEMAllN8veEmit-x86.h: */ 8352 template<uint32_t const a_bmInputRegs> 8353 DECL_FORCE_INLINE_THROW(uint32_t) 8354 iemNativeDoPostponedEFlagsAtTbExitEx(PIEMRECOMPILERSTATE pReNative, uint32_t off, PIEMNATIVEINSTR pCodeBuf); 8355 8356 template<uint32_t const a_bmInputRegs> 8357 DECL_FORCE_INLINE_THROW(uint32_t) 8358 iemNativeDoPostponedEFlagsAtTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off); 8359 #endif 8360 8361 8283 8362 /** 8284 8363 * Helper for marking the current conditional branch as exiting the TB. … … 8298 8377 8299 8378 /** 8379 * Unconditionally exits the translation block via a branch instructions. 8380 * 8381 * @note In case a delayed EFLAGS calculation is pending, this may emit an 8382 * additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions. 8383 */ 8384 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fActuallyExitingTb = true, bool const a_fPostponedEfl = true> 8385 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off) 8386 { 8387 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8388 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8389 8390 if RT_CONSTEXPR(a_fActuallyExitingTb) 8391 iemNativeMarkCurCondBranchAsExiting(pReNative); 8392 8393 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8394 if RT_CONSTEXPR(a_fPostponedEfl) 8395 off = iemNativeDoPostponedEFlagsAtTbExitEx<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off, 8396 pCodeBuf); 8397 #endif 8398 8399 #ifdef RT_ARCH_AMD64 8400 /* jmp rel32 */ 8401 pCodeBuf[off++] = 0xe9; 8402 iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason); 8403 pCodeBuf[off++] = 0xfe; 8404 pCodeBuf[off++] = 0xff; 8405 pCodeBuf[off++] = 0xff; 8406 pCodeBuf[off++] = 0xff; 8407 8408 #elif defined(RT_ARCH_ARM64) 8409 iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason); 8410 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8411 8412 #else 8413 # error "Port me!" 8414 #endif 8415 return off; 8416 } 8417 8418 8419 /** 8420 * Unconditionally exits the translation block via a branch instructions. 8421 * 8422 * @note In case a delayed EFLAGS calculation is pending, this may emit an 8423 * additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions. 8424 */ 8425 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fActuallyExitingTb = true, bool const a_fPostponedEfl = true> 8426 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off) 8427 { 8428 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8429 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8430 8431 if RT_CONSTEXPR(a_fActuallyExitingTb) 8432 iemNativeMarkCurCondBranchAsExiting(pReNative); 8433 8434 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8435 if RT_CONSTEXPR(a_fPostponedEfl) 8436 off = iemNativeDoPostponedEFlagsAtTbExit<IEMNATIVELABELTYPE_GET_INPUT_REG_MASK(a_enmExitReason)>(pReNative, off); 8437 #endif 8438 8439 #ifdef RT_ARCH_AMD64 8440 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); 8441 8442 /* jmp rel32 */ 8443 pCodeBuf[off++] = 0xe9; 8444 iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason); 8445 pCodeBuf[off++] = 0xfe; 8446 pCodeBuf[off++] = 0xff; 8447 pCodeBuf[off++] = 0xff; 8448 pCodeBuf[off++] = 0xff; 8449 8450 #elif defined(RT_ARCH_ARM64) 8451 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 8452 iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason); 8453 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8454 8455 #else 8456 # error "Port me!" 8457 #endif 8458 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8459 return off; 8460 } 8461 8462 8463 /** 8300 8464 * Emits a Jcc rel32 / B.cc imm19 to the given label (ASSUMED requiring fixup). 8301 */ 8465 * 8466 * @note In case a delayed EFLAGS calculation is pending, this may emit an 8467 * additional IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS instructions. 8468 */ 8469 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8302 8470 DECL_FORCE_INLINE_THROW(uint32_t) 8303 iemNativeEmitJccTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, 8304 IEMNATIVELABELTYPE enmExitReason, IEMNATIVEINSTRCOND enmCond) 8471 iemNativeEmitTbExitJccEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, IEMNATIVEINSTRCOND enmCond) 8305 8472 { 8306 8473 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8307 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8308 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8474 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8475 8476 #ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8477 if RT_CONSTEXPR(a_fPostponedEfl) 8478 if (pReNative->PostponedEfl.fEFlags) 8479 { 8480 /* Jcc l_NonPrimaryCodeStreamTarget */ 8481 uint32_t const offFixup1 = off; 8482 off = iemNativeEmitJccToFixedEx(pCodeBuf, off, off + 1, enmCond); 8483 8484 /* JMP l_PrimaryCodeStreamResume */ 8485 uint32_t const offFixup2 = off; 8486 off = iemNativeEmitJmpToFixedEx(pCodeBuf, off, off + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8487 8488 /* l_NonPrimaryCodeStreamTarget: */ 8489 iemNativeFixupFixedJump(pReNative, offFixup1, off); 8490 off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off); 8491 8492 /* l_PrimaryCodeStreamResume: */ 8493 iemNativeFixupFixedJump(pReNative, offFixup2, off); 8494 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8495 return off; 8496 } 8497 #endif 8309 8498 8310 8499 #if defined(RT_ARCH_AMD64) … … 8312 8501 pCodeBuf[off++] = 0x0f; 8313 8502 pCodeBuf[off++] = (uint8_t)enmCond | 0x80; 8314 iemNativeAddTbExitFixup(pReNative, off, enmExitReason);8503 iemNativeAddTbExitFixup(pReNative, off, a_enmExitReason); 8315 8504 pCodeBuf[off++] = 0x00; 8316 8505 pCodeBuf[off++] = 0x00; … … 8321 8510 /* ARM64 doesn't have the necessary jump range, so we jump via local label 8322 8511 just like when we keep everything local. */ 8323 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/);8512 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8324 8513 off = iemNativeEmitJccToLabelEx(pReNative, pCodeBuf, off, idxLabel, enmCond); 8325 8514 #endif … … 8331 8520 * Emits a Jcc rel32 / B.cc imm19 to the epilog. 8332 8521 */ 8333 DECL_INLINE_THROW(uint32_t) 8334 iemNativeEmitJccTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason, IEMNATIVEINSTRCOND enmCond)8522 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8523 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJcc(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVEINSTRCOND enmCond) 8335 8524 { 8336 8525 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8337 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8338 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8339 8340 #ifdef RT_ARCH_AMD64 8341 off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 6), off, enmExitReason, enmCond); 8342 #elif defined(RT_ARCH_ARM64) 8343 off = iemNativeEmitJccTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 2), off, enmExitReason, enmCond); 8526 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8527 8528 #ifdef RT_ARCH_AMD64 8529 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS + 5); 8530 #elif defined(RT_ARCH_ARM64) 8531 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS + 1); 8344 8532 #else 8345 8533 # error "Port me!" 8346 8534 #endif 8535 off = iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, enmCond); 8347 8536 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8348 8537 return off; … … 8353 8542 * Emits a JNZ/JNE rel32 / B.NE imm19 to the TB exit routine with the given reason. 8354 8543 */ 8355 DECL_INLINE_THROW(uint32_t) iemNativeEmitJnzTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason) 8356 { 8357 #ifdef RT_ARCH_AMD64 8358 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_ne); 8359 #elif defined(RT_ARCH_ARM64) 8360 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Ne); 8544 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8545 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJnz(PIEMRECOMPILERSTATE pReNative, uint32_t off) 8546 { 8547 #ifdef RT_ARCH_AMD64 8548 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_ne); 8549 #elif defined(RT_ARCH_ARM64) 8550 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Ne); 8361 8551 #else 8362 8552 # error "Port me!" … … 8368 8558 * Emits a JZ/JE rel32 / B.EQ imm19 to the TB exit routine with the given reason. 8369 8559 */ 8370 DECL_INLINE_THROW(uint32_t) iemNativeEmitJzTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason) 8371 { 8372 #ifdef RT_ARCH_AMD64 8373 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_e); 8374 #elif defined(RT_ARCH_ARM64) 8375 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Eq); 8560 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8561 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJz(PIEMRECOMPILERSTATE pReNative, uint32_t off) 8562 { 8563 #ifdef RT_ARCH_AMD64 8564 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_e); 8565 #elif defined(RT_ARCH_ARM64) 8566 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Eq); 8376 8567 #else 8377 8568 # error "Port me!" … … 8383 8574 * Emits a JA/JNBE rel32 / B.HI imm19 to the TB exit. 8384 8575 */ 8385 DECL_INLINE_THROW(uint32_t) iemNativeEmitJaTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason) 8386 { 8387 #ifdef RT_ARCH_AMD64 8388 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_nbe); 8389 #elif defined(RT_ARCH_ARM64) 8390 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Hi); 8576 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8577 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJa(PIEMRECOMPILERSTATE pReNative, uint32_t off) 8578 { 8579 #ifdef RT_ARCH_AMD64 8580 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_nbe); 8581 #elif defined(RT_ARCH_ARM64) 8582 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Hi); 8391 8583 #else 8392 8584 # error "Port me!" … … 8398 8590 * Emits a JL/JNGE rel32 / B.LT imm19 to the TB exit with the given reason. 8399 8591 */ 8400 DECL_INLINE_THROW(uint32_t) iemNativeEmitJlTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason) 8401 { 8402 #ifdef RT_ARCH_AMD64 8403 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_l); 8404 #elif defined(RT_ARCH_ARM64) 8405 return iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kArmv8InstrCond_Lt); 8592 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8593 DECL_INLINE_THROW(uint32_t) iemNativeEmitTbExitJl(PIEMRECOMPILERSTATE pReNative, uint32_t off) 8594 { 8595 #ifdef RT_ARCH_AMD64 8596 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_l); 8597 #elif defined(RT_ARCH_ARM64) 8598 return iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kArmv8InstrCond_Lt); 8406 8599 #else 8407 8600 # error "Port me!" … … 8410 8603 8411 8604 8412 DECL_INLINE_THROW(uint32_t) 8413 iemNativeEmitTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, IEMNATIVELABELTYPE enmExitReason) 8414 { 8415 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8416 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here. */ 8417 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8418 8419 iemNativeMarkCurCondBranchAsExiting(pReNative); 8420 8421 #ifdef RT_ARCH_AMD64 8422 /* jmp rel32 */ 8423 pCodeBuf[off++] = 0xe9; 8424 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8425 pCodeBuf[off++] = 0xfe; 8426 pCodeBuf[off++] = 0xff; 8427 pCodeBuf[off++] = 0xff; 8428 pCodeBuf[off++] = 0xff; 8429 8430 #elif defined(RT_ARCH_ARM64) 8431 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8432 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8433 8434 #else 8435 # error "Port me!" 8436 #endif 8437 return off; 8438 } 8439 8440 8441 DECL_INLINE_THROW(uint32_t) 8442 iemNativeEmitTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, IEMNATIVELABELTYPE enmExitReason, 8443 bool fActuallyExitingTb = true) 8444 { 8445 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8446 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here. */ 8447 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8448 8449 if (fActuallyExitingTb) 8450 iemNativeMarkCurCondBranchAsExiting(pReNative); 8451 8452 #ifdef RT_ARCH_AMD64 8453 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); 8454 8455 /* jmp rel32 */ 8456 pCodeBuf[off++] = 0xe9; 8457 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8458 pCodeBuf[off++] = 0xfe; 8459 pCodeBuf[off++] = 0xff; 8460 pCodeBuf[off++] = 0xff; 8461 pCodeBuf[off++] = 0xff; 8462 8463 #elif defined(RT_ARCH_ARM64) 8464 PIEMNATIVEINSTR pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 8465 iemNativeAddTbExitFixup(pReNative, off, enmExitReason); 8466 pCodeBuf[off++] = Armv8A64MkInstrB(-1); 8467 8468 #else 8469 # error "Port me!" 8470 #endif 8471 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8472 return off; 8473 } 8474 8475 8476 /** 8477 * Emits a jump to the TB exit with @a enmExitReason on the condition _any_ of the bits in @a fBits 8478 * are set in @a iGprSrc. 8479 */ 8480 DECL_INLINE_THROW(uint32_t) 8481 iemNativeEmitTestAnyBitsInGprAndTbExitIfAnySet(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8482 uint8_t iGprSrc, uint64_t fBits, IEMNATIVELABELTYPE enmExitReason) 8605 /** 8606 * Emits a jump to the TB exit with @a a_enmExitReason on the condition _any_ of 8607 * the bits in @a fBits are set in @a iGprSrc. 8608 */ 8609 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8610 DECL_INLINE_THROW(uint32_t) 8611 iemNativeEmitTbExitIfAnyBitsSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits) 8483 8612 { 8484 8613 Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits)); 8485 8614 8486 8615 off = iemNativeEmitTestAnyBitsInGpr(pReNative, off, iGprSrc, fBits); 8487 return iemNativeEmitJnzTbExit(pReNative, off, enmExitReason); 8488 } 8489 8490 8616 return iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8617 } 8618 8619 8620 #if 0 /* unused */ 8491 8621 /** 8492 8622 * Emits a jump to @a idxLabel on the condition _none_ of the bits in @a fBits 8493 8623 * are set in @a iGprSrc. 8494 8624 */ 8495 DECL_INLINE_THROW(uint32_t) 8496 iemNativeEmitTestAnyBitsInGprAndTbExitIfNoneSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8497 uint8_t iGprSrc, uint64_t fBits, IEMNATIVELABELTYPE enmExitReason)8625 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8626 DECL_INLINE_THROW(uint32_t) 8627 iemNativeEmitTbExitIfNoBitsSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t fBits) 8498 8628 { 8499 8629 Assert(fBits); Assert(!RT_IS_POWER_OF_TWO(fBits)); 8500 8630 8501 8631 off = iemNativeEmitTestAnyBitsInGpr(pReNative, off, iGprSrc, fBits); 8502 return iemNativeEmitJzTbExit(pReNative, off, enmExitReason); 8503 } 8504 8505 8632 return iemNativeEmitJzTbExit<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8633 } 8634 #endif 8635 8636 8637 #if 0 /* unused */ 8506 8638 /** 8507 8639 * Emits code that exits the TB with the given reason if @a iGprLeft and @a iGprRight 8508 8640 * differs. 8509 8641 */ 8510 DECL_INLINE_THROW(uint32_t) 8511 iemNativeEmitTestIfGprNotEqualGprAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8512 uint8_t iGprLeft, uint8_t iGprRight, IEMNATIVELABELTYPE enmExitReason)8642 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8643 DECL_INLINE_THROW(uint32_t) 8644 iemNativeEmitTbExitIfGprNotEqualGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprLeft, uint8_t iGprRight) 8513 8645 { 8514 8646 off = iemNativeEmitCmpGprWithGpr(pReNative, off, iGprLeft, iGprRight); 8515 off = iemNativeEmitJnzTbExit(pReNative, off, enmExitReason); 8516 return off; 8517 } 8647 off = iemNativeEmitJnzTbExit<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8648 return off; 8649 } 8650 #endif 8518 8651 8519 8652 … … 8522 8655 * @a uImm. 8523 8656 */ 8524 DECL_INLINE_THROW(uint32_t) 8525 iemNativeEmitTestIfGpr32NotEqualImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8526 uint8_t iGprSrc, uint32_t uImm, IEMNATIVELABELTYPE enmExitReason)8657 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8658 DECL_INLINE_THROW(uint32_t) 8659 iemNativeEmitTbExitIfGpr32NotEqualImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint32_t uImm) 8527 8660 { 8528 8661 off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm); 8529 off = iemNativeEmit JnzTbExit(pReNative, off, enmExitReason);8662 off = iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8530 8663 return off; 8531 8664 } … … 8535 8668 * Emits code that exits the current TB if @a iGprSrc differs from @a uImm. 8536 8669 */ 8537 DECL_INLINE_THROW(uint32_t) 8538 iemNativeEmitTestIfGprNotEqualImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8539 uint8_t iGprSrc, uint64_t uImm, IEMNATIVELABELTYPE enmExitReason)8670 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8671 DECL_INLINE_THROW(uint32_t) 8672 iemNativeEmitTbExitIfGprNotEqualImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint64_t uImm) 8540 8673 { 8541 8674 off = iemNativeEmitCmpGprWithImm(pReNative, off, iGprSrc, uImm); 8542 off = iemNativeEmit JnzTbExit(pReNative, off, enmExitReason);8675 off = iemNativeEmitTbExitJnz<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8543 8676 return off; 8544 8677 } … … 8548 8681 * Emits code that exits the current TB with the given reason if 32-bit @a iGprSrc equals @a uImm. 8549 8682 */ 8550 DECL_INLINE_THROW(uint32_t) 8551 iemNativeEmitTestIfGpr32EqualsImmAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8552 uint8_t iGprSrc, uint32_t uImm, IEMNATIVELABELTYPE enmExitReason)8683 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8684 DECL_INLINE_THROW(uint32_t) 8685 iemNativeEmitTbExitIfGpr32EqualsImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint32_t uImm) 8553 8686 { 8554 8687 off = iemNativeEmitCmpGpr32WithImm(pReNative, off, iGprSrc, uImm); 8555 off = iemNativeEmit JzTbExit(pReNative, off, enmExitReason);8556 return off; 8557 } 8558 8559 8560 /** 8561 * Emits code to exit the current TB with the reason @a enmExitReason on the condition that bit @a iBitNo _is_ _set_ in8562 * @a iGprSrc.8688 off = iemNativeEmitTbExitJz<a_enmExitReason, a_fPostponedEfl>(pReNative, off); 8689 return off; 8690 } 8691 8692 8693 /** 8694 * Emits code to exit the current TB with the reason @a a_enmExitReason on the 8695 * condition that bit @a iBitNo _is_ _set_ in @a iGprSrc. 8563 8696 * 8564 8697 * @note On ARM64 the range is only +/-8191 instructions. 8565 8698 */ 8566 DECL_INLINE_THROW(uint32_t) 8567 iemNativeEmitTestBitInGprAndTbExitIfSet(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8568 uint8_t iGprSrc, uint8_t iBitNo, IEMNATIVELABELTYPE enmExitReason)8569 { 8570 Assert (IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));8699 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8700 DECL_INLINE_THROW(uint32_t) 8701 iemNativeEmitTbExitIfBitSetInGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, uint8_t iBitNo) 8702 { 8703 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8571 8704 8572 8705 #if defined(RT_ARCH_AMD64) … … 8581 8714 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, iGprSrc & 7); 8582 8715 pbCodeBuf[off++] = (uint8_t)1 << iBitNo; 8583 off = iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_ne); 8716 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8717 off = iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_ne); 8584 8718 } 8585 8719 else … … 8594 8728 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 4, iGprSrc & 7); 8595 8729 pbCodeBuf[off++] = iBitNo; 8596 off = iemNativeEmitJccTbExit(pReNative, off, enmExitReason, kIemNativeInstrCond_c); 8597 } 8598 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8599 return off; 8600 8601 #else 8602 /* ARM64 doesn't have the necessary jump range, so we jump via local label 8603 just like when we keep everything local. */ 8730 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8731 off = iemNativeEmitTbExitJcc<a_enmExitReason, a_fPostponedEfl>(pReNative, off, kIemNativeInstrCond_c); 8732 } 8733 return off; 8734 8735 #elif defined(RT_ARCH_ARM64) 8604 8736 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8605 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8606 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8737 /** @todo Perhaps we should always apply the PostponedEfl code pattern here, 8738 * it's the same number of instructions as the TST + B.CC stuff? */ 8739 # ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8740 if RT_CONSTEXPR(a_fPostponedEfl) 8741 if (pReNative->PostponedEfl.fEFlags) 8742 { 8743 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8744 3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8745 pCodeBuf[off++] = Armv8A64MkInstrTbnz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, iBitNo); 8746 uint32_t const offFixup = off; 8747 pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/); 8748 /* l_NonPrimaryCodeStreamTarget: */ 8749 off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off); 8750 /* l_PrimaryCodeStreamResume: */ 8751 iemNativeFixupFixedJump(pReNative, offFixup, off); 8752 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8753 return off; 8754 } 8755 # endif 8756 /* ARM64 doesn't have the necessary range to reach the per-chunk code, so 8757 we go via a local trampoline. */ 8758 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8607 8759 return iemNativeEmitTestBitInGprAndJmpToLabelIfCc(pReNative, off, iGprSrc, iBitNo, idxLabel, true /*fJmpIfSet*/); 8608 #endif 8609 } 8610 8611 8612 /** 8613 * Emits code that exits the current TB with @a enmExitReason if @a iGprSrc is not zero. 8760 #else 8761 # error "port me" 8762 #endif 8763 } 8764 8765 8766 /** 8767 * Emits code that exits the current TB with @a a_enmExitReason if @a iGprSrc is 8768 * not zero. 8614 8769 * 8615 8770 * The operand size is given by @a f64Bit. 8616 8771 */ 8772 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8617 8773 DECL_FORCE_INLINE_THROW(uint32_t) 8618 iemNativeEmitT estIfGprIsNotZeroAndTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off,8619 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)8620 { 8621 Assert (IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason));8774 iemNativeEmitTbExitIfGprIsNotZeroEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, 8775 uint8_t iGprSrc, bool f64Bit) 8776 { 8777 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8622 8778 8623 8779 #if defined(RT_ARCH_AMD64) … … 8631 8787 8632 8788 /* jnz idxLabel */ 8633 return iemNativeEmitJccTbExitEx(pReNative, pCodeBuf, off, enmExitReason, kIemNativeInstrCond_ne); 8634 8635 #else 8636 /* ARM64 doesn't have the necessary jump range, so we jump via local label 8637 just like when we keep everything local. */ 8789 return iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, kIemNativeInstrCond_ne); 8790 8791 #elif defined(RT_ARCH_ARM64) 8638 8792 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8639 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8640 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8793 # ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8794 if RT_CONSTEXPR(a_fPostponedEfl) 8795 if (pReNative->PostponedEfl.fEFlags) 8796 { 8797 pCodeBuf[off++] = Armv8A64MkInstrCbnz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, f64Bit); 8798 uint32_t const offFixup = off; 8799 pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/); 8800 /* l_NonPrimaryCodeStreamTarget: */ 8801 off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off); 8802 /* l_PrimaryCodeStreamResume: */ 8803 iemNativeFixupFixedJump(pReNative, offFixup, off); 8804 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8805 return off; 8806 } 8807 # endif 8808 /* ARM64 doesn't have the necessary range to reach the per-chunk code, so 8809 we go via a local trampoline. */ 8810 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8641 8811 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, iGprSrc, 8642 8812 f64Bit, true /*fJmpIfNotZero*/, idxLabel); 8643 #endif 8644 } 8645 8646 8647 /** 8648 * Emits code to exit the current TB with the given reason @a enmExitReason if @a iGprSrc is not zero. 8813 #else 8814 # error "port me" 8815 #endif 8816 } 8817 8818 8819 /** 8820 * Emits code to exit the current TB with the given reason @a a_enmExitReason if 8821 * @a iGprSrc is not zero. 8649 8822 * 8650 8823 * The operand size is given by @a f64Bit. 8651 8824 */ 8652 DECL_INLINE_THROW(uint32_t) 8653 iemNativeEmitTestIfGprIsNotZeroAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8654 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)8825 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8826 DECL_INLINE_THROW(uint32_t) 8827 iemNativeEmitTbExitIfGprIsNotZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit) 8655 8828 { 8656 8829 #if defined(RT_ARCH_AMD64) 8657 off = iemNativeEmitTestIfGprIsNotZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6), 8658 off, iGprSrc, f64Bit, enmExitReason); 8659 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8660 return off; 8661 #else 8662 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8663 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8664 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8665 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, true /*fJmpIfNotZero*/, idxLabel); 8666 #endif 8667 } 8668 8669 8670 /** 8671 * Emits code that exits the current TB with @a enmExitReason if @a iGprSrc is zero. 8830 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8831 8832 #else 8833 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8834 #endif 8835 off = iemNativeEmitTbExitIfGprIsNotZeroEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, iGprSrc, f64Bit); 8836 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8837 return off; 8838 } 8839 8840 8841 /** 8842 * Emits code that exits the current TB with @a a_enmExitReason if @a iGprSrc is 8843 * zero. 8672 8844 * 8673 8845 * The operand size is given by @a f64Bit. 8674 8846 */ 8847 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8675 8848 DECL_FORCE_INLINE_THROW(uint32_t) 8676 iemNativeEmitTestIfGprIsZeroAndTbExitEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, 8677 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason) 8678 { 8679 Assert(IEMNATIVELABELTYPE_IS_EXIT_REASON(enmExitReason)); 8849 iemNativeEmitTbExitIfGprIsZeroEx(PIEMRECOMPILERSTATE pReNative, PIEMNATIVEINSTR pCodeBuf, uint32_t off, 8850 uint8_t iGprSrc, bool f64Bit) 8851 { 8852 AssertCompile(IEMNATIVELABELTYPE_IS_EXIT_REASON(a_enmExitReason)); 8853 8680 8854 #if defined(RT_ARCH_AMD64) 8681 8855 /* test reg32,reg32 / test reg64,reg64 */ … … 8688 8862 8689 8863 /* jnz idxLabel */ 8690 return iemNativeEmitJccTbExitEx(pReNative, pCodeBuf, off, enmExitReason, kIemNativeInstrCond_e); 8691 8692 #else 8693 /* ARM64 doesn't have the necessary jump range, so we jump via local label 8694 just like when we keep everything local. */ 8864 return iemNativeEmitTbExitJccEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, kIemNativeInstrCond_e); 8865 8866 #elif defined(RT_ARCH_ARM64) 8695 8867 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8696 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8697 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8868 # ifdef IEMNATIVE_WITH_EFLAGS_POSTPONING 8869 if RT_CONSTEXPR(a_fPostponedEfl) 8870 if (pReNative->PostponedEfl.fEFlags) 8871 { 8872 pCodeBuf[off++] = Armv8A64MkInstrCbz(1 /*l_NonPrimaryCodeStreamTarget*/, iGprSrc, f64Bit); 8873 uint32_t const offFixup = off; 8874 pCodeBuf[off++] = Armv8A64MkInstrB(0 /*l_PrimaryCodeStreamResume*/); 8875 /* l_NonPrimaryCodeStreamTarget: */ 8876 off = iemNativeEmitTbExitEx<a_enmExitReason, false /*a_fActuallyExitingTb*/, true>(pReNative, pCodeBuf, off); 8877 /* l_PrimaryCodeStreamResume: */ 8878 iemNativeFixupFixedJump(pReNative, offFixup, off); 8879 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8880 return off; 8881 } 8882 # endif 8883 /* ARM64 doesn't have the necessary range to reach the per-chunk code, so 8884 we go via a local trampoline. */ 8885 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, a_enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8698 8886 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabelEx(pReNative, pCodeBuf, off, iGprSrc, 8699 8887 f64Bit, false /*fJmpIfNotZero*/, idxLabel); 8700 #endif 8701 } 8702 8703 8704 /** 8705 * Emits code to exit the current TB with the given reason @a enmExitReason if @a iGprSrc is zero. 8888 #else 8889 # error "port me" 8890 #endif 8891 } 8892 8893 8894 /** 8895 * Emits code to exit the current TB with the given reason @a a_enmExitReason if @a iGprSrc is zero. 8706 8896 * 8707 8897 * The operand size is given by @a f64Bit. 8708 8898 */ 8709 DECL_INLINE_THROW(uint32_t) 8710 iemNativeEmitTestIfGprIsZeroAndTbExit(PIEMRECOMPILERSTATE pReNative, uint32_t off, 8711 uint8_t iGprSrc, bool f64Bit, IEMNATIVELABELTYPE enmExitReason)8899 template<IEMNATIVELABELTYPE const a_enmExitReason, bool const a_fPostponedEfl = true> 8900 DECL_INLINE_THROW(uint32_t) 8901 iemNativeEmitTbExitIfGprIsZero(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprSrc, bool f64Bit) 8712 8902 { 8713 8903 #if defined(RT_ARCH_AMD64) 8714 off = iemNativeEmitTestIfGprIsZeroAndTbExitEx(pReNative, iemNativeInstrBufEnsure(pReNative, off, 3 + 6), 8715 off, iGprSrc, f64Bit, enmExitReason); 8716 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8717 return off; 8718 #else 8719 IEMNATIVE_ASSERT_EFLAGS_SKIPPING_ONLY(pReNative, X86_EFL_STATUS_BITS); 8720 IEMNATIVE_ASSERT_EFLAGS_POSTPONING_ONLY(pReNative, X86_EFL_STATUS_BITS); /** @todo emit postponed stuff here and invert the condition. */ 8721 uint32_t const idxLabel = iemNativeLabelCreate(pReNative, enmExitReason, UINT32_MAX /*offWhere*/, 0 /*uData*/); 8722 return iemNativeEmitTestIfGprIsZeroOrNotZeroAndJmpToLabel(pReNative, off, iGprSrc, f64Bit, false /*fJmpIfNotZero*/, idxLabel); 8723 #endif 8904 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + 6 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8905 8906 #else 8907 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3 + IEMNATIVE_MAX_POSTPONED_EFLAGS_INSTRUCTIONS); 8908 #endif 8909 off = iemNativeEmitTbExitIfGprIsZeroEx<a_enmExitReason, a_fPostponedEfl>(pReNative, pCodeBuf, off, iGprSrc, f64Bit); 8910 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 8911 return off; 8724 8912 } 8725 8913
Note:
See TracChangeset
for help on using the changeset viewer.