Changeset 103555 in vbox
- Timestamp:
- Feb 24, 2024 2:14:09 AM (9 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r103542 r103555 2900 2900 'IEM_MC_AND_GREG_U64': (McBlock.parseMcGeneric, True, True, False, ), 2901 2901 'IEM_MC_AND_GREG_U8': (McBlock.parseMcGeneric, True, True, False, ), 2902 'IEM_MC_AND_LOCAL_U16': (McBlock.parseMcGeneric, False, False, False,),2903 'IEM_MC_AND_LOCAL_U32': (McBlock.parseMcGeneric, False, False, False,),2904 'IEM_MC_AND_LOCAL_U64': (McBlock.parseMcGeneric, False, False, False,),2905 'IEM_MC_AND_LOCAL_U8': (McBlock.parseMcGeneric, False, False, False,),2902 'IEM_MC_AND_LOCAL_U16': (McBlock.parseMcGeneric, False, False, True, ), 2903 'IEM_MC_AND_LOCAL_U32': (McBlock.parseMcGeneric, False, False, True, ), 2904 'IEM_MC_AND_LOCAL_U64': (McBlock.parseMcGeneric, False, False, True, ), 2905 'IEM_MC_AND_LOCAL_U8': (McBlock.parseMcGeneric, False, False, True, ), 2906 2906 'IEM_MC_ARG': (McBlock.parseMcArg, False, False, True, ), 2907 2907 'IEM_MC_ARG_CONST': (McBlock.parseMcArgConst, False, False, True, ), … … 3159 3159 'IEM_MC_OR_GREG_U64': (McBlock.parseMcGeneric, True, True, False, ), 3160 3160 'IEM_MC_OR_GREG_U8': (McBlock.parseMcGeneric, True, True, False, ), 3161 'IEM_MC_OR_LOCAL_U16': (McBlock.parseMcGeneric, False, False, False,),3162 'IEM_MC_OR_LOCAL_U32': (McBlock.parseMcGeneric, False, False, False,),3163 'IEM_MC_OR_LOCAL_U8': (McBlock.parseMcGeneric, False, False, False,),3161 'IEM_MC_OR_LOCAL_U16': (McBlock.parseMcGeneric, False, False, True, ), 3162 'IEM_MC_OR_LOCAL_U32': (McBlock.parseMcGeneric, False, False, True, ), 3163 'IEM_MC_OR_LOCAL_U8': (McBlock.parseMcGeneric, False, False, True, ), 3164 3164 'IEM_MC_POP_GREG_U16': (McBlock.parseMcGeneric, True, True, True, ), 3165 3165 'IEM_MC_POP_GREG_U32': (McBlock.parseMcGeneric, True, True, True, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r103404 r103555 9809 9809 return off; 9810 9810 } 9811 9812 9813 /********************************************************************************************************************************* 9814 * Local variable manipulation (add, sub, and, or). * 9815 *********************************************************************************************************************************/ 9816 9817 #define IEM_MC_AND_LOCAL_U8(a_u8Local, a_u8Mask) \ 9818 off = iemNativeEmitAndLocal(pReNative, off, a_u8Local, a_u8Mask, sizeof(uint8_t)) 9819 9820 #define IEM_MC_AND_LOCAL_U16(a_u16Local, a_u16Mask) \ 9821 off = iemNativeEmitAndLocal(pReNative, off, a_u16Local, a_u16Mask, sizeof(uint16_t)) 9822 9823 #define IEM_MC_AND_LOCAL_U32(a_u32Local, a_u32Mask) \ 9824 off = iemNativeEmitAndLocal(pReNative, off, a_u32Local, a_u32Mask, sizeof(uint32_t)) 9825 9826 #define IEM_MC_AND_LOCAL_U64(a_u64Local, a_u64Mask) \ 9827 off = iemNativeEmitAndLocal(pReNative, off, a_u64Local, a_u64Mask, sizeof(uint64_t)) 9828 9829 /** Emits code for AND'ing a local and a constant value. */ 9830 DECL_INLINE_THROW(uint32_t) 9831 iemNativeEmitAndLocal(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVar, uint64_t uMask, uint8_t cbMask) 9832 { 9833 Assert(pReNative->Core.aVars[idxVar].cbVar == cbMask); 9834 #ifdef VBOX_STRICT 9835 switch (cbMask) 9836 { 9837 case sizeof(uint8_t): Assert((uint8_t)uMask == uMask); break; 9838 case sizeof(uint16_t): Assert((uint16_t)uMask == uMask); break; 9839 case sizeof(uint32_t): Assert((uint32_t)uMask == uMask); break; 9840 case sizeof(uint64_t): break; 9841 default: AssertFailedBreak(); 9842 } 9843 #endif 9844 9845 uint8_t const idxVarReg = iemNativeVarRegisterAcquire(pReNative, idxVar, &off, true /*fInitialized*/); 9846 if (cbMask <= sizeof(uint32_t)) 9847 off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxVarReg, uMask); 9848 else 9849 off = iemNativeEmitAndGprByImm(pReNative, off, idxVarReg, uMask); 9850 iemNativeVarRegisterRelease(pReNative, idxVar); 9851 return off; 9852 } 9853 9854 9855 #define IEM_MC_OR_LOCAL_U8(a_u8Local, a_u8Mask) \ 9856 off = iemNativeEmitOrLocal(pReNative, off, a_u8Local, a_u8Mask, sizeof(uint8_t)) 9857 9858 #define IEM_MC_OR_LOCAL_U16(a_u16Local, a_u16Mask) \ 9859 off = iemNativeEmitOrLocal(pReNative, off, a_u16Local, a_u16Mask, sizeof(uint16_t)) 9860 9861 #define IEM_MC_OR_LOCAL_U32(a_u32Local, a_u32Mask) \ 9862 off = iemNativeEmitOrLocal(pReNative, off, a_u32Local, a_u32Mask, sizeof(uint32_t)) 9863 9864 #define IEM_MC_OR_LOCAL_U64(a_u64Local, a_u64Mask) \ 9865 off = iemNativeEmitOrLocal(pReNative, off, a_u64Local, a_u64Mask, sizeof(uint64_t)) 9866 9867 /** Emits code for OR'ing a local and a constant value. */ 9868 DECL_INLINE_THROW(uint32_t) 9869 iemNativeEmitOrLocal(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVar, uint64_t uMask, uint8_t cbMask) 9870 { 9871 Assert(pReNative->Core.aVars[idxVar].cbVar == cbMask); 9872 #ifdef VBOX_STRICT 9873 switch (cbMask) 9874 { 9875 case sizeof(uint8_t): Assert((uint8_t)uMask == uMask); break; 9876 case sizeof(uint16_t): Assert((uint16_t)uMask == uMask); break; 9877 case sizeof(uint32_t): Assert((uint32_t)uMask == uMask); break; 9878 case sizeof(uint64_t): break; 9879 default: AssertFailedBreak(); 9880 } 9881 #endif 9882 9883 uint8_t const idxVarReg = iemNativeVarRegisterAcquire(pReNative, idxVar, &off, true /*fInitialized*/); 9884 if (cbMask <= sizeof(uint32_t)) 9885 off = iemNativeEmitOrGpr32ByImm(pReNative, off, idxVarReg, uMask); 9886 else 9887 off = iemNativeEmitOrGprByImm(pReNative, off, idxVarReg, uMask); 9888 iemNativeVarRegisterRelease(pReNative, idxVar); 9889 return off; 9890 } 9891 9811 9892 9812 9893 … … 13094 13175 RT_NOREF(cbMinRead); 13095 13176 return VERR_NO_DATA; 13177 } 13178 13179 13180 DECLHIDDEN(const char *) iemNativeDbgVCpuOffsetToName(uint32_t off) 13181 { 13182 static struct { uint32_t off; const char *pszName; } const s_aMembers[] = 13183 { 13184 #define ENTRY(a_Member) { RT_UOFFSETOF(VMCPUCC, a_Member), #a_Member } 13185 ENTRY(fLocalForcedActions), 13186 ENTRY(iem.s.rcPassUp), 13187 ENTRY(iem.s.fExec), 13188 ENTRY(iem.s.pbInstrBuf), 13189 ENTRY(iem.s.uInstrBufPc), 13190 ENTRY(iem.s.GCPhysInstrBuf), 13191 ENTRY(iem.s.cbInstrBufTotal), 13192 ENTRY(iem.s.idxTbCurInstr), 13193 #ifdef VBOX_WITH_STATISTICS 13194 ENTRY(iem.s.StatNativeTlbHitsForFetch), 13195 ENTRY(iem.s.StatNativeTlbHitsForStore), 13196 ENTRY(iem.s.StatNativeTlbHitsForStack), 13197 ENTRY(iem.s.StatNativeTlbHitsForMapped), 13198 ENTRY(iem.s.StatNativeCodeTlbMissesNewPage), 13199 ENTRY(iem.s.StatNativeCodeTlbHitsForNewPage), 13200 ENTRY(iem.s.StatNativeCodeTlbMissesNewPageWithOffset), 13201 ENTRY(iem.s.StatNativeCodeTlbHitsForNewPageWithOffset), 13202 #endif 13203 ENTRY(iem.s.DataTlb.aEntries), 13204 ENTRY(iem.s.DataTlb.uTlbRevision), 13205 ENTRY(iem.s.DataTlb.uTlbPhysRev), 13206 ENTRY(iem.s.DataTlb.cTlbHits), 13207 ENTRY(iem.s.CodeTlb.aEntries), 13208 ENTRY(iem.s.CodeTlb.uTlbRevision), 13209 ENTRY(iem.s.CodeTlb.uTlbPhysRev), 13210 ENTRY(iem.s.CodeTlb.cTlbHits), 13211 ENTRY(pVMR3), 13212 ENTRY(cpum.GstCtx.rax), 13213 ENTRY(cpum.GstCtx.ah), 13214 ENTRY(cpum.GstCtx.rcx), 13215 ENTRY(cpum.GstCtx.ch), 13216 ENTRY(cpum.GstCtx.rdx), 13217 ENTRY(cpum.GstCtx.dh), 13218 ENTRY(cpum.GstCtx.rbx), 13219 ENTRY(cpum.GstCtx.bh), 13220 ENTRY(cpum.GstCtx.rsp), 13221 ENTRY(cpum.GstCtx.rbp), 13222 ENTRY(cpum.GstCtx.rsi), 13223 ENTRY(cpum.GstCtx.rdi), 13224 ENTRY(cpum.GstCtx.r8), 13225 ENTRY(cpum.GstCtx.r9), 13226 ENTRY(cpum.GstCtx.r10), 13227 ENTRY(cpum.GstCtx.r11), 13228 ENTRY(cpum.GstCtx.r12), 13229 ENTRY(cpum.GstCtx.r13), 13230 ENTRY(cpum.GstCtx.r14), 13231 ENTRY(cpum.GstCtx.r15), 13232 ENTRY(cpum.GstCtx.es.Sel), 13233 ENTRY(cpum.GstCtx.es.u64Base), 13234 ENTRY(cpum.GstCtx.es.u32Limit), 13235 ENTRY(cpum.GstCtx.es.Attr), 13236 ENTRY(cpum.GstCtx.cs.Sel), 13237 ENTRY(cpum.GstCtx.cs.u64Base), 13238 ENTRY(cpum.GstCtx.cs.u32Limit), 13239 ENTRY(cpum.GstCtx.cs.Attr), 13240 ENTRY(cpum.GstCtx.ss.Sel), 13241 ENTRY(cpum.GstCtx.ss.u64Base), 13242 ENTRY(cpum.GstCtx.ss.u32Limit), 13243 ENTRY(cpum.GstCtx.ss.Attr), 13244 ENTRY(cpum.GstCtx.ds.Sel), 13245 ENTRY(cpum.GstCtx.ds.u64Base), 13246 ENTRY(cpum.GstCtx.ds.u32Limit), 13247 ENTRY(cpum.GstCtx.ds.Attr), 13248 ENTRY(cpum.GstCtx.fs.Sel), 13249 ENTRY(cpum.GstCtx.fs.u64Base), 13250 ENTRY(cpum.GstCtx.fs.u32Limit), 13251 ENTRY(cpum.GstCtx.fs.Attr), 13252 ENTRY(cpum.GstCtx.gs.Sel), 13253 ENTRY(cpum.GstCtx.gs.u64Base), 13254 ENTRY(cpum.GstCtx.gs.u32Limit), 13255 ENTRY(cpum.GstCtx.gs.Attr), 13256 ENTRY(cpum.GstCtx.rip), 13257 ENTRY(cpum.GstCtx.eflags), 13258 ENTRY(cpum.GstCtx.uRipInhibitInt), 13259 #undef ENTRY 13260 }; 13261 #ifdef VBOX_STRICT 13262 static bool s_fOrderChecked = false; 13263 if (!s_fOrderChecked) 13264 { 13265 s_fOrderChecked = true; 13266 uint32_t offPrev = s_aMembers[0].off; 13267 for (unsigned i = 1; i < RT_ELEMENTS(s_aMembers); i++) 13268 { 13269 Assert(s_aMembers[i].off > offPrev); 13270 offPrev = s_aMembers[i].off; 13271 } 13272 } 13273 #endif 13274 13275 /* 13276 * Binary lookup. 13277 */ 13278 unsigned iStart = 0; 13279 unsigned iEnd = RT_ELEMENTS(s_aMembers); 13280 for (;;) 13281 { 13282 unsigned const iCur = iStart + (iEnd - iStart) / 2; 13283 uint32_t const offCur = s_aMembers[iCur].off; 13284 if (off < offCur) 13285 { 13286 if (iCur != iStart) 13287 iEnd = iCur; 13288 else 13289 break; 13290 } 13291 else if (off > offCur) 13292 { 13293 if (iCur + 1 < iEnd) 13294 iStart = iCur + 1; 13295 else 13296 break; 13297 } 13298 else 13299 return s_aMembers[iCur].pszName; 13300 } 13301 #ifdef VBOX_WITH_STATISTICS 13302 if (off - RT_UOFFSETOF(VMCPUCC, iem.s.acThreadedFuncStats) < RT_SIZEOFMEMB(VMCPUCC, iem.s.acThreadedFuncStats)) 13303 return "iem.s.acThreadedFuncStats[iFn]"; 13304 #endif 13305 return NULL; 13096 13306 } 13097 13307 … … 13488 13698 # endif 13489 13699 { 13700 const char *pszAnnotation = NULL; 13490 13701 # ifdef RT_ARCH_AMD64 13491 13702 DISFormatYasmEx(&Dis, szDisBuf, sizeof(szDisBuf), … … 13493 13704 | DIS_FMT_FLAGS_RELATIVE_BRANCH | DIS_FMT_FLAGS_C_HEX, 13494 13705 NULL /*pfnGetSymbol*/, NULL /*pvUser*/); 13495 # elif defined(RT_ARCH_ARM64) 13706 PCDISOPPARAM pMemOp; 13707 if (DISUSE_IS_EFFECTIVE_ADDR(Dis.Param1.fUse)) 13708 pMemOp = &Dis.Param1; 13709 else if (DISUSE_IS_EFFECTIVE_ADDR(Dis.Param2.fUse)) 13710 pMemOp = &Dis.Param2; 13711 else if (DISUSE_IS_EFFECTIVE_ADDR(Dis.Param3.fUse)) 13712 pMemOp = &Dis.Param3; 13713 else 13714 pMemOp = NULL; 13715 if ( pMemOp 13716 && pMemOp->x86.Base.idxGenReg == IEMNATIVE_REG_FIXED_PVMCPU 13717 && (pMemOp->fUse & (DISUSE_BASE | DISUSE_REG_GEN64)) == (DISUSE_BASE | DISUSE_REG_GEN64)) 13718 pszAnnotation = iemNativeDbgVCpuOffsetToName(pMemOp->fUse & DISUSE_DISPLACEMENT32 13719 ? pMemOp->x86.uDisp.u32 : pMemOp->x86.uDisp.u8); 13720 13721 #elif defined(RT_ARCH_ARM64) 13496 13722 DISFormatArmV8Ex(&Dis, szDisBuf, sizeof(szDisBuf), 13497 13723 DIS_FMT_FLAGS_BYTES_LEFT | DIS_FMT_FLAGS_RELATIVE_BRANCH | DIS_FMT_FLAGS_C_HEX, … … 13500 13726 # error "Port me" 13501 13727 # endif 13728 if (pszAnnotation) 13729 { 13730 static unsigned const s_offAnnotation = 55; 13731 size_t const cchAnnotation = strlen(pszAnnotation); 13732 size_t cchDis = strlen(szDisBuf); 13733 if (RT_MAX(cchDis, s_offAnnotation) + sizeof(" ; ") + cchAnnotation <= sizeof(szDisBuf)) 13734 { 13735 if (cchDis < s_offAnnotation) 13736 { 13737 memset(&szDisBuf[cchDis], ' ', s_offAnnotation - cchDis); 13738 cchDis = s_offAnnotation; 13739 } 13740 szDisBuf[cchDis++] = ' '; 13741 szDisBuf[cchDis++] = ';'; 13742 szDisBuf[cchDis++] = ' '; 13743 memcpy(&szDisBuf[cchDis], pszAnnotation, cchAnnotation + 1); 13744 } 13745 } 13502 13746 pHlp->pfnPrintf(pHlp, " %p: %s\n", pNativeCur, szDisBuf); 13503 13747 } -
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r103404 r103555 4411 4411 4412 4412 4413 /** 4414 * Emits code for OR'ing a 64-bit GPRs with a constant. 4415 */ 4416 DECL_INLINE_THROW(uint32_t) 4417 iemNativeEmitOrGprByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint64_t uImm) 4418 { 4419 #if defined(RT_ARCH_AMD64) 4420 if ((int64_t)uImm == (int8_t)uImm) 4421 { 4422 /* or Ev, imm8 */ 4423 uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 4); 4424 pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_B); 4425 pbCodeBuf[off++] = 0x83; 4426 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 1, iGprDst & 7); 4427 pbCodeBuf[off++] = (uint8_t)uImm; 4428 } 4429 else if ((int64_t)uImm == (int32_t)uImm) 4430 { 4431 /* or Ev, imm32 */ 4432 uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7); 4433 pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_B); 4434 pbCodeBuf[off++] = 0x81; 4435 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 1, iGprDst & 7); 4436 pbCodeBuf[off++] = RT_BYTE1(uImm); 4437 pbCodeBuf[off++] = RT_BYTE2(uImm); 4438 pbCodeBuf[off++] = RT_BYTE3(uImm); 4439 pbCodeBuf[off++] = RT_BYTE4(uImm); 4440 } 4441 else 4442 { 4443 /* Use temporary register for the 64-bit immediate. */ 4444 uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm); 4445 off = iemNativeEmitOrGprByGprEx(iemNativeInstrBufEnsure(pReNative, off, 3), off, iGprDst, iTmpReg); 4446 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 4447 iemNativeRegFreeTmpImm(pReNative, iTmpReg); 4448 } 4449 4450 #elif defined(RT_ARCH_ARM64) 4451 uint32_t uImmR = 0; 4452 uint32_t uImmNandS = 0; 4453 if (Armv8A64ConvertMask64ToImmRImmS(uImm, &uImmNandS, &uImmR)) 4454 { 4455 uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 4456 pu32CodeBuf[off++] = Armv8A64MkInstrOrrImm(iGprDst, iGprDst, uImmNandS, uImmR); 4457 } 4458 else 4459 { 4460 /* Use temporary register for the 64-bit immediate. */ 4461 uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm); 4462 off = iemNativeEmitOrGprByGprEx(iemNativeInstrBufEnsure(pReNative, off, 1), off, iGprDst, iTmpReg); 4463 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 4464 iemNativeRegFreeTmpImm(pReNative, iTmpReg); 4465 } 4466 4467 #else 4468 # error "Port me" 4469 #endif 4470 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 4471 return off; 4472 } 4473 4474 4475 /** 4476 * Emits code for OR'ing an 32-bit GPRs with a constant. 4477 * @note Bits 32 thru 63 in the destination will be zero after the operation. 4478 * @note For ARM64 this only supports @a uImm values that can be expressed using 4479 * the two 6-bit immediates of the OR instructions. The caller must make 4480 * sure this is possible! 4481 */ 4482 DECL_FORCE_INLINE_THROW(uint32_t) 4483 iemNativeEmitOrGpr32ByImmEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprDst, uint32_t uImm) 4484 { 4485 #if defined(RT_ARCH_AMD64) 4486 /* or Ev, imm */ 4487 if (iGprDst >= 8) 4488 pCodeBuf[off++] = X86_OP_REX_B; 4489 if ((int32_t)uImm == (int8_t)uImm) 4490 { 4491 pCodeBuf[off++] = 0x83; 4492 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 1, iGprDst & 7); 4493 pCodeBuf[off++] = (uint8_t)uImm; 4494 } 4495 else 4496 { 4497 pCodeBuf[off++] = 0x81; 4498 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 1, iGprDst & 7); 4499 pCodeBuf[off++] = RT_BYTE1(uImm); 4500 pCodeBuf[off++] = RT_BYTE2(uImm); 4501 pCodeBuf[off++] = RT_BYTE3(uImm); 4502 pCodeBuf[off++] = RT_BYTE4(uImm); 4503 } 4504 4505 #elif defined(RT_ARCH_ARM64) 4506 uint32_t uImmR = 0; 4507 uint32_t uImmNandS = 0; 4508 if (Armv8A64ConvertMask32ToImmRImmS(uImm, &uImmNandS, &uImmR)) 4509 pCodeBuf[off++] = Armv8A64MkInstrOrrImm(iGprDst, iGprDst, uImmNandS, uImmR, false /*f64Bit*/); 4510 else 4511 # ifdef IEM_WITH_THROW_CATCH 4512 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IEM_IPE_9)); 4513 # else 4514 AssertReleaseFailedStmt(off = UINT32_MAX); 4515 # endif 4516 4517 #else 4518 # error "Port me" 4519 #endif 4520 return off; 4521 } 4522 4523 4524 /** 4525 * Emits code for OR'ing an 32-bit GPRs with a constant. 4526 * 4527 * @note Bits 32 thru 63 in the destination will be zero after the operation. 4528 */ 4529 DECL_INLINE_THROW(uint32_t) 4530 iemNativeEmitOrGpr32ByImm(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint32_t uImm) 4531 { 4532 #if defined(RT_ARCH_AMD64) 4533 off = iemNativeEmitOrGpr32ByImmEx(iemNativeInstrBufEnsure(pReNative, off, 7), off, iGprDst, uImm); 4534 4535 #elif defined(RT_ARCH_ARM64) 4536 uint32_t uImmR = 0; 4537 uint32_t uImmNandS = 0; 4538 if (Armv8A64ConvertMask32ToImmRImmS(uImm, &uImmNandS, &uImmR)) 4539 { 4540 uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 4541 pu32CodeBuf[off++] = Armv8A64MkInstrOrrImm(iGprDst, iGprDst, uImmNandS, uImmR, false /*f64Bit*/); 4542 } 4543 else 4544 { 4545 /* Use temporary register for the 64-bit immediate. */ 4546 uint8_t iTmpReg = iemNativeRegAllocTmpImm(pReNative, &off, uImm); 4547 off = iemNativeEmitOrGpr32ByGpr(pReNative, off, iGprDst, iTmpReg); 4548 iemNativeRegFreeTmpImm(pReNative, iTmpReg); 4549 } 4550 4551 #else 4552 # error "Port me" 4553 #endif 4554 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 4555 return off; 4556 } 4557 4413 4558 4414 4559 /**
Note:
See TracChangeset
for help on using the changeset viewer.