Changeset 102385 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
- Timestamp:
- Nov 29, 2023 9:09:08 PM (14 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102370 r102385 149 149 DECL_FORCE_INLINE(void) iemNativeRegClearGstRegShadowingOne(PIEMRECOMPILERSTATE pReNative, uint8_t idxHstReg, 150 150 IEMNATIVEGSTREG enmGstReg, uint32_t off); 151 152 static uint8_t iemNativeVarGetStackSlot(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar); 151 153 152 154 … … 2415 2417 uint32_t const idxVar = ASMBitFirstSetU32(fVars) - 1; 2416 2418 uint8_t const idxReg = pReNative->Core.aVars[idxVar].idxReg; 2419 /** @todo Prevent active variables from changing here... */ 2417 2420 if ( idxReg < RT_ELEMENTS(pReNative->Core.aHstRegs) 2418 2421 && (RT_BIT_32(idxReg) & fRegMask) … … 2430 2433 if (pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack) 2431 2434 { 2432 AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, 2433 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_8)); 2434 *poff = iemNativeEmitStoreGprByBp(pReNative, *poff, iemNativeVarCalcBpDisp(pReNative, idxVar), idxReg); 2435 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar); 2436 *poff = iemNativeEmitStoreGprByBp(pReNative, *poff, iemNativeStackCalcBpDisp(idxStackSlot), idxReg); 2435 2437 } 2436 2438 … … 2549 2551 * Otherwise we must spill the register onto the stack. 2550 2552 */ 2551 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot;2553 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar); 2552 2554 Log12(("iemNativeRegMoveOrSpillStackVar: spilling idxVar=%d/idxReg=%d onto the stack (slot %#x bp+%d, off=%#x)\n", 2553 2555 idxVar, idxRegOld, idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off)); 2554 AssertStmt(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_7));2555 2556 off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxRegOld); 2556 2557 … … 5338 5339 5339 5340 /** 5341 * Gets the stack slot for a stack variable, allocating one if necessary. 5342 * 5343 * Calling this function implies that the stack slot will contain a valid 5344 * variable value. The caller deals with any register currently assigned to the 5345 * variable, typically by spilling it into the stack slot. 5346 * 5347 * @returns The stack slot number. 5348 * @param pReNative The recompiler state. 5349 * @param idxVar The variable. 5350 * @throws VERR_IEM_VAR_OUT_OF_STACK_SLOTS 5351 */ 5352 static uint8_t iemNativeVarGetStackSlot(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar) 5353 { 5354 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVar); 5355 Assert(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack); 5356 5357 /* Already got a slot? */ 5358 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 5359 if (idxStackSlot != UINT8_MAX) 5360 { 5361 Assert(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS); 5362 return idxStackSlot; 5363 } 5364 5365 /* 5366 * A single slot is easy to allocate. 5367 * Allocate them from the top end, closest to BP, to reduce the displacement. 5368 */ 5369 if (pReNative->Core.aVars[idxVar].cbVar <= sizeof(uint64_t)) 5370 { 5371 unsigned const iSlot = ASMBitLastSetU32(~pReNative->Core.bmStack) - 1; 5372 AssertStmt(iSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5373 pReNative->Core.bmStack |= RT_BIT_32(iSlot); 5374 pReNative->Core.aVars[idxVar].idxStackSlot = (uint8_t)iSlot; 5375 Log11(("iemNativeVarSetKindToStack: idxVar=%d iSlot=%#x\n", idxVar, iSlot)); 5376 return (uint8_t)iSlot; 5377 } 5378 5379 /* 5380 * We need more than one stack slot. 5381 * 5382 * cbVar -> fBitAlignMask: 16 -> 1; 32 -> 3; 64 -> 7; 5383 */ 5384 AssertCompile(RT_IS_POWER_OF_TWO(IEMNATIVE_FRAME_VAR_SLOTS)); /* If not we have to add an overflow check. */ 5385 Assert(pReNative->Core.aVars[idxVar].cbVar <= 64); 5386 uint32_t const fBitAlignMask = RT_BIT_32(ASMBitLastSetU32(pReNative->Core.aVars[idxVar].cbVar) - 4) - 1; 5387 uint32_t fBitAllocMask = RT_BIT_32((pReNative->Core.aVars[idxVar].cbVar + 7) >> 3) - 1; 5388 uint32_t bmStack = ~pReNative->Core.bmStack; 5389 while (bmStack != UINT32_MAX) 5390 { 5391 /** @todo allocate from the top to reduce BP displacement. */ 5392 unsigned const iSlot = ASMBitFirstSetU32(bmStack) - 1; 5393 AssertStmt(iSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5394 if (!(iSlot & fBitAlignMask)) 5395 { 5396 if ((bmStack & (fBitAllocMask << iSlot)) == (fBitAllocMask << iSlot)) 5397 { 5398 pReNative->Core.bmStack |= (fBitAllocMask << iSlot); 5399 pReNative->Core.aVars[idxVar].idxStackSlot = (uint8_t)iSlot; 5400 Log11(("iemNativeVarSetKindToStack: idxVar=%d iSlot=%#x/%#x (cbVar=%#x)\n", 5401 idxVar, iSlot, fBitAllocMask, pReNative->Core.aVars[idxVar].cbVar)); 5402 return (uint8_t)iSlot; 5403 } 5404 } 5405 bmStack |= fBitAlignMask << (iSlot & ~fBitAlignMask); 5406 } 5407 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5408 } 5409 5410 5411 /** 5340 5412 * Changes the variable to a stack variable. 5341 5413 * … … 5345 5417 * @param pReNative The recompiler state. 5346 5418 * @param idxVar The variable. 5347 * @throws VERR_IEM_VAR_ OUT_OF_STACK_SLOTS, VERR_IEM_VAR_IPE_25419 * @throws VERR_IEM_VAR_IPE_2 5348 5420 */ 5349 5421 static void iemNativeVarSetKindToStack(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar) … … 5360 5432 pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_Stack; 5361 5433 5362 if (pReNative->Core.aVars[idxVar].idxStackSlot == UINT8_MAX) 5363 { 5364 if (pReNative->Core.aVars[idxVar].cbVar <= sizeof(uint64_t)) 5365 { 5366 unsigned const iSlot = ASMBitFirstSetU32(~pReNative->Core.bmStack) - 1; 5367 AssertStmt(iSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5368 pReNative->Core.bmStack |= RT_BIT_32(iSlot); 5369 pReNative->Core.aVars[idxVar].idxStackSlot = iSlot; 5370 Log11(("iemNativeVarSetKindToStack: idxVar=%d iSlot=%#x\n", idxVar, iSlot)); 5371 return; 5372 } 5373 /* cbVar -> fBitAlignMask: 16 -> 1; 32 -> 3; 64 -> 7;*/ 5374 AssertCompile(RT_IS_POWER_OF_TWO(IEMNATIVE_FRAME_VAR_SLOTS)); /* If not we have to add an overflow check. */ 5375 Assert(pReNative->Core.aVars[idxVar].cbVar <= 64); 5376 uint32_t const fBitAlignMask = RT_BIT_32(ASMBitLastSetU32(pReNative->Core.aVars[idxVar].cbVar) - 4) - 1; 5377 uint32_t fBitAllocMask = RT_BIT_32((pReNative->Core.aVars[idxVar].cbVar + 7) >> 3) - 1; 5378 uint32_t bmStack = ~pReNative->Core.bmStack; 5379 while (bmStack != UINT32_MAX) 5380 { 5381 unsigned const iSlot = ASMBitFirstSetU32(bmStack) - 1; 5382 AssertStmt(iSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5383 if (!(iSlot & fBitAlignMask)) 5384 { 5385 if ((bmStack & (fBitAllocMask << iSlot)) == (fBitAllocMask << iSlot)) 5386 { 5387 pReNative->Core.bmStack |= (fBitAllocMask << iSlot); 5388 pReNative->Core.aVars[idxVar].idxStackSlot = iSlot; 5389 Log11(("iemNativeVarSetKindToStack: idxVar=%d iSlot=%#x/%#x (cbVar=%#x)\n", 5390 idxVar, iSlot, fBitAllocMask, pReNative->Core.aVars[idxVar].cbVar)); 5391 return; 5392 } 5393 } 5394 bmStack |= fBitAlignMask << (iSlot & ~fBitAlignMask); 5395 } 5396 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_OUT_OF_STACK_SLOTS)); 5397 } 5434 /* Note! We don't allocate a stack slot here, that's only done when a 5435 slot is actually needed to hold a variable value. */ 5398 5436 } 5399 5437 } … … 5545 5583 * @param idxVar The variable. 5546 5584 * @param poff Pointer to the instruction buffer offset. 5547 * In case a register needs to be freed up. 5548 */ 5549 DECL_HIDDEN_THROW(uint8_t) iemNativeVarAllocRegister(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, uint32_t *poff) 5585 * In case a register needs to be freed up or the value 5586 * loaded off the stack. 5587 * @param fInitialized Set if the variable must already have been initialized. 5588 * Will throw VERR_IEM_VAR_NOT_INITIALIZED if this is not 5589 * the case. 5590 */ 5591 DECL_HIDDEN_THROW(uint8_t) iemNativeVarAllocRegister(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, 5592 uint32_t *poff, bool fInitialized = false) 5550 5593 { 5551 5594 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVar); 5552 5595 Assert(pReNative->Core.aVars[idxVar].cbVar <= 8); 5596 /** @todo we must mark the variable as active and add a release function to 5597 * mark it as inactive, otherwise temporary register allocations may 5598 * cause the variable to be spilled onto the stack. */ 5553 5599 5554 5600 uint8_t idxReg = pReNative->Core.aVars[idxVar].idxReg; … … 5619 5665 iemNativeRegMarkAllocated(pReNative, idxReg, kIemNativeWhat_Var, idxVar); 5620 5666 pReNative->Core.aVars[idxVar].idxReg = idxReg; 5667 5668 /* 5669 * Load it off the stack if we've got a stack slot. 5670 */ 5671 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 5672 if (idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS) 5673 { 5674 int32_t const offDispBp = iemNativeStackCalcBpDisp(idxStackSlot); 5675 switch (pReNative->Core.aVars[idxVar].cbVar) 5676 { 5677 case 1: *poff = iemNativeEmitLoadGprByBpU8( pReNative, *poff, idxReg, offDispBp); break; 5678 case 2: *poff = iemNativeEmitLoadGprByBpU16(pReNative, *poff, idxReg, offDispBp); break; 5679 case 3: AssertFailed(); RT_FALL_THRU(); 5680 case 4: *poff = iemNativeEmitLoadGprByBpU32(pReNative, *poff, idxReg, offDispBp); break; 5681 default: AssertFailed(); RT_FALL_THRU(); 5682 case 8: *poff = iemNativeEmitLoadGprByBp( pReNative, *poff, idxReg, offDispBp); break; 5683 } 5684 } 5685 else 5686 { 5687 Assert(idxStackSlot == UINT8_MAX); 5688 AssertStmt(!fInitialized, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 5689 } 5621 5690 return idxReg; 5622 5691 } … … 5709 5778 else 5710 5779 { 5711 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 5712 AssertStmt(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_7)); 5713 int32_t const offDispBp = iemNativeStackCalcBpDisp(idxStackSlot); 5780 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar); 5781 int32_t const offDispBp = iemNativeStackCalcBpDisp(idxStackSlot); 5714 5782 switch (pReNative->Core.aVars[idxVar].cbVar) 5715 5783 { … … 5778 5846 { 5779 5847 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 5780 Assert(idxStackSlot == UINT8_MAX || idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS);5781 5848 if (idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS) 5782 5849 { … … 5790 5857 pReNative->Core.aVars[idxVar].idxStackSlot = UINT8_MAX; 5791 5858 } 5859 else 5860 Assert(idxStackSlot == UINT8_MAX); 5792 5861 } 5793 5862 … … 6028 6097 if (idxRegOld < RT_ELEMENTS(pReNative->Core.aHstRegs)) 6029 6098 { 6030 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot;6099 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxVar); 6031 6100 Log12(("iemNativeEmitCallCommon: spilling idxVar=%d/idxReg=%d (referred to by %d) onto the stack (slot %#x bp+%d, off=%#x)\n", 6032 6101 idxVar, idxRegOld, pReNative->Core.aVars[idxVar].idxReferrerVar, 6033 6102 idxStackSlot, iemNativeStackCalcBpDisp(idxStackSlot), off)); 6034 AssertStmt(idxStackSlot < IEMNATIVE_FRAME_VAR_SLOTS, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_REG_IPE_7));6035 6103 off = iemNativeEmitStoreGprByBp(pReNative, off, iemNativeStackCalcBpDisp(idxStackSlot), idxRegOld); 6036 6104 … … 6128 6196 { 6129 6197 case kIemNativeVarKind_Stack: 6130 AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, 6131 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3)); 6198 { 6199 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 6200 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 6132 6201 off = iemNativeEmitLoadGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG /* is free */, 6133 iemNative VarCalcBpDisp(pReNative, idxVar));6202 iemNativeStackCalcBpDisp(idxStackSlot)); 6134 6203 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG); 6135 6204 continue; 6205 } 6136 6206 6137 6207 case kIemNativeVarKind_Immediate: … … 6141 6211 case kIemNativeVarKind_VarRef: 6142 6212 { 6143 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar;6213 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar; 6144 6214 Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars)); 6145 AssertStmt( pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX 6146 && pReNative->Core.aVars[idxOtherVar].idxReg == UINT8_MAX, 6147 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4)); 6148 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, 6149 iemNativeStackCalcBpDisp(pReNative->Core.aVars[idxOtherVar].idxStackSlot)); 6215 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxOtherVar); 6216 int32_t const offBpDispOther = iemNativeStackCalcBpDisp(idxStackSlot); 6217 uint8_t const idxRegOther = pReNative->Core.aVars[idxOtherVar].idxReg; 6218 if (idxRegOther < RT_ELEMENTS(pReNative->Core.aHstRegs)) 6219 { 6220 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDispOther, idxRegOther); 6221 pReNative->Core.aVars[idxOtherVar].idxReg = UINT8_MAX; 6222 } 6223 Assert( pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX 6224 && pReNative->Core.aVars[idxOtherVar].idxReg == UINT8_MAX); 6225 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, offBpDispOther); 6150 6226 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG); 6151 6227 continue; … … 6202 6278 { 6203 6279 case kIemNativeVarKind_Stack: 6204 AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, 6205 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3)); 6206 off = iemNativeEmitLoadGprByBp(pReNative, off, idxArgReg, iemNativeVarCalcBpDisp(pReNative, idxVar)); 6280 { 6281 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVar].idxStackSlot; 6282 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 6283 off = iemNativeEmitLoadGprByBp(pReNative, off, idxArgReg, iemNativeStackCalcBpDisp(idxStackSlot)); 6207 6284 continue; 6285 } 6208 6286 6209 6287 case kIemNativeVarKind_Immediate: … … 6213 6291 case kIemNativeVarKind_VarRef: 6214 6292 { 6215 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar;6293 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar; 6216 6294 Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars)); 6217 AssertStmt( pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX 6218 && pReNative->Core.aVars[idxOtherVar].idxReg == UINT8_MAX, 6219 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4)); 6220 off = iemNativeEmitLeaGprByBp(pReNative, off, idxArgReg, 6221 iemNativeStackCalcBpDisp(pReNative->Core.aVars[idxOtherVar].idxStackSlot)); 6295 uint8_t const idxStackSlot = iemNativeVarGetStackSlot(pReNative, idxOtherVar); 6296 int32_t const offBpDispOther = iemNativeStackCalcBpDisp(idxStackSlot); 6297 uint8_t const idxRegOther = pReNative->Core.aVars[idxOtherVar].idxReg; 6298 if (idxRegOther < RT_ELEMENTS(pReNative->Core.aHstRegs)) 6299 { 6300 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDispOther, idxRegOther); 6301 pReNative->Core.aVars[idxOtherVar].idxReg = UINT8_MAX; 6302 } 6303 Assert( pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX 6304 && pReNative->Core.aVars[idxOtherVar].idxReg == UINT8_MAX); 6305 off = iemNativeEmitLeaGprByBp(pReNative, off, idxArgReg, offBpDispOther); 6222 6306 continue; 6223 6307 } … … 6782 6866 else 6783 6867 { 6784 AssertStmt(pReNative->Core.aVars[idxValueVar].idxStackSlot != UINT8_MAX,6785 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4));6868 uint8_t const idxStackSlot = pReNative->Core.aVars[idxValueVar].idxStackSlot; 6869 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 6786 6870 if (idxGstTmpReg >= 8) 6787 6871 pbCodeBuf[off++] = X86_OP_REX_R; 6788 6872 pbCodeBuf[off++] = 0x8b; 6789 off = iemNativeEmitGprByBpDisp(pbCodeBuf, off, idxGstTmpReg, iemNative VarCalcBpDisp(pReNative, idxValueVar), pReNative);6873 off = iemNativeEmitGprByBpDisp(pbCodeBuf, off, idxGstTmpReg, iemNativeStackCalcBpDisp(idxStackSlot), pReNative); 6790 6874 } 6791 6875 6792 6876 #elif defined(RT_ARCH_ARM64) 6793 6877 /* bfi w1, w2, 0, 16 - moves bits 15:0 from idxVarReg to idxGstTmpReg bits 15:0. */ 6794 uint8_t const idxVarReg = iemNativeVarAllocRegister(pReNative, idxValueVar, &off );6878 uint8_t const idxVarReg = iemNativeVarAllocRegister(pReNative, idxValueVar, &off, true /*fInitialized*/); 6795 6879 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 6796 6880 pu32CodeBuf[off++] = Armv8A64MkInstrBfi(idxGstTmpReg, idxVarReg, 0, 16); … … 7516 7600 { 7517 7601 off = iemNativeEmitLoadGprImm64(pReNative, off, idxRegRet, u32EffAddr); 7518 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2);7519 pu32CodeBuf[off++] = Armv8A64MkInstrMovZ(idxRegRet, u32EffAddr);7520 7602 if (idxRegBase != UINT8_MAX) 7603 { 7604 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 7521 7605 pu32CodeBuf[off++] = Armv8A64MkInstrAddSubReg(false /*fSub*/, idxRegRet, idxRegRet, idxRegBase, false /*f64Bit*/); 7606 } 7522 7607 } 7523 7608 if (idxRegIndex != UINT8_MAX) … … 7715 7800 else 7716 7801 { 7802 uint8_t const idxStackSlot = pReNative->Core.aVars[idxVarGCPtrMem].idxStackSlot; 7803 AssertStmt(idxStackSlot != UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_NOT_INITIALIZED)); 7717 7804 AssertFailed(); /** @todo This was probably caused by iemNativeRegMoveAndFreeAndFlushAtCall above. Improve... */ 7718 off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgGCPtrMem, iemNative VarCalcBpDisp(pReNative, idxVarGCPtrMem));7805 off = iemNativeEmitLoadGprByBp(pReNative, off, idxRegArgGCPtrMem, iemNativeStackCalcBpDisp(idxStackSlot)); 7719 7806 if (offDisp) 7720 7807 off = iemNativeEmitAddGprImm(pReNative, off, idxRegArgGCPtrMem, offDisp);
Note:
See TracChangeset
for help on using the changeset viewer.