Changeset 106465 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Oct 18, 2024 12:27:52 AM (4 months ago)
- svn:sync-xref-src-repo-rev:
- 165291
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllN8veExecMem.cpp
r106405 r106465 323 323 typedef struct IEMEXECMEMALLOCHDR 324 324 { 325 RT_GCC_EXTENSION 325 326 union 326 327 { -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompBltIn.cpp
r106443 r106465 1306 1306 if (IEM_F_MODE_X86_IS_FLAT(pReNative->fExec)) 1307 1307 { 1308 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, idxRegTmp, RT_UOFFSETOF(VMCPUCC, cpum.GstCtx.cs.u64Base));1308 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_CsBase>(pReNative, off, idxRegTmp); 1309 1309 # ifdef RT_ARCH_ARM64 1310 1310 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2); … … 1619 1619 if (IEM_F_MODE_X86_IS_FLAT(pReNative->fExec)) 1620 1620 { 1621 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, idxRegTmp, RT_UOFFSETOF(VMCPUCC, cpum.GstCtx.cs.u64Base));1621 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_CsBase>(pReNative, off, idxRegTmp); 1622 1622 # ifdef RT_ARCH_ARM64 1623 1623 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2); -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompFuncs.h
r106453 r106465 143 143 Log4(("uPcUpdatingDebug=rip+%ld cBits=%d off=%#x\n", offDisp, cBits, off)); 144 144 pReNative->Core.fDebugPcInitialized = true; 145 off = iemNativeEmitLoadGpr FromVCpuU64Ex(pCodeBuf, off, idxTmpReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));145 off = iemNativeEmitLoadGprWithGstRegExT<kIemNativeGstReg_Pc>(pCodeBuf, off, idxTmpReg); 146 146 } 147 147 … … 444 444 | CPUMCTX_DBG_DBGF_MASK); 445 445 off = iemNativeEmitAndGpr32ByImm(pReNative, off, idxEflReg, ~(uint32_t)(X86_EFL_RF | CPUMCTX_INHIBIT_SHADOW)); 446 off = iemNativeEmitStoreGprTo VCpuU32(pReNative, off, idxEflReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));446 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_EFlags>(pReNative, off, idxEflReg); 447 447 448 448 /* Free but don't flush the EFLAGS register. */ … … 583 583 # if defined(IEMNATIVE_REG_FIXED_PC_DBG) 584 584 if (!pReNative->Core.offPc) 585 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, IEMNATIVE_REG_FIXED_PC_DBG, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));585 off = iemNativeEmitLoadGprWithGstShadowRegT<kIemNativeGstReg_Pc>(pNative, off, IEMNATIVE_REG_FIXED_PC_DBG); 586 586 # endif 587 587 … … 591 591 /* Perform the addition and store the result. */ 592 592 off = iemNativeEmitAddGprImm8(pReNative, off, idxPcReg, cbInstr); 593 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));593 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 594 594 595 595 /* Free but don't flush the PC register. */ … … 629 629 # ifdef IEMNATIVE_REG_FIXED_PC_DBG 630 630 if (!pReNative->Core.offPc) 631 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, IEMNATIVE_REG_FIXED_PC_DBG, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));631 off = iemNativeEmitLoadGprWithGstShadowRegT<kIemNativeGstReg_Pc>(pReNative, off, IEMNATIVE_REG_FIXED_PC_DBG); 632 632 # endif 633 633 … … 637 637 /* Perform the addition and store the result. */ 638 638 off = iemNativeEmitAddGpr32Imm8(pReNative, off, idxPcReg, cbInstr); 639 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));639 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 640 640 641 641 /* Free but don't flush the PC register. */ … … 675 675 # if defined(IEMNATIVE_REG_FIXED_PC_DBG) 676 676 if (!pReNative->Core.offPc) 677 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, IEMNATIVE_REG_FIXED_PC_DBG, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));677 off = iemNativeEmitLoadGprWithGstShadowRegT<kIemNativeGstReg_Pc>(pReNative, off, IEMNATIVE_REG_FIXED_PC_DBG); 678 678 # endif 679 679 … … 684 684 off = iemNativeEmitAddGpr32Imm8(pReNative, off, idxPcReg, cbInstr); 685 685 off = iemNativeEmitClear16UpGpr(pReNative, off, idxPcReg); 686 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));686 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 687 687 688 688 /* Free but don't flush the PC register. */ … … 850 850 /* Undo the PC adjustment and store the old PC value. */ 851 851 off = iemNativeEmitSubGprImm(pReNative, off, idxAddrReg, offDisp, iTmpReg); 852 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxAddrReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));852 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxAddrReg); 853 853 854 854 off = iemNativeEmitTbExit<kIemNativeLabelType_RaiseGp0, false /*a_fActuallyExitingTb*/>(pReNative, off); … … 943 943 { 944 944 idxOldPcReg = iTmpReg; 945 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, idxOldPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));945 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxOldPcReg); 946 946 } 947 947 off = iemNativeEmitAddGprImm(pReNative, off, idxOldPcReg, pReNative->Core.offPc); 948 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxOldPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));948 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxOldPcReg); 949 949 950 950 off = iemNativeEmitTbExit<kIemNativeLabelType_RaiseGp0, false /*a_fActuallyExitingTb*/>(pReNative, off); … … 1049 1049 { 1050 1050 idxOldPcReg = idxAddrReg; 1051 off = iemNativeEmitLoadGpr FromVCpuU64(pReNative, off, idxOldPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1051 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxOldPcReg); 1052 1052 } 1053 1053 off = iemNativeEmitAddGprImm(pReNative, off, idxOldPcReg, pReNative->Core.offPc); 1054 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxOldPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1054 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxOldPcReg); 1055 1055 # ifdef IEMNATIVE_WITH_INSTRUCTION_COUNTING 1056 1056 off = iemNativeEmitStoreImmToVCpuU8(pReNative, off, idxInstr, RT_UOFFSETOF(VMCPUCC, iem.s.idxTbCurInstr)); … … 1210 1210 #endif 1211 1211 1212 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1212 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 1213 1213 1214 1214 /* Free but don't flush the PC register. */ … … 1332 1332 #endif 1333 1333 1334 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1334 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 1335 1335 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING 1336 1336 pReNative->Core.offPc = 0; … … 1395 1395 off = iemNativeEmitPcDebugCheckWithReg(pReNative, off, idxPcReg); 1396 1396 #endif 1397 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1397 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 1398 1398 1399 1399 /* Free but don't flush the PC register. */ … … 1510 1510 1511 1511 /* Store the result. */ 1512 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1512 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 1513 1513 1514 1514 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING … … 1808 1808 1809 1809 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 1810 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.rsp));1810 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Rsp>(pReNative, off, idxRegRsp); 1811 1811 #endif 1812 1812 iemNativeRegFreeTmp(pReNative, idxRegRsp); … … 1888 1888 /* Get a register with the new PC loaded from idxVarPc. 1889 1889 Note! This ASSUMES that the high bits of the GPR is zeroed. */ 1890 uint8_t const idx NewPcReg= iemNativeVarRegisterAcquire(pReNative, idxVarPc, &off);1890 uint8_t const idxPcRegNew = iemNativeVarRegisterAcquire(pReNative, idxVarPc, &off); 1891 1891 1892 1892 /* Check limit (may #GP(0) + exit TB). */ 1893 1893 if (!f64Bit) 1894 1894 /** @todo we can skip this test in FLAT 32-bit mode. */ 1895 off = iemNativeEmitCheckGpr32AgainstCsSegLimitMaybeRaiseGp0(pReNative, off, idx NewPcReg, idxInstr);1895 off = iemNativeEmitCheckGpr32AgainstCsSegLimitMaybeRaiseGp0(pReNative, off, idxPcRegNew, idxInstr); 1896 1896 /* Check that the address is canonical, raising #GP(0) + exit TB if it isn't. */ 1897 1897 else if (cbVar > sizeof(uint32_t)) 1898 off = iemNativeEmitCheckGprCanonicalMaybeRaiseGp0(pReNative, off, idx NewPcReg, idxInstr);1898 off = iemNativeEmitCheckGprCanonicalMaybeRaiseGp0(pReNative, off, idxPcRegNew, idxInstr); 1899 1899 1900 1900 #if 1 … … 1935 1935 1936 1936 /* Store the result. */ 1937 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxNewPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));1937 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcRegNew); 1938 1938 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 1939 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idx NewPcReg, RT_UOFFSETOF(VMCPU, iem.s.uPcUpdatingDebug));1939 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, iem.s.uPcUpdatingDebug)); 1940 1940 pReNative->Core.fDebugPcInitialized = true; 1941 1941 Log4(("uPcUpdatingDebug=rip/indirect-call off=%#x\n", off)); … … 1944 1944 #if 1 1945 1945 /* Need to transfer the shadow information to the new RIP register. */ 1946 iemNativeRegClearAndMarkAsGstRegShadow(pReNative, idx NewPcReg, kIemNativeGstReg_Pc, off);1946 iemNativeRegClearAndMarkAsGstRegShadow(pReNative, idxPcRegNew, kIemNativeGstReg_Pc, off); 1947 1947 #else 1948 1948 /* Sync the new PC. */ 1949 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxPcReg, idx NewPcReg);1949 off = iemNativeEmitLoadGprFromGpr(pReNative, off, idxPcReg, idxPcRegNew); 1950 1950 #endif 1951 1951 iemNativeVarRegisterRelease(pReNative, idxVarPc); … … 2034 2034 2035 2035 /* Store the result. */ 2036 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));2036 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcRegNew); 2037 2037 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 2038 2038 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, iem.s.uPcUpdatingDebug)); … … 2100 2100 2101 2101 /* Store the result. */ 2102 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));2102 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcRegNew); 2103 2103 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 2104 2104 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, iem.s.uPcUpdatingDebug)); … … 2164 2164 2165 2165 /* Store the result. */ 2166 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));2166 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcRegNew); 2167 2167 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 2168 2168 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxPcRegNew, RT_UOFFSETOF(VMCPU, iem.s.uPcUpdatingDebug)); … … 2512 2512 2513 2513 /* Commit the result and clear any current guest shadows for RIP. */ 2514 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp));2515 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxRegMemResult, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));2514 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Rsp>(pReNative, off, idxRegRsp); 2515 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>( pReNative, off, idxRegMemResult); 2516 2516 iemNativeRegClearAndMarkAsGstRegShadow(pReNative, idxRegMemResult, kIemNativeGstReg_Pc, off); 2517 2517 #ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG … … 5924 5924 } 5925 5925 else 5926 off = iemNativeEmitLoadGpr FromVCpuU32(pReNative, off, idxVarReg, RT_UOFFSETOF(VMCPUCC, cpum.GstCtx.eflags));5926 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_EFlags>(pReNative, off, idxVarReg); 5927 5927 iemNativeVarRegisterRelease(pReNative, idxVarEFlags); 5928 5928 } … … 6023 6023 6024 6024 iemNativeRegClearAndMarkAsGstRegShadow(pReNative, idxReg, kIemNativeGstReg_EFlags, off); 6025 off = iemNativeEmitStoreGprTo VCpuU32(pReNative, off, idxReg, RT_UOFFSETOF_DYN(VMCPUCC, cpum.GstCtx.eflags));6025 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_EFlags>(pReNative, off, idxReg); 6026 6026 iemNativeVarRegisterRelease(pReNative, idxVarEFlags); 6027 6027 return off; … … 6067 6067 6068 6068 /** @todo No delayed writeback for EFLAGS right now. */ 6069 off = iemNativeEmitStoreGprTo VCpuU32(pReNative, off, idxEflReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));6069 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_EFlags>(pReNative, off, idxEflReg); 6070 6070 6071 6071 /* Free but don't flush the EFLAGS register. */ … … 7391 7391 /* Perform the addition and store the result. */ 7392 7392 off = iemNativeEmitAddGprImm(pReNative, off, idxPcReg, pReNative->Core.offPc); 7393 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));7393 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 7394 7394 # ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 7395 7395 off = iemNativeEmitPcDebugCheckWithReg(pReNative, off, idxPcReg); … … 7527 7527 /* Restore the original value. */ 7528 7528 off = iemNativeEmitSubGprImm(pReNative, off, idxPcReg, pReNative->Core.offPc); 7529 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));7529 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 7530 7530 7531 7531 /* Free and flush the PC register. */ … … 8463 8463 else 8464 8464 { 8465 off = iemNativeEmitLoadGprFromVCpuU32(pReNative, off, TlbState.idxReg1, 8466 RT_UOFFSETOF(VMCPUCC, cpum.GstCtx.eflags)); 8465 off = iemNativeEmitLoadGprWithGstRegT<kIemNativeGstReg_EFlags>(pReNative, off, TlbState.idxReg1); 8467 8466 off = iemNativeEmitAndGpr32ByImm(pReNative, off, TlbState.idxReg1, 8468 8467 UINT32_C(0xffff0000) & ~X86_EFL_RAZ_MASK); … … 8514 8513 8515 8514 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 8516 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.rsp));8515 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Rsp>(pReNative, off, idxRegRsp); 8517 8516 #endif 8518 8517 iemNativeRegFreeTmp(pReNative, idxRegRsp); … … 8864 8863 8865 8864 #if !defined(IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK) 8866 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxRegRsp, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp));8865 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Rsp>(pReNative, off, idxRegRsp); 8867 8866 #endif 8868 8867 … … 10928 10927 #ifndef IEMNATIVE_WITH_DELAYED_REGISTER_WRITEBACK 10929 10928 /* Writeback the MXCSR register value (there is no delayed writeback for such registers at the moment). */ 10930 off = iemNativeEmitStoreGprTo VCpuU32(pReNative, off, idxRegMxCsr, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.XState.x87.MXCSR));10929 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_MxCsr>(pReNative, off, idxRegMxCsr); 10931 10930 #endif 10932 10931 -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r106453 r106465 6035 6035 /* Perform the addition and store the result. */ 6036 6036 off = iemNativeEmitAddGprImm(pReNative, off, idxPcReg, pReNative->Core.offPc); 6037 off = iemNativeEmitStoreGprTo VCpuU64(pReNative, off, idxPcReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip));6037 off = iemNativeEmitStoreGprToGstRegT<kIemNativeGstReg_Pc>(pReNative, off, idxPcReg); 6038 6038 # ifdef IEMNATIVE_WITH_DELAYED_PC_UPDATING_DEBUG 6039 6039 off = iemNativeEmitPcDebugCheckWithReg(pReNative, off, idxPcReg); -
trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h
r106453 r106465 198 198 199 199 /********************************************************************************************************************************* 200 * Guest Register Load & Store Helpers * 201 *********************************************************************************************************************************/ 202 203 204 /** 205 * Alternative to iemNativeEmitLoadGprWithGstShadowRegEx() and 206 * iemNativeEmitLoadGprWithGstShadowReg() which should be more efficient as it 207 * lets the compiler do the equivalent of the g_aGstShadowInfo lookup. 208 * 209 * @note This does not mark @a idxHstReg as having a shadow copy of @a a_enmGstReg, 210 * that is something the caller needs to do if applicable. 211 */ 212 template<IEMNATIVEGSTREG const a_enmGstReg> 213 DECL_INLINE_THROW(uint32_t) iemNativeEmitLoadGprWithGstRegExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxHstReg) 214 { 215 /* 64-bit registers: */ 216 if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Pc) 217 return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip)); 218 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Rsp) 219 return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp)); 220 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_CsBase) 221 return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cs.u64Base)); 222 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr0) 223 // return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr0)); 224 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr4) 225 // return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr4)); 226 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Xcr0) 227 // return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.aXcr[0])); 228 229 /* 32-bit registers: */ 230 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_EFlags) 231 return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags)); 232 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_MxCsr) 233 return iemNativeEmitLoadGprFromVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.MXCSR)); 234 235 /* 16-bit registers */ 236 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFcw) 237 return iemNativeEmitLoadGprFromVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FCW)); 238 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFsw) 239 return iemNativeEmitLoadGprFromVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FSW)); 240 #if RT_CPLUSPLUS_PREREQ(201700) && !defined(__clang_major__) 241 else 242 { 243 AssertCompile(false); 244 return off; 245 } 246 #endif 247 } 248 249 250 /** See iemNativeEmitLoadGprWithGstRegExT(). */ 251 template<IEMNATIVEGSTREG const a_enmGstReg> 252 DECL_INLINE_THROW(uint32_t) iemNativeEmitLoadGprWithGstRegT(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg) 253 { 254 #ifdef RT_ARCH_AMD64 255 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 16); 256 #elif defined(RT_ARCH_ARM64) 257 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); 258 #else 259 # error "port me" 260 #endif 261 off = iemNativeEmitLoadGprWithGstRegExT<a_enmGstReg>(pCodeBuf, off, idxHstReg); 262 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 263 return off; 264 } 265 266 267 /** 268 * Store companion to iemNativeEmitLoadGprWithGstRegExT(). 269 */ 270 template<IEMNATIVEGSTREG const a_enmGstReg> 271 DECL_INLINE_THROW(uint32_t) iemNativeEmitStoreGprToGstRegExT(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t idxHstReg, 272 uint8_t idxTmpReg = IEMNATIVE_REG_FIXED_TMP0) 273 { 274 /* 64-bit registers: */ 275 if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Pc) 276 return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rip), idxTmpReg); 277 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Rsp) 278 return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.rsp), idxTmpReg); 279 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr0) 280 // return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr0), idxTmpReg); 281 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Cr4) 282 // return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.cr4), idxTmpReg); 283 //else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_Xcr0) 284 // return iemNativeEmitStoreGprToVCpuU64Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.aXcr[0]), idxTmpReg); 285 /* 32-bit registers: */ 286 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_EFlags) 287 return iemNativeEmitStoreGprToVCpuU32Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags), idxTmpReg); 288 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_MxCsr) 289 return iemNativeEmitStoreGprToVCpuU32Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.MXCSR), idxTmpReg); 290 /* 16-bit registers */ 291 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFcw) 292 return iemNativeEmitStoreGprToVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FCW), idxTmpReg); 293 else if RT_CONSTEXPR_IF(a_enmGstReg == kIemNativeGstReg_FpuFsw) 294 return iemNativeEmitStoreGprToVCpuU16Ex(pCodeBuf, off, idxHstReg, RT_UOFFSETOF(VMCPU, cpum.GstCtx.XState.x87.FSW), idxTmpReg); 295 #if RT_CPLUSPLUS_PREREQ(201700) && !defined(__clang_major__) 296 else 297 { 298 AssertCompile(false); 299 return off; 300 } 301 #endif 302 } 303 304 305 /** See iemNativeEmitLoadGprWithGstRegExT(). */ 306 template<IEMNATIVEGSTREG const a_enmGstReg> 307 DECL_INLINE_THROW(uint32_t) iemNativeEmitStoreGprToGstRegT(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxHstReg) 308 { 309 #ifdef RT_ARCH_AMD64 310 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 16); 311 #elif defined(RT_ARCH_ARM64) 312 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 6); 313 #else 314 # error "port me" 315 #endif 316 off = iemNativeEmitStoreGprToGstRegExT<a_enmGstReg>(pCodeBuf, off, idxHstReg); 317 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 318 return off; 319 } 320 321 322 323 /********************************************************************************************************************************* 200 324 * EFLAGS * 201 325 *********************************************************************************************************************************/ … … 468 592 idxRegEfl = ASMBitFirstSetU32(bmAvailableRegs) - 1; 469 593 bmAvailableRegs &= ~RT_BIT_32(idxRegTmp); 470 off = iemNativeEmitLoadGpr FromVCpuU32Ex(pCodeBuf, off, idxRegEfl, RT_UOFFSETOF(VMCPU, cpum.GstCtx.eflags));594 off = iemNativeEmitLoadGprWithGstRegExT<kIemNativeGstReg_EFlags>(pCodeBuf, off, idxRegEfl); 471 595 } 472 596 Assert(bmAvailableRegs != 0);
Note:
See TracChangeset
for help on using the changeset viewer.