- Timestamp:
- Jun 30, 2021 11:02:04 AM (4 years ago)
- svn:sync-xref-src-repo-rev:
- 145433
- Location:
- trunk
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/vmm/iem.h
r82968 r89974 288 288 const void *pvOpcodeBytes, size_t cbOpcodeBytes, 289 289 uint32_t *pcbWritten); 290 VMMDECL(VBOXSTRICTRC) IEMExecOneIgnoreLock(PVMCPUCC pVCpu); 290 291 VMMDECL(VBOXSTRICTRC) IEMExecLots(PVMCPUCC pVCpu, uint32_t cMaxInstructions, uint32_t cPollRate, uint32_t *pcInstructions); 291 292 /** Statistics returned by IEMExecForExits. */ -
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r89973 r89974 1194 1194 * calling thread. 1195 1195 * @param fBypassHandlers Whether to bypass access handlers. 1196 */ 1197 DECLINLINE(void) iemInitDecoder(PVMCPUCC pVCpu, bool fBypassHandlers) 1196 * @param fDisregardLock Whether to disregard the LOCK prefix. 1197 */ 1198 DECLINLINE(void) iemInitDecoder(PVMCPUCC pVCpu, bool fBypassHandlers, bool fDisregardLock) 1198 1199 { 1199 1200 IEM_CTX_ASSERT(pVCpu, IEM_CPUMCTX_EXTRN_MUST_MASK); … … 1250 1251 pVCpu->iem.s.rcPassUp = VINF_SUCCESS; 1251 1252 pVCpu->iem.s.fBypassHandlers = fBypassHandlers; 1253 pVCpu->iem.s.fDisregardLock = fDisregardLock; 1252 1254 1253 1255 #ifdef DBGFTRACE_ENABLED … … 1376 1378 * calling thread. 1377 1379 * @param fBypassHandlers Whether to bypass access handlers. 1378 */ 1379 IEM_STATIC VBOXSTRICTRC iemInitDecoderAndPrefetchOpcodes(PVMCPUCC pVCpu, bool fBypassHandlers) 1380 { 1381 iemInitDecoder(pVCpu, fBypassHandlers); 1380 * @param fDisregardLock Whether to disregard LOCK prefixes. 1381 * 1382 * @todo Combine fDisregardLock and fBypassHandlers into a flag parameter and 1383 * store them as such. 1384 */ 1385 IEM_STATIC VBOXSTRICTRC iemInitDecoderAndPrefetchOpcodes(PVMCPUCC pVCpu, bool fBypassHandlers, bool fDisregardLock) 1386 { 1387 iemInitDecoder(pVCpu, fBypassHandlers, fDisregardLock); 1382 1388 1383 1389 #ifdef IEM_WITH_CODE_TLB … … 8149 8155 if ( (fAccess & IEM_ACCESS_TYPE_WRITE) 8150 8156 && !(fFlags & X86_PTE_RW) 8151 && ( (pVCpu->iem.s.uCpl == 38157 && ( ( pVCpu->iem.s.uCpl == 3 8152 8158 && !(fAccess & IEM_ACCESS_WHAT_SYS)) 8153 8159 || (pVCpu->cpum.GstCtx.cr0 & X86_CR0_WP))) … … 14042 14048 && EMIsInhibitInterruptsActive(pVCpu)) 14043 14049 { 14044 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, pVCpu->iem.s.fBypassHandlers );14050 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, pVCpu->iem.s.fBypassHandlers, pVCpu->iem.s.fDisregardLock); 14045 14051 if (rcStrict == VINF_SUCCESS) 14046 14052 { … … 14104 14110 * Do the decoding and emulation. 14105 14111 */ 14106 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false );14112 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, false); 14107 14113 if (rcStrict == VINF_SUCCESS) 14108 14114 rcStrict = iemExecOneInner(pVCpu, true, "IEMExecOne"); … … 14122 14128 14123 14129 uint32_t const cbOldWritten = pVCpu->iem.s.cbWritten; 14124 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false );14130 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, false); 14125 14131 if (rcStrict == VINF_SUCCESS) 14126 14132 { … … 14145 14151 && pVCpu->cpum.GstCtx.rip == OpcodeBytesPC) 14146 14152 { 14147 iemInitDecoder(pVCpu, false );14153 iemInitDecoder(pVCpu, false, false); 14148 14154 #ifdef IEM_WITH_CODE_TLB 14149 14155 pVCpu->iem.s.uInstrBufPc = OpcodeBytesPC; … … 14159 14165 } 14160 14166 else 14161 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false );14167 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, false); 14162 14168 if (rcStrict == VINF_SUCCESS) 14163 14169 rcStrict = iemExecOneInner(pVCpu, true, "IEMExecOneWithPrefetchedByPC"); … … 14174 14180 14175 14181 uint32_t const cbOldWritten = pVCpu->iem.s.cbWritten; 14176 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true );14182 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true, false); 14177 14183 if (rcStrict == VINF_SUCCESS) 14178 14184 { … … 14197 14203 && pVCpu->cpum.GstCtx.rip == OpcodeBytesPC) 14198 14204 { 14199 iemInitDecoder(pVCpu, true );14205 iemInitDecoder(pVCpu, true, false); 14200 14206 #ifdef IEM_WITH_CODE_TLB 14201 14207 pVCpu->iem.s.uInstrBufPc = OpcodeBytesPC; … … 14211 14217 } 14212 14218 else 14213 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true );14219 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true, false); 14214 14220 if (rcStrict == VINF_SUCCESS) 14215 14221 rcStrict = iemExecOneInner(pVCpu, false, "IEMExecOneBypassWithPrefetchedByPC"); … … 14245 14251 && pVCpu->cpum.GstCtx.rip == OpcodeBytesPC) 14246 14252 { 14247 iemInitDecoder(pVCpu, true );14253 iemInitDecoder(pVCpu, true, false); 14248 14254 #ifdef IEM_WITH_CODE_TLB 14249 14255 pVCpu->iem.s.uInstrBufPc = OpcodeBytesPC; … … 14259 14265 } 14260 14266 else 14261 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true );14267 rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, true, false); 14262 14268 if (rcStrict == VINF_SUCCESS) 14263 14269 { … … 14269 14275 iemMemRollback(pVCpu); 14270 14276 14277 return rcStrict; 14278 } 14279 14280 14281 /** 14282 * For handling split cacheline lock operations when the host has split-lock 14283 * detection enabled. 14284 * 14285 * This will cause the interpreter to disregard the lock prefix and implicit 14286 * locking (xchg). 14287 * 14288 * @returns Strict VBox status code. 14289 * @param pVCpu The cross context virtual CPU structure of the calling EMT. 14290 */ 14291 VMMDECL(VBOXSTRICTRC) IEMExecOneIgnoreLock(PVMCPUCC pVCpu) 14292 { 14293 /* 14294 * Do the decoding and emulation. 14295 */ 14296 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, true /*fDisregardLock*/); 14297 if (rcStrict == VINF_SUCCESS) 14298 rcStrict = iemExecOneInner(pVCpu, true, "IEMExecOneIgnoreLock"); 14299 else if (pVCpu->iem.s.cActiveMappings > 0) 14300 iemMemRollback(pVCpu); 14301 14302 if (rcStrict != VINF_SUCCESS) 14303 LogFlow(("IEMExecOneIgnoreLock: cs:rip=%04x:%08RX64 ss:rsp=%04x:%08RX64 EFL=%06x - rcStrict=%Rrc\n", 14304 pVCpu->cpum.GstCtx.cs.Sel, pVCpu->cpum.GstCtx.rip, pVCpu->cpum.GstCtx.ss.Sel, pVCpu->cpum.GstCtx.rsp, pVCpu->cpum.GstCtx.eflags.u, VBOXSTRICTRC_VAL(rcStrict))); 14271 14305 return rcStrict; 14272 14306 } … … 14328 14362 * Initial decoder init w/ prefetch, then setup setjmp. 14329 14363 */ 14330 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false );14364 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, false); 14331 14365 if (rcStrict == VINF_SUCCESS) 14332 14366 { … … 14471 14505 * Initial decoder init w/ prefetch, then setup setjmp. 14472 14506 */ 14473 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false );14507 VBOXSTRICTRC rcStrict = iemInitDecoderAndPrefetchOpcodes(pVCpu, false, false); 14474 14508 if (rcStrict == VINF_SUCCESS) 14475 14509 { … … 14628 14662 uint8_t cbInstr) 14629 14663 { 14630 iemInitDecoder(pVCpu, false );14664 iemInitDecoder(pVCpu, false, false); 14631 14665 #ifdef DBGFTRACE_ENABLED 14632 14666 RTTraceBufAddMsgF(pVCpu->CTX_SUFF(pVM)->CTX_SUFF(hTraceBuf), "IEMInjectTrap: %x %d %x %llx", -
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r87740 r89974 581 581 ; 582 582 BEGINCODE 583 BEGINPROC_FASTCALL iemAImpl_xchg_u8 , 8583 BEGINPROC_FASTCALL iemAImpl_xchg_u8_locked, 8 584 584 PROLOGUE_2_ARGS 585 585 mov T0_8, [A1] … … 587 587 mov [A1], T0_8 588 588 EPILOGUE_2_ARGS 589 ENDPROC iemAImpl_xchg_u8 590 591 BEGINPROC_FASTCALL iemAImpl_xchg_u16 , 8589 ENDPROC iemAImpl_xchg_u8_locked 590 591 BEGINPROC_FASTCALL iemAImpl_xchg_u16_locked, 8 592 592 PROLOGUE_2_ARGS 593 593 mov T0_16, [A1] … … 595 595 mov [A1], T0_16 596 596 EPILOGUE_2_ARGS 597 ENDPROC iemAImpl_xchg_u16 598 599 BEGINPROC_FASTCALL iemAImpl_xchg_u32 , 8597 ENDPROC iemAImpl_xchg_u16_locked 598 599 BEGINPROC_FASTCALL iemAImpl_xchg_u32_locked, 8 600 600 PROLOGUE_2_ARGS 601 601 mov T0_32, [A1] … … 603 603 mov [A1], T0_32 604 604 EPILOGUE_2_ARGS 605 ENDPROC iemAImpl_xchg_u32 605 ENDPROC iemAImpl_xchg_u32_locked 606 606 607 607 %ifdef RT_ARCH_AMD64 608 BEGINPROC_FASTCALL iemAImpl_xchg_u64 , 8608 BEGINPROC_FASTCALL iemAImpl_xchg_u64_locked, 8 609 609 PROLOGUE_2_ARGS 610 610 mov T0, [A1] … … 612 612 mov [A1], T0 613 613 EPILOGUE_2_ARGS 614 ENDPROC iemAImpl_xchg_u64 614 ENDPROC iemAImpl_xchg_u64_locked 615 %endif 616 617 ; Unlocked variants for fDisregardLock mode. 618 619 BEGINPROC_FASTCALL iemAImpl_xchg_u8_unlocked, 8 620 PROLOGUE_2_ARGS 621 mov T0_8, [A1] 622 mov T1_8, [A0] 623 mov [A0], T0_8 624 mov [A1], T1_8 625 EPILOGUE_2_ARGS 626 ENDPROC iemAImpl_xchg_u8_unlocked 627 628 BEGINPROC_FASTCALL iemAImpl_xchg_u16_unlocked, 8 629 PROLOGUE_2_ARGS 630 mov T0_16, [A1] 631 mov T1_16, [A0] 632 mov [A0], T0_16 633 mov [A1], T1_16 634 EPILOGUE_2_ARGS 635 ENDPROC iemAImpl_xchg_u16_unlocked 636 637 BEGINPROC_FASTCALL iemAImpl_xchg_u32_unlocked, 8 638 PROLOGUE_2_ARGS 639 mov T0_32, [A1] 640 mov T1_32, [A0] 641 mov [A0], T0_32 642 mov [A1], T1_32 643 EPILOGUE_2_ARGS 644 ENDPROC iemAImpl_xchg_u32_unlocked 645 646 %ifdef RT_ARCH_AMD64 647 BEGINPROC_FASTCALL iemAImpl_xchg_u64_unlocked, 8 648 PROLOGUE_2_ARGS 649 mov T0, [A1] 650 mov T1, [A0] 651 mov [A0], T0 652 mov [A1], T1 653 EPILOGUE_2_ARGS 654 ENDPROC iemAImpl_xchg_u64_unlocked 615 655 %endif 616 656 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r84476 r89974 3624 3624 IEM_MC_MEM_MAP(pu8Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3625 3625 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3626 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8, pu8Mem, pu8Reg); 3626 if (!pVCpu->iem.s.fDisregardLock) 3627 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_locked, pu8Mem, pu8Reg); 3628 else 3629 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u8_unlocked, pu8Mem, pu8Reg); 3627 3630 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Mem, IEM_ACCESS_DATA_RW); 3628 3631 … … 3713 3716 IEM_MC_MEM_MAP(pu16Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3714 3717 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3715 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16, pu16Mem, pu16Reg); 3718 if (!pVCpu->iem.s.fDisregardLock) 3719 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg); 3720 else 3721 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u16_unlocked, pu16Mem, pu16Reg); 3716 3722 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Mem, IEM_ACCESS_DATA_RW); 3717 3723 … … 3729 3735 IEM_MC_MEM_MAP(pu32Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3730 3736 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3731 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32, pu32Mem, pu32Reg); 3737 if (!pVCpu->iem.s.fDisregardLock) 3738 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_locked, pu32Mem, pu32Reg); 3739 else 3740 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u32_unlocked, pu32Mem, pu32Reg); 3732 3741 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Mem, IEM_ACCESS_DATA_RW); 3733 3742 … … 3746 3755 IEM_MC_MEM_MAP(pu64Mem, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 3747 3756 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3748 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64, pu64Mem, pu64Reg); 3757 if (!pVCpu->iem.s.fDisregardLock) 3758 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_locked, pu64Mem, pu64Reg); 3759 else 3760 IEM_MC_CALL_VOID_AIMPL_2(iemAImpl_xchg_u64_unlocked, pu64Mem, pu64Reg); 3749 3761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Mem, IEM_ACCESS_DATA_RW); 3750 3762 … … 10592 10604 { 10593 10605 IEMOP_HLP_CLEAR_REX_NOT_BEFORE_OPCODE("lock"); 10594 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK; 10606 if (!pVCpu->iem.s.fDisregardLock) 10607 pVCpu->iem.s.fPrefixes |= IEM_OP_PRF_LOCK; 10595 10608 10596 10609 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b);
Note:
See TracChangeset
for help on using the changeset viewer.