Changeset 73959 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllCImplVmxInstr.cpp.h
- Timestamp:
- Aug 29, 2018 3:24:49 PM (6 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllCImplVmxInstr.cpp.h
r73937 r73959 1041 1041 * @param pVCpu The cross context virtual CPU structure. 1042 1042 * @param cbInstr The instruction length. 1043 * @param iEffSeg The effective segment register to use with @a u64Val. 1044 * Pass UINT8_MAX if it is a register access. 1045 * @param enmEffAddrMode The effective addressing mode. 1046 * @param u64Val The value to write (or guest linear address to the 1047 * value), @a iEffSeg will indicate if it's a memory 1048 * operand. 1043 1049 * @param uFieldEnc The VMCS field encoding. 1044 * @param u64Val The value to write (or guest linear address to the 1045 * value), @a pExitInstrInfo will indicate whether it's a 1046 * memory or register operand. 1047 * @param pExitInstrInfo Pointer to the VM-exit instruction information field. 1048 * @param GCPtrDisp The displacement field for @a GCPtrVmcs if any. 1049 */ 1050 IEM_STATIC VBOXSTRICTRC iemVmxVmwrite(PVMCPU pVCpu, uint8_t cbInstr, uint32_t uFieldEnc, uint64_t u64Val, 1051 PCVMXEXITINSTRINFO pExitInstrInfo, RTGCPTR GCPtrDisp) 1050 * @param pExitInfo Pointer to the VM-exit information struct. 1051 */ 1052 IEM_STATIC VBOXSTRICTRC iemVmxVmwrite(PVMCPU pVCpu, uint8_t cbInstr, uint8_t iEffSeg, IEMMODE enmEffAddrMode, 1053 uint64_t u64Val, uint32_t uFieldEnc, PCVMXVEXITINFO pExitInfo) 1052 1054 { 1053 1055 if (IEM_IS_VMX_NON_ROOT_MODE(pVCpu)) 1054 1056 { 1055 RT_NOREF( GCPtrDisp);1057 RT_NOREF(pExitInfo); 1056 1058 /** @todo NSTVMX: intercept. */ 1057 1059 /** @todo NSTVMX: VMCS shadowing intercept (VMREAD/VMWRITE bitmap). */ … … 1089 1091 1090 1092 /* If the VMWRITE instruction references memory, access the specified in memory operand. */ 1091 if (!pExitInstrInfo->VmreadVmwrite.fIsRegOperand)1092 {1093 uint8_t const uAddrSize = pExitInstrInfo->VmreadVmwrite.u3AddrSize;1094 static uint64_t const s_auAddrSizeMasks[] = { UINT64_C(0xffff), UINT64_C(0xffffffff), UINT64_C(0xffffffffffffffff) , 0};1095 Assert RCReturn(uAddrSize != 3, VERR_IEM_IPE_1);1096 RTGCPTR const GCPtrVal = u64Val & s_auAddrSizeMasks[ uAddrSize];1093 bool const fIsRegOperand = iEffSeg == UINT8_MAX; 1094 if (fIsRegOperand) 1095 { 1096 static uint64_t const s_auAddrSizeMasks[] = { UINT64_C(0xffff), UINT64_C(0xffffffff), UINT64_C(0xffffffffffffffff) }; 1097 Assert(enmEffAddrMode < RT_ELEMENTS(s_auAddrSizeMasks)); 1098 RTGCPTR const GCPtrVal = u64Val & s_auAddrSizeMasks[enmEffAddrMode]; 1097 1099 1098 1100 /* Read the value from the specified guest memory location. */ 1099 1101 VBOXSTRICTRC rcStrict; 1100 1102 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) 1101 rcStrict = iemMemFetchDataU64(pVCpu, &u64Val, pExitInstrInfo->VmreadVmwrite.iSegReg, GCPtrVal);1103 rcStrict = iemMemFetchDataU64(pVCpu, &u64Val, iEffSeg, GCPtrVal); 1102 1104 else 1103 1105 { 1104 1106 uint32_t u32Val; 1105 rcStrict = iemMemFetchDataU32(pVCpu, &u32Val, pExitInstrInfo->VmreadVmwrite.iSegReg, GCPtrVal);1107 rcStrict = iemMemFetchDataU32(pVCpu, &u32Val, iEffSeg, GCPtrVal); 1106 1108 u64Val = u32Val; 1107 1109 } … … 1182 1184 * @param pVCpu The cross context virtual CPU structure. 1183 1185 * @param cbInstr The instruction length. 1186 * @param iEffSeg The effective segment register to use with @a GCPtrVmcs. 1184 1187 * @param GCPtrVmcs The linear address of the VMCS pointer. 1185 * @param pExitIn strInfo Pointer to the VM-exit instruction information field.1186 * @param GCPtrDisp The displacement field for @a GCPtrVmcs if any.1188 * @param pExitInfo Pointer to the VM-exit information struct. Optional, can 1189 * be NULL. 1187 1190 * 1188 1191 * @remarks Common VMX instruction checks are already expected to by the caller, 1189 1192 * i.e. VMX operation, CR4.VMXE, Real/V86 mode, EFER/CS.L checks. 1190 1193 */ 1191 IEM_STATIC VBOXSTRICTRC iemVmxVmclear(PVMCPU pVCpu, uint8_t cbInstr, RTGCPHYS GCPtrVmcs, PCVMXEXITINSTRINFO pExitInstrInfo,1192 RTGCPTR GCPtrDisp)1194 IEM_STATIC VBOXSTRICTRC iemVmxVmclear(PVMCPU pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPHYS GCPtrVmcs, 1195 PCVMXVEXITINFO pExitInfo) 1193 1196 { 1194 1197 if (IEM_IS_VMX_NON_ROOT_MODE(pVCpu)) 1195 1198 { 1196 RT_NOREF( GCPtrDisp);1199 RT_NOREF(pExitInfo); 1197 1200 /** @todo NSTVMX: intercept. */ 1198 1201 } … … 1209 1212 /* Get the VMCS pointer from the location specified by the source memory operand. */ 1210 1213 RTGCPHYS GCPhysVmcs; 1211 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmcs, pExitInstrInfo->VmxXsave.iSegReg, GCPtrVmcs);1214 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmcs, iEffSeg, GCPtrVmcs); 1212 1215 if (RT_UNLIKELY(rcStrict != VINF_SUCCESS)) 1213 1216 { … … 1292 1295 * @param pVCpu The cross context virtual CPU structure. 1293 1296 * @param cbInstr The instruction length. 1297 * @param iEffSeg The effective segment register to use with @a GCPtrVmcs. 1294 1298 * @param GCPtrVmcs The linear address of where to store the current VMCS 1295 1299 * pointer. 1296 * @param pExitIn strInfo Pointer to the VM-exit instruction information field.1297 * @param GCPtrDisp The displacement field for @a GCPtrVmcs if any.1300 * @param pExitInfo Pointer to the VM-exit information struct. Optional, can 1301 * be NULL. 1298 1302 * 1299 1303 * @remarks Common VMX instruction checks are already expected to by the caller, 1300 1304 * i.e. VMX operation, CR4.VMXE, Real/V86 mode, EFER/CS.L checks. 1301 1305 */ 1302 IEM_STATIC VBOXSTRICTRC iemVmxVmptrst(PVMCPU pVCpu, uint8_t cbInstr, RTGCPHYS GCPtrVmcs, PCVMXEXITINSTRINFO pExitInstrInfo,1303 RTGCPTR GCPtrDisp)1306 IEM_STATIC VBOXSTRICTRC iemVmxVmptrst(PVMCPU pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPHYS GCPtrVmcs, 1307 PCVMXVEXITINFO pExitInfo) 1304 1308 { 1305 1309 if (IEM_IS_VMX_NON_ROOT_MODE(pVCpu)) 1306 1310 { 1307 RT_NOREF( GCPtrDisp);1311 RT_NOREF(pExitInfo); 1308 1312 /** @todo NSTVMX: intercept. */ 1309 1313 } … … 1320 1324 /* Set the VMCS pointer to the location specified by the destination memory operand. */ 1321 1325 AssertCompile(NIL_RTGCPHYS == ~(RTGCPHYS)0U); 1322 VBOXSTRICTRC rcStrict = iemMemStoreDataU64(pVCpu, pExitInstrInfo->VmxXsave.iSegReg, GCPtrVmcs, 1323 IEM_VMX_GET_CURRENT_VMCS(pVCpu)); 1326 VBOXSTRICTRC rcStrict = iemMemStoreDataU64(pVCpu, iEffSeg, GCPtrVmcs, IEM_VMX_GET_CURRENT_VMCS(pVCpu)); 1324 1327 if (RT_LIKELY(rcStrict == VINF_SUCCESS)) 1325 1328 { … … 1342 1345 * @param cbInstr The instruction length. 1343 1346 * @param GCPtrVmcs The linear address of the current VMCS pointer. 1344 * @param pExitIn strInfo Pointer to the VM-exit instruction information field.1345 * @param GCPtrDisp The displacement field for @a GCPtrVmcs if any.1347 * @param pExitInfo Pointer to the virtual VM-exit information struct. 1348 * Optional, can be NULL. 1346 1349 * 1347 1350 * @remarks Common VMX instruction checks are already expected to by the caller, 1348 1351 * i.e. VMX operation, CR4.VMXE, Real/V86 mode, EFER/CS.L checks. 1349 1352 */ 1350 IEM_STATIC VBOXSTRICTRC iemVmxVmptrld(PVMCPU pVCpu, uint8_t cbInstr, RTGCPHYS GCPtrVmcs, PCVMXEXITINSTRINFO pExitInstrInfo,1351 RTGCPTR GCPtrDisp)1353 IEM_STATIC VBOXSTRICTRC iemVmxVmptrld(PVMCPU pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPHYS GCPtrVmcs, 1354 PCVMXVEXITINFO pExitInfo) 1352 1355 { 1353 1356 if (IEM_IS_VMX_NON_ROOT_MODE(pVCpu)) 1354 1357 { 1355 RT_NOREF( GCPtrDisp);1358 RT_NOREF(pExitInfo); 1356 1359 /** @todo NSTVMX: intercept. */ 1357 1360 } … … 1368 1371 /* Get the VMCS pointer from the location specified by the source memory operand. */ 1369 1372 RTGCPHYS GCPhysVmcs; 1370 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmcs, pExitInstrInfo->VmxXsave.iSegReg, GCPtrVmcs);1373 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmcs, iEffSeg, GCPtrVmcs); 1371 1374 if (RT_UNLIKELY(rcStrict != VINF_SUCCESS)) 1372 1375 { … … 1473 1476 * @param pVCpu The cross context virtual CPU structure. 1474 1477 * @param cbInstr The instruction length. 1478 * @param iEffSeg The effective segment register to use with @a 1479 * GCPtrVmxon. 1475 1480 * @param GCPtrVmxon The linear address of the VMXON pointer. 1476 * @param pExitIn strInfo Pointer to the VM-exit instruction information field.1477 * @param GCPtrDisp The displacement field for @a GCPtrVmxon if any.1481 * @param pExitInfo Pointer to the VM-exit instruction information struct. 1482 * Optional, can be NULL. 1478 1483 * 1479 1484 * @remarks Common VMX instruction checks are already expected to by the caller, 1480 1485 * i.e. CR4.VMXE, Real/V86 mode, EFER/CS.L checks. 1481 1486 */ 1482 IEM_STATIC VBOXSTRICTRC iemVmxVmxon(PVMCPU pVCpu, uint8_t cbInstr, RTGCPHYS GCPtrVmxon, PCVMXEXITINSTRINFO pExitInstrInfo,1483 RTGCPTR GCPtrDisp)1487 IEM_STATIC VBOXSTRICTRC iemVmxVmxon(PVMCPU pVCpu, uint8_t cbInstr, uint8_t iEffSeg, RTGCPHYS GCPtrVmxon, 1488 PCVMXVEXITINFO pExitInfo) 1484 1489 { 1485 1490 #if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1486 RT_NOREF5(pVCpu, cbInstr, GCPtrVmxon, pExitInstrInfo, GCPtrDisp);1491 RT_NOREF5(pVCpu, cbInstr, iEffSeg, GCPtrVmxon, pExitInfo); 1487 1492 return VINF_EM_RAW_EMULATE_INSTR; 1488 1493 #else … … 1534 1539 /* Get the VMXON pointer from the location specified by the source memory operand. */ 1535 1540 RTGCPHYS GCPhysVmxon; 1536 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmxon, pExitInstrInfo->VmxXsave.iSegReg, GCPtrVmxon);1541 VBOXSTRICTRC rcStrict = iemMemFetchDataU64(pVCpu, &GCPhysVmxon, iEffSeg, GCPtrVmxon); 1537 1542 if (RT_UNLIKELY(rcStrict != VINF_SUCCESS)) 1538 1543 { … … 1625 1630 else if (IEM_IS_VMX_NON_ROOT_MODE(pVCpu)) 1626 1631 { 1627 RT_NOREF( GCPtrDisp);1632 RT_NOREF(pExitInfo); 1628 1633 /** @todo NSTVMX: intercept. */ 1629 1634 } … … 1651 1656 * Implements 'VMXON'. 1652 1657 */ 1653 IEM_CIMPL_DEF_1(iemCImpl_vmxon, RTGCPTR, GCPtrVmxon) 1654 { 1655 RTGCPTR GCPtrDisp; 1656 VMXEXITINSTRINFO ExitInstrInfo; 1657 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMXON, VMX_INSTR_ID_NONE, &GCPtrDisp); 1658 return iemVmxVmxon(pVCpu, cbInstr, GCPtrVmxon, &ExitInstrInfo, GCPtrDisp); 1658 IEM_CIMPL_DEF_2(iemCImpl_vmxon, uint8_t, iEffSeg, RTGCPTR, GCPtrVmxon) 1659 { 1660 return iemVmxVmxon(pVCpu, cbInstr, iEffSeg, GCPtrVmxon, NULL /* pExitInfo */); 1659 1661 } 1660 1662 … … 1725 1727 * Implements 'VMPTRLD'. 1726 1728 */ 1727 IEM_CIMPL_DEF_1(iemCImpl_vmptrld, RTGCPTR, GCPtrVmcs) 1728 { 1729 RTGCPTR GCPtrDisp; 1730 VMXEXITINSTRINFO ExitInstrInfo; 1731 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMPTRLD, VMX_INSTR_ID_NONE, &GCPtrDisp); 1732 return iemVmxVmptrld(pVCpu, cbInstr, GCPtrVmcs, &ExitInstrInfo, GCPtrDisp); 1729 IEM_CIMPL_DEF_2(iemCImpl_vmptrld, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs) 1730 { 1731 return iemVmxVmptrld(pVCpu, cbInstr, iEffSeg, GCPtrVmcs, NULL /* pExitInfo */); 1733 1732 } 1734 1733 … … 1737 1736 * Implements 'VMPTRST'. 1738 1737 */ 1739 IEM_CIMPL_DEF_1(iemCImpl_vmptrst, RTGCPTR, GCPtrVmcs) 1740 { 1741 RTGCPTR GCPtrDisp; 1742 VMXEXITINSTRINFO ExitInstrInfo; 1743 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMPTRST, VMX_INSTR_ID_NONE, &GCPtrDisp); 1744 return iemVmxVmptrst(pVCpu, cbInstr, GCPtrVmcs, &ExitInstrInfo, GCPtrDisp); 1738 IEM_CIMPL_DEF_2(iemCImpl_vmptrst, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs) 1739 { 1740 return iemVmxVmptrst(pVCpu, cbInstr, iEffSeg, GCPtrVmcs, NULL /* pExitInfo */); 1745 1741 } 1746 1742 … … 1749 1745 * Implements 'VMCLEAR'. 1750 1746 */ 1751 IEM_CIMPL_DEF_1(iemCImpl_vmclear, RTGCPTR, GCPtrVmcs) 1752 { 1753 RTGCPTR GCPtrDisp; 1754 VMXEXITINSTRINFO ExitInstrInfo; 1755 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMCLEAR, VMX_INSTR_ID_NONE, &GCPtrDisp); 1756 return iemVmxVmclear(pVCpu, cbInstr, GCPtrVmcs, &ExitInstrInfo, GCPtrDisp); 1747 IEM_CIMPL_DEF_2(iemCImpl_vmclear, uint8_t, iEffSeg, RTGCPTR, GCPtrVmcs) 1748 { 1749 return iemVmxVmclear(pVCpu, cbInstr, iEffSeg, GCPtrVmcs, NULL /* pExitInfo */); 1757 1750 } 1758 1751 … … 1761 1754 * Implements 'VMWRITE' register. 1762 1755 */ 1763 IEM_CIMPL_DEF_2(iemCImpl_vmwrite_reg, uint32_t, u32VmcsFieldEnc, uint64_t, u64Val) 1764 { 1765 VMXEXITINSTRINFO ExitInstrInfo; 1766 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMWRITE, VMX_INSTR_ID_NONE, NULL /* pGCPtrDisp */); 1767 return iemVmxVmwrite(pVCpu, cbInstr, u32VmcsFieldEnc, u64Val, &ExitInstrInfo, 0 /* GCPtrDisp */); 1756 IEM_CIMPL_DEF_2(iemCImpl_vmwrite_reg, uint64_t, u64Val, uint32_t, uFieldEnc) 1757 { 1758 return iemVmxVmwrite(pVCpu, cbInstr, UINT8_MAX /*iEffSeg*/, IEMMODE_64BIT /* N/A */, u64Val, uFieldEnc, NULL /* pExitInfo */); 1768 1759 } 1769 1760 … … 1772 1763 * Implements 'VMWRITE' memory. 1773 1764 */ 1774 IEM_CIMPL_DEF_2(iemCImpl_vmwrite_mem, uint32_t, u32VmcsFieldEnc, RTGCUINTPTR64, GCPtrVal) 1775 { 1776 RTGCPTR GCPtrDisp; 1777 VMXEXITINSTRINFO ExitInstrInfo; 1778 ExitInstrInfo.u = iemVmxGetExitInstrInfo(pVCpu, VMX_EXIT_VMWRITE, VMX_INSTR_ID_NONE, &GCPtrDisp); 1779 return iemVmxVmwrite(pVCpu, cbInstr, u32VmcsFieldEnc, GCPtrVal, &ExitInstrInfo, GCPtrDisp); 1765 IEM_CIMPL_DEF_4(iemCImpl_vmwrite_mem, uint8_t, iEffSeg, IEMMODE, enmEffAddrMode, RTGCPTR, GCPtrVal, uint32_t, uFieldEnc) 1766 { 1767 return iemVmxVmwrite(pVCpu, cbInstr, iEffSeg, enmEffAddrMode, GCPtrVal, uFieldEnc, NULL /* pExitInfo */); 1780 1768 } 1781 1769
Note:
See TracChangeset
for help on using the changeset viewer.