Changeset 74043 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Sep 3, 2018 1:01:30 PM (6 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/CPUMAllMsrs.cpp
r73870 r74043 1337 1337 1338 1338 1339 /** @callback_method_impl{FNCPUMRDMSR} */ 1340 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1341 { 1342 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1339 /** 1340 * Gets IA32_VMX_PINBASED_CTLS for IEM and cpumMsrRd_Ia32VmxPinbasedCtls. 1341 * 1342 * @returns IA32_VMX_PINBASED_CTLS value. 1343 * @param pVCpu The cross context per CPU structure. 1344 */ 1345 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxPinbasedCtls(PVMCPU pVCpu) 1346 { 1343 1347 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1348 uint64_t uVmxMsr; 1344 1349 if (pGuestFeatures->fVmx) 1345 1350 { … … 1351 1356 uint32_t const fVal = VMX_PIN_CTLS_DEFAULT1; 1352 1357 uint32_t const fZap = fFeatures | VMX_PIN_CTLS_DEFAULT1; 1353 AssertMsgReturn((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures), 1354 VERR_CPUM_INVALID_HWVIRT_FEAT_COMBO); 1355 *puValue = RT_MAKE_U64(fVal, fZap); 1358 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures)); 1359 uVmxMsr = RT_MAKE_U64(fVal, fZap); 1356 1360 } 1357 1361 else 1358 *puValue= 0;1359 return VINF_SUCCESS;1360 } 1361 1362 1363 /** @callback_method_impl{FNCPUMRDMSR} */ 1364 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxP rocbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)1362 uVmxMsr = 0; 1363 return uVmxMsr; 1364 } 1365 1366 1367 /** @callback_method_impl{FNCPUMRDMSR} */ 1368 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxPinbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1365 1369 { 1366 1370 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1371 *puValue = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu); 1372 return VINF_SUCCESS; 1373 } 1374 1375 1376 /** 1377 * Gets IA32_VMX_PROCBASED_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls. 1378 * 1379 * @returns IA32_VMX_PROCBASED_CTLS value. 1380 * @param pVCpu The cross context per CPU structure. 1381 */ 1382 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls(PVMCPU pVCpu) 1383 { 1367 1384 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1385 uint64_t uVmxMsr; 1368 1386 if (pGuestFeatures->fVmx) 1369 1387 { … … 1391 1409 uint32_t const fVal = VMX_PROC_CTLS_DEFAULT1; 1392 1410 uint32_t const fZap = fFeatures | VMX_PROC_CTLS_DEFAULT1; 1393 AssertMsgReturn((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures), 1394 VERR_CPUM_INVALID_HWVIRT_FEAT_COMBO); 1395 *puValue = RT_MAKE_U64(fVal, fZap); 1411 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures)); 1412 uVmxMsr = RT_MAKE_U64(fVal, fZap); 1396 1413 } 1397 1414 else 1398 *puValue= 0;1399 return VINF_SUCCESS;1400 } 1401 1402 1403 /** @callback_method_impl{FNCPUMRDMSR} */ 1404 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Vmx ExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)1415 uVmxMsr = 0; 1416 return uVmxMsr; 1417 } 1418 1419 1420 /** @callback_method_impl{FNCPUMRDMSR} */ 1421 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcbasedCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1405 1422 { 1406 1423 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1424 *puValue = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu); 1425 return VINF_SUCCESS; 1426 } 1427 1428 1429 /** 1430 * Gets IA32_VMX_EXIT_CTLS for IEM and cpumMsrRd_Ia32VmxProcbasedCtls. 1431 * 1432 * @returns IA32_VMX_EXIT_CTLS value. 1433 * @param pVCpu The cross context per CPU structure. 1434 */ 1435 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxExitCtls(PVMCPU pVCpu) 1436 { 1407 1437 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1438 uint64_t uVmxMsr; 1408 1439 if (pGuestFeatures->fVmx) 1409 1440 { … … 1418 1449 uint32_t const fVal = VMX_EXIT_CTLS_DEFAULT1; 1419 1450 uint32_t const fZap = fFeatures | VMX_EXIT_CTLS_DEFAULT1; 1420 AssertMsgReturn((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures), 1421 VERR_CPUM_INVALID_HWVIRT_FEAT_COMBO); 1422 *puValue = RT_MAKE_U64(fVal, fZap); 1451 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures)); 1452 uVmxMsr = RT_MAKE_U64(fVal, fZap); 1423 1453 } 1424 1454 else 1425 *puValue= 0;1426 return VINF_SUCCESS;1427 } 1428 1429 1430 /** @callback_method_impl{FNCPUMRDMSR} */ 1431 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxE ntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)1455 uVmxMsr = 0; 1456 return uVmxMsr; 1457 } 1458 1459 1460 /** @callback_method_impl{FNCPUMRDMSR} */ 1461 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxExitCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1432 1462 { 1433 1463 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1464 *puValue = CPUMGetGuestIa32VmxExitCtls(pVCpu); 1465 return VINF_SUCCESS; 1466 } 1467 1468 1469 /** 1470 * Gets IA32_VMX_ENTRY_CTLS for IEM and cpumMsrRd_Ia32VmxEntryCtls. 1471 * 1472 * @returns IA32_VMX_ENTRY_CTLS value. 1473 * @param pVCpu The cross context per CPU structure. 1474 */ 1475 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxEntryCtls(PVMCPU pVCpu) 1476 { 1434 1477 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1478 uint64_t uVmxMsr; 1435 1479 if (pGuestFeatures->fVmx) 1436 1480 { … … 1442 1486 uint32_t const fVal = fDefault1; 1443 1487 uint32_t const fZap = fFeatures | fDefault1; 1444 AssertMsgReturn((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures), 1445 VERR_CPUM_INVALID_HWVIRT_FEAT_COMBO); 1446 *puValue = RT_MAKE_U64(fVal, fZap); 1488 AssertMsg((fVal & fZap) == fVal, ("fVal=%#RX32 fZap=%#RX32 fFeatures=%#RX32\n", fVal, fZap, fFeatures)); 1489 uVmxMsr = RT_MAKE_U64(fVal, fZap); 1447 1490 } 1448 1491 else 1449 *puValue= 0;1450 return VINF_SUCCESS;1451 } 1452 1453 1454 /** @callback_method_impl{FNCPUMRDMSR} */ 1455 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32Vmx Misc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue)1492 uVmxMsr = 0; 1493 return uVmxMsr; 1494 } 1495 1496 1497 /** @callback_method_impl{FNCPUMRDMSR} */ 1498 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxEntryCtls(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1456 1499 { 1457 1500 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1501 *puValue = CPUMGetGuestIa32VmxEntryCtls(pVCpu); 1502 return VINF_SUCCESS; 1503 } 1504 1505 1506 /** 1507 * Gets IA32_VMX_MISC for IEM and cpumMsrRd_Ia32VmxMisc. 1508 * 1509 * @returns IA32_VMX_MISC MSR. 1510 * @param pVCpu The cross context per CPU structure. 1511 */ 1512 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxMisc(PVMCPU pVCpu) 1513 { 1458 1514 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1515 uint64_t uVmxMsr; 1459 1516 if (pGuestFeatures->fVmx) 1460 1517 { 1461 1518 uint64_t uHostMsr; 1462 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), idMsr, &uHostMsr);1463 Assert RCReturn(rc,rc);1519 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_MISC, &uHostMsr); 1520 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc); 1464 1521 uint8_t const cMaxMsrs = RT_MIN(RT_BF_GET(uHostMsr, VMX_BF_MISC_MAX_MSRS), VMX_V_MAX_MSRS); 1465 *puValue = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT)1466 1467 1468 1469 1470 1471 1472 1473 1474 1475 1522 uVmxMsr = RT_BF_MAKE(VMX_BF_MISC_PREEMPT_TIMER_TSC, VMX_V_PREEMPT_TIMER_SHIFT ) 1523 | RT_BF_MAKE(VMX_BF_MISC_EXIT_STORE_EFER_LMA, pGuestFeatures->fVmxExitStoreEferLma ) 1524 | RT_BF_MAKE(VMX_BF_MISC_ACTIVITY_STATES, VMX_V_GUEST_ACTIVITY_STATE_MASK ) 1525 | RT_BF_MAKE(VMX_BF_MISC_PT, 0 ) 1526 | RT_BF_MAKE(VMX_BF_MISC_SMM_READ_SMBASE_MSR, 0 ) 1527 | RT_BF_MAKE(VMX_BF_MISC_CR3_TARGET, VMX_V_CR3_TARGET_COUNT ) 1528 | RT_BF_MAKE(VMX_BF_MISC_MAX_MSRS, cMaxMsrs ) 1529 | RT_BF_MAKE(VMX_BF_MISC_VMXOFF_BLOCK_SMI, 0 ) 1530 | RT_BF_MAKE(VMX_BF_MISC_VMWRITE_ALL, pGuestFeatures->fVmxVmwriteAll ) 1531 | RT_BF_MAKE(VMX_BF_MISC_ENTRY_INJECT_SOFT_INT, pGuestFeatures->fVmxEntryInjectSoftInt) 1532 | RT_BF_MAKE(VMX_BF_MISC_MSEG_ID, VMX_V_MSEG_REV_ID ); 1476 1533 } 1477 1534 else 1478 *puValue = 0; 1479 return VINF_SUCCESS; 1535 uVmxMsr = 0; 1536 return uVmxMsr; 1537 } 1538 1539 1540 /** @callback_method_impl{FNCPUMRDMSR} */ 1541 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxMisc(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1542 { 1543 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1544 *puValue = CPUMGetGuestIa32VmxMisc(pVCpu); 1545 return VINF_SUCCESS; 1546 } 1547 1548 1549 /** 1550 * Gets IA32_VMX_CR0_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc. 1551 * 1552 * @returns IA32_VMX_CR0_FIXED0 value. 1553 * @param pVCpu The cross context per CPU structure. 1554 */ 1555 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed0(PVMCPU pVCpu) 1556 { 1557 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1558 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR0_FIXED0 : 0; 1559 return uVmxMsr; 1480 1560 } 1481 1561 … … 1485 1565 { 1486 1566 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1567 *puValue = CPUMGetGuestIa32VmxCr0Fixed0(pVCpu); 1568 return VINF_SUCCESS; 1569 } 1570 1571 1572 /** 1573 * Gets IA32_VMX_CR0_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc. 1574 * 1575 * @returns IA32_VMX_CR0_FIXED1 MSR. 1576 * @param pVCpu The cross context per CPU structure. 1577 */ 1578 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr0Fixed1(PVMCPU pVCpu) 1579 { 1487 1580 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1488 if (pGuestFeatures->fVmx) 1489 *puValue = VMX_V_CR0_FIXED0; 1490 else 1491 *puValue = 0; 1492 return VINF_SUCCESS; 1493 } 1494 1495 1496 /** @callback_method_impl{FNCPUMRDMSR} */ 1497 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1498 { 1499 RT_NOREF_PV(pRange); 1500 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1581 uint64_t uVmxMsr; 1501 1582 if (pGuestFeatures->fVmx) 1502 1583 { 1503 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), idMsr, puValue);1504 Assert RCReturn(rc,rc);1505 *puValue|= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */1584 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR0_FIXED1, &uVmxMsr); 1585 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc); 1586 uVmxMsr |= VMX_V_CR0_FIXED0; /* Make sure the CR0 MB1 bits are not clear. */ 1506 1587 } 1507 1588 else 1508 *puValue = 0; 1509 return VINF_SUCCESS; 1589 uVmxMsr = 0; 1590 return uVmxMsr; 1591 } 1592 1593 1594 /** @callback_method_impl{FNCPUMRDMSR} */ 1595 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr0Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1596 { 1597 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1598 Assert(idMsr == MSR_IA32_VMX_CR0_FIXED1); 1599 *puValue = CPUMGetGuestIa32VmxCr0Fixed1(pVCpu); 1600 return VINF_SUCCESS; 1601 } 1602 1603 1604 /** 1605 * Gets IA32_VMX_CR4_FIXED0 for IEM and cpumMsrRd_Ia32VmxMisc. 1606 * 1607 * @returns IA32_VMX_CR4_FIXED0 value. 1608 * @param pVCpu The cross context per CPU structure. 1609 */ 1610 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed0(PVMCPU pVCpu) 1611 { 1612 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1613 uint64_t const uVmxMsr = pGuestFeatures->fVmx ? VMX_V_CR4_FIXED0 : 0; 1614 return uVmxMsr; 1510 1615 } 1511 1616 … … 1515 1620 { 1516 1621 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1622 *puValue = CPUMGetGuestIa32VmxCr4Fixed0(pVCpu); 1623 return VINF_SUCCESS; 1624 } 1625 1626 1627 /** 1628 * Gets IA32_VMX_CR4_FIXED1 for IEM and cpumMsrRd_Ia32VmxMisc. 1629 * 1630 * @returns IA32_VMX_CR4_FIXED1 MSR. 1631 * @param pVCpu The cross context per CPU structure. 1632 */ 1633 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxCr4Fixed1(PVMCPU pVCpu) 1634 { 1517 1635 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1518 if (pGuestFeatures->fVmx) 1519 *puValue = VMX_V_CR4_FIXED0; 1520 else 1521 *puValue = 0; 1522 return VINF_SUCCESS; 1523 } 1524 1525 1526 /** @callback_method_impl{FNCPUMRDMSR} */ 1527 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1528 { 1529 RT_NOREF_PV(pRange); 1530 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1636 uint64_t uVmxMsr; 1531 1637 if (pGuestFeatures->fVmx) 1532 1638 { 1533 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), idMsr, puValue);1534 Assert RCReturn(rc,rc);1535 *puValue|= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */1639 int rc = HMVmxGetHostMsr(pVCpu->CTX_SUFF(pVM), MSR_IA32_VMX_CR4_FIXED1, &uVmxMsr); 1640 AssertMsgRC(rc, ("HMVmxGetHostMsr failed. rc=%Rrc\n", rc)); RT_NOREF_PV(rc); 1641 uVmxMsr |= VMX_V_CR4_FIXED0; /* Make sure the CR4 MB1 bits are not clear. */ 1536 1642 } 1537 1643 else 1538 *puValue = 0; 1539 return VINF_SUCCESS; 1644 uVmxMsr = 0; 1645 return uVmxMsr; 1646 } 1647 1648 1649 /** @callback_method_impl{FNCPUMRDMSR} */ 1650 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxCr4Fixed1(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1651 { 1652 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1653 Assert(idMsr == MSR_IA32_VMX_CR4_FIXED1); 1654 *puValue = CPUMGetGuestIa32VmxCr4Fixed1(pVCpu); 1655 return VINF_SUCCESS; 1656 } 1657 1658 1659 /** 1660 * Gets IA32_VMX_VMCS_ENUM for IEM and cpumMsrRd_Ia32VmxMisc. 1661 * 1662 * @returns IA32_VMX_VMCS_ENUM value. 1663 * @param pVCpu The cross context per CPU structure. 1664 */ 1665 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxVmcsEnum(PVMCPU pVCpu) 1666 { 1667 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1668 uint64_t uVmxMsr; 1669 if (pGuestFeatures->fVmx) 1670 uVmxMsr = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT; 1671 else 1672 uVmxMsr = 0; 1673 return uVmxMsr; 1540 1674 } 1541 1675 … … 1545 1679 { 1546 1680 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1681 *puValue = CPUMGetGuestIa32VmxVmcsEnum(pVCpu); 1682 return VINF_SUCCESS; 1683 } 1684 1685 1686 /** 1687 * Gets MSR_IA32_VMX_PROCBASED_CTLS2 for IEM and cpumMsrRd_Ia32VmxMisc. 1688 * 1689 * @returns MSR_IA32_VMX_PROCBASED_CTLS2 value. 1690 * @param pVCpu The cross context per CPU structure. 1691 */ 1692 VMM_INT_DECL(uint64_t) CPUMGetGuestIa32VmxProcbasedCtls2(PVMCPU pVCpu) 1693 { 1547 1694 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1548 if (pGuestFeatures->fVmx) 1549 *puValue = VMX_V_VMCS_MAX_INDEX << VMX_BF_VMCS_ENUM_HIGHEST_IDX_SHIFT; 1550 else 1551 *puValue = 0; 1552 return VINF_SUCCESS; 1553 } 1554 1555 1556 /** @callback_method_impl{FNCPUMRDMSR} */ 1557 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1558 { 1559 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1560 PCCPUMFEATURES pGuestFeatures = &pVCpu->CTX_SUFF(pVM)->cpum.s.GuestFeatures; 1695 uint64_t uVmxMsr; 1561 1696 if (pGuestFeatures->fVmx) 1562 1697 { … … 1583 1718 uint32_t const fVal = 0; 1584 1719 uint32_t const fZap = fFeatures; 1585 *puValue= RT_MAKE_U64(fVal, fZap);1720 uVmxMsr = RT_MAKE_U64(fVal, fZap); 1586 1721 } 1587 1722 else 1588 *puValue = 0; 1723 uVmxMsr = 0; 1724 return uVmxMsr; 1725 } 1726 1727 1728 /** @callback_method_impl{FNCPUMRDMSR} */ 1729 static DECLCALLBACK(VBOXSTRICTRC) cpumMsrRd_Ia32VmxProcBasedCtls2(PVMCPU pVCpu, uint32_t idMsr, PCCPUMMSRRANGE pRange, uint64_t *puValue) 1730 { 1731 RT_NOREF_PV(idMsr); RT_NOREF_PV(pRange); 1732 *puValue = CPUMGetGuestIa32VmxProcbasedCtls2(pVCpu); 1589 1733 return VINF_SUCCESS; 1590 1734 } -
trunk/src/VBox/VMM/VMMAll/HMVMXAll.cpp
r74022 r74043 34 34 { 35 35 /* Internal processing errors. */ 36 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_1 , "Ipe_1" ),37 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_2 , "Ipe_2" ),38 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_3 , "Ipe_3" ),39 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_4 , "Ipe_4" ),40 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_5 , "Ipe_5" ),41 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_6 , "Ipe_6" ),42 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_7 , "Ipe_7" ),43 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_8 , "Ipe_8" ),44 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_9 , "Ipe_9" ),36 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_1 , "Ipe_1" ), 37 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_2 , "Ipe_2" ), 38 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_3 , "Ipe_3" ), 39 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_4 , "Ipe_4" ), 40 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_5 , "Ipe_5" ), 41 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_6 , "Ipe_6" ), 42 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_7 , "Ipe_7" ), 43 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_8 , "Ipe_8" ), 44 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Ipe_9 , "Ipe_9" ), 45 45 /* VMXON. */ 46 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_A20M , "A20M" ),47 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cpl , "Cpl" ),48 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cr0Fixed0 , "Cr0Fixed0" ),49 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cr4Fixed0 , "Cr4Fixed0" ),50 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Intercept , "Intercept" ),51 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_LongModeCS , "LongModeCS" ),52 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_MsrFeatCtl , "MsrFeatCtl" ),53 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrAbnormal , "PtrAbnormal" ),54 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrAlign , "PtrAlign" ),55 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrMap , "PtrMap" ),56 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrPhysRead , "PtrPhysRead" ),57 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrWidth , "PtrWidth" ),58 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_RealOrV86Mode , "RealOrV86Mode" ),59 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Success , "Success" ),60 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_ShadowVmcs , "ShadowVmcs" ),61 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmxAlreadyRoot , "VmxAlreadyRoot" ),62 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Vmxe , "Vmxe" ),63 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmcsRevId , "VmcsRevId" ),64 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmxRootCpl , "VmxRootCpl" ),46 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_A20M , "A20M" ), 47 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cpl , "Cpl" ), 48 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cr0Fixed0 , "Cr0Fixed0" ), 49 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Cr4Fixed0 , "Cr4Fixed0" ), 50 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Intercept , "Intercept" ), 51 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_LongModeCS , "LongModeCS" ), 52 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_MsrFeatCtl , "MsrFeatCtl" ), 53 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrAbnormal , "PtrAbnormal" ), 54 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrAlign , "PtrAlign" ), 55 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrMap , "PtrMap" ), 56 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrPhysRead , "PtrPhysRead" ), 57 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_PtrWidth , "PtrWidth" ), 58 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_RealOrV86Mode , "RealOrV86Mode" ), 59 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Success , "Success" ), 60 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_ShadowVmcs , "ShadowVmcs" ), 61 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmxAlreadyRoot , "VmxAlreadyRoot" ), 62 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_Vmxe , "Vmxe" ), 63 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmcsRevId , "VmcsRevId" ), 64 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxon_VmxRootCpl , "VmxRootCpl" ), 65 65 /* VMXOFF. */ 66 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Cpl , "Cpl" ),67 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Intercept , "Intercept" ),68 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_LongModeCS , "LongModeCS" ),69 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_RealOrV86Mode , "RealOrV86Mode" ),70 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Success , "Success" ),71 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Vmxe , "Vmxe" ),72 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_VmxRoot , "VmxRoot" ),66 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Cpl , "Cpl" ), 67 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Intercept , "Intercept" ), 68 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_LongModeCS , "LongModeCS" ), 69 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_RealOrV86Mode , "RealOrV86Mode" ), 70 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Success , "Success" ), 71 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_Vmxe , "Vmxe" ), 72 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmxoff_VmxRoot , "VmxRoot" ), 73 73 /* VMPTRLD. */ 74 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_Cpl , "Cpl" ),75 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_LongModeCS , "LongModeCS" ),76 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrAbnormal , "PtrAbnormal" ),77 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrAlign , "PtrAlign" ),78 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrMap , "PtrMap" ),79 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrReadPhys , "PtrReadPhys" ),80 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrVmxon , "PtrVmxon" ),81 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrWidth , "PtrWidth" ),82 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_RealOrV86Mode , "RealOrV86Mode" ),83 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_ShadowVmcs , "ShadowVmcs" ),84 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_Success , "Success" ),85 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_VmcsRevId , "VmcsRevId" ),86 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_VmxRoot , "VmxRoot" ),74 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_Cpl , "Cpl" ), 75 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_LongModeCS , "LongModeCS" ), 76 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrAbnormal , "PtrAbnormal" ), 77 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrAlign , "PtrAlign" ), 78 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrMap , "PtrMap" ), 79 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrReadPhys , "PtrReadPhys" ), 80 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrVmxon , "PtrVmxon" ), 81 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_PtrWidth , "PtrWidth" ), 82 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_RealOrV86Mode , "RealOrV86Mode" ), 83 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_ShadowVmcs , "ShadowVmcs" ), 84 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_Success , "Success" ), 85 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_VmcsRevId , "VmcsRevId" ), 86 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrld_VmxRoot , "VmxRoot" ), 87 87 /* VMPTRST. */ 88 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_Cpl , "Cpl" ),89 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_LongModeCS , "LongModeCS" ),90 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_PtrMap , "PtrMap" ),91 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_RealOrV86Mode , "RealOrV86Mode" ),92 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_Success , "Success" ),93 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_VmxRoot , "VmxRoot" ),88 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_Cpl , "Cpl" ), 89 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_LongModeCS , "LongModeCS" ), 90 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_PtrMap , "PtrMap" ), 91 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_RealOrV86Mode , "RealOrV86Mode" ), 92 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_Success , "Success" ), 93 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmptrst_VmxRoot , "VmxRoot" ), 94 94 /* VMCLEAR. */ 95 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_Cpl , "Cpl" ),96 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_LongModeCS , "LongModeCS" ),97 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrAbnormal , "PtrAbnormal" ),98 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrAlign , "PtrAlign" ),99 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrMap , "PtrMap" ),100 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrReadPhys , "PtrReadPhys" ),101 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrVmxon , "PtrVmxon" ),102 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrWidth , "PtrWidth" ),103 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_RealOrV86Mode , "RealOrV86Mode" ),104 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_Success , "Success" ),105 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_VmxRoot , "VmxRoot" ),95 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_Cpl , "Cpl" ), 96 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_LongModeCS , "LongModeCS" ), 97 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrAbnormal , "PtrAbnormal" ), 98 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrAlign , "PtrAlign" ), 99 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrMap , "PtrMap" ), 100 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrReadPhys , "PtrReadPhys" ), 101 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrVmxon , "PtrVmxon" ), 102 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_PtrWidth , "PtrWidth" ), 103 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_RealOrV86Mode , "RealOrV86Mode" ), 104 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_Success , "Success" ), 105 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmclear_VmxRoot , "VmxRoot" ), 106 106 /* VMWRITE. */ 107 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_Cpl , "Cpl" ),108 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_FieldInvalid , "FieldInvalid" ),109 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_FieldRo , "FieldRo" ),110 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_LinkPtrInvalid , "LinkPtrInvalid" ),111 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_LongModeCS , "LongModeCS" ),112 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_PtrInvalid , "PtrInvalid" ),113 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_PtrMap , "PtrMap" ),114 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_RealOrV86Mode , "RealOrV86Mode" ),115 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_Success , "Success" ),116 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_VmxRoot , "VmxRoot" ),107 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_Cpl , "Cpl" ), 108 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_FieldInvalid , "FieldInvalid" ), 109 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_FieldRo , "FieldRo" ), 110 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_LinkPtrInvalid , "LinkPtrInvalid" ), 111 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_LongModeCS , "LongModeCS" ), 112 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_PtrInvalid , "PtrInvalid" ), 113 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_PtrMap , "PtrMap" ), 114 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_RealOrV86Mode , "RealOrV86Mode" ), 115 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_Success , "Success" ), 116 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmwrite_VmxRoot , "VmxRoot" ), 117 117 /* VMREAD. */ 118 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_Cpl , "Cpl" ), 119 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_FieldInvalid , "FieldInvalid" ), 120 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_LinkPtrInvalid , "LinkPtrInvalid" ), 121 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_LongModeCS , "LongModeCS" ), 122 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_PtrInvalid , "PtrInvalid" ), 123 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_PtrMap , "PtrMap" ), 124 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_RealOrV86Mode , "RealOrV86Mode" ), 125 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_Success , "Success" ), 126 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_VmxRoot , "VmxRoot" ), 127 /* VMLAUNCH. */ 128 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_BlocKMovSS , "BlockMovSS" ), 129 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_Cpl , "Cpl" ), 130 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_LongModeCS , "LongModeCS" ), 131 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_PtrInvalid , "PtrInvalid" ), 132 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_RealOrV86Mode, "RealOrV86Mode" ), 133 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_VmcsClear , "VmcsClear" ), 134 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_VmcsLaunch , "VmcsLaunch" ), 135 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_VmlaunchVmresume_VmxRoot , "VmxRoot" ) 118 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_Cpl , "Cpl" ), 119 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_FieldInvalid , "FieldInvalid" ), 120 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_LinkPtrInvalid , "LinkPtrInvalid" ), 121 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_LongModeCS , "LongModeCS" ), 122 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_PtrInvalid , "PtrInvalid" ), 123 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_PtrMap , "PtrMap" ), 124 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_RealOrV86Mode , "RealOrV86Mode" ), 125 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_Success , "Success" ), 126 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmread_VmxRoot , "VmxRoot" ), 127 /* VMLAUNCH/VMRESUME. */ 128 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_BlocKMovSS , "BlockMovSS" ), 129 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_Cpl , "Cpl" ), 130 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_Cr3TargetCount , "Cr3TargetCount" ), 131 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_LongModeCS , "LongModeCS" ), 132 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_PinCtlsAllowed1 , "PinCtlsAllowed1" ), 133 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_PinCtlsDisallowed0 , "PinCtlsDisallowed0" ), 134 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_ProcCtlsAllowed1 , "ProcCtlsAllowed1" ), 135 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_ProcCtlsDisallowed0 , "ProcCtlsDisallowed0" ), 136 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_ProcCtls2Allowed1 , "ProcCtls2Allowed1" ), 137 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_ProcCtls2Disallowed0 , "ProcCtls2Disallowed0" ), 138 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_PtrInvalid , "PtrInvalid" ), 139 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_PtrReadPhys , "PtrReadPhys" ), 140 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_RealOrV86Mode , "RealOrV86Mode" ), 141 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_Success , "Success" ), 142 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_VmcsClear , "VmcsClear" ), 143 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_VmcsLaunch , "VmcsLaunch" ), 144 VMX_INSTR_DIAG_DESC(kVmxVInstrDiag_Vmentry_VmxRoot , "VmxRoot" ) 136 145 /* kVmxVInstrDiag_Last */ 137 146 }; -
trunk/src/VBox/VMM/VMMAll/IEMAllCImplVmxInstr.cpp.h
r74022 r74043 1949 1949 1950 1950 /** 1951 * Checks VMX controls as part of VM-entry. 1952 * 1953 * @returns VBox status code. 1954 * @param pVCpu The cross context virtual CPU structure. 1955 * @param pszInstr The VMX instruction name (for logging purposes). 1956 */ 1957 IEM_STATIC VBOXSTRICTRC iemVmxVmentryCheckCtls(PVMCPU pVCpu, const char *pszInstr) 1958 { 1959 /* 1960 * Check VM-execution controls. 1961 * See Intel spec. 26.2.1.1 "VM-Execution Control Fields". 1962 */ 1963 PCVMXVVMCS pVmcs = pVCpu->cpum.GstCtx.hwvirt.vmx.CTX_SUFF(pVmcs); 1964 1965 /* Pin-based VM-execution controls. */ 1966 { 1967 VMXCTLSMSR PinCtls; 1968 PinCtls.u = CPUMGetGuestIa32VmxPinbasedCtls(pVCpu); 1969 if (~pVmcs->u32PinCtls & PinCtls.n.disallowed0) 1970 { 1971 Log(("%s: Invalid PinCtls %#RX32 (disallowed0) -> VMFail\n", pszInstr, pVmcs->u32PinCtls)); 1972 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_PinCtlsDisallowed0; 1973 return VERR_VMX_VMENTRY_FAILED; 1974 } 1975 if (pVmcs->u32PinCtls & ~PinCtls.n.allowed1) 1976 { 1977 Log(("%s: Invalid PinCtls %#RX32 (allowed1) -> VMFail\n", pszInstr, pVmcs->u32PinCtls)); 1978 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_PinCtlsAllowed1; 1979 return VERR_VMX_VMENTRY_FAILED; 1980 } 1981 } 1982 1983 /* Processor-based VM-execution controls. */ 1984 { 1985 VMXCTLSMSR ProcCtls; 1986 ProcCtls.u = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu); 1987 if (~pVmcs->u32ProcCtls & ProcCtls.n.disallowed0) 1988 { 1989 Log(("%s: Invalid ProcCtls %#RX32 (disallowed0) -> VMFail\n", pszInstr, pVmcs->u32ProcCtls)); 1990 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_ProcCtlsDisallowed0; 1991 return VERR_VMX_VMENTRY_FAILED; 1992 } 1993 if (pVmcs->u32ProcCtls & ~ProcCtls.n.allowed1) 1994 { 1995 Log(("%s: Invalid ProcCtls %#RX32 (allowed1) -> VMFail\n", pszInstr, pVmcs->u32ProcCtls)); 1996 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_ProcCtlsAllowed1; 1997 return VERR_VMX_VMENTRY_FAILED; 1998 } 1999 } 2000 2001 /* Secondary processor-based VM-execution controls. */ 2002 if (pVmcs->u32ProcCtls & VMX_PROC_CTLS_USE_SECONDARY_CTLS) 2003 { 2004 VMXCTLSMSR ProcCtls2; 2005 ProcCtls2.u = CPUMGetGuestIa32VmxProcbasedCtls(pVCpu); 2006 if (~pVmcs->u32ProcCtls2 & ProcCtls2.n.disallowed0) 2007 { 2008 Log(("%s: Invalid ProcCtls2 %#RX32 (disallowed0) -> VMFail\n", pszInstr, pVmcs->u32ProcCtls2)); 2009 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_ProcCtls2Disallowed0; 2010 return VERR_VMX_VMENTRY_FAILED; 2011 } 2012 if (pVmcs->u32ProcCtls2 & ~ProcCtls2.n.allowed1) 2013 { 2014 Log(("%s: Invalid ProcCtls2 %#RX32 (allowed1) -> VMFail\n", pszInstr, pVmcs->u32ProcCtls2)); 2015 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_ProcCtls2Allowed1; 2016 return VERR_VMX_VMENTRY_FAILED; 2017 } 2018 } 2019 2020 /* CR3-target count. */ 2021 if (pVmcs->u32Cr3TargetCount > VMX_V_CR3_TARGET_COUNT) 2022 { 2023 Log(("%s: CR3-target count exceeded %#x -> VMFail\n", pszInstr, pVmcs->u32Cr3TargetCount)); 2024 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_Cr3TargetCount; 2025 return VERR_VMX_VMENTRY_FAILED; 2026 } 2027 2028 /** @todo NSTVMX: rest of Ctls. */ 2029 2030 NOREF(pszInstr); 2031 return VINF_SUCCESS; 2032 } 2033 2034 2035 /** 1951 2036 * VMLAUNCH/VMRESUME instruction execution worker. 1952 2037 * … … 1977 2062 if (pVCpu->iem.s.uCpl > 0) 1978 2063 { 1979 Log((" vmlaunch: CPL %u -> #GP(0)\n", pVCpu->iem.s.uCpl));1980 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vm launchVmresume_Cpl;2064 Log(("%s: CPL %u -> #GP(0)\n", pszInstr, pVCpu->iem.s.uCpl)); 2065 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_Cpl; 1981 2066 return iemRaiseGeneralProtectionFault0(pVCpu); 1982 2067 } … … 1986 2071 { 1987 2072 Log(("%s: VMCS pointer %#RGp invalid -> VMFailInvalid\n", pszInstr, IEM_VMX_GET_CURRENT_VMCS(pVCpu))); 1988 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vm launchVmresume_PtrInvalid;2073 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_PtrInvalid; 1989 2074 iemVmxVmFailInvalid(pVCpu); 1990 2075 iemRegAddToRipAndClearRF(pVCpu, cbInstr); … … 1998 2083 { 1999 2084 Log(("%s: VM entry with events blocked by MOV SS -> VMFail\n", pszInstr)); 2000 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vm launchVmresume_BlocKMovSS;2085 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_BlocKMovSS; 2001 2086 iemVmxVmFail(pVCpu, VMXINSTRERR_VMENTRY_BLOCK_MOVSS); 2002 2087 iemRegAddToRipAndClearRF(pVCpu, cbInstr); … … 2009 2094 if (pVCpu->cpum.GstCtx.hwvirt.vmx.CTX_SUFF(pVmcs)->fVmcsState != VMX_V_VMCS_STATE_CLEAR) 2010 2095 { 2011 Log((" %s: VMLAUNCH with non-clear VMCS -> VMFail\n", pszInstr));2012 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vm launchVmresume_VmcsClear;2096 Log(("vmlaunch: VMLAUNCH with non-clear VMCS -> VMFail\n")); 2097 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_VmcsClear; 2013 2098 iemVmxVmFail(pVCpu, VMXINSTRERR_VMLAUNCH_NON_CLEAR_VMCS); 2014 2099 iemRegAddToRipAndClearRF(pVCpu, cbInstr); … … 2021 2106 if (pVCpu->cpum.GstCtx.hwvirt.vmx.CTX_SUFF(pVmcs)->fVmcsState != VMX_V_VMCS_STATE_LAUNCHED) 2022 2107 { 2023 Log((" %s: VMRESUME with non-launched VMCS -> VMFail\n", pszInstr));2024 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vm launchVmresume_VmcsLaunch;2108 Log(("vmresume: VMRESUME with non-launched VMCS -> VMFail\n")); 2109 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_VmcsLaunch; 2025 2110 iemVmxVmFail(pVCpu, VMXINSTRERR_VMRESUME_NON_LAUNCHED_VMCS); 2026 2111 iemRegAddToRipAndClearRF(pVCpu, cbInstr); … … 2029 2114 } 2030 2115 2031 /** @todo NSTVMX: VMLAUNCH/VMRESUME impl. */ 2032 2116 /* 2117 * Load the current VMCS. 2118 */ 2119 int rc = PGMPhysSimpleReadGCPhys(pVCpu->CTX_SUFF(pVM), pVCpu->cpum.GstCtx.hwvirt.vmx.CTX_SUFF(pVmcs), 2120 IEM_VMX_GET_CURRENT_VMCS(pVCpu), VMX_V_VMCS_SIZE); 2121 if (RT_FAILURE(rc)) 2122 { 2123 Log(("%s: Failed to read VMCS at %#RGp, rc=%Rrc\n", pszInstr, rc)); 2124 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_PtrReadPhys; 2125 return rc; 2126 } 2127 2128 rc = iemVmxVmentryCheckCtls(pVCpu, pszInstr); 2129 if (rc == VINF_SUCCESS) 2130 { /* likely */ } 2131 else 2132 { 2133 iemVmxVmFail(pVCpu, VMXINSTRERR_VMENTRY_INVALID_CTLS); 2134 iemRegAddToRipAndClearRF(pVCpu, cbInstr); 2135 return VINF_SUCCESS; 2136 } 2137 2138 pVCpu->cpum.GstCtx.hwvirt.vmx.enmInstrDiag = kVmxVInstrDiag_Vmentry_Success; 2139 iemVmxVmSucceed(pVCpu); 2033 2140 iemRegAddToRipAndClearRF(pVCpu, cbInstr); 2034 RT_NOREF(pszInstr);2035 2141 return VERR_IEM_IPE_2; 2036 2142 } -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r74022 r74043 252 252 { 253 253 IEMOP_MNEMONIC(vmlaunch, "vmlaunch"); 254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVInstrDiag_Vm launchVmresume);255 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVInstrDiag_Vm launchVmresume);254 IEMOP_HLP_IN_VMX_OPERATION("vmlaunch", kVmxVInstrDiag_Vmentry); 255 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVInstrDiag_Vmentry); 256 256 IEMOP_HLP_DONE_DECODING(); 257 257 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmlaunch); … … 271 271 { 272 272 IEMOP_MNEMONIC(vmresume, "vmresume"); 273 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVInstrDiag_Vm launchVmresume);274 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVInstrDiag_Vm launchVmresume);273 IEMOP_HLP_IN_VMX_OPERATION("vmresume", kVmxVInstrDiag_Vmentry); 274 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVInstrDiag_Vmentry); 275 275 IEMOP_HLP_DONE_DECODING(); 276 276 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmresume); -
trunk/src/VBox/VMM/VMMR3/HM.cpp
r73984 r74043 3088 3088 LogRel(("HM: CPU[%u] Current Host Cpu %u\n", i, pVCpu->hm.s.vmx.LastError.idCurrentCpu)); 3089 3089 } 3090 else if (pVM->aCpus[i].hm.s.vmx.LastError.u32InstrError == VMXINSTRERR_VMENTRY_INVALID_CTL )3090 else if (pVM->aCpus[i].hm.s.vmx.LastError.u32InstrError == VMXINSTRERR_VMENTRY_INVALID_CTLS) 3091 3091 { 3092 3092 LogRel(("HM: CPU[%u] PinCtls %#RX32\n", i, pVCpu->hm.s.vmx.u32PinCtls));
Note:
See TracChangeset
for help on using the changeset viewer.