Changeset 60678 in vbox for trunk/src/VBox/ValidationKit/bootsectors
- Timestamp:
- Apr 24, 2016 2:57:13 PM (9 years ago)
- Location:
- trunk/src/VBox/ValidationKit/bootsectors
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/ValidationKit/bootsectors/bs3-cpu-basic-2-template.c
r60676 r60678 1373 1373 */ 1374 1374 # define bs3CpuBasic2_sidt_sgdt_One BS3_CMN_NM(bs3CpuBasic2_sidt_sgdt_One) 1375 BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, 1375 BS3_DECL_NEAR(void) bs3CpuBasic2_sidt_sgdt_One(BS3CB2SIDTSGDT const BS3_FAR *pWorker, uint8_t bTestMode, uint8_t bRing, 1376 1376 uint8_t const *pabExpected) 1377 1377 { … … 1387 1387 uint8_t bFiller; 1388 1388 int off; 1389 unsigned cb; 1390 uint8_t bDpl; 1389 1391 uint8_t BS3_FAR *pbTest; 1390 Bs3TestPrintf("bs3CpuBasic2_sidt_sgdt_One: %p bTestMode=%#x \n", pWorker, bTestMode);1392 Bs3TestPrintf("bs3CpuBasic2_sidt_sgdt_One: %p bTestMode=%#x bRing=%d\n", pWorker, bTestMode, bRing); 1391 1393 1392 1394 /* make sure they're allocated */ … … 1404 1406 if (BS3_MODE_IS_16BIT_SYS(bTestMode)) 1405 1407 g_uBs3TrapEipHint = Ctx.rip.u32; 1408 if (!BS3_MODE_IS_RM_OR_V86(bTestMode)) 1409 Bs3RegCtxConvertToRingX(&Ctx, bRing); 1406 1410 1407 1411 /* For successful SIDT attempts, we'll stop at the UD2. */ … … 1506 1510 uint32_t uFlatBuf = Bs3SelPtrToFlat(abBuf); 1507 1511 Bs3GdteTestPage00 = Bs3Gdte_DATA16; 1512 Bs3GdteTestPage00.Gen.u2Dpl = bRing; 1508 1513 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatBuf; 1509 1514 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatBuf >> 16); 1510 1515 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatBuf >> 24); 1511 1516 1512 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 ;1517 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing; 1513 1518 1514 1519 /* Expand up (normal). */ … … 1615 1620 */ 1616 1621 if ( BS3_MODE_IS_PAGED(bTestMode) 1617 1618 1622 && (pbTest = (uint8_t BS3_FAR *)Bs3MemGuardedTestPageAlloc(BS3MEMKIND_TILED)) != NULL) 1619 1623 { … … 1624 1628 * first word being written entirely separately from the 2nd dword/qword. 1625 1629 */ 1626 for (off = X86_PAGE_ 4K_SIZE - cbIdtr - 4; off < X86_PAGE_4K_SIZE + 4; off++)1627 { 1628 Bs3MemSet(&pbTest[X86_PAGE_ 4K_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2);1630 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++) 1631 { 1632 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2); 1629 1633 Bs3RegCtxSetGrpDsFromCurPtr(&Ctx, &Ctx.rbx, &pbTest[off]); 1630 1634 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1631 if (off + cbIdtr <= X86_PAGE_ 4K_SIZE)1635 if (off + cbIdtr <= X86_PAGE_SIZE) 1632 1636 { 1633 1637 CtxUdExpected.rbx = Ctx.rbx; … … 1639 1643 else 1640 1644 { 1641 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl ? X86_TRAP_PF_US : 0),1642 uFlatTest + RT_MAX(off, X86_PAGE_ 4K_SIZE));1643 if ( off <= X86_PAGE_ 4K_SIZE - 21645 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), 1646 uFlatTest + RT_MAX(off, X86_PAGE_SIZE)); 1647 if ( off <= X86_PAGE_SIZE - 2 1644 1648 && Bs3MemCmp(&pbTest[off], pabExpected, 2) != 0) 1645 1649 Bs3TestPrintf("Mismatch (#10): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n", 1646 1650 pabExpected, &pbTest[off], off); 1647 if ( off < X86_PAGE_ 4K_SIZE - 21648 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_ 4K_SIZE - off - 2, bFiller))1649 Bs3TestPrintf("Wrote partial base on #PF (#10): Expected %.*Rhxs, got %.*Rhxs; off=%#x\n",1650 X86_PAGE_4K_SIZE - off - 2, pabExpected, X86_PAGE_4K_SIZE - off - 2, &pbTest[off + 2], off);1651 if (off == X86_PAGE_ 4K_SIZE - 1 && pbTest[off] != bFiller)1651 if ( off < X86_PAGE_SIZE - 2 1652 && !ASMMemIsAllU8(&pbTest[off + 2], X86_PAGE_SIZE - off - 2, bFiller)) 1653 Bs3TestPrintf("Wrote partial base on #PF (#10): bFiller=%#x, got %.*Rhxs; off=%#x\n", 1654 bFiller, X86_PAGE_SIZE - off - 2, &pbTest[off + 2], off); 1655 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller) 1652 1656 Bs3TestPrintf("Wrote partial limit on #PF (#10): Expected %02x, got %02x\n", bFiller, pbTest[off]); 1653 1657 } … … 1669 1673 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1670 1674 if (Bs3MemCmp(&pbTest[off], pabExpected, cbIdtr) != 0) 1671 Bs3TestFailedF("Mismatch (#1 0): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &pbTest[off]);1675 Bs3TestFailedF("Mismatch (#11): expected %.*Rhxs, got %.*Rhxs\n", cbIdtr, pabExpected, cbIdtr, &pbTest[off]); 1672 1676 } 1673 1677 else 1674 1678 { 1675 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl ? X86_TRAP_PF_US : 0), 1676 uFlatTest + RT_MAX(off, X86_PAGE_4K_SIZE)); 1679 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), uFlatTest + off); 1677 1680 if ( -off < cbIdtr 1678 1681 && !ASMMemIsAllU8(pbTest, cbIdtr + off, bFiller)) 1679 Bs3TestPrintf("Wrote partial content on #PF (#1 1): bFiller=%#x, found %.*Rhxs; off=%d\n",1682 Bs3TestPrintf("Wrote partial content on #PF (#12): bFiller=%#x, found %.*Rhxs; off=%d\n", 1680 1683 bFiller, cbIdtr + off, pbTest, off); 1681 1684 } 1682 1685 if (!ASMMemIsAllU8(&pbTest[RT_MAX(cbIdtr + off, 0)], 16, bFiller)) 1683 Bs3TestPrintf("Wrote beyond expected area (#1 2): bFiller=%#x, found %.16Rhxs; off=%d\n",1686 Bs3TestPrintf("Wrote beyond expected area (#13): bFiller=%#x, found %.16Rhxs; off=%d\n", 1684 1687 bFiller, &pbTest[RT_MAX(cbIdtr + off, 0)], off); 1685 1688 } 1686 1689 1690 /* 1691 * Combine paging and segment limit and check ordering. 1692 * This is kind of interesting here since it the instruction seems to 1693 * be doing two separate writes. 1694 */ 1695 if ( !BS3_MODE_IS_RM_OR_V86(bTestMode) 1696 && !BS3_MODE_IS_64BIT_CODE(bTestMode)) 1697 { 1698 uint16_t cbLimit; 1699 1700 Bs3GdteTestPage00 = Bs3Gdte_DATA16; 1701 Bs3GdteTestPage00.Gen.u2Dpl = bRing; 1702 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest; 1703 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16); 1704 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24); 1705 1706 CtxUdExpected.ds = Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing; 1707 1708 /* Expand up (normal), approaching tail guard page. */ 1709 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++) 1710 { 1711 CtxUdExpected.rbx.u = Ctx.rbx.u = off; 1712 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++) 1713 { 1714 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit; 1715 Bs3MemSet(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], bFiller, cbIdtr * 2); 1716 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1717 if (off + cbIdtr <= cbLimit + 1) 1718 { 1719 /* No #GP, but maybe #PF. */ 1720 if (off + cbIdtr <= X86_PAGE_SIZE) 1721 { 1722 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1723 if (Bs3MemCmp(&pbTest[off], pabExpected, cbIdtr) != 0) 1724 Bs3TestFailedF("Mismatch (#14): expected %.*Rhxs, got %.*Rhxs\n", 1725 cbIdtr, pabExpected, cbIdtr, &pbTest[off]); 1726 } 1727 else 1728 { 1729 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), 1730 uFlatTest + RT_MAX(off, X86_PAGE_SIZE)); 1731 if ( off <= X86_PAGE_SIZE - 2 1732 && Bs3MemCmp(&pbTest[off], pabExpected, 2) != 0) 1733 Bs3TestPrintf("Mismatch (#15): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n", 1734 pabExpected, &pbTest[off], off); 1735 cb = X86_PAGE_SIZE - off - 2; 1736 if ( off < X86_PAGE_SIZE - 2 1737 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller)) 1738 Bs3TestPrintf("Wrote partial base on #PF (#15): bFiller=%#x, got %.*Rhxs; off=%#x\n", 1739 bFiller, cb, &pbTest[off + 2], off); 1740 if (off == X86_PAGE_SIZE - 1 && pbTest[off] != bFiller) 1741 Bs3TestPrintf("Wrote partial limit on #PF (#15): Expected %02x, got %02x\n", bFiller, pbTest[off]); 1742 } 1743 } 1744 else if (off + 2 <= cbLimit + 1) 1745 { 1746 /* [ig]tr.limit writing does not cause #GP, but may cause #PG, if not writing the base causes #GP. */ 1747 if (off <= X86_PAGE_SIZE - 2) 1748 { 1749 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0); 1750 if (Bs3MemCmp(&pbTest[off], pabExpected, 2) != 0) 1751 Bs3TestPrintf("Mismatch (#16): Expected limit %.2Rhxs, got %.2Rhxs; off=%#x\n", 1752 pabExpected, &pbTest[off], off); 1753 cb = X86_PAGE_SIZE - off - 2; 1754 if ( off < X86_PAGE_SIZE - 2 1755 && !ASMMemIsAllU8(&pbTest[off + 2], cb, bFiller)) 1756 Bs3TestPrintf("Wrote partial base with limit (#16): bFiller=%#x, got %.*Rhxs; off=%#x\n", 1757 bFiller, cb, &pbTest[off + 2], off); 1758 } 1759 else 1760 { 1761 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), 1762 uFlatTest + RT_MAX(off, X86_PAGE_SIZE)); 1763 if ( off < X86_PAGE_SIZE 1764 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller)) 1765 Bs3TestPrintf("Mismatch (#16): Partial limit write on #PF: bFiller=%#x, got %.*Rhxs\n", 1766 bFiller, X86_PAGE_SIZE - off, &pbTest[off]); 1767 } 1768 } 1769 else 1770 { 1771 /* #GP on limit. */ 1772 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0); 1773 if ( off < X86_PAGE_SIZE 1774 && !ASMMemIsAllU8(&pbTest[off], X86_PAGE_SIZE - off, bFiller)) 1775 Bs3TestPrintf("Mismatch (#17): Partial write on #GP: bFiller=%#x, got %.*Rhxs\n", 1776 bFiller, X86_PAGE_SIZE - off, &pbTest[off]); 1777 } 1778 1779 cb = RT_MIN(cbIdtr * 2, off - (X86_PAGE_SIZE - cbIdtr*2)); 1780 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE - cbIdtr * 2], cb, bFiller)) 1781 Bs3TestFailedF("Leading bytes touched (#18): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n", 1782 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE - cbIdtr * 2]); 1783 1784 g_usBs3TestStep++; 1785 1786 /* Set DS to 0 and check that we get #GP(0). */ 1787 Ctx.ds = 0; 1788 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1789 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0); 1790 g_usBs3TestStep++; 1791 Ctx.ds = BS3_SEL_TEST_PAGE_00 | bRing; 1792 } 1793 } 1794 1795 /* Expand down. */ 1796 pbTest -= X86_PAGE_SIZE; /* Note! we're backing up a page to simplify things */ 1797 uFlatTest -= X86_PAGE_SIZE; 1798 1799 Bs3GdteTestPage00.Gen.u4Type = X86_SEL_TYPE_RW_DOWN_ACC; 1800 Bs3GdteTestPage00.Gen.u16BaseLow = (uint16_t)uFlatTest; 1801 Bs3GdteTestPage00.Gen.u8BaseHigh1 = (uint8_t)(uFlatTest >> 16); 1802 Bs3GdteTestPage00.Gen.u8BaseHigh2 = (uint8_t)(uFlatTest >> 24); 1803 1804 for (off = X86_PAGE_SIZE - cbIdtr - 4; off < X86_PAGE_SIZE + 4; off++) 1805 { 1806 CtxUdExpected.rbx.u = Ctx.rbx.u = off; 1807 for (cbLimit = X86_PAGE_SIZE - cbIdtr*2; cbLimit < X86_PAGE_SIZE + cbIdtr*2; cbLimit++) 1808 { 1809 Bs3GdteTestPage00.Gen.u16LimitLow = cbLimit; 1810 Bs3MemSet(&pbTest[X86_PAGE_SIZE], bFiller, cbIdtr * 2); 1811 Bs3TrapSetJmpAndRestore(&Ctx, &TrapCtx); 1812 if (cbLimit < off && off >= X86_PAGE_SIZE) 1813 { 1814 bs3CpuBasic2_CompareUdCtx(&TrapCtx, &CtxUdExpected); 1815 if (Bs3MemCmp(&pbTest[off], pabExpected, cbIdtr) != 0) 1816 Bs3TestFailedF("Mismatch (#19): expected %.*Rhxs, got %.*Rhxs\n", 1817 cbIdtr, pabExpected, cbIdtr, &pbTest[off]); 1818 cb = X86_PAGE_SIZE + cbIdtr*2 - off; 1819 if (!ASMMemIsAllU8(&pbTest[off + cbIdtr], cb, bFiller)) 1820 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n", 1821 cbIdtr, off, cbLimit, bFiller, cb, pbTest[off + cbIdtr]); 1822 } 1823 else 1824 { 1825 if (cbLimit < off && off < X86_PAGE_SIZE) 1826 bs3CpuBasic2_ComparePfCtx(&TrapCtx, &Ctx, X86_TRAP_PF_RW | (Ctx.bCpl == 3 ? X86_TRAP_PF_US : 0), 1827 uFlatTest + off); 1828 else 1829 bs3CpuBasic2_CompareGpCtx(&TrapCtx, &Ctx, 0); 1830 cb = cbIdtr*2; 1831 if (!ASMMemIsAllU8(&pbTest[X86_PAGE_SIZE], cb, bFiller)) 1832 Bs3TestFailedF("Trailing bytes touched (#20): cbIdtr=%u off=%u cbLimit=%u bFiller=%#x pbTest=%.*Rhxs\n", 1833 cbIdtr, off, cbLimit, bFiller, cb, pbTest[X86_PAGE_SIZE]); 1834 } 1835 g_usBs3TestStep++; 1836 } 1837 } 1838 1839 pbTest += X86_PAGE_SIZE; 1840 uFlatTest += X86_PAGE_SIZE; 1841 } 1842 1687 1843 Bs3MemGuardedTestPageFree(pbTest); 1688 1844 } 1689 1845 1846 /* 1847 * Check non-canonical 64-bit space. 1848 */ 1849 if (BS3_MODE_IS_64BIT_CODE(bTestMode)) 1850 { 1851 1852 } 1690 1853 } 1691 1854 … … 1696 1859 unsigned idx; 1697 1860 unsigned iStep = 0; 1698 1699 for (idx = 0; idx < cWorkers; idx++) 1700 if (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK)) 1701 { 1702 g_usBs3TestStep = iStep; 1703 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, pabExpected); 1704 iStep += 1000; 1705 } 1861 unsigned bRing = 0; 1862 1863 for (bRing = 0; bRing <= 3; bRing++) 1864 { 1865 for (idx = 0; idx < cWorkers; idx++) 1866 if (paWorkers[idx].bMode & (bTestMode & BS3_MODE_CODE_MASK)) 1867 { 1868 g_usBs3TestStep = iStep; 1869 bs3CpuBasic2_sidt_sgdt_One(&paWorkers[idx], bTestMode, bRing, pabExpected); 1870 iStep += 1000; 1871 } 1872 if (BS3_MODE_IS_RM_OR_V86(bTestMode)) 1873 break; 1874 } 1706 1875 } 1707 1876 … … 1956 2125 { 1957 2126 //if (bMode == BS3_MODE_PE16_V86) 1958 if (bMode & BS3_MODE_CODE_V86)2127 //if (bMode & BS3_MODE_CODE_V86) 1959 2128 { 1960 2129 union -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-PagingProtect.c
r60676 r60678 37 37 * Defined Constants And Macros * 38 38 *********************************************************************************************************************************/ 39 #if 139 #if 0 40 40 # define BS3PAGING_DPRINTF1(a) Bs3TestPrintf a 41 41 #else -
trunk/src/VBox/ValidationKit/bootsectors/bs3kit/bs3-cmn-RegCtxSetGrpSegFromFlat.c
r60676 r60678 55 55 /* Adjust CS to the right ring, if not ring-0 or V86 context. */ 56 56 if ( pRegCtx->bCpl != 0 57 && !BS3_MODE_IS_RM_OR_V86(pRegCtx->bMode) 58 && BS3_SEL_IS_IN_R0_RANGE(*pSel)) 57 && !BS3_MODE_IS_RM_OR_V86(pRegCtx->bMode)) 59 58 { 60 *pSel += (uint16_t)pRegCtx->bCpl << BS3_SEL_RING_SHIFT; 59 if (BS3_SEL_IS_IN_R0_RANGE(*pSel)) 60 *pSel += (uint16_t)pRegCtx->bCpl << BS3_SEL_RING_SHIFT; 61 61 *pSel |= pRegCtx->bCpl; 62 62 }
Note:
See TracChangeset
for help on using the changeset viewer.