Changeset 65607 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Feb 3, 2017 8:00:37 PM (8 years ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r65605 r65607 1611 1611 1612 1612 1613 /** Opcode 0x0f 0x10. */ 1614 FNIEMOP_STUB(iemOp_movups_Vps_Wps); 1615 1616 /** Opcode 66h 0x0f 0x10. */ 1617 FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd); 1618 1619 /** Opcode f3h 0x0f 0x10. */ 1620 FNIEMOP_STUB(iemOp_movss_Vss_Hx_Wss); 1621 1622 /** Opcode f2h 0x0f 0x10. */ 1623 FNIEMOP_STUB(iemOp_movsd_Vsd_Hx_Wsd); 1624 1625 1626 /** Opcode 0x0f 0x11. */ 1627 FNIEMOP_DEF(iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd) 1628 { 1629 /* Quick hack. Need to restructure all of this later some time. */ 1630 uint32_t const fRelevantPrefix = pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ); 1631 if (fRelevantPrefix == 0) 1632 { 1633 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps"); 1634 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1635 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1636 { 1637 /* 1638 * Register, register. 1639 */ 1640 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); 1641 IEM_MC_BEGIN(0, 0); 1642 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1643 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1644 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1645 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1646 IEM_MC_ADVANCE_RIP(); 1647 IEM_MC_END(); 1648 } 1649 else 1650 { 1651 /* 1652 * Memory, register. 1653 */ 1654 IEM_MC_BEGIN(0, 2); 1655 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */ 1656 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1657 1658 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1659 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */ 1660 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1661 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1662 1663 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1664 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1665 1666 IEM_MC_ADVANCE_RIP(); 1667 IEM_MC_END(); 1668 } 1669 } 1670 else if (fRelevantPrefix == IEM_OP_PRF_REPNZ) 1671 { 1672 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd"); 1673 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1674 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1675 { 1676 /* 1677 * Register, register. 1678 */ 1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1680 IEM_MC_BEGIN(0, 1); 1681 IEM_MC_LOCAL(uint64_t, uSrc); 1682 1683 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1684 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1685 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1686 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 1687 1688 IEM_MC_ADVANCE_RIP(); 1689 IEM_MC_END(); 1690 } 1691 else 1692 { 1693 /* 1694 * Memory, register. 1695 */ 1696 IEM_MC_BEGIN(0, 2); 1697 IEM_MC_LOCAL(uint64_t, uSrc); 1698 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1699 1700 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1701 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1702 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1703 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1704 1705 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1706 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1707 1708 IEM_MC_ADVANCE_RIP(); 1709 IEM_MC_END(); 1710 } 1613 /** Opcode 0x0f 0x10 - vmovups Vps, Wps */ 1614 FNIEMOP_STUB(iemOp_vmovups_Vps_Wps); 1615 /** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */ 1616 FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd); 1617 /** Opcode 0xf3 0x0f 0x10 - vmovss Vx, Hx, Wss */ 1618 FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss); 1619 /** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */ 1620 FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd); 1621 1622 1623 /** Opcode 0x0f 0x11 - vmovups Wps, Vps */ 1624 FNIEMOP_DEF(iemOp_vmovups_Wps_Vps) 1625 { 1626 IEMOP_MNEMONIC(movups_Wps_Vps, "movups Wps,Vps"); 1627 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1628 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1629 { 1630 /* 1631 * Register, register. 1632 */ 1633 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); 1634 IEM_MC_BEGIN(0, 0); 1635 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1636 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1637 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1638 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1639 IEM_MC_ADVANCE_RIP(); 1640 IEM_MC_END(); 1711 1641 } 1712 1642 else 1713 1643 { 1714 IEMOP_BITCH_ABOUT_STUB(); 1715 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 1644 /* 1645 * Memory, register. 1646 */ 1647 IEM_MC_BEGIN(0, 2); 1648 IEM_MC_LOCAL(uint128_t, uSrc); /** @todo optimize this one day... */ 1649 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1650 1651 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1652 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); /** @todo check if this is delayed this long for REPZ/NZ - yes it generally is! */ 1653 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1654 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1655 1656 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1657 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1658 1659 IEM_MC_ADVANCE_RIP(); 1660 IEM_MC_END(); 1661 } 1662 return VINF_SUCCESS; 1663 } 1664 1665 1666 /** Opcode 0x66 0x0f 0x11 - vmovupd Wpd,Vpd */ 1667 FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd); 1668 1669 /** Opcode 0xf3 0x0f 0x11 - vmovss Wss, Hx, Vss */ 1670 FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss); 1671 1672 /** Opcode 0xf2 0x0f 0x11 - vmovsd Wsd, Hx, Vsd */ 1673 FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd) 1674 { 1675 IEMOP_MNEMONIC(movsd_Wsd_Vsd, "movsd Wsd,Vsd"); 1676 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1677 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1678 { 1679 /* 1680 * Register, register. 1681 */ 1682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1683 IEM_MC_BEGIN(0, 1); 1684 IEM_MC_LOCAL(uint64_t, uSrc); 1685 1686 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1687 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1688 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1689 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 1690 1691 IEM_MC_ADVANCE_RIP(); 1692 IEM_MC_END(); 1693 } 1694 else 1695 { 1696 /* 1697 * Memory, register. 1698 */ 1699 IEM_MC_BEGIN(0, 2); 1700 IEM_MC_LOCAL(uint64_t, uSrc); 1701 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1702 1703 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1704 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1705 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1706 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1707 1708 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1709 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1710 1711 IEM_MC_ADVANCE_RIP(); 1712 IEM_MC_END(); 1716 1713 } 1717 1714 return VINF_SUCCESS; … … 7907 7904 /* 0x0f */ IEMOP_X4(iemOp_3Dnow), 7908 7905 7909 /* 0x10 */ iemOp_ movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Hx_Wss, iemOp_movsd_Vsd_Hx_Wsd,7910 /* 0x11 */ iemOp_ movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd, iemOp_movups_Wps_Vps__movupd_Wpd_Vpd__movss_Wss_Vss__movsd_Vsd_Wsd,7906 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd, 7907 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd, 7911 7908 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx, 7912 7909 /* 0x13 */ iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_movlps_Mq_Vq__movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,
Note:
See TracChangeset
for help on using the changeset viewer.