Changeset 99324 in vbox
- Timestamp:
- Apr 6, 2023 11:34:00 PM (20 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r99220 r99324 77 77 IEM_MC_ARG(PCRTUINT128U, puSrc1, 2); 78 78 IEM_MC_ARG(PCRTUINT128U, puSrc2, 3); 79 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();79 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 80 80 IEM_MC_PREPARE_AVX_USAGE(); 81 81 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 130 130 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 131 131 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 132 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();132 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 133 133 IEM_MC_PREPARE_AVX_USAGE(); 134 134 … … 189 189 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 190 190 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 191 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();191 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 192 192 IEM_MC_PREPARE_AVX_USAGE(); 193 193 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 240 240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 241 241 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 242 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();242 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 243 243 IEM_MC_PREPARE_AVX_USAGE(); 244 244 … … 327 327 IEM_MC_ARG(PRTUINT128U, puDst, 0); 328 328 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 329 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();329 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 330 330 IEM_MC_PREPARE_AVX_USAGE(); 331 331 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 373 373 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 374 374 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 375 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();375 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 376 376 IEM_MC_PREPARE_AVX_USAGE(); 377 377 … … 432 432 * Register, register. 433 433 */ 434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();434 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 435 435 IEM_MC_BEGIN(0, 0); 436 436 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); … … 455 455 456 456 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();457 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 458 458 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 459 459 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 475 475 476 476 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();477 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 478 478 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 479 479 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 507 507 * Register, register. 508 508 */ 509 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();509 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 510 510 IEM_MC_BEGIN(0, 0); 511 511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); … … 530 530 531 531 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();532 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 533 533 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 534 534 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 550 550 551 551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();552 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 553 553 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 554 554 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 583 583 */ 584 584 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSS, vmovss, Vss_WO, HssHi, Uss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED); 585 IEMOP_HLP_DONE_VEX_DECODING ();585 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 586 586 IEM_MC_BEGIN(0, 0); 587 588 587 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 589 588 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 614 613 615 614 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 616 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();615 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 617 616 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 618 617 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 647 646 */ 648 647 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVSD, vmovsd, Vsd_WO, HsdHi, Usd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED); 649 IEMOP_HLP_DONE_VEX_DECODING ();648 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 650 649 IEM_MC_BEGIN(0, 0); 651 650 … … 678 677 679 678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 680 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();679 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 681 680 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 682 681 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 710 709 * Register, register. 711 710 */ 712 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();711 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 713 712 IEM_MC_BEGIN(0, 0); 714 713 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); … … 733 732 734 733 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 735 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();734 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 736 735 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 737 736 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 753 752 754 753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 755 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();754 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 756 755 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 757 756 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 785 784 * Register, register. 786 785 */ 787 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 788 787 IEM_MC_BEGIN(0, 0); 789 788 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); … … 808 807 809 808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 810 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();809 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 811 810 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 812 811 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 828 827 829 828 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 830 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 831 830 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 832 831 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 860 859 */ 861 860 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSS, vmovss, Uss_WO, HssHi, Vss, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED); 862 IEMOP_HLP_DONE_VEX_DECODING ();861 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 863 862 IEM_MC_BEGIN(0, 0); 864 863 … … 891 890 892 891 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 893 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();892 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 894 893 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 895 894 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 924 923 */ 925 924 IEMOP_MNEMONIC3(VEX_MVR_REG, VMOVSD, vmovsd, Usd_WO, HsdHi, Vsd, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_IGNORED); 926 IEMOP_HLP_DONE_VEX_DECODING ();925 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 927 926 IEM_MC_BEGIN(0, 0); 928 927 … … 955 954 956 955 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 957 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();956 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 958 957 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 959 958 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 988 987 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVHLPS, vmovhlps, Vq_WO, HqHi, UqHi, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO); 989 988 990 IEMOP_HLP_DONE_VEX_DECODING_L0 ();989 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 991 990 IEM_MC_BEGIN(0, 0); 992 991 … … 1023 1022 1024 1023 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1025 IEMOP_HLP_DONE_VEX_DECODING_L0 ();1024 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 1026 1025 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1027 1026 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1062 1061 1063 1062 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1064 IEMOP_HLP_DONE_VEX_DECODING_L0 ();1063 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 1065 1064 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1066 1065 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1112 1111 * Register, register. 1113 1112 */ 1114 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1113 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1115 1114 if (pVCpu->iem.s.uVexLength == 0) 1116 1115 { … … 1158 1157 1159 1158 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1160 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1159 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1161 1160 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1162 1161 IEM_MC_PREPARE_AVX_USAGE(); … … 1182 1181 1183 1182 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1184 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1183 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1185 1184 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1186 1185 IEM_MC_PREPARE_AVX_USAGE(); … … 1216 1215 * Register, register. 1217 1216 */ 1218 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1217 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1219 1218 if (pVCpu->iem.s.uVexLength == 0) 1220 1219 { … … 1260 1259 1261 1260 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1262 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1261 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1263 1262 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1264 1263 IEM_MC_PREPARE_AVX_USAGE(); … … 1282 1281 1283 1282 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1284 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1283 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1285 1284 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1286 1285 IEM_MC_PREPARE_AVX_USAGE(); … … 1318 1317 1319 1318 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1320 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();1319 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 1321 1320 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1322 1321 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 1365 1364 1366 1365 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1367 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();1366 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 1368 1367 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1369 1368 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 1452 1451 IEMOP_MNEMONIC3(VEX_RVM_REG, VMOVLHPS, vmovlhps, Vq_WO, Hq, Uq, DISOPTYPE_HARMLESS | DISOPTYPE_X86_AVX, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_VEX_L_ZERO); 1453 1452 1454 IEMOP_HLP_DONE_VEX_DECODING_L0 ();1453 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 1455 1454 IEM_MC_BEGIN(0, 0); 1456 1455 … … 1483 1482 1484 1483 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1485 IEMOP_HLP_DONE_VEX_DECODING_L0 ();1484 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 1486 1485 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1487 1486 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1518 1517 1519 1518 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1520 IEMOP_HLP_DONE_VEX_DECODING_L0 ();1519 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fAvx); 1521 1520 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1522 1521 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1564 1563 * Register, register. 1565 1564 */ 1566 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1565 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1567 1566 if (pVCpu->iem.s.uVexLength == 0) 1568 1567 { … … 1610 1609 1611 1610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1612 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1611 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1613 1612 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1614 1613 IEM_MC_PREPARE_AVX_USAGE(); … … 1634 1633 1635 1634 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1636 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1635 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1637 1636 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1638 1637 IEM_MC_PREPARE_AVX_USAGE(); … … 1671 1670 1672 1671 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1673 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();1672 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 1674 1673 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1675 1674 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 1716 1715 1717 1716 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1718 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();1717 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 1719 1718 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1720 1719 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 1784 1783 * Register, register. 1785 1784 */ 1786 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1785 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1787 1786 IEM_MC_BEGIN(1, 0); 1788 1787 … … 1810 1809 1811 1810 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1812 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1811 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1813 1812 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1814 1813 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1827 1826 1828 1827 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1829 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1828 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1830 1829 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1831 1830 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1861 1860 * Register, register. 1862 1861 */ 1863 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1862 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1864 1863 IEM_MC_BEGIN(1, 0); 1865 1864 … … 1887 1886 1888 1887 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1889 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1888 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1890 1889 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1891 1890 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1904 1903 1905 1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1906 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1905 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1907 1906 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1908 1907 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 1957 1956 * Register, register. 1958 1957 */ 1959 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1958 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1960 1959 IEM_MC_BEGIN(1, 0); 1961 1960 … … 1983 1982 1984 1983 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1985 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();1984 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 1986 1985 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 1987 1986 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 2000 1999 2001 2000 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2002 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2001 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2003 2002 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2004 2003 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 2033 2032 * Register, register. 2034 2033 */ 2035 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2034 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2036 2035 IEM_MC_BEGIN(1, 0); 2037 2036 … … 2059 2058 2060 2059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2061 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2060 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2062 2061 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2063 2062 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 2076 2075 2077 2076 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2078 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2077 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2079 2078 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2080 2079 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 2147 2146 2148 2147 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2149 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2148 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2150 2149 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2151 2150 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 2164 2163 2165 2164 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2166 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2165 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2167 2166 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2168 2167 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 2208 2207 2209 2208 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2210 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2209 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2211 2210 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2212 2211 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 2225 2224 2226 2225 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2227 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();2226 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 2228 2227 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2229 2228 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 2287 2286 * Register, register. 2288 2287 */ 2289 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2288 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2290 2289 IEM_MC_BEGIN(4, 1); 2291 2290 IEM_MC_LOCAL(uint32_t, fEFlags); … … 2326 2325 2327 2326 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2328 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2327 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2329 2328 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2330 2329 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 2358 2357 * Register, register. 2359 2358 */ 2360 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2359 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2361 2360 IEM_MC_BEGIN(4, 1); 2362 2361 IEM_MC_LOCAL(uint32_t, fEFlags); … … 2397 2396 2398 2397 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2399 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2398 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2400 2399 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2401 2400 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 2432 2431 * Register, register. 2433 2432 */ 2434 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2433 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2435 2434 IEM_MC_BEGIN(4, 1); 2436 2435 IEM_MC_LOCAL(uint32_t, fEFlags); … … 2471 2470 2472 2471 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2473 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2472 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2474 2473 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2475 2474 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 2503 2502 * Register, register. 2504 2503 */ 2505 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2504 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2506 2505 IEM_MC_BEGIN(4, 1); 2507 2506 IEM_MC_LOCAL(uint32_t, fEFlags); … … 2542 2541 2543 2542 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2544 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();2543 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 2545 2544 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 2546 2545 IEM_MC_FETCH_MEM_XMM_U32(uSrc2, 0 /*a_DWord*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); … … 2613 2612 if (pVCpu->iem.s.uVexLength == 0) 2614 2613 { 2615 IEMOP_HLP_DONE_VEX_DECODING ();2614 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2616 2615 IEM_MC_BEGIN(2, 1); 2617 2616 IEM_MC_LOCAL(uint8_t, u8Dst); … … 2629 2628 else 2630 2629 { 2631 IEMOP_HLP_DONE_VEX_DECODING ();2630 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); 2632 2631 IEM_MC_BEGIN(2, 2); 2633 2632 IEM_MC_LOCAL(uint8_t, u8Dst); … … 2664 2663 if (pVCpu->iem.s.uVexLength == 0) 2665 2664 { 2666 IEMOP_HLP_DONE_VEX_DECODING ();2665 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 2667 2666 IEM_MC_BEGIN(2, 1); 2668 2667 IEM_MC_LOCAL(uint8_t, u8Dst); … … 2680 2679 else 2681 2680 { 2682 IEMOP_HLP_DONE_VEX_DECODING ();2681 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx2); 2683 2682 IEM_MC_BEGIN(2, 2); 2684 2683 IEM_MC_LOCAL(uint8_t, u8Dst); … … 3172 3171 { 3173 3172 /* XMM, greg64 */ 3174 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3173 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3175 3174 IEM_MC_BEGIN(0, 1); 3176 3175 IEM_MC_LOCAL(uint64_t, u64Tmp); … … 3193 3192 3194 3193 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3195 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3194 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3196 3195 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3197 3196 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3222 3221 { 3223 3222 /* XMM, greg32 */ 3224 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3223 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3225 3224 IEM_MC_BEGIN(0, 1); 3226 3225 IEM_MC_LOCAL(uint32_t, u32Tmp); … … 3243 3242 3244 3243 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3245 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3244 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3246 3245 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3247 3246 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3281 3280 * Register, register. 3282 3281 */ 3283 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3282 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3284 3283 IEM_MC_BEGIN(0, 0); 3285 3284 … … 3305 3304 3306 3305 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3307 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3306 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3308 3307 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3309 3308 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3325 3324 3326 3325 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3327 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3326 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3328 3327 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3329 3328 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3356 3355 * Register, register. 3357 3356 */ 3358 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3357 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3359 3358 IEM_MC_BEGIN(0, 0); 3360 3359 … … 3380 3379 3381 3380 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3382 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3381 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3383 3382 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3384 3383 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3400 3399 3401 3400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3402 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3401 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3403 3402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3404 3403 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3456 3455 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 3457 3456 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 3458 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();3457 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3459 3458 IEM_MC_PREPARE_AVX_USAGE(); 3460 3459 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 3506 3505 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3507 3506 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 2); 3508 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();3507 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3509 3508 IEM_MC_PREPARE_AVX_USAGE(); 3510 3509 … … 3735 3734 IEM_MC_BEGIN(0, 0); 3736 3735 3737 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3736 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3738 3737 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3739 3738 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3772 3771 IEM_MC_LOCAL(uint32_t, uZero); 3773 3772 3774 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();3773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 3775 3774 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3776 3775 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 3864 3863 { 3865 3864 /* greg64, XMM */ 3866 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3865 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3867 3866 IEM_MC_BEGIN(0, 1); 3868 3867 IEM_MC_LOCAL(uint64_t, u64Tmp); … … 3885 3884 3886 3885 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3887 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3886 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3888 3887 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3889 3888 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 3914 3913 { 3915 3914 /* greg32, XMM */ 3916 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3915 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3917 3916 IEM_MC_BEGIN(0, 1); 3918 3917 IEM_MC_LOCAL(uint32_t, u32Tmp); … … 3935 3934 3936 3935 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3937 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3936 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3938 3937 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3939 3938 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 3967 3966 * Register, register. 3968 3967 */ 3969 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3968 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3970 3969 IEM_MC_BEGIN(0, 0); 3971 3970 … … 3988 3987 3989 3988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3990 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();3989 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 3991 3990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 3992 3991 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 4024 4023 * Register, register. 4025 4024 */ 4026 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4025 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4027 4026 IEM_MC_BEGIN(0, 0); 4028 4027 … … 4048 4047 4049 4048 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4050 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4049 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4051 4050 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4052 4051 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 4068 4067 4069 4068 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4070 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4069 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4071 4070 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4072 4071 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 4100 4099 * Register, register. 4101 4100 */ 4102 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4101 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4103 4102 IEM_MC_BEGIN(0, 0); 4104 4103 … … 4124 4123 4125 4124 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4126 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4125 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4127 4126 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4128 4127 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 4144 4143 4145 4144 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4146 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4145 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4147 4146 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4148 4147 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 4233 4232 //{ 4234 4233 // IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 4235 // if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse)4236 // return IEMOP_RAISE_INVALID_OPCODE();4237 //4238 4234 // IEM_MC_BEGIN(2, 0); 4239 4235 // IEM_MC_ARG(uint8_t, iEffSeg, 0); 4240 4236 // IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 4241 4237 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 4242 // IEMOP_HLP_DONE_ DECODING_NO_LOCK_PREFIX();4238 // IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 4243 4239 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 4244 4240 // IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); … … 4294 4290 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 4295 4291 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 4296 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();4292 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 4297 4293 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 4298 4294 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); … … 4532 4528 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); \ 4533 4529 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \ 4534 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT(); \4530 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 4535 4531 IEM_MC_PREPARE_AVX_USAGE(); \ 4536 4532 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ … … 4585 4581 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); \ 4586 4582 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); \ 4587 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT(); \4583 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 4588 4584 IEM_MC_PREPARE_AVX_USAGE(); \ 4589 4585 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ … … 4709 4705 * Register, register. 4710 4706 */ 4711 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();4707 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 4712 4708 IEM_MC_BEGIN(0, 0); 4713 4709 … … 4730 4726 4731 4727 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4732 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV ();4728 IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(fAvx); 4733 4729 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4734 4730 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 4757 4753 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */ 4758 4754 IEMOP_MNEMONIC2(VEX_RM_REG, VPMOVMSKB, vpmovmskb, Gd, Ux, DISOPTYPE_X86_SSE | DISOPTYPE_HARMLESS, 0); 4759 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV();4760 4755 if (pVCpu->iem.s.uVexLength) 4761 4756 { 4757 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 4762 4758 IEM_MC_BEGIN(2, 1); 4763 4759 IEM_MC_ARG(uint64_t *, puDst, 0); … … 4775 4771 else 4776 4772 { 4773 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4777 4774 IEM_MC_BEGIN(2, 0); 4778 4775 IEM_MC_ARG(uint64_t *, puDst, 0); 4779 4776 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 4780 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();4777 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4781 4778 IEM_MC_PREPARE_AVX_USAGE(); 4782 4779 IEM_MC_REF_GREG_U64(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 4990 4987 4991 4988 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 4992 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();4989 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 4993 4990 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 4994 4991 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 5010 5007 5011 5008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 5012 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();5009 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 5013 5010 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 5014 5011 IEM_MC_ACTUALIZE_AVX_STATE_FOR_READ(); … … 5152 5149 5153 5150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 5154 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();5151 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 5155 5152 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 5156 5153 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 5172 5169 5173 5170 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 5174 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();5171 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 5175 5172 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 5176 5173 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r99220 r99324 391 391 IEM_MC_ARG(PRTUINT128U, puDst, 0); \ 392 392 IEM_MC_ARG(uint64_t, uSrc, 1); \ 393 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT(); \393 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 394 394 IEM_MC_PREPARE_AVX_USAGE(); \ 395 395 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ … … 436 436 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 437 437 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); \ 438 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT(); \438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); \ 439 439 IEM_MC_PREPARE_AVX_USAGE(); \ 440 440 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ … … 552 552 553 553 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 554 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();554 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 555 555 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 556 556 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 583 583 584 584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 585 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV ();585 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 586 586 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 587 587 IEM_MC_ACTUALIZE_AVX_STATE_FOR_CHANGE(); … … 791 791 IEM_MC_ARG(PRTUINT128U, puDst, 0); 792 792 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 793 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();793 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 794 794 IEM_MC_PREPARE_AVX_USAGE(); 795 795 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap3.cpp.h
r98103 r99324 81 81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 82 82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 83 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 84 84 IEM_MC_PREPARE_AVX_USAGE(); 85 85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 136 136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 137 137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 138 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 139 IEM_MC_PREPARE_AVX_USAGE(); 140 141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg); 145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 146 147 IEM_MC_ADVANCE_RIP_AND_FINISH(); 148 IEM_MC_END(); 149 } 150 } 151 } 152 153 154 /** 155 * Common worker for AVX instructions on the forms: 156 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8 157 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8 158 * 159 * Takes function table for function w/o implicit state parameter. 160 * 161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation. 162 */ 163 FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl) 164 { 165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 166 if (IEM_IS_MODRM_REG_MODE(bRm)) 167 { 168 /* 169 * Register, register. 170 */ 171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 172 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 173 if (pVCpu->iem.s.uVexLength) 174 { 175 IEM_MC_BEGIN(4, 3); 176 IEM_MC_LOCAL(RTUINT256U, uDst); 177 IEM_MC_LOCAL(RTUINT256U, uSrc1); 178 IEM_MC_LOCAL(RTUINT256U, uSrc2); 179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 181 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 184 IEM_MC_PREPARE_AVX_USAGE(); 185 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 186 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 187 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg); 188 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 189 IEM_MC_ADVANCE_RIP_AND_FINISH(); 190 IEM_MC_END(); 191 } 192 else 193 { 194 IEM_MC_BEGIN(4, 0); 195 IEM_MC_ARG(PRTUINT128U, puDst, 0); 196 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 197 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 198 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 200 IEM_MC_PREPARE_AVX_USAGE(); 201 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 202 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 203 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 204 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg); 205 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 206 IEM_MC_ADVANCE_RIP_AND_FINISH(); 207 IEM_MC_END(); 208 } 209 } 210 else 211 { 212 /* 213 * Register, memory. 214 */ 215 if (pVCpu->iem.s.uVexLength) 216 { 217 IEM_MC_BEGIN(4, 4); 218 IEM_MC_LOCAL(RTUINT256U, uDst); 219 IEM_MC_LOCAL(RTUINT256U, uSrc1); 220 IEM_MC_LOCAL(RTUINT256U, uSrc2); 221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 222 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 223 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 224 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 225 226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 227 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 228 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 229 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 231 IEM_MC_PREPARE_AVX_USAGE(); 232 233 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 234 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 235 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg); 236 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 237 238 IEM_MC_ADVANCE_RIP_AND_FINISH(); 239 IEM_MC_END(); 240 } 241 else 242 { 243 IEM_MC_BEGIN(4, 2); 244 IEM_MC_LOCAL(RTUINT128U, uSrc2); 245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 246 IEM_MC_ARG(PRTUINT128U, puDst, 0); 247 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 248 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); 249 250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 251 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 252 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 253 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 139 255 IEM_MC_PREPARE_AVX_USAGE(); 140 256 … … 176 292 177 293 178 /** Opcode VEX.66.0F3A 0x0c. */ 294 /** Opcode VEX.66.0F3A 0x0c. 295 * AVX,AVX */ 179 296 FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib) 180 297 { 181 298 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */ 182 299 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps); 183 return FNIEMOP_CALL_1(iemOpCommonAvxAvx 2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));300 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback)); 184 301 } 185 302 186 303 187 /** Opcode VEX.66.0F3A 0x0d. */ 304 /** Opcode VEX.66.0F3A 0x0d. 305 * AVX,AVX */ 188 306 FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib) 189 307 { 190 308 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */ 191 309 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd); 192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx 2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));310 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback)); 193 311 } 194 312 195 313 196 /** Opcode VEX.66.0F3A 0x0e. */ 314 /** Opcode VEX.66.0F3A 0x0e. 315 * AVX,AVX2 */ 197 316 FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib) 198 317 { … … 206 325 207 326 208 /** Opcode VEX.66.0F3A 0x0f. */ 327 /** Opcode VEX.66.0F3A 0x0f. 328 * AVX,AVX2 */ 209 329 FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib) 210 330 { … … 363 483 /** 364 484 * Common worker for AVX2 instructions on the forms: 485 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4 486 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4 487 * 488 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations. 489 */ 490 FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl) 491 { 492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 493 if (IEM_IS_MODRM_REG_MODE(bRm)) 494 { 495 /* 496 * Register, register. 497 */ 498 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 499 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 500 if (pVCpu->iem.s.uVexLength) 501 { 502 IEM_MC_BEGIN(4, 4); 503 IEM_MC_LOCAL(RTUINT256U, uDst); 504 IEM_MC_LOCAL(RTUINT256U, uSrc1); 505 IEM_MC_LOCAL(RTUINT256U, uSrc2); 506 IEM_MC_LOCAL(RTUINT256U, uSrc3); 507 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 508 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 509 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 510 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3); 511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 512 IEM_MC_PREPARE_AVX_USAGE(); 513 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 514 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 515 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 516 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3); 517 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 518 IEM_MC_ADVANCE_RIP_AND_FINISH(); 519 IEM_MC_END(); 520 } 521 else 522 { 523 IEM_MC_BEGIN(4, 0); 524 IEM_MC_ARG(PRTUINT128U, puDst, 0); 525 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 526 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 527 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3); 528 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 529 IEM_MC_PREPARE_AVX_USAGE(); 530 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 531 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 532 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 533 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3); 535 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 536 IEM_MC_ADVANCE_RIP_AND_FINISH(); 537 IEM_MC_END(); 538 } 539 } 540 else 541 { 542 /* 543 * Register, memory. 544 */ 545 if (pVCpu->iem.s.uVexLength) 546 { 547 IEM_MC_BEGIN(4, 5); 548 IEM_MC_LOCAL(RTUINT256U, uDst); 549 IEM_MC_LOCAL(RTUINT256U, uSrc1); 550 IEM_MC_LOCAL(RTUINT256U, uSrc2); 551 IEM_MC_LOCAL(RTUINT256U, uSrc3); 552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 553 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 554 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 555 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 556 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3); 557 558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 559 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 560 561 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 563 IEM_MC_PREPARE_AVX_USAGE(); 564 565 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 566 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 567 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 568 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 569 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3); 570 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 571 572 IEM_MC_ADVANCE_RIP_AND_FINISH(); 573 IEM_MC_END(); 574 } 575 else 576 { 577 IEM_MC_BEGIN(4, 2); 578 IEM_MC_LOCAL(RTUINT128U, uSrc2); 579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 580 IEM_MC_ARG(PRTUINT128U, puDst, 0); 581 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 582 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); 583 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3); 584 585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 586 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 587 588 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 589 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 590 IEM_MC_PREPARE_AVX_USAGE(); 591 592 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 594 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 595 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 596 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3); 597 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 598 599 IEM_MC_ADVANCE_RIP_AND_FINISH(); 600 IEM_MC_END(); 601 } 602 } 603 } 604 605 606 /** Opcode VEX.66.0F3A 0x4a (vex only). 607 * AVX, AVX */ 608 FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx) 609 { 610 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 611 IEMOPBLENDOP_INIT_VARS(vblendvps); 612 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 613 } 614 615 616 /** Opcode VEX.66.0F3A 0x4b (vex only). 617 * AVX, AVX */ 618 FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx) 619 { 620 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 621 IEMOPBLENDOP_INIT_VARS(vblendvpd); 622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 623 } 624 625 626 /** 627 * Common worker for AVX2 instructions on the forms: 365 628 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4 366 629 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4 … … 488 751 489 752 490 /** Opcode VEX.66.0F3A 0x4a (vex only). */ 491 FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx) 492 { 493 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 494 IEMOPBLENDOP_INIT_VARS(vblendvps); 495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 496 } 497 498 499 /** Opcode VEX.66.0F3A 0x4b (vex only). */ 500 FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx) 501 { 502 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 503 IEMOPBLENDOP_INIT_VARS(vblendvpd); 504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 505 } 506 507 508 /** Opcode VEX.66.0F3A 0x4c (vex only). */ 753 /** Opcode VEX.66.0F3A 0x4c (vex only). 754 * AVX, AVX2 */ 509 755 FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx) 510 756 { -
trunk/src/VBox/VMM/include/IEMMc.h
r99304 r99324 76 76 #define IEM_MC_MAYBE_RAISE_DEVICE_NOT_AVAILABLE() \ 77 77 do { \ 78 if (pVCpu->cpum.GstCtx.cr0 & (X86_CR0_EM | X86_CR0_TS)) \ 78 if (!(pVCpu->cpum.GstCtx.cr0 & (X86_CR0_EM | X86_CR0_TS))) \ 79 { } else return iemRaiseDeviceNotAvailable(pVCpu); \ 80 } while (0) 81 #define IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() \ 82 do { \ 83 if (!((pVCpu->cpum.GstCtx.cr0 & (X86_CR0_MP | X86_CR0_TS)) == (X86_CR0_MP | X86_CR0_TS))) \ 84 { } else return iemRaiseDeviceNotAvailable(pVCpu); \ 85 } while (0) 86 #define IEM_MC_MAYBE_RAISE_FPU_XCPT() \ 87 do { \ 88 if (!(pVCpu->cpum.GstCtx.XState.x87.FSW & X86_FSW_ES)) \ 89 { } else return iemRaiseMathFault(pVCpu); \ 90 } while (0) 91 #define IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() \ 92 do { \ 93 if ( (pVCpu->cpum.GstCtx.aXcr[0] & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE) \ 94 || !(pVCpu->cpum.GstCtx.cr4 & X86_CR4_OSXSAVE) \ 95 || !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx) \ 96 return iemRaiseUndefinedOpcode(pVCpu); \ 97 if (pVCpu->cpum.GstCtx.cr0 & X86_CR0_TS) \ 79 98 return iemRaiseDeviceNotAvailable(pVCpu); \ 80 } while (0)81 #define IEM_MC_MAYBE_RAISE_WAIT_DEVICE_NOT_AVAILABLE() \82 do { \83 if ((pVCpu->cpum.GstCtx.cr0 & (X86_CR0_MP | X86_CR0_TS)) == (X86_CR0_MP | X86_CR0_TS)) \84 return iemRaiseDeviceNotAvailable(pVCpu); \85 } while (0)86 #define IEM_MC_MAYBE_RAISE_FPU_XCPT() \87 do { \88 if (pVCpu->cpum.GstCtx.XState.x87.FSW & X86_FSW_ES) \89 return iemRaiseMathFault(pVCpu); \90 99 } while (0) 91 100 #define IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT() \ … … 94 103 || !(pVCpu->cpum.GstCtx.cr4 & X86_CR4_OSXSAVE) \ 95 104 || !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx2) \ 96 return iemRaiseUndefinedOpcode(pVCpu); \97 if (pVCpu->cpum.GstCtx.cr0 & X86_CR0_TS) \98 return iemRaiseDeviceNotAvailable(pVCpu); \99 } while (0)100 #define IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT() \101 do { \102 if ( (pVCpu->cpum.GstCtx.aXcr[0] & (XSAVE_C_YMM | XSAVE_C_SSE)) != (XSAVE_C_YMM | XSAVE_C_SSE) \103 || !(pVCpu->cpum.GstCtx.cr4 & X86_CR4_OSXSAVE) \104 || !IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx) \105 105 return iemRaiseUndefinedOpcode(pVCpu); \ 106 106 if (pVCpu->cpum.GstCtx.cr0 & X86_CR0_TS) \ -
trunk/src/VBox/VMM/include/IEMOpHlp.h
r98910 r99324 508 508 * Done decoding VEX instruction, raise \#UD exception if any lock, rex, repz, 509 509 * repnz or size prefixes are present, or if the VEX.VVVV field doesn't indicate 510 * register 0, if in real or v8086 mode, or if the a_fFeature is present in the511 * guest CPU.510 * register 0, if in real or v8086 mode, or if the a_fFeature is not present in 511 * the guest CPU. 512 512 */ 513 513 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) \ … … 542 542 } while (0) 543 543 544 /** 545 * Done decoding VEX, no V, L=0. 546 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if 547 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=0, or if the a_fFeature 548 * is not present in the guest CPU. 549 */ 550 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) \ 551 do \ 552 { \ 553 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 554 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \ 555 && pVCpu->iem.s.uVexLength == 0 \ 556 && pVCpu->iem.s.uVex3rdReg == 0 \ 557 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 558 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \ 559 { /* likely */ } \ 560 else \ 561 return IEMOP_RAISE_INVALID_OPCODE(); \ 562 } while (0) 563 544 564 #define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \ 545 565 do \ -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r99309 r99324 167 167 #define IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(a_fFeature) do { } while (0) 168 168 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() do { } while (0) 169 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) do { } while (0) 169 170 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() do { } while (0) 170 171 #define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() do { } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.