Changeset 66469 in vbox
- Timestamp:
- Apr 7, 2017 9:32:59 AM (8 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 6 edited
- 1 copied
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/Makefile.kmk
r66334 r66469 352 352 $(PATH_SUB_CURRENT)/VMMAll/IEMAllInstructions.cpp.h \ 353 353 $(PATH_SUB_CURRENT)/VMMAll/IEMAllInstructionsOneByte.cpp.h \ 354 $(PATH_SUB_CURRENT)/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h 354 $(PATH_SUB_CURRENT)/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h \ 355 $(PATH_SUB_CURRENT)/VMMAll/IEMAllInstructionsVexMap1.cpp.h 355 356 $(QUIET)$(call MSG_GENERATE,VBoxVMM,$@,$<) 356 357 $(QUIET)$(RM) -f -- "[email protected]" "[email protected]" "[email protected]" … … 1010 1011 # Alias the IEM templates to the object in which they are instantiated. 1011 1012 IEMInternal.o \ 1012 IEMAllInstructions.cpp.o IEMAllInstructions.cpp.obj \1013 IEMAllInstructionsOneByte.cpp.o IEMAllInstructionsOneByte.cpp.obj \1013 IEMAllInstructions.cpp.o IEMAllInstructions.cpp.obj \ 1014 IEMAllInstructionsOneByte.cpp.o IEMAllInstructionsOneByte.cpp.obj \ 1014 1015 IEMAllInstructionsTwoByte0f.cpp.o IEMAllInstructionsTwoByte0f.cpp.obj \ 1015 IEMAllCImpl.cpp.o IEMAllCImpl.cpp.obj \ 1016 IEMAllCImplStrInstr.cpp.o IEMAllCImplStrInstr.cpp.obj: IEMAll.o 1016 IEMAllInstructionsVexMap1.cpp.o IEMAllInstructionsVexMap1.cpp.obj \ 1017 IEMAllCImpl.cpp.o IEMAllCImpl.cpp.obj \ 1018 IEMAllCImplStrInstr.cpp.o IEMAllCImplStrInstr.cpp.obj: IEMAll.o 1017 1019 1018 1020 # Alias the switcher templates. -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r66463 r66469 732 732 */ 733 733 #include "IEMAllInstructionsTwoByte0f.cpp.h" 734 #ifdef IEM_WITH_VEX 735 # include "IEMAllInstructionsVexMap1.cpp.h" 736 #endif 734 737 #include "IEMAllInstructionsOneByte.cpp.h" 735 738 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r66463 r66469 6231 6231 { 6232 6232 case 1: /* 0x0f lead opcode byte. */ 6233 #ifdef IEM_WITH_VEX 6233 6234 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]); 6235 #else 6236 IEMOP_BITCH_ABOUT_STUB(); 6237 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 6238 #endif 6234 6239 6235 6240 case 2: /* 0x0f 0x38 lead opcode bytes. */ … … 6284 6289 pVCpu->iem.s.idxPrefix = bRm & 0x3; 6285 6290 6291 #ifdef IEM_WITH_VEX 6286 6292 return FNIEMOP_CALL(g_apfnVexMap1[(uintptr_t)bOpcode * 4 + pVCpu->iem.s.idxPrefix]); 6293 #else 6294 IEMOP_BITCH_ABOUT_STUB(); 6295 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 6296 #endif 6287 6297 } 6288 6298 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r66464 r66469 3080 3080 # We only look for comments, so only lines with a slash might possibly 3081 3081 # influence the parser state. 3082 if sLine.find('/') >= 0: 3083 #self.debug('line %d: slash' % (self.iLine,)); 3084 3085 offLine = 0; 3086 while offLine < len(sLine): 3087 if self.iState == self.kiCode: 3088 offHit = sLine.find('/*', offLine); # only multiline comments for now. 3089 if offHit >= 0: 3090 self.checkCodeForMacro(sLine[offLine:offHit]); 3091 self.sComment = ''; 3092 self.iCommentLine = self.iLine; 3093 self.iState = self.kiCommentMulti; 3094 offLine = offHit + 2; 3082 offSlash = sLine.find('/'); 3083 if offSlash >= 0: 3084 if offSlash + 1 >= len(sLine) or sLine[offSlash + 1] != '/' or self.iState != self.kiCode: 3085 offLine = 0; 3086 while offLine < len(sLine): 3087 if self.iState == self.kiCode: 3088 offHit = sLine.find('/*', offLine); # only multiline comments for now. 3089 if offHit >= 0: 3090 self.checkCodeForMacro(sLine[offLine:offHit]); 3091 self.sComment = ''; 3092 self.iCommentLine = self.iLine; 3093 self.iState = self.kiCommentMulti; 3094 offLine = offHit + 2; 3095 else: 3096 self.checkCodeForMacro(sLine[offLine:]); 3097 offLine = len(sLine); 3098 3099 elif self.iState == self.kiCommentMulti: 3100 offHit = sLine.find('*/', offLine); 3101 if offHit >= 0: 3102 self.sComment += sLine[offLine:offHit]; 3103 self.iState = self.kiCode; 3104 offLine = offHit + 2; 3105 self.parseComment(); 3106 else: 3107 self.sComment += sLine[offLine:]; 3108 offLine = len(sLine); 3095 3109 else: 3096 self.checkCodeForMacro(sLine[offLine:]); 3097 offLine = len(sLine); 3098 3099 elif self.iState == self.kiCommentMulti: 3100 offHit = sLine.find('*/', offLine); 3101 if offHit >= 0: 3102 self.sComment += sLine[offLine:offHit]; 3103 self.iState = self.kiCode; 3104 offLine = offHit + 2; 3105 self.parseComment(); 3106 else: 3107 self.sComment += sLine[offLine:]; 3108 offLine = len(sLine); 3109 else: 3110 assert False; 3110 assert False; 3111 # C++ line comment. 3112 elif offSlash > 0: 3113 self.checkCodeForMacro(sLine[:offSlash]); 3111 3114 3112 3115 # No slash, but append the line if in multi-line comment. … … 3213 3216 cErrors = 0; 3214 3217 for sDefaultMap, sName in [ 3215 ( 'one', 'IEMAllInstructionsOneByte.cpp.h'), 3216 ( 'two0f', 'IEMAllInstructionsTwoByte0f.cpp.h'), 3218 ( 'one', 'IEMAllInstructionsOneByte.cpp.h'), 3219 ( 'two0f', 'IEMAllInstructionsTwoByte0f.cpp.h'), 3220 ( 'vexmap1', 'IEMAllInstructionsVexMap1.cpp.h'), 3217 3221 ]: 3218 3222 cErrors += __parseFileByName(os.path.join(sSrcDir, sName), sDefaultMap); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r66464 r66469 2 2 /** @file 3 3 * IEM - Instruction Decoding and Emulation. 4 * 5 * @remarks IEMAllInstructionsVexMap1.cpp.h is a VEX mirror of this file. 6 * Any update here is likely needed in that file too. 4 7 */ 5 8 6 9 /* 7 * Copyright (C) 2011-201 6Oracle Corporation10 * Copyright (C) 2011-2017 Oracle Corporation 8 11 * 9 12 * This file is part of VirtualBox Open Source Edition (OSE), as … … 15 18 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind. 16 19 */ 17 18 19 /*******************************************************************************20 * Global Variables *21 *******************************************************************************/22 extern const PFNIEMOP g_apfnOneByteMap[256]; /* not static since we need to forward declare it. */23 24 25 /** @name ..... opcodes.26 *27 * @{28 */29 30 /** @} */31 20 32 21 … … 1067 1056 1068 1057 /** Opcode 0x0f 0x10 - vmovups Vps, Wps */ 1069 FNIEMOP_STUB(iemOp_ vmovups_Vps_Wps);1058 FNIEMOP_STUB(iemOp_movups_Vps_Wps); 1070 1059 /** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */ 1071 FNIEMOP_STUB(iemOp_ vmovupd_Vpd_Wpd);1060 FNIEMOP_STUB(iemOp_movupd_Vpd_Wpd); 1072 1061 1073 1062 … … 1080 1069 * @optest op1=1 op2=2 -> op1=2 1081 1070 * @optest op1=0 op2=-22 -> op1=-22 1082 * @oponly1083 1071 */ 1084 1072 FNIEMOP_DEF(iemOp_movss_Vss_Wss) … … 1127 1115 1128 1116 1129 /** Opcode VEX 0xf3 0x0f 0x10 - vmovsd Vx, Hx, Wsd */1130 FNIEMOP_STUB(iemOp_ vmovss_Vx_Hx_Wss);1131 1132 /** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */1133 FNIEMOP_STUB(iemOp_ vmovsd_Vx_Hx_Wsd);1117 /** Opcode VEX 0xf3 0x0f 0x10 - movsd Vx, Wsd */ 1118 FNIEMOP_STUB(iemOp_movss_Vx_Wss); 1119 1120 /** Opcode 0xf2 0x0f 0x10 - movsd Vx, Wsd */ 1121 FNIEMOP_STUB(iemOp_movsd_Vx_Wsd); 1134 1122 1135 1123 … … 1143 1131 * @optest op1=0 op2=-42 -> op1=-42 1144 1132 */ 1145 FNIEMOP_DEF(iemOp_ vmovups_Wps_Vps)1133 FNIEMOP_DEF(iemOp_movups_Wps_Vps) 1146 1134 { 1147 1135 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1194 1182 * @optest op1=0 op2=-42 -> op1=-42 1195 1183 */ 1196 FNIEMOP_DEF(iemOp_ vmovupd_Wpd_Vpd)1184 FNIEMOP_DEF(iemOp_movupd_Wpd_Vpd) 1197 1185 { 1198 1186 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1245 1233 * @optest op1=0 op2=-22 -> op1=-22 1246 1234 */ 1247 FNIEMOP_DEF(iemOp_ vmovss_Wss_Hx_Vss)1235 FNIEMOP_DEF(iemOp_movss_Wss_Vss) 1248 1236 { 1249 1237 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1299 1287 * @optest op1=0 op2=-42 -> op1=-42 1300 1288 */ 1301 FNIEMOP_DEF(iemOp_ vmovsd_Wsd_Hx_Vsd)1289 FNIEMOP_DEF(iemOp_movsd_Wsd_Vsd) 1302 1290 { 1303 1291 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1344 1332 1345 1333 1346 FNIEMOP_DEF(iemOp_ vmovlps_Vq_Hq_Mq__vmovhlps)1334 FNIEMOP_DEF(iemOp_movlps_Vq_Mq__movhlps) 1347 1335 { 1348 1336 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); … … 1385 1373 * @optest op1=1 op2=2 -> op1=2 1386 1374 * @optest op1=0 op2=-42 -> op1=-42 1387 * @opfunction iemOp_ vmovlps_Vq_Hq_Mq__vmovhlps1375 * @opfunction iemOp_movlps_Vq_Mq__vmovhlps 1388 1376 */ 1389 1377 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1418 1406 * @optest op1=0 op2=-42 -> op1=-42 1419 1407 */ 1420 FNIEMOP_DEF(iemOp_ vmovlpd_Vq_Hq_Mq)1408 FNIEMOP_DEF(iemOp_movlpd_Vq_Mq) 1421 1409 { 1422 1410 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); … … 1465 1453 * op1=0x00000002000000020000000100000001 1466 1454 */ 1467 FNIEMOP_DEF(iemOp_ vmovsldup_Vx_Wx)1455 FNIEMOP_DEF(iemOp_movsldup_Vdq_Wdq) 1468 1456 { 1469 1457 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1525 1513 * op1=0x22222222111111112222222211111111 1526 1514 */ 1527 FNIEMOP_DEF(iemOp_ vmovddup_Vx_Wx)1515 FNIEMOP_DEF(iemOp_movddup_Vdq_Wdq) 1528 1516 { 1529 1517 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 1576 1564 1577 1565 /** Opcode 0x0f 0x13 - vmovlps Mq, Vq */ 1578 FNIEMOP_STUB(iemOp_ vmovlps_Mq_Vq);1566 FNIEMOP_STUB(iemOp_movlps_Mq_Vq); 1579 1567 1580 1568 /** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */ 1581 FNIEMOP_DEF(iemOp_ vmovlpd_Mq_Vq)1569 FNIEMOP_DEF(iemOp_movlpd_Mq_Vq) 1582 1570 { 1583 1571 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq"); … … 1628 1616 /* Opcode 0xf2 0x0f 0x13 - invalid */ 1629 1617 1630 /** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/1631 FNIEMOP_STUB(iemOp_ vunpcklps_Vx_Hx_Wx);1632 /** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */1633 FNIEMOP_STUB(iemOp_ vunpcklpd_Vx_Hx_Wx);1618 /** Opcode 0x0f 0x14 - unpcklps Vx, Wx*/ 1619 FNIEMOP_STUB(iemOp_unpcklps_Vx_Wx); 1620 /** Opcode 0x66 0x0f 0x14 - unpcklpd Vx, Wx */ 1621 FNIEMOP_STUB(iemOp_unpcklpd_Vx_Wx); 1634 1622 /* Opcode 0xf3 0x0f 0x14 - invalid */ 1635 1623 /* Opcode 0xf2 0x0f 0x14 - invalid */ 1636 /** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */1637 FNIEMOP_STUB(iemOp_ vunpckhps_Vx_Hx_Wx);1638 /** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */1639 FNIEMOP_STUB(iemOp_ vunpckhpd_Vx_Hx_Wx);1624 /** Opcode 0x0f 0x15 - unpckhps Vx, Wx */ 1625 FNIEMOP_STUB(iemOp_unpckhps_Vx_Wx); 1626 /** Opcode 0x66 0x0f 0x15 - unpckhpd Vx, Wx */ 1627 FNIEMOP_STUB(iemOp_unpckhpd_Vx_Wx); 1640 1628 /* Opcode 0xf3 0x0f 0x15 - invalid */ 1641 1629 /* Opcode 0xf2 0x0f 0x15 - invalid */ 1642 /** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */1643 FNIEMOP_STUB(iemOp_ vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT1644 /** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */1645 FNIEMOP_STUB(iemOp_ vmovhpdv1_Vdq_Hq_Mq); //NEXT1646 /** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */1647 FNIEMOP_STUB(iemOp_ vmovshdup_Vx_Wx); //NEXT1630 /** Opcode 0x0f 0x16 - movhpsv1 Vdq, Mq movlhps Vdq, Uq */ 1631 FNIEMOP_STUB(iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq); //NEXT 1632 /** Opcode 0x66 0x0f 0x16 - movhpdv1 Vdq, Mq */ 1633 FNIEMOP_STUB(iemOp_movhpdv1_Vdq_Mq); //NEXT 1634 /** Opcode 0xf3 0x0f 0x16 - movshdup Vx, Wx */ 1635 FNIEMOP_STUB(iemOp_movshdup_Vx_Wx); //NEXT 1648 1636 /* Opcode 0xf2 0x0f 0x16 - invalid */ 1649 /** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */1650 FNIEMOP_STUB(iemOp_ vmovhpsv1_Mq_Vq); //NEXT1651 /** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */1652 FNIEMOP_STUB(iemOp_ vmovhpdv1_Mq_Vq); //NEXT1637 /** Opcode 0x0f 0x17 - movhpsv1 Mq, Vq */ 1638 FNIEMOP_STUB(iemOp_movhpsv1_Mq_Vq); //NEXT 1639 /** Opcode 0x66 0x0f 0x17 - movhpdv1 Mq, Vq */ 1640 FNIEMOP_STUB(iemOp_movhpdv1_Mq_Vq); //NEXT 1653 1641 /* Opcode 0xf3 0x0f 0x17 - invalid */ 1654 1642 /* Opcode 0xf2 0x0f 0x17 - invalid */ … … 1832 1820 1833 1821 1834 /** Opcode 0x0f 0x28 - vmovaps Vps, Wps */1835 FNIEMOP_DEF(iemOp_ vmovaps_Vps_Wps)1822 /** Opcode 0x0f 0x28 - movaps Vps, Wps */ 1823 FNIEMOP_DEF(iemOp_movaps_Vps_Wps) 1836 1824 { 1837 1825 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr"); … … 1874 1862 } 1875 1863 1876 /** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */1877 FNIEMOP_DEF(iemOp_ vmovapd_Vpd_Wpd)1864 /** Opcode 0x66 0x0f 0x28 - movapd Vpd, Wpd */ 1865 FNIEMOP_DEF(iemOp_movapd_Vpd_Wpd) 1878 1866 { 1879 1867 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr"); … … 1919 1907 /* Opcode 0xf2 0x0f 0x28 - invalid */ 1920 1908 1921 /** Opcode 0x0f 0x29 - vmovaps Wps, Vps */1922 FNIEMOP_DEF(iemOp_ vmovaps_Wps_Vps)1909 /** Opcode 0x0f 0x29 - movaps Wps, Vps */ 1910 FNIEMOP_DEF(iemOp_movaps_Wps_Vps) 1923 1911 { 1924 1912 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps"); … … 1961 1949 } 1962 1950 1963 /** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */1964 FNIEMOP_DEF(iemOp_ vmovapd_Wpd_Vpd)1951 /** Opcode 0x66 0x0f 0x29 - movapd Wpd,Vpd */ 1952 FNIEMOP_DEF(iemOp_movapd_Wpd_Vpd) 1965 1953 { 1966 1954 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd"); … … 2012 2000 FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT 2013 2001 /** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */ 2014 FNIEMOP_STUB(iemOp_ vcvtsi2ss_Vss_Hss_Ey); //NEXT2002 FNIEMOP_STUB(iemOp_cvtsi2ss_Vss_Ey); //NEXT 2015 2003 /** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */ 2016 FNIEMOP_STUB(iemOp_ vcvtsi2sd_Vsd_Hsd_Ey); //NEXT2004 FNIEMOP_STUB(iemOp_cvtsi2sd_Vsd_Ey); //NEXT 2017 2005 2018 2006 2019 2007 /** Opcode 0x0f 0x2b - vmovntps Mps, Vps */ 2020 FNIEMOP_DEF(iemOp_ vmovntps_Mps_Vps)2008 FNIEMOP_DEF(iemOp_movntps_Mps_Vps) 2021 2009 { 2022 2010 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps"); … … 2048 2036 } 2049 2037 2050 /** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */2051 FNIEMOP_DEF(iemOp_ vmovntpd_Mpd_Vpd)2038 /** Opcode 0x66 0x0f 0x2b - movntpd Mpd, Vpd */ 2039 FNIEMOP_DEF(iemOp_movntpd_Mpd_Vpd) 2052 2040 { 2053 2041 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd"); … … 2086 2074 /** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */ 2087 2075 FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd); 2088 /** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */2089 FNIEMOP_STUB(iemOp_ vcvttss2si_Gy_Wss);2090 /** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */2091 FNIEMOP_STUB(iemOp_ vcvttsd2si_Gy_Wsd);2076 /** Opcode 0xf3 0x0f 0x2c - cvttss2si Gy, Wss */ 2077 FNIEMOP_STUB(iemOp_cvttss2si_Gy_Wss); 2078 /** Opcode 0xf2 0x0f 0x2c - cvttsd2si Gy, Wsd */ 2079 FNIEMOP_STUB(iemOp_cvttsd2si_Gy_Wsd); 2092 2080 2093 2081 /** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */ … … 2095 2083 /** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */ 2096 2084 FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd); 2097 /** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */2098 FNIEMOP_STUB(iemOp_ vcvtss2si_Gy_Wss);2099 /** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */2100 FNIEMOP_STUB(iemOp_ vcvtsd2si_Gy_Wsd);2101 2102 /** Opcode 0x0f 0x2e - vucomiss Vss, Wss */2103 FNIEMOP_STUB(iemOp_ vucomiss_Vss_Wss); // NEXT2104 /** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */2105 FNIEMOP_STUB(iemOp_ vucomisd_Vsd_Wsd); // NEXT2085 /** Opcode 0xf3 0x0f 0x2d - cvtss2si Gy, Wss */ 2086 FNIEMOP_STUB(iemOp_cvtss2si_Gy_Wss); 2087 /** Opcode 0xf2 0x0f 0x2d - cvtsd2si Gy, Wsd */ 2088 FNIEMOP_STUB(iemOp_cvtsd2si_Gy_Wsd); 2089 2090 /** Opcode 0x0f 0x2e - ucomiss Vss, Wss */ 2091 FNIEMOP_STUB(iemOp_ucomiss_Vss_Wss); // NEXT 2092 /** Opcode 0x66 0x0f 0x2e - ucomisd Vsd, Wsd */ 2093 FNIEMOP_STUB(iemOp_ucomisd_Vsd_Wsd); // NEXT 2106 2094 /* Opcode 0xf3 0x0f 0x2e - invalid */ 2107 2095 /* Opcode 0xf2 0x0f 0x2e - invalid */ 2108 2096 2109 /** Opcode 0x0f 0x2f - vcomiss Vss, Wss */2110 FNIEMOP_STUB(iemOp_ vcomiss_Vss_Wss);2111 /** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */2112 FNIEMOP_STUB(iemOp_ vcomisd_Vsd_Wsd);2097 /** Opcode 0x0f 0x2f - comiss Vss, Wss */ 2098 FNIEMOP_STUB(iemOp_comiss_Vss_Wss); 2099 /** Opcode 0x66 0x0f 0x2f - comisd Vsd, Wsd */ 2100 FNIEMOP_STUB(iemOp_comisd_Vsd_Wsd); 2113 2101 /* Opcode 0xf3 0x0f 0x2f - invalid */ 2114 2102 /* Opcode 0xf2 0x0f 0x2f - invalid */ … … 2387 2375 #undef CMOV_X 2388 2376 2389 /** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */2390 FNIEMOP_STUB(iemOp_ vmovmskps_Gy_Ups);2391 /** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */2392 FNIEMOP_STUB(iemOp_ vmovmskpd_Gy_Upd);2377 /** Opcode 0x0f 0x50 - movmskps Gy, Ups */ 2378 FNIEMOP_STUB(iemOp_movmskps_Gy_Ups); 2379 /** Opcode 0x66 0x0f 0x50 - movmskpd Gy, Upd */ 2380 FNIEMOP_STUB(iemOp_movmskpd_Gy_Upd); 2393 2381 /* Opcode 0xf3 0x0f 0x50 - invalid */ 2394 2382 /* Opcode 0xf2 0x0f 0x50 - invalid */ 2395 2383 2396 /** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */2397 FNIEMOP_STUB(iemOp_ vsqrtps_Vps_Wps);2398 /** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */2399 FNIEMOP_STUB(iemOp_ vsqrtpd_Vpd_Wpd);2400 /** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */2401 FNIEMOP_STUB(iemOp_ vsqrtss_Vss_Hss_Wss);2402 /** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */2403 FNIEMOP_STUB(iemOp_ vsqrtsd_Vsd_Hsd_Wsd);2404 2405 /** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */2406 FNIEMOP_STUB(iemOp_ vrsqrtps_Vps_Wps);2384 /** Opcode 0x0f 0x51 - sqrtps Vps, Wps */ 2385 FNIEMOP_STUB(iemOp_sqrtps_Vps_Wps); 2386 /** Opcode 0x66 0x0f 0x51 - sqrtpd Vpd, Wpd */ 2387 FNIEMOP_STUB(iemOp_sqrtpd_Vpd_Wpd); 2388 /** Opcode 0xf3 0x0f 0x51 - sqrtss Vss, Wss */ 2389 FNIEMOP_STUB(iemOp_sqrtss_Vss_Wss); 2390 /** Opcode 0xf2 0x0f 0x51 - sqrtsd Vsd, Wsd */ 2391 FNIEMOP_STUB(iemOp_sqrtsd_Vsd_Wsd); 2392 2393 /** Opcode 0x0f 0x52 - rsqrtps Vps, Wps */ 2394 FNIEMOP_STUB(iemOp_rsqrtps_Vps_Wps); 2407 2395 /* Opcode 0x66 0x0f 0x52 - invalid */ 2408 /** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */2409 FNIEMOP_STUB(iemOp_ vrsqrtss_Vss_Hss_Wss);2396 /** Opcode 0xf3 0x0f 0x52 - rsqrtss Vss, Wss */ 2397 FNIEMOP_STUB(iemOp_rsqrtss_Vss_Wss); 2410 2398 /* Opcode 0xf2 0x0f 0x52 - invalid */ 2411 2399 2412 /** Opcode 0x0f 0x53 - vrcpps Vps, Wps */2413 FNIEMOP_STUB(iemOp_ vrcpps_Vps_Wps);2400 /** Opcode 0x0f 0x53 - rcpps Vps, Wps */ 2401 FNIEMOP_STUB(iemOp_rcpps_Vps_Wps); 2414 2402 /* Opcode 0x66 0x0f 0x53 - invalid */ 2415 /** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */2416 FNIEMOP_STUB(iemOp_ vrcpss_Vss_Hss_Wss);2403 /** Opcode 0xf3 0x0f 0x53 - rcpss Vss, Wss */ 2404 FNIEMOP_STUB(iemOp_rcpss_Vss_Wss); 2417 2405 /* Opcode 0xf2 0x0f 0x53 - invalid */ 2418 2406 2419 /** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */2420 FNIEMOP_STUB(iemOp_ vandps_Vps_Hps_Wps);2421 /** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */2422 FNIEMOP_STUB(iemOp_ vandpd_Vpd_Hpd_Wpd);2407 /** Opcode 0x0f 0x54 - andps Vps, Wps */ 2408 FNIEMOP_STUB(iemOp_andps_Vps_Wps); 2409 /** Opcode 0x66 0x0f 0x54 - andpd Vpd, Wpd */ 2410 FNIEMOP_STUB(iemOp_andpd_Vpd_Wpd); 2423 2411 /* Opcode 0xf3 0x0f 0x54 - invalid */ 2424 2412 /* Opcode 0xf2 0x0f 0x54 - invalid */ 2425 2413 2426 /** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */2427 FNIEMOP_STUB(iemOp_ vandnps_Vps_Hps_Wps);2428 /** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */2429 FNIEMOP_STUB(iemOp_ vandnpd_Vpd_Hpd_Wpd);2414 /** Opcode 0x0f 0x55 - andnps Vps, Wps */ 2415 FNIEMOP_STUB(iemOp_andnps_Vps_Wps); 2416 /** Opcode 0x66 0x0f 0x55 - andnpd Vpd, Wpd */ 2417 FNIEMOP_STUB(iemOp_andnpd_Vpd_Wpd); 2430 2418 /* Opcode 0xf3 0x0f 0x55 - invalid */ 2431 2419 /* Opcode 0xf2 0x0f 0x55 - invalid */ 2432 2420 2433 /** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */2434 FNIEMOP_STUB(iemOp_ vorps_Vps_Hps_Wps);2435 /** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */2436 FNIEMOP_STUB(iemOp_ vorpd_Vpd_Hpd_Wpd);2421 /** Opcode 0x0f 0x56 - orps Vps, Wps */ 2422 FNIEMOP_STUB(iemOp_orps_Vps_Wps); 2423 /** Opcode 0x66 0x0f 0x56 - orpd Vpd, Wpd */ 2424 FNIEMOP_STUB(iemOp_orpd_Vpd_Wpd); 2437 2425 /* Opcode 0xf3 0x0f 0x56 - invalid */ 2438 2426 /* Opcode 0xf2 0x0f 0x56 - invalid */ 2439 2427 2440 /** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */2441 FNIEMOP_STUB(iemOp_ vxorps_Vps_Hps_Wps);2442 /** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */2443 FNIEMOP_STUB(iemOp_ vxorpd_Vpd_Hpd_Wpd);2428 /** Opcode 0x0f 0x57 - xorps Vps, Wps */ 2429 FNIEMOP_STUB(iemOp_xorps_Vps_Wps); 2430 /** Opcode 0x66 0x0f 0x57 - xorpd Vpd, Wpd */ 2431 FNIEMOP_STUB(iemOp_xorpd_Vpd_Wpd); 2444 2432 /* Opcode 0xf3 0x0f 0x57 - invalid */ 2445 2433 /* Opcode 0xf2 0x0f 0x57 - invalid */ 2446 2434 2447 /** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */2448 FNIEMOP_STUB(iemOp_ vaddps_Vps_Hps_Wps);2449 /** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */2450 FNIEMOP_STUB(iemOp_ vaddpd_Vpd_Hpd_Wpd);2451 /** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */2452 FNIEMOP_STUB(iemOp_ vaddss_Vss_Hss_Wss);2453 /** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */2454 FNIEMOP_STUB(iemOp_ vaddsd_Vsd_Hsd_Wsd);2455 2456 /** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */2457 FNIEMOP_STUB(iemOp_ vmulps_Vps_Hps_Wps);2458 /** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */2459 FNIEMOP_STUB(iemOp_ vmulpd_Vpd_Hpd_Wpd);2460 /** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */2461 FNIEMOP_STUB(iemOp_ vmulss_Vss_Hss_Wss);2462 /** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */2463 FNIEMOP_STUB(iemOp_ vmulsd_Vsd_Hsd_Wsd);2464 2465 /** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */2466 FNIEMOP_STUB(iemOp_ vcvtps2pd_Vpd_Wps);2467 /** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */2468 FNIEMOP_STUB(iemOp_ vcvtpd2ps_Vps_Wpd);2469 /** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */2470 FNIEMOP_STUB(iemOp_ vcvtss2sd_Vsd_Hx_Wss);2471 /** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */2472 FNIEMOP_STUB(iemOp_ vcvtsd2ss_Vss_Hx_Wsd);2473 2474 /** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */2475 FNIEMOP_STUB(iemOp_ vcvtdq2ps_Vps_Wdq);2476 /** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */2477 FNIEMOP_STUB(iemOp_ vcvtps2dq_Vdq_Wps);2478 /** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */2479 FNIEMOP_STUB(iemOp_ vcvttps2dq_Vdq_Wps);2435 /** Opcode 0x0f 0x58 - addps Vps, Wps */ 2436 FNIEMOP_STUB(iemOp_addps_Vps_Wps); 2437 /** Opcode 0x66 0x0f 0x58 - addpd Vpd, Wpd */ 2438 FNIEMOP_STUB(iemOp_addpd_Vpd_Wpd); 2439 /** Opcode 0xf3 0x0f 0x58 - addss Vss, Wss */ 2440 FNIEMOP_STUB(iemOp_addss_Vss_Wss); 2441 /** Opcode 0xf2 0x0f 0x58 - addsd Vsd, Wsd */ 2442 FNIEMOP_STUB(iemOp_addsd_Vsd_Wsd); 2443 2444 /** Opcode 0x0f 0x59 - mulps Vps, Wps */ 2445 FNIEMOP_STUB(iemOp_mulps_Vps_Wps); 2446 /** Opcode 0x66 0x0f 0x59 - mulpd Vpd, Wpd */ 2447 FNIEMOP_STUB(iemOp_mulpd_Vpd_Wpd); 2448 /** Opcode 0xf3 0x0f 0x59 - mulss Vss, Wss */ 2449 FNIEMOP_STUB(iemOp_mulss_Vss_Wss); 2450 /** Opcode 0xf2 0x0f 0x59 - mulsd Vsd, Wsd */ 2451 FNIEMOP_STUB(iemOp_mulsd_Vsd_Wsd); 2452 2453 /** Opcode 0x0f 0x5a - cvtps2pd Vpd, Wps */ 2454 FNIEMOP_STUB(iemOp_cvtps2pd_Vpd_Wps); 2455 /** Opcode 0x66 0x0f 0x5a - cvtpd2ps Vps, Wpd */ 2456 FNIEMOP_STUB(iemOp_cvtpd2ps_Vps_Wpd); 2457 /** Opcode 0xf3 0x0f 0x5a - cvtss2sd Vsd, Wss */ 2458 FNIEMOP_STUB(iemOp_cvtss2sd_Vsd_Wss); 2459 /** Opcode 0xf2 0x0f 0x5a - cvtsd2ss Vss, Wsd */ 2460 FNIEMOP_STUB(iemOp_cvtsd2ss_Vss_Wsd); 2461 2462 /** Opcode 0x0f 0x5b - cvtdq2ps Vps, Wdq */ 2463 FNIEMOP_STUB(iemOp_cvtdq2ps_Vps_Wdq); 2464 /** Opcode 0x66 0x0f 0x5b - cvtps2dq Vdq, Wps */ 2465 FNIEMOP_STUB(iemOp_cvtps2dq_Vdq_Wps); 2466 /** Opcode 0xf3 0x0f 0x5b - cvttps2dq Vdq, Wps */ 2467 FNIEMOP_STUB(iemOp_cvttps2dq_Vdq_Wps); 2480 2468 /* Opcode 0xf2 0x0f 0x5b - invalid */ 2481 2469 2482 /** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */2483 FNIEMOP_STUB(iemOp_ vsubps_Vps_Hps_Wps);2484 /** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */2485 FNIEMOP_STUB(iemOp_ vsubpd_Vpd_Hpd_Wpd);2486 /** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */2487 FNIEMOP_STUB(iemOp_ vsubss_Vss_Hss_Wss);2488 /** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */2489 FNIEMOP_STUB(iemOp_ vsubsd_Vsd_Hsd_Wsd);2490 2491 /** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */2492 FNIEMOP_STUB(iemOp_ vminps_Vps_Hps_Wps);2493 /** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */2494 FNIEMOP_STUB(iemOp_ vminpd_Vpd_Hpd_Wpd);2495 /** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */2496 FNIEMOP_STUB(iemOp_ vminss_Vss_Hss_Wss);2497 /** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */2498 FNIEMOP_STUB(iemOp_ vminsd_Vsd_Hsd_Wsd);2499 2500 /** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */2501 FNIEMOP_STUB(iemOp_ vdivps_Vps_Hps_Wps);2502 /** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */2503 FNIEMOP_STUB(iemOp_ vdivpd_Vpd_Hpd_Wpd);2504 /** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */2505 FNIEMOP_STUB(iemOp_ vdivss_Vss_Hss_Wss);2506 /** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */2507 FNIEMOP_STUB(iemOp_ vdivsd_Vsd_Hsd_Wsd);2508 2509 /** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */2510 FNIEMOP_STUB(iemOp_ vmaxps_Vps_Hps_Wps);2511 /** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */2512 FNIEMOP_STUB(iemOp_ vmaxpd_Vpd_Hpd_Wpd);2513 /** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */2514 FNIEMOP_STUB(iemOp_ vmaxss_Vss_Hss_Wss);2515 /** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */2516 FNIEMOP_STUB(iemOp_ vmaxsd_Vsd_Hsd_Wsd);2470 /** Opcode 0x0f 0x5c - subps Vps, Wps */ 2471 FNIEMOP_STUB(iemOp_subps_Vps_Wps); 2472 /** Opcode 0x66 0x0f 0x5c - subpd Vpd, Wpd */ 2473 FNIEMOP_STUB(iemOp_subpd_Vpd_Wpd); 2474 /** Opcode 0xf3 0x0f 0x5c - subss Vss, Wss */ 2475 FNIEMOP_STUB(iemOp_subss_Vss_Wss); 2476 /** Opcode 0xf2 0x0f 0x5c - subsd Vsd, Wsd */ 2477 FNIEMOP_STUB(iemOp_subsd_Vsd_Wsd); 2478 2479 /** Opcode 0x0f 0x5d - minps Vps, Wps */ 2480 FNIEMOP_STUB(iemOp_minps_Vps_Wps); 2481 /** Opcode 0x66 0x0f 0x5d - minpd Vpd, Wpd */ 2482 FNIEMOP_STUB(iemOp_minpd_Vpd_Wpd); 2483 /** Opcode 0xf3 0x0f 0x5d - minss Vss, Wss */ 2484 FNIEMOP_STUB(iemOp_minss_Vss_Wss); 2485 /** Opcode 0xf2 0x0f 0x5d - minsd Vsd, Wsd */ 2486 FNIEMOP_STUB(iemOp_minsd_Vsd_Wsd); 2487 2488 /** Opcode 0x0f 0x5e - divps Vps, Wps */ 2489 FNIEMOP_STUB(iemOp_divps_Vps_Wps); 2490 /** Opcode 0x66 0x0f 0x5e - divpd Vpd, Wpd */ 2491 FNIEMOP_STUB(iemOp_divpd_Vpd_Wpd); 2492 /** Opcode 0xf3 0x0f 0x5e - divss Vss, Wss */ 2493 FNIEMOP_STUB(iemOp_divss_Vss_Wss); 2494 /** Opcode 0xf2 0x0f 0x5e - divsd Vsd, Wsd */ 2495 FNIEMOP_STUB(iemOp_divsd_Vsd_Wsd); 2496 2497 /** Opcode 0x0f 0x5f - maxps Vps, Wps */ 2498 FNIEMOP_STUB(iemOp_maxps_Vps_Wps); 2499 /** Opcode 0x66 0x0f 0x5f - maxpd Vpd, Wpd */ 2500 FNIEMOP_STUB(iemOp_maxpd_Vpd_Wpd); 2501 /** Opcode 0xf3 0x0f 0x5f - maxss Vss, Wss */ 2502 FNIEMOP_STUB(iemOp_maxss_Vss_Wss); 2503 /** Opcode 0xf2 0x0f 0x5f - maxsd Vsd, Wsd */ 2504 FNIEMOP_STUB(iemOp_maxsd_Vsd_Wsd); 2517 2505 2518 2506 /** … … 2641 2629 } 2642 2630 2643 /** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */2644 FNIEMOP_DEF(iemOp_ vpunpcklbw_Vx_Hx_Wx)2645 { 2646 IEMOP_MNEMONIC(vpunpcklbw , "vpunpcklbw Vx, Hx, Wx");2647 return FNIEMOP_CALL_1(iemOpCommon Mmx_LowLow_To_Full, &g_iemAImpl_punpcklbw);2631 /** Opcode 0x66 0x0f 0x60 - punpcklbw Vx, W */ 2632 FNIEMOP_DEF(iemOp_punpcklbw_Vx_Wx) 2633 { 2634 IEMOP_MNEMONIC(vpunpcklbw_Vx_Wx, "vpunpcklbw Vx, Wx"); 2635 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw); 2648 2636 } 2649 2637 … … 2658 2646 } 2659 2647 2660 /** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */2661 FNIEMOP_DEF(iemOp_ vpunpcklwd_Vx_Hx_Wx)2662 { 2663 IEMOP_MNEMONIC(vpunpcklwd , "vpunpcklwd Vx, Hx, Wx");2648 /** Opcode 0x66 0x0f 0x61 - punpcklwd Vx, Wx */ 2649 FNIEMOP_DEF(iemOp_punpcklwd_Vx_Wx) 2650 { 2651 IEMOP_MNEMONIC(vpunpcklwd_Vx_Wx, "punpcklwd Vx, Wx"); 2664 2652 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd); 2665 2653 } … … 2675 2663 } 2676 2664 2677 /** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */2678 FNIEMOP_DEF(iemOp_ vpunpckldq_Vx_Hx_Wx)2679 { 2680 IEMOP_MNEMONIC( vpunpckldq, "vpunpckldq Vx, Hx, Wx");2665 /** Opcode 0x66 0x0f 0x62 - punpckldq Vx, Wx */ 2666 FNIEMOP_DEF(iemOp_punpckldq_Vx_Wx) 2667 { 2668 IEMOP_MNEMONIC(punpckldq_Vx_Wx, "punpckldq Vx, Wx"); 2681 2669 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq); 2682 2670 } … … 2688 2676 /** Opcode 0x0f 0x63 - packsswb Pq, Qq */ 2689 2677 FNIEMOP_STUB(iemOp_packsswb_Pq_Qq); 2690 /** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */2691 FNIEMOP_STUB(iemOp_ vpacksswb_Vx_Hx_Wx);2678 /** Opcode 0x66 0x0f 0x63 - packsswb Vx, Wx */ 2679 FNIEMOP_STUB(iemOp_packsswb_Vx_Wx); 2692 2680 /* Opcode 0xf3 0x0f 0x63 - invalid */ 2693 2681 2694 2682 /** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */ 2695 2683 FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq); 2696 /** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */2697 FNIEMOP_STUB(iemOp_ vpcmpgtb_Vx_Hx_Wx);2684 /** Opcode 0x66 0x0f 0x64 - pcmpgtb Vx, Wx */ 2685 FNIEMOP_STUB(iemOp_pcmpgtb_Vx_Wx); 2698 2686 /* Opcode 0xf3 0x0f 0x64 - invalid */ 2699 2687 2700 2688 /** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */ 2701 2689 FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq); 2702 /** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */2703 FNIEMOP_STUB(iemOp_ vpcmpgtw_Vx_Hx_Wx);2690 /** Opcode 0x66 0x0f 0x65 - pcmpgtw Vx, Wx */ 2691 FNIEMOP_STUB(iemOp_pcmpgtw_Vx_Wx); 2704 2692 /* Opcode 0xf3 0x0f 0x65 - invalid */ 2705 2693 2706 2694 /** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */ 2707 2695 FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq); 2708 /** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */2709 FNIEMOP_STUB(iemOp_ vpcmpgtd_Vx_Hx_Wx);2696 /** Opcode 0x66 0x0f 0x66 - pcmpgtd Vx, Wx */ 2697 FNIEMOP_STUB(iemOp_pcmpgtd_Vx_Wx); 2710 2698 /* Opcode 0xf3 0x0f 0x66 - invalid */ 2711 2699 2712 2700 /** Opcode 0x0f 0x67 - packuswb Pq, Qq */ 2713 2701 FNIEMOP_STUB(iemOp_packuswb_Pq_Qq); 2714 /** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */2715 FNIEMOP_STUB(iemOp_ vpackuswb_Vx_Hx_W);2702 /** Opcode 0x66 0x0f 0x67 - packuswb Vx, W */ 2703 FNIEMOP_STUB(iemOp_packuswb_Vx_W); 2716 2704 /* Opcode 0xf3 0x0f 0x67 - invalid */ 2717 2705 … … 2841 2829 } 2842 2830 2843 /** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */2844 FNIEMOP_DEF(iemOp_ vpunpckhbw_Vx_Hx_Wx)2845 { 2846 IEMOP_MNEMONIC(vpunpckhbw , "vpunpckhbw Vx, Hx, Wx");2831 /** Opcode 0x66 0x0f 0x68 - punpckhbw Vx, Wx */ 2832 FNIEMOP_DEF(iemOp_punpckhbw_Vx_Wx) 2833 { 2834 IEMOP_MNEMONIC(vpunpckhbw_Vx_Wx, "vpunpckhbw Vx, Wx"); 2847 2835 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw); 2848 2836 } … … 2857 2845 } 2858 2846 2859 /** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */2860 FNIEMOP_DEF(iemOp_ vpunpckhwd_Vx_Hx_Wx)2861 { 2862 IEMOP_MNEMONIC( vpunpckhwd, "vpunpckhwd Vx, Hx, Wx");2847 /** Opcode 0x66 0x0f 0x69 - punpckhwd Vx, Hx, Wx */ 2848 FNIEMOP_DEF(iemOp_punpckhwd_Vx_Wx) 2849 { 2850 IEMOP_MNEMONIC(punpckhwd_Vx_Wx, "punpckhwd Vx, Wx"); 2863 2851 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd); 2864 2852 … … 2874 2862 } 2875 2863 2876 /** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */2877 FNIEMOP_DEF(iemOp_ vpunpckhdq_Vx_Hx_W)2878 { 2879 IEMOP_MNEMONIC( vpunpckhdq, "vpunpckhdq Vx, Hx, W");2864 /** Opcode 0x66 0x0f 0x6a - punpckhdq Vx, W */ 2865 FNIEMOP_DEF(iemOp_punpckhdq_Vx_W) 2866 { 2867 IEMOP_MNEMONIC(punpckhdq_Vx_W, "punpckhdq Vx, W"); 2880 2868 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq); 2881 2869 } … … 2885 2873 /** Opcode 0x0f 0x6b - packssdw Pq, Qd */ 2886 2874 FNIEMOP_STUB(iemOp_packssdw_Pq_Qd); 2887 /** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */2888 FNIEMOP_STUB(iemOp_ vpackssdw_Vx_Hx_Wx);2875 /** Opcode 0x66 0x0f 0x6b - packssdw Vx, Wx */ 2876 FNIEMOP_STUB(iemOp_packssdw_Vx_Wx); 2889 2877 /* Opcode 0xf3 0x0f 0x6b - invalid */ 2890 2878 … … 2892 2880 /* Opcode 0x0f 0x6c - invalid */ 2893 2881 2894 /** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */2895 FNIEMOP_DEF(iemOp_ vpunpcklqdq_Vx_Hx_Wx)2896 { 2897 IEMOP_MNEMONIC( vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx");2882 /** Opcode 0x66 0x0f 0x6c - punpcklqdq Vx, Wx */ 2883 FNIEMOP_DEF(iemOp_punpcklqdq_Vx_Wx) 2884 { 2885 IEMOP_MNEMONIC(punpcklqdq, "punpcklqdq Vx, Wx"); 2898 2886 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq); 2899 2887 } … … 2905 2893 /* Opcode 0x0f 0x6d - invalid */ 2906 2894 2907 /** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */2908 FNIEMOP_DEF(iemOp_ vpunpckhqdq_Vx_Hx_W)2909 { 2910 IEMOP_MNEMONIC(punpckhqdq , "punpckhqdq");2895 /** Opcode 0x66 0x0f 0x6d - punpckhqdq Vx, W */ 2896 FNIEMOP_DEF(iemOp_punpckhqdq_Vx_W) 2897 { 2898 IEMOP_MNEMONIC(punpckhqdq_Vx_W, "punpckhqdq Vx,W"); 2911 2899 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq); 2912 2900 } … … 2966 2954 } 2967 2955 2968 /** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */2969 FNIEMOP_DEF(iemOp_ vmovd_q_Vy_Ey)2956 /** Opcode 0x66 0x0f 0x6e - movd/q Vy, Ey */ 2957 FNIEMOP_DEF(iemOp_movd_q_Vy_Ey) 2970 2958 { 2971 2959 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2972 2960 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2973 IEMOP_MNEMONIC( vmovdq_Wq_Eq, "vmovq Wq,Eq");2974 else 2975 IEMOP_MNEMONIC( vmovdq_Wd_Ed, "vmovd Wd,Ed");2961 IEMOP_MNEMONIC(movdq_Wq_Eq, "movq Wq,Eq"); 2962 else 2963 IEMOP_MNEMONIC(movdq_Wd_Ed, "movd Wd,Ed"); 2976 2964 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2977 2965 { … … 3070 3058 } 3071 3059 3072 /** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */3073 FNIEMOP_DEF(iemOp_ vmovdqa_Vx_Wx)3060 /** Opcode 0x66 0x0f 0x6f - movdqa Vx, Wx */ 3061 FNIEMOP_DEF(iemOp_movdqa_Vx_Wx) 3074 3062 { 3075 3063 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); … … 3111 3099 } 3112 3100 3113 /** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */3114 FNIEMOP_DEF(iemOp_ vmovdqu_Vx_Wx)3101 /** Opcode 0xf3 0x0f 0x6f - movdqu Vx, Wx */ 3102 FNIEMOP_DEF(iemOp_movdqu_Vx_Wx) 3115 3103 { 3116 3104 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); … … 3206 3194 } 3207 3195 3208 /** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */3209 FNIEMOP_DEF(iemOp_ vpshufd_Vx_Wx_Ib)3210 { 3211 IEMOP_MNEMONIC( vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib");3196 /** Opcode 0x66 0x0f 0x70 - pshufd Vx, Wx, Ib */ 3197 FNIEMOP_DEF(iemOp_pshufd_Vx_Wx_Ib) 3198 { 3199 IEMOP_MNEMONIC(pshufd_Vx_Wx_Ib, "pshufd Vx,Wx,Ib"); 3212 3200 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3213 3201 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 3259 3247 } 3260 3248 3261 /** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */3262 FNIEMOP_DEF(iemOp_ vpshufhw_Vx_Wx_Ib)3263 { 3264 IEMOP_MNEMONIC( vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib");3249 /** Opcode 0xf3 0x0f 0x70 - pshufhw Vx, Wx, Ib */ 3250 FNIEMOP_DEF(iemOp_pshufhw_Vx_Wx_Ib) 3251 { 3252 IEMOP_MNEMONIC(pshufhw_Vx_Wx_Ib, "pshufhw Vx,Wx,Ib"); 3265 3253 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3266 3254 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 3312 3300 } 3313 3301 3314 /** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */3315 FNIEMOP_DEF(iemOp_ vpshuflw_Vx_Wx_Ib)3316 { 3317 IEMOP_MNEMONIC( vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib");3302 /** Opcode 0xf2 0x0f 0x70 - pshuflw Vx, Wx, Ib */ 3303 FNIEMOP_DEF(iemOp_pshuflw_Vx_Wx_Ib) 3304 { 3305 IEMOP_MNEMONIC(pshuflw_Vx_Wx_Ib, "pshuflw Vx,Wx,Ib"); 3318 3306 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3319 3307 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 3367 3355 3368 3356 /** Opcode 0x0f 0x71 11/2. */ 3369 FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, 3357 FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm); 3370 3358 3371 3359 /** Opcode 0x66 0x0f 0x71 11/2. */ 3372 FNIEMOP_STUB_1(iemOp_Grp12_ vpsrlw_Hx_Ux_Ib, uint8_t, bRm);3360 FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Ux_Ib, uint8_t, bRm); 3373 3361 3374 3362 /** Opcode 0x0f 0x71 11/4. */ 3375 FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, 3363 FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm); 3376 3364 3377 3365 /** Opcode 0x66 0x0f 0x71 11/4. */ 3378 FNIEMOP_STUB_1(iemOp_Grp12_ vpsraw_Hx_Ux_Ib, uint8_t, bRm);3366 FNIEMOP_STUB_1(iemOp_Grp12_psraw_Ux_Ib, uint8_t, bRm); 3379 3367 3380 3368 /** Opcode 0x0f 0x71 11/6. */ 3381 FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, 3369 FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm); 3382 3370 3383 3371 /** Opcode 0x66 0x0f 0x71 11/6. */ 3384 FNIEMOP_STUB_1(iemOp_Grp12_ vpsllw_Hx_Ux_Ib, uint8_t, bRm);3372 FNIEMOP_STUB_1(iemOp_Grp12_psllw_Ux_Ib, uint8_t, bRm); 3385 3373 3386 3374 … … 3392 3380 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3393 3381 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3394 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_ vpsrlw_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3382 /* /2 */ iemOp_Grp12_psrlw_Nq_Ib, iemOp_Grp12_psrlw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3395 3383 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3396 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_ vpsraw_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3384 /* /4 */ iemOp_Grp12_psraw_Nq_Ib, iemOp_Grp12_psraw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3397 3385 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3398 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_ vpsllw_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3386 /* /6 */ iemOp_Grp12_psllw_Nq_Ib, iemOp_Grp12_psllw_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3399 3387 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8) 3400 3388 }; … … 3415 3403 3416 3404 /** Opcode 0x0f 0x72 11/2. */ 3417 FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, 3405 FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm); 3418 3406 3419 3407 /** Opcode 0x66 0x0f 0x72 11/2. */ 3420 FNIEMOP_STUB_1(iemOp_Grp13_ vpsrld_Hx_Ux_Ib, uint8_t, bRm);3408 FNIEMOP_STUB_1(iemOp_Grp13_psrld_Ux_Ib, uint8_t, bRm); 3421 3409 3422 3410 /** Opcode 0x0f 0x72 11/4. */ 3423 FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, 3411 FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm); 3424 3412 3425 3413 /** Opcode 0x66 0x0f 0x72 11/4. */ 3426 FNIEMOP_STUB_1(iemOp_Grp13_ vpsrad_Hx_Ux_Ib, uint8_t, bRm);3414 FNIEMOP_STUB_1(iemOp_Grp13_psrad_Ux_Ib, uint8_t, bRm); 3427 3415 3428 3416 /** Opcode 0x0f 0x72 11/6. */ 3429 FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, 3417 FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm); 3430 3418 3431 3419 /** Opcode 0x66 0x0f 0x72 11/6. */ 3432 FNIEMOP_STUB_1(iemOp_Grp13_ vpslld_Hx_Ux_Ib, uint8_t, bRm);3420 FNIEMOP_STUB_1(iemOp_Grp13_pslld_Ux_Ib, uint8_t, bRm); 3433 3421 3434 3422 … … 3440 3428 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3441 3429 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3442 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_ vpsrld_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3430 /* /2 */ iemOp_Grp13_psrld_Nq_Ib, iemOp_Grp13_psrld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3443 3431 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3444 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_ vpsrad_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3432 /* /4 */ iemOp_Grp13_psrad_Nq_Ib, iemOp_Grp13_psrad_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3445 3433 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3446 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_ vpslld_Hx_Ux_Ib,iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3434 /* /6 */ iemOp_Grp13_pslld_Nq_Ib, iemOp_Grp13_pslld_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3447 3435 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8) 3448 3436 }; … … 3465 3453 3466 3454 /** Opcode 0x66 0x0f 0x73 11/2. */ 3467 FNIEMOP_STUB_1(iemOp_Grp14_ vpsrlq_Hx_Ux_Ib, uint8_t, bRm);3455 FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Ux_Ib, uint8_t, bRm); 3468 3456 3469 3457 /** Opcode 0x66 0x0f 0x73 11/3. */ 3470 FNIEMOP_STUB_1(iemOp_Grp14_ vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT3458 FNIEMOP_STUB_1(iemOp_Grp14_psrldq_Ux_Ib, uint8_t, bRm); //NEXT 3471 3459 3472 3460 /** Opcode 0x0f 0x73 11/6. */ … … 3474 3462 3475 3463 /** Opcode 0x66 0x0f 0x73 11/6. */ 3476 FNIEMOP_STUB_1(iemOp_Grp14_ vpsllq_Hx_Ux_Ib, uint8_t, bRm);3464 FNIEMOP_STUB_1(iemOp_Grp14_psllq_Ux_Ib, uint8_t, bRm); 3477 3465 3478 3466 /** Opcode 0x66 0x0f 0x73 11/7. */ 3479 FNIEMOP_STUB_1(iemOp_Grp14_ vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT3467 FNIEMOP_STUB_1(iemOp_Grp14_pslldq_Ux_Ib, uint8_t, bRm); //NEXT 3480 3468 3481 3469 /** … … 3486 3474 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3487 3475 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3488 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_ vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3489 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_ vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3476 /* /2 */ iemOp_Grp14_psrlq_Nq_Ib, iemOp_Grp14_psrlq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3477 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_psrldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3490 3478 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3491 3479 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3492 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_ vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3493 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_ vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3480 /* /6 */ iemOp_Grp14_psllq_Nq_Ib, iemOp_Grp14_psllq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3481 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_Grp14_pslldq_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3494 3482 }; 3495 3483 AssertCompile(RT_ELEMENTS(g_apfnGroup14RegReg) == 8*4); … … 3622 3610 } 3623 3611 3624 /** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */3625 FNIEMOP_DEF(iemOp_ vpcmpeqb_Vx_Hx_Wx)3626 { 3627 IEMOP_MNEMONIC(vpcmpeqb , "vpcmpeqb");3612 /** Opcode 0x66 0x0f 0x74 - pcmpeqb Vx, Wx */ 3613 FNIEMOP_DEF(iemOp_pcmpeqb_Vx_Wx) 3614 { 3615 IEMOP_MNEMONIC(vpcmpeqb_Vx_Wx, "pcmpeqb"); 3628 3616 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3629 3617 } … … 3640 3628 } 3641 3629 3642 /** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */3643 FNIEMOP_DEF(iemOp_ vpcmpeqw_Vx_Hx_Wx)3644 { 3645 IEMOP_MNEMONIC( vpcmpeqw, "vpcmpeqw");3630 /** Opcode 0x66 0x0f 0x75 - pcmpeqw Vx, Wx */ 3631 FNIEMOP_DEF(iemOp_pcmpeqw_Vx_Wx) 3632 { 3633 IEMOP_MNEMONIC(pcmpeqw_Vx_Wx, "pcmpeqw"); 3646 3634 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3647 3635 } … … 3658 3646 } 3659 3647 3660 /** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */3661 FNIEMOP_DEF(iemOp_ vpcmpeqd_Vx_Hx_Wx)3662 { 3663 IEMOP_MNEMONIC( vpcmpeqd, "vpcmpeqd");3648 /** Opcode 0x66 0x0f 0x76 - pcmpeqd Vx, Wx */ 3649 FNIEMOP_DEF(iemOp_pcmpeqd_Vx_Wx) 3650 { 3651 IEMOP_MNEMONIC(pcmpeqd_Vx_Wx, "vpcmpeqd"); 3664 3652 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3665 3653 } … … 3669 3657 3670 3658 3671 /** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv*/3672 FNIEMOP_STUB(iemOp_emms __vzeroupperv__vzeroallv);3659 /** Opcode 0x0f 0x77 - emms (vex has vzeroall and vzeroupper here) */ 3660 FNIEMOP_STUB(iemOp_emms); 3673 3661 /* Opcode 0x66 0x0f 0x77 - invalid */ 3674 3662 /* Opcode 0xf3 0x0f 0x77 - invalid */ … … 3699 3687 3700 3688 /* Opcode 0x0f 0x7c - invalid */ 3701 /** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */3702 FNIEMOP_STUB(iemOp_ vhaddpd_Vpd_Hpd_Wpd);3689 /** Opcode 0x66 0x0f 0x7c - haddpd Vpd, Wpd */ 3690 FNIEMOP_STUB(iemOp_haddpd_Vpd_Wpd); 3703 3691 /* Opcode 0xf3 0x0f 0x7c - invalid */ 3704 /** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */3705 FNIEMOP_STUB(iemOp_ vhaddps_Vps_Hps_Wps);3692 /** Opcode 0xf2 0x0f 0x7c - haddps Vps, Wps */ 3693 FNIEMOP_STUB(iemOp_haddps_Vps_Wps); 3706 3694 3707 3695 /* Opcode 0x0f 0x7d - invalid */ 3708 /** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */3709 FNIEMOP_STUB(iemOp_ vhsubpd_Vpd_Hpd_Wpd);3696 /** Opcode 0x66 0x0f 0x7d - hsubpd Vpd, Wpd */ 3697 FNIEMOP_STUB(iemOp_hsubpd_Vpd_Wpd); 3710 3698 /* Opcode 0xf3 0x0f 0x7d - invalid */ 3711 /** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */3712 FNIEMOP_STUB(iemOp_ vhsubps_Vps_Hps_Wps);3699 /** Opcode 0xf2 0x0f 0x7d - hsubps Vps, Wps */ 3700 FNIEMOP_STUB(iemOp_hsubps_Vps_Wps); 3713 3701 3714 3702 … … 3770 3758 } 3771 3759 3772 /** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */3773 FNIEMOP_DEF(iemOp_ vmovd_q_Ey_Vy)3760 /** Opcode 0x66 0x0f 0x7e - movd_q Ey, Vy */ 3761 FNIEMOP_DEF(iemOp_movd_q_Ey_Vy) 3774 3762 { 3775 3763 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3776 3764 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3777 IEMOP_MNEMONIC( vmovq_Eq_Wq, "vmovq Eq,Wq");3778 else 3779 IEMOP_MNEMONIC( vmovd_Ed_Wd, "vmovd Ed,Wd");3765 IEMOP_MNEMONIC(movq_Eq_Wq, "movq Eq,Wq"); 3766 else 3767 IEMOP_MNEMONIC(movd_Ed_Wd, "movd Ed,Wd"); 3780 3768 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3781 3769 { … … 3827 3815 } 3828 3816 3829 /** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */3830 FNIEMOP_STUB(iemOp_ vmovq_Vq_Wq);3817 /** Opcode 0xf3 0x0f 0x7e - movq Vq, Wq */ 3818 FNIEMOP_STUB(iemOp_movq_Vq_Wq); 3831 3819 /* Opcode 0xf2 0x0f 0x7e - invalid */ 3832 3820 … … 3877 3865 } 3878 3866 3879 /** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */3880 FNIEMOP_DEF(iemOp_ vmovdqa_Wx_Vx)3881 { 3882 IEMOP_MNEMONIC( vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx");3867 /** Opcode 0x66 0x0f 0x7f - movdqa Wx,Vx */ 3868 FNIEMOP_DEF(iemOp_movdqa_Wx_Vx) 3869 { 3870 IEMOP_MNEMONIC(movdqa_Wdq_Vdq, "movdqa Wx,Vx"); 3883 3871 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3884 3872 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) … … 3919 3907 } 3920 3908 3921 /** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */3922 FNIEMOP_DEF(iemOp_ vmovdqu_Wx_Vx)3923 { 3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3925 IEMOP_MNEMONIC( vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx");3909 /** Opcode 0xf3 0x0f 0x7f - movdqu Wx,Vx */ 3910 FNIEMOP_DEF(iemOp_movdqu_Wx_Vx) 3911 { 3912 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3913 IEMOP_MNEMONIC(movdqu_Wdq_Vdq, "movdqu Wx,Vx"); 3926 3914 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3927 3915 { … … 5960 5948 5961 5949 /** 5962 * @opmaps vexgrp15 5963 * @opcode !11/3 5964 * @oppfx none 5965 * @opcpuid avx 5966 * @opgroup og_avx_mxcsrsm 5967 * @opxcpttype 5 5968 * @optest mxcsr=0 -> op1=0 5969 * @optest mxcsr=0x2083 -> op1=0x2083 5970 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7 5971 * @optest !amd / mxcsr=0x2085 cr0|=em -> op1=0x2085 5972 * @optest amd / mxcsr=0x2085 cr0|=em -> value.xcpt=0x6 5973 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086 5974 * @optest mxcsr=0x2087 cr4&~=osfxsr -> op1=0x2087 5975 * @optest mxcsr=0x208f cr4&~=osxsave -> value.xcpt=0x6 5976 * @optest mxcsr=0x2087 cr4&~=osfxsr,osxsave -> value.xcpt=0x6 5977 * @optest !amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x7 5978 * @optest amd / mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6 5979 * @optest !amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> op1=0x2089 5980 * @optest amd / mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6 5981 * @optest !amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x7 5982 * @optest amd / mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6 5983 * @optest !amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x7 5984 * @optest amd / mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6 5985 * @optest !amd / mxcsr=0x208c xcr0&~=all_avx -> value.xcpt=0x6 5986 * @optest amd / mxcsr=0x208c xcr0&~=all_avx -> op1=0x208c 5987 * @optest !amd / mxcsr=0x208d xcr0&~=all_avx_sse -> value.xcpt=0x6 5988 * @optest amd / mxcsr=0x208d xcr0&~=all_avx_sse -> op1=0x208d 5989 * @optest !amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x6 5990 * @optest amd / mxcsr=0x208e xcr0&~=all_avx cr0|=ts -> value.xcpt=0x7 5991 * @optest mxcsr=0x2082 cr0|=ts cr4&~=osxsave -> value.xcpt=0x6 5992 * @optest mxcsr=0x2081 xcr0&~=all_avx cr0|=ts cr4&~=osxsave 5993 * -> value.xcpt=0x6 5994 * @remarks AMD Jaguar CPU (f0x16,m0,s1) \#UD when CR0.EM is set. It also 5995 * doesn't seem to check XCR0[2:1] != 11b. This does not match the 5996 * APMv4 rev 3.17 page 509. 5997 * @todo Test this instruction on AMD Ryzen. 5998 */ 5999 FNIEMOP_DEF_1(iemOp_VGrp15_vstmxcsr, uint8_t, bRm) 6000 { 6001 IEMOP_MNEMONIC1(VEX_M_MEM, VSTMXCSR, vstmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 6002 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fAvx) 6003 return IEMOP_RAISE_INVALID_OPCODE(); 6004 6005 IEM_MC_BEGIN(2, 0); 6006 IEM_MC_ARG(uint8_t, iEffSeg, 0); 6007 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 6008 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6009 IEMOP_HLP_DONE_VEX_DECODING_L_ZERO_NO_VVV(); 6010 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 6011 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 6012 IEM_MC_CALL_CIMPL_2(iemCImpl_vstmxcsr, iEffSeg, GCPtrEff); 6013 IEM_MC_END(); 6014 return VINF_SUCCESS; 6015 } 6016 6017 6018 /** 6019 * @opmaps vexgrp15 5950 * @opmaps grp15 6020 5951 * @opcode !11/4 6021 5952 * @oppfx none … … 6045 5976 6046 5977 /** 6047 * @opmaps vexgrp155978 * @opmaps grp15 6048 5979 * @opcode !11/5 6049 5980 * @oppfx none … … 6244 6175 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 6245 6176 + pVCpu->iem.s.idxPrefix], bRm); 6246 }6247 6248 6249 /**6250 * Vex group 15 jump table for register variant.6251 * @todo work in progress6252 */6253 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15RegReg[] =6254 { /* pfx: none, 066h, 0f3h, 0f2h */6255 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM,6256 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM,6257 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM,6258 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM,6259 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM),6260 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6261 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6262 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6263 };6264 AssertCompile(RT_ELEMENTS(g_apfnVexGroup15RegReg) == 8*4);6265 6266 6267 /**6268 * Vex group 15 jump table for memory variant.6269 * @todo work in progress6270 */6271 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] =6272 { /* pfx: none, 066h, 0f3h, 0f2h */6273 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6274 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6275 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6276 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6277 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6278 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6279 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6280 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6281 };6282 AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4);6283 6284 6285 /** Opcode vex. 0xae. */6286 FNIEMOP_DEF(iemOp_VGrp15)6287 {6288 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);6289 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT))6290 /* register, register */6291 return FNIEMOP_CALL_1(g_apfnVexGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 46292 + pVCpu->iem.s.idxPrefix], bRm);6293 /* memory, register */6294 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 46295 + pVCpu->iem.s.idxPrefix], bRm);6296 6177 } 6297 6178 … … 7446 7327 7447 7328 7448 /** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */7449 FNIEMOP_STUB(iemOp_ vcmpps_Vps_Hps_Wps_Ib);7450 /** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */7451 FNIEMOP_STUB(iemOp_ vcmppd_Vpd_Hpd_Wpd_Ib);7452 /** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */7453 FNIEMOP_STUB(iemOp_ vcmpss_Vss_Hss_Wss_Ib);7454 /** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */7455 FNIEMOP_STUB(iemOp_ vcmpsd_Vsd_Hsd_Wsd_Ib);7329 /** Opcode 0x0f 0xc2 - cmpps Vps,Wps,Ib */ 7330 FNIEMOP_STUB(iemOp_cmpps_Vps_Wps_Ib); 7331 /** Opcode 0x66 0x0f 0xc2 - cmppd Vpd,Wpd,Ib */ 7332 FNIEMOP_STUB(iemOp_cmppd_Vpd_Wpd_Ib); 7333 /** Opcode 0xf3 0x0f 0xc2 - cmpss Vss,Wss,Ib */ 7334 FNIEMOP_STUB(iemOp_cmpss_Vss_Wss_Ib); 7335 /** Opcode 0xf2 0x0f 0xc2 - cmpsd Vsd,Wsd,Ib */ 7336 FNIEMOP_STUB(iemOp_cmpsd_Vsd_Wsd_Ib); 7456 7337 7457 7338 … … 7513 7394 /* Opcode 0xf2 0x0f 0xc3 - invalid */ 7514 7395 7515 /** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */7396 /** Opcode 0x0f 0xc4 - pinsrw Pq, Ry/Mw,Ib */ 7516 7397 FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib); 7517 /** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */7518 FNIEMOP_STUB(iemOp_ vpinsrw_Vdq_Hdq_RyMw_Ib);7398 /** Opcode 0x66 0x0f 0xc4 - pinsrw Vdq, Ry/Mw,Ib */ 7399 FNIEMOP_STUB(iemOp_pinsrw_Vdq_RyMw_Ib); 7519 7400 /* Opcode 0xf3 0x0f 0xc4 - invalid */ 7520 7401 /* Opcode 0xf2 0x0f 0xc4 - invalid */ … … 7522 7403 /** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */ 7523 7404 FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib); 7524 /** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */7525 FNIEMOP_STUB(iemOp_ vpextrw_Gd_Udq_Ib);7405 /** Opcode 0x66 0x0f 0xc5 - pextrw Gd, Udq, Ib */ 7406 FNIEMOP_STUB(iemOp_pextrw_Gd_Udq_Ib); 7526 7407 /* Opcode 0xf3 0x0f 0xc5 - invalid */ 7527 7408 /* Opcode 0xf2 0x0f 0xc5 - invalid */ 7528 7409 7529 /** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */7530 FNIEMOP_STUB(iemOp_ vshufps_Vps_Hps_Wps_Ib);7531 /** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */7532 FNIEMOP_STUB(iemOp_ vshufpd_Vpd_Hpd_Wpd_Ib);7410 /** Opcode 0x0f 0xc6 - shufps Vps, Wps, Ib */ 7411 FNIEMOP_STUB(iemOp_shufps_Vps_Wps_Ib); 7412 /** Opcode 0x66 0x0f 0xc6 - shufpd Vpd, Wpd, Ib */ 7413 FNIEMOP_STUB(iemOp_shufpd_Vpd_Wpd_Ib); 7533 7414 /* Opcode 0xf3 0x0f 0xc6 - invalid */ 7534 7415 /* Opcode 0xf2 0x0f 0xc6 - invalid */ … … 7846 7727 7847 7728 /* Opcode 0x0f 0xd0 - invalid */ 7848 /** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */7849 FNIEMOP_STUB(iemOp_ vaddsubpd_Vpd_Hpd_Wpd);7729 /** Opcode 0x66 0x0f 0xd0 - addsubpd Vpd, Wpd */ 7730 FNIEMOP_STUB(iemOp_addsubpd_Vpd_Wpd); 7850 7731 /* Opcode 0xf3 0x0f 0xd0 - invalid */ 7851 /** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */7852 FNIEMOP_STUB(iemOp_ vaddsubps_Vps_Hps_Wps);7732 /** Opcode 0xf2 0x0f 0xd0 - addsubps Vps, Wps */ 7733 FNIEMOP_STUB(iemOp_addsubps_Vps_Wps); 7853 7734 7854 7735 /** Opcode 0x0f 0xd1 - psrlw Pq, Qq */ 7855 7736 FNIEMOP_STUB(iemOp_psrlw_Pq_Qq); 7856 /** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */7857 FNIEMOP_STUB(iemOp_ vpsrlw_Vx_Hx_W);7737 /** Opcode 0x66 0x0f 0xd1 - psrlw Vx, W */ 7738 FNIEMOP_STUB(iemOp_psrlw_Vx_W); 7858 7739 /* Opcode 0xf3 0x0f 0xd1 - invalid */ 7859 7740 /* Opcode 0xf2 0x0f 0xd1 - invalid */ … … 7861 7742 /** Opcode 0x0f 0xd2 - psrld Pq, Qq */ 7862 7743 FNIEMOP_STUB(iemOp_psrld_Pq_Qq); 7863 /** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */7864 FNIEMOP_STUB(iemOp_ vpsrld_Vx_Hx_Wx);7744 /** Opcode 0x66 0x0f 0xd2 - psrld Vx, Wx */ 7745 FNIEMOP_STUB(iemOp_psrld_Vx_Wx); 7865 7746 /* Opcode 0xf3 0x0f 0xd2 - invalid */ 7866 7747 /* Opcode 0xf2 0x0f 0xd2 - invalid */ … … 7868 7749 /** Opcode 0x0f 0xd3 - psrlq Pq, Qq */ 7869 7750 FNIEMOP_STUB(iemOp_psrlq_Pq_Qq); 7870 /** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */7871 FNIEMOP_STUB(iemOp_ vpsrlq_Vx_Hx_Wx);7751 /** Opcode 0x66 0x0f 0xd3 - psrlq Vx, Wx */ 7752 FNIEMOP_STUB(iemOp_psrlq_Vx_Wx); 7872 7753 /* Opcode 0xf3 0x0f 0xd3 - invalid */ 7873 7754 /* Opcode 0xf2 0x0f 0xd3 - invalid */ … … 7875 7756 /** Opcode 0x0f 0xd4 - paddq Pq, Qq */ 7876 7757 FNIEMOP_STUB(iemOp_paddq_Pq_Qq); 7877 /** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */7878 FNIEMOP_STUB(iemOp_ vpaddq_Vx_Hx_W);7758 /** Opcode 0x66 0x0f 0xd4 - paddq Vx, W */ 7759 FNIEMOP_STUB(iemOp_paddq_Vx_W); 7879 7760 /* Opcode 0xf3 0x0f 0xd4 - invalid */ 7880 7761 /* Opcode 0xf2 0x0f 0xd4 - invalid */ … … 7882 7763 /** Opcode 0x0f 0xd5 - pmullw Pq, Qq */ 7883 7764 FNIEMOP_STUB(iemOp_pmullw_Pq_Qq); 7884 /** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */7885 FNIEMOP_STUB(iemOp_ vpmullw_Vx_Hx_Wx);7765 /** Opcode 0x66 0x0f 0xd5 - pmullw Vx, Wx */ 7766 FNIEMOP_STUB(iemOp_pmullw_Vx_Wx); 7886 7767 /* Opcode 0xf3 0x0f 0xd5 - invalid */ 7887 7768 /* Opcode 0xf2 0x0f 0xd5 - invalid */ … … 7898 7779 * @optest op1=0 op2=-42 -> op1=-42 7899 7780 */ 7900 FNIEMOP_DEF(iemOp_ vmovq_Wq_Vq)7781 FNIEMOP_DEF(iemOp_movq_Wq_Vq) 7901 7782 { 7902 7783 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); … … 8023 7904 8024 7905 /** Opcode 0x66 0x0f 0xd7 - */ 8025 FNIEMOP_DEF(iemOp_ vpmovmskb_Gd_Ux)7906 FNIEMOP_DEF(iemOp_pmovmskb_Gd_Ux) 8026 7907 { 8027 7908 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */ … … 8057 7938 /** Opcode 0x0f 0xd8 - psubusb Pq, Qq */ 8058 7939 FNIEMOP_STUB(iemOp_psubusb_Pq_Qq); 8059 /** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */8060 FNIEMOP_STUB(iemOp_ vpsubusb_Vx_Hx_W);7940 /** Opcode 0x66 0x0f 0xd8 - psubusb Vx, W */ 7941 FNIEMOP_STUB(iemOp_psubusb_Vx_W); 8061 7942 /* Opcode 0xf3 0x0f 0xd8 - invalid */ 8062 7943 /* Opcode 0xf2 0x0f 0xd8 - invalid */ … … 8064 7945 /** Opcode 0x0f 0xd9 - psubusw Pq, Qq */ 8065 7946 FNIEMOP_STUB(iemOp_psubusw_Pq_Qq); 8066 /** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */8067 FNIEMOP_STUB(iemOp_ vpsubusw_Vx_Hx_Wx);7947 /** Opcode 0x66 0x0f 0xd9 - psubusw Vx, Wx */ 7948 FNIEMOP_STUB(iemOp_psubusw_Vx_Wx); 8068 7949 /* Opcode 0xf3 0x0f 0xd9 - invalid */ 8069 7950 /* Opcode 0xf2 0x0f 0xd9 - invalid */ … … 8071 7952 /** Opcode 0x0f 0xda - pminub Pq, Qq */ 8072 7953 FNIEMOP_STUB(iemOp_pminub_Pq_Qq); 8073 /** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */8074 FNIEMOP_STUB(iemOp_ vpminub_Vx_Hx_Wx);7954 /** Opcode 0x66 0x0f 0xda - pminub Vx, Wx */ 7955 FNIEMOP_STUB(iemOp_pminub_Vx_Wx); 8075 7956 /* Opcode 0xf3 0x0f 0xda - invalid */ 8076 7957 /* Opcode 0xf2 0x0f 0xda - invalid */ … … 8078 7959 /** Opcode 0x0f 0xdb - pand Pq, Qq */ 8079 7960 FNIEMOP_STUB(iemOp_pand_Pq_Qq); 8080 /** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */8081 FNIEMOP_STUB(iemOp_ vpand_Vx_Hx_W);7961 /** Opcode 0x66 0x0f 0xdb - pand Vx, W */ 7962 FNIEMOP_STUB(iemOp_pand_Vx_W); 8082 7963 /* Opcode 0xf3 0x0f 0xdb - invalid */ 8083 7964 /* Opcode 0xf2 0x0f 0xdb - invalid */ … … 8085 7966 /** Opcode 0x0f 0xdc - paddusb Pq, Qq */ 8086 7967 FNIEMOP_STUB(iemOp_paddusb_Pq_Qq); 8087 /** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */8088 FNIEMOP_STUB(iemOp_ vpaddusb_Vx_Hx_Wx);7968 /** Opcode 0x66 0x0f 0xdc - paddusb Vx, Wx */ 7969 FNIEMOP_STUB(iemOp_paddusb_Vx_Wx); 8089 7970 /* Opcode 0xf3 0x0f 0xdc - invalid */ 8090 7971 /* Opcode 0xf2 0x0f 0xdc - invalid */ … … 8092 7973 /** Opcode 0x0f 0xdd - paddusw Pq, Qq */ 8093 7974 FNIEMOP_STUB(iemOp_paddusw_Pq_Qq); 8094 /** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */8095 FNIEMOP_STUB(iemOp_ vpaddusw_Vx_Hx_Wx);7975 /** Opcode 0x66 0x0f 0xdd - paddusw Vx, Wx */ 7976 FNIEMOP_STUB(iemOp_paddusw_Vx_Wx); 8096 7977 /* Opcode 0xf3 0x0f 0xdd - invalid */ 8097 7978 /* Opcode 0xf2 0x0f 0xdd - invalid */ … … 8099 7980 /** Opcode 0x0f 0xde - pmaxub Pq, Qq */ 8100 7981 FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq); 8101 /** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */8102 FNIEMOP_STUB(iemOp_ vpmaxub_Vx_Hx_W);7982 /** Opcode 0x66 0x0f 0xde - pmaxub Vx, W */ 7983 FNIEMOP_STUB(iemOp_pmaxub_Vx_W); 8103 7984 /* Opcode 0xf3 0x0f 0xde - invalid */ 8104 7985 /* Opcode 0xf2 0x0f 0xde - invalid */ … … 8106 7987 /** Opcode 0x0f 0xdf - pandn Pq, Qq */ 8107 7988 FNIEMOP_STUB(iemOp_pandn_Pq_Qq); 8108 /** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */8109 FNIEMOP_STUB(iemOp_ vpandn_Vx_Hx_Wx);7989 /** Opcode 0x66 0x0f 0xdf - pandn Vx, Wx */ 7990 FNIEMOP_STUB(iemOp_pandn_Vx_Wx); 8110 7991 /* Opcode 0xf3 0x0f 0xdf - invalid */ 8111 7992 /* Opcode 0xf2 0x0f 0xdf - invalid */ … … 8113 7994 /** Opcode 0x0f 0xe0 - pavgb Pq, Qq */ 8114 7995 FNIEMOP_STUB(iemOp_pavgb_Pq_Qq); 8115 /** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */8116 FNIEMOP_STUB(iemOp_ vpavgb_Vx_Hx_Wx);7996 /** Opcode 0x66 0x0f 0xe0 - pavgb Vx, Wx */ 7997 FNIEMOP_STUB(iemOp_pavgb_Vx_Wx); 8117 7998 /* Opcode 0xf3 0x0f 0xe0 - invalid */ 8118 7999 /* Opcode 0xf2 0x0f 0xe0 - invalid */ … … 8120 8001 /** Opcode 0x0f 0xe1 - psraw Pq, Qq */ 8121 8002 FNIEMOP_STUB(iemOp_psraw_Pq_Qq); 8122 /** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */8123 FNIEMOP_STUB(iemOp_ vpsraw_Vx_Hx_W);8003 /** Opcode 0x66 0x0f 0xe1 - psraw Vx, W */ 8004 FNIEMOP_STUB(iemOp_psraw_Vx_W); 8124 8005 /* Opcode 0xf3 0x0f 0xe1 - invalid */ 8125 8006 /* Opcode 0xf2 0x0f 0xe1 - invalid */ … … 8127 8008 /** Opcode 0x0f 0xe2 - psrad Pq, Qq */ 8128 8009 FNIEMOP_STUB(iemOp_psrad_Pq_Qq); 8129 /** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */8130 FNIEMOP_STUB(iemOp_ vpsrad_Vx_Hx_Wx);8010 /** Opcode 0x66 0x0f 0xe2 - psrad Vx, Wx */ 8011 FNIEMOP_STUB(iemOp_psrad_Vx_Wx); 8131 8012 /* Opcode 0xf3 0x0f 0xe2 - invalid */ 8132 8013 /* Opcode 0xf2 0x0f 0xe2 - invalid */ … … 8134 8015 /** Opcode 0x0f 0xe3 - pavgw Pq, Qq */ 8135 8016 FNIEMOP_STUB(iemOp_pavgw_Pq_Qq); 8136 /** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */8137 FNIEMOP_STUB(iemOp_ vpavgw_Vx_Hx_Wx);8017 /** Opcode 0x66 0x0f 0xe3 - pavgw Vx, Wx */ 8018 FNIEMOP_STUB(iemOp_pavgw_Vx_Wx); 8138 8019 /* Opcode 0xf3 0x0f 0xe3 - invalid */ 8139 8020 /* Opcode 0xf2 0x0f 0xe3 - invalid */ … … 8141 8022 /** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */ 8142 8023 FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq); 8143 /** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */8144 FNIEMOP_STUB(iemOp_ vpmulhuw_Vx_Hx_W);8024 /** Opcode 0x66 0x0f 0xe4 - pmulhuw Vx, W */ 8025 FNIEMOP_STUB(iemOp_pmulhuw_Vx_W); 8145 8026 /* Opcode 0xf3 0x0f 0xe4 - invalid */ 8146 8027 /* Opcode 0xf2 0x0f 0xe4 - invalid */ … … 8148 8029 /** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */ 8149 8030 FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq); 8150 /** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */8151 FNIEMOP_STUB(iemOp_ vpmulhw_Vx_Hx_Wx);8031 /** Opcode 0x66 0x0f 0xe5 - pmulhw Vx, Wx */ 8032 FNIEMOP_STUB(iemOp_pmulhw_Vx_Wx); 8152 8033 /* Opcode 0xf3 0x0f 0xe5 - invalid */ 8153 8034 /* Opcode 0xf2 0x0f 0xe5 - invalid */ 8154 8035 8155 8036 /* Opcode 0x0f 0xe6 - invalid */ 8156 /** Opcode 0x66 0x0f 0xe6 - vcvttpd2dq Vx, Wpd */8157 FNIEMOP_STUB(iemOp_ vcvttpd2dq_Vx_Wpd);8158 /** Opcode 0xf3 0x0f 0xe6 - vcvtdq2pd Vx, Wpd */8159 FNIEMOP_STUB(iemOp_ vcvtdq2pd_Vx_Wpd);8160 /** Opcode 0xf2 0x0f 0xe6 - vcvtpd2dq Vx, Wpd */8161 FNIEMOP_STUB(iemOp_ vcvtpd2dq_Vx_Wpd);8037 /** Opcode 0x66 0x0f 0xe6 - cvttpd2dq Vx, Wpd */ 8038 FNIEMOP_STUB(iemOp_cvttpd2dq_Vx_Wpd); 8039 /** Opcode 0xf3 0x0f 0xe6 - cvtdq2pd Vx, Wpd */ 8040 FNIEMOP_STUB(iemOp_cvtdq2pd_Vx_Wpd); 8041 /** Opcode 0xf2 0x0f 0xe6 - cvtpd2dq Vx, Wpd */ 8042 FNIEMOP_STUB(iemOp_cvtpd2dq_Vx_Wpd); 8162 8043 8163 8044 … … 8190 8071 } 8191 8072 8192 /** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */8193 FNIEMOP_DEF(iemOp_ vmovntdq_Mx_Vx)8073 /** Opcode 0x66 0x0f 0xe7 - movntdq Mx, Vx */ 8074 FNIEMOP_DEF(iemOp_movntdq_Mx_Vx) 8194 8075 { 8195 8076 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); … … 8197 8078 { 8198 8079 /* Register, memory. */ 8199 IEMOP_MNEMONIC( vmovntdq_Mx_Vx, "vmovntdq Mx,Vx");8080 IEMOP_MNEMONIC(movntdq_Mx_Vx, "movntdq Mx,Vx"); 8200 8081 IEM_MC_BEGIN(0, 2); 8201 8082 IEM_MC_LOCAL(RTUINT128U, uSrc); … … 8225 8106 /** Opcode 0x0f 0xe8 - psubsb Pq, Qq */ 8226 8107 FNIEMOP_STUB(iemOp_psubsb_Pq_Qq); 8227 /** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */8228 FNIEMOP_STUB(iemOp_ vpsubsb_Vx_Hx_W);8108 /** Opcode 0x66 0x0f 0xe8 - psubsb Vx, W */ 8109 FNIEMOP_STUB(iemOp_psubsb_Vx_W); 8229 8110 /* Opcode 0xf3 0x0f 0xe8 - invalid */ 8230 8111 /* Opcode 0xf2 0x0f 0xe8 - invalid */ … … 8232 8113 /** Opcode 0x0f 0xe9 - psubsw Pq, Qq */ 8233 8114 FNIEMOP_STUB(iemOp_psubsw_Pq_Qq); 8234 /** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */8235 FNIEMOP_STUB(iemOp_ vpsubsw_Vx_Hx_Wx);8115 /** Opcode 0x66 0x0f 0xe9 - psubsw Vx, Wx */ 8116 FNIEMOP_STUB(iemOp_psubsw_Vx_Wx); 8236 8117 /* Opcode 0xf3 0x0f 0xe9 - invalid */ 8237 8118 /* Opcode 0xf2 0x0f 0xe9 - invalid */ … … 8239 8120 /** Opcode 0x0f 0xea - pminsw Pq, Qq */ 8240 8121 FNIEMOP_STUB(iemOp_pminsw_Pq_Qq); 8241 /** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */8242 FNIEMOP_STUB(iemOp_ vpminsw_Vx_Hx_Wx);8122 /** Opcode 0x66 0x0f 0xea - pminsw Vx, Wx */ 8123 FNIEMOP_STUB(iemOp_pminsw_Vx_Wx); 8243 8124 /* Opcode 0xf3 0x0f 0xea - invalid */ 8244 8125 /* Opcode 0xf2 0x0f 0xea - invalid */ … … 8246 8127 /** Opcode 0x0f 0xeb - por Pq, Qq */ 8247 8128 FNIEMOP_STUB(iemOp_por_Pq_Qq); 8248 /** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */8249 FNIEMOP_STUB(iemOp_ vpor_Vx_Hx_W);8129 /** Opcode 0x66 0x0f 0xeb - por Vx, W */ 8130 FNIEMOP_STUB(iemOp_por_Vx_W); 8250 8131 /* Opcode 0xf3 0x0f 0xeb - invalid */ 8251 8132 /* Opcode 0xf2 0x0f 0xeb - invalid */ … … 8253 8134 /** Opcode 0x0f 0xec - paddsb Pq, Qq */ 8254 8135 FNIEMOP_STUB(iemOp_paddsb_Pq_Qq); 8255 /** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */8256 FNIEMOP_STUB(iemOp_ vpaddsb_Vx_Hx_Wx);8136 /** Opcode 0x66 0x0f 0xec - paddsb Vx, Wx */ 8137 FNIEMOP_STUB(iemOp_paddsb_Vx_Wx); 8257 8138 /* Opcode 0xf3 0x0f 0xec - invalid */ 8258 8139 /* Opcode 0xf2 0x0f 0xec - invalid */ … … 8260 8141 /** Opcode 0x0f 0xed - paddsw Pq, Qq */ 8261 8142 FNIEMOP_STUB(iemOp_paddsw_Pq_Qq); 8262 /** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */8263 FNIEMOP_STUB(iemOp_ vpaddsw_Vx_Hx_Wx);8143 /** Opcode 0x66 0x0f 0xed - paddsw Vx, Wx */ 8144 FNIEMOP_STUB(iemOp_paddsw_Vx_Wx); 8264 8145 /* Opcode 0xf3 0x0f 0xed - invalid */ 8265 8146 /* Opcode 0xf2 0x0f 0xed - invalid */ … … 8267 8148 /** Opcode 0x0f 0xee - pmaxsw Pq, Qq */ 8268 8149 FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq); 8269 /** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */8270 FNIEMOP_STUB(iemOp_ vpmaxsw_Vx_Hx_W);8150 /** Opcode 0x66 0x0f 0xee - pmaxsw Vx, W */ 8151 FNIEMOP_STUB(iemOp_pmaxsw_Vx_W); 8271 8152 /* Opcode 0xf3 0x0f 0xee - invalid */ 8272 8153 /* Opcode 0xf2 0x0f 0xee - invalid */ … … 8280 8161 } 8281 8162 8282 /** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */8283 FNIEMOP_DEF(iemOp_ vpxor_Vx_Hx_Wx)8284 { 8285 IEMOP_MNEMONIC( vpxor, "vpxor");8163 /** Opcode 0x66 0x0f 0xef - pxor Vx, Wx */ 8164 FNIEMOP_DEF(iemOp_pxor_Vx_Wx) 8165 { 8166 IEMOP_MNEMONIC(pxor_Vx_Wx, "pxor"); 8286 8167 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pxor); 8287 8168 } … … 8292 8173 /* Opcode 0x0f 0xf0 - invalid */ 8293 8174 /* Opcode 0x66 0x0f 0xf0 - invalid */ 8294 /** Opcode 0xf2 0x0f 0xf0 - vlddqu Vx, Mx */8295 FNIEMOP_STUB(iemOp_ vlddqu_Vx_Mx);8175 /** Opcode 0xf2 0x0f 0xf0 - lddqu Vx, Mx */ 8176 FNIEMOP_STUB(iemOp_lddqu_Vx_Mx); 8296 8177 8297 8178 /** Opcode 0x0f 0xf1 - psllw Pq, Qq */ 8298 8179 FNIEMOP_STUB(iemOp_psllw_Pq_Qq); 8299 /** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */8300 FNIEMOP_STUB(iemOp_ vpsllw_Vx_Hx_W);8180 /** Opcode 0x66 0x0f 0xf1 - psllw Vx, W */ 8181 FNIEMOP_STUB(iemOp_psllw_Vx_W); 8301 8182 /* Opcode 0xf2 0x0f 0xf1 - invalid */ 8302 8183 8303 8184 /** Opcode 0x0f 0xf2 - pslld Pq, Qq */ 8304 8185 FNIEMOP_STUB(iemOp_pslld_Pq_Qq); 8305 /** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */8306 FNIEMOP_STUB(iemOp_ vpslld_Vx_Hx_Wx);8186 /** Opcode 0x66 0x0f 0xf2 - pslld Vx, Wx */ 8187 FNIEMOP_STUB(iemOp_pslld_Vx_Wx); 8307 8188 /* Opcode 0xf2 0x0f 0xf2 - invalid */ 8308 8189 8309 8190 /** Opcode 0x0f 0xf3 - psllq Pq, Qq */ 8310 8191 FNIEMOP_STUB(iemOp_psllq_Pq_Qq); 8311 /** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */8312 FNIEMOP_STUB(iemOp_ vpsllq_Vx_Hx_Wx);8192 /** Opcode 0x66 0x0f 0xf3 - psllq Vx, Wx */ 8193 FNIEMOP_STUB(iemOp_psllq_Vx_Wx); 8313 8194 /* Opcode 0xf2 0x0f 0xf3 - invalid */ 8314 8195 8315 8196 /** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */ 8316 8197 FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq); 8317 /** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */8318 FNIEMOP_STUB(iemOp_ vpmuludq_Vx_Hx_W);8198 /** Opcode 0x66 0x0f 0xf4 - pmuludq Vx, W */ 8199 FNIEMOP_STUB(iemOp_pmuludq_Vx_W); 8319 8200 /* Opcode 0xf2 0x0f 0xf4 - invalid */ 8320 8201 8321 8202 /** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */ 8322 8203 FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq); 8323 /** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */8324 FNIEMOP_STUB(iemOp_ vpmaddwd_Vx_Hx_Wx);8204 /** Opcode 0x66 0x0f 0xf5 - pmaddwd Vx, Wx */ 8205 FNIEMOP_STUB(iemOp_pmaddwd_Vx_Wx); 8325 8206 /* Opcode 0xf2 0x0f 0xf5 - invalid */ 8326 8207 8327 8208 /** Opcode 0x0f 0xf6 - psadbw Pq, Qq */ 8328 8209 FNIEMOP_STUB(iemOp_psadbw_Pq_Qq); 8329 /** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */8330 FNIEMOP_STUB(iemOp_ vpsadbw_Vx_Hx_Wx);8210 /** Opcode 0x66 0x0f 0xf6 - psadbw Vx, Wx */ 8211 FNIEMOP_STUB(iemOp_psadbw_Vx_Wx); 8331 8212 /* Opcode 0xf2 0x0f 0xf6 - invalid */ 8332 8213 8333 8214 /** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */ 8334 8215 FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq); 8335 /** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */8336 FNIEMOP_STUB(iemOp_ vmaskmovdqu_Vdq_Udq);8216 /** Opcode 0x66 0x0f 0xf7 - maskmovdqu Vdq, Udq */ 8217 FNIEMOP_STUB(iemOp_maskmovdqu_Vdq_Udq); 8337 8218 /* Opcode 0xf2 0x0f 0xf7 - invalid */ 8338 8219 8339 8220 /** Opcode 0x0f 0xf8 - psubb Pq, Qq */ 8340 8221 FNIEMOP_STUB(iemOp_psubb_Pq_Qq); 8341 /** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */8342 FNIEMOP_STUB(iemOp_ vpsubb_Vx_Hx_W);8222 /** Opcode 0x66 0x0f 0xf8 - psubb Vx, W */ 8223 FNIEMOP_STUB(iemOp_psubb_Vx_W); 8343 8224 /* Opcode 0xf2 0x0f 0xf8 - invalid */ 8344 8225 8345 8226 /** Opcode 0x0f 0xf9 - psubw Pq, Qq */ 8346 8227 FNIEMOP_STUB(iemOp_psubw_Pq_Qq); 8347 /** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */8348 FNIEMOP_STUB(iemOp_ vpsubw_Vx_Hx_Wx);8228 /** Opcode 0x66 0x0f 0xf9 - psubw Vx, Wx */ 8229 FNIEMOP_STUB(iemOp_psubw_Vx_Wx); 8349 8230 /* Opcode 0xf2 0x0f 0xf9 - invalid */ 8350 8231 8351 8232 /** Opcode 0x0f 0xfa - psubd Pq, Qq */ 8352 8233 FNIEMOP_STUB(iemOp_psubd_Pq_Qq); 8353 /** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */8354 FNIEMOP_STUB(iemOp_ vpsubd_Vx_Hx_Wx);8234 /** Opcode 0x66 0x0f 0xfa - psubd Vx, Wx */ 8235 FNIEMOP_STUB(iemOp_psubd_Vx_Wx); 8355 8236 /* Opcode 0xf2 0x0f 0xfa - invalid */ 8356 8237 8357 8238 /** Opcode 0x0f 0xfb - psubq Pq, Qq */ 8358 8239 FNIEMOP_STUB(iemOp_psubq_Pq_Qq); 8359 /** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */8360 FNIEMOP_STUB(iemOp_ vpsubq_Vx_Hx_W);8240 /** Opcode 0x66 0x0f 0xfb - psubq Vx, W */ 8241 FNIEMOP_STUB(iemOp_psubq_Vx_W); 8361 8242 /* Opcode 0xf2 0x0f 0xfb - invalid */ 8362 8243 8363 8244 /** Opcode 0x0f 0xfc - paddb Pq, Qq */ 8364 8245 FNIEMOP_STUB(iemOp_paddb_Pq_Qq); 8365 /** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */8366 FNIEMOP_STUB(iemOp_ vpaddb_Vx_Hx_Wx);8246 /** Opcode 0x66 0x0f 0xfc - paddb Vx, Wx */ 8247 FNIEMOP_STUB(iemOp_paddb_Vx_Wx); 8367 8248 /* Opcode 0xf2 0x0f 0xfc - invalid */ 8368 8249 8369 8250 /** Opcode 0x0f 0xfd - paddw Pq, Qq */ 8370 8251 FNIEMOP_STUB(iemOp_paddw_Pq_Qq); 8371 /** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */8372 FNIEMOP_STUB(iemOp_ vpaddw_Vx_Hx_Wx);8252 /** Opcode 0x66 0x0f 0xfd - paddw Vx, Wx */ 8253 FNIEMOP_STUB(iemOp_paddw_Vx_Wx); 8373 8254 /* Opcode 0xf2 0x0f 0xfd - invalid */ 8374 8255 8375 8256 /** Opcode 0x0f 0xfe - paddd Pq, Qq */ 8376 8257 FNIEMOP_STUB(iemOp_paddd_Pq_Qq); 8377 /** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */8378 FNIEMOP_STUB(iemOp_ vpaddd_Vx_Hx_W);8258 /** Opcode 0x66 0x0f 0xfe - paddd Vx, W */ 8259 FNIEMOP_STUB(iemOp_paddd_Vx_W); 8379 8260 /* Opcode 0xf2 0x0f 0xfe - invalid */ 8380 8261 … … 8426 8307 /* 0x0f */ IEMOP_X4(iemOp_3Dnow), 8427 8308 8428 /* 0x10 */ iemOp_ vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_vmovsd_Vx_Hx_Wsd,8429 /* 0x11 */ iemOp_ vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,8430 /* 0x12 */ iemOp_ vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,8431 /* 0x13 */ iemOp_ vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8432 /* 0x14 */ iemOp_ vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8433 /* 0x15 */ iemOp_ vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8434 /* 0x16 */ iemOp_ vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,8435 /* 0x17 */ iemOp_ vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8309 /* 0x10 */ iemOp_movups_Vps_Wps, iemOp_movupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_movsd_Vx_Wsd, 8310 /* 0x11 */ iemOp_movups_Wps_Vps, iemOp_movupd_Wpd_Vpd, iemOp_movss_Wss_Vss, iemOp_movsd_Wsd_Vsd, 8311 /* 0x12 */ iemOp_movlps_Vq_Mq__movhlps, iemOp_movlpd_Vq_Mq, iemOp_movsldup_Vdq_Wdq, iemOp_movddup_Vdq_Wdq, 8312 /* 0x13 */ iemOp_movlps_Mq_Vq, iemOp_movlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8313 /* 0x14 */ iemOp_unpcklps_Vx_Wx, iemOp_unpcklpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8314 /* 0x15 */ iemOp_unpckhps_Vx_Wx, iemOp_unpckhpd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8315 /* 0x16 */ iemOp_movhpsv1_Vdq_Mq__movlhps_Vdq_Uq, iemOp_movhpdv1_Vdq_Mq, iemOp_movshdup_Vx_Wx, iemOp_InvalidNeedRM, 8316 /* 0x17 */ iemOp_movhpsv1_Mq_Vq, iemOp_movhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8436 8317 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16), 8437 8318 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev), … … 8451 8332 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, 8452 8333 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, 8453 /* 0x28 */ iemOp_ vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8454 /* 0x29 */ iemOp_ vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8455 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_ vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,8456 /* 0x2b */ iemOp_ vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8457 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_ vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,8458 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_ vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,8459 /* 0x2e */ iemOp_ vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8460 /* 0x2f */ iemOp_ vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8334 /* 0x28 */ iemOp_movaps_Vps_Wps, iemOp_movapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8335 /* 0x29 */ iemOp_movaps_Wps_Vps, iemOp_movapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8336 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_cvtsi2ss_Vss_Ey, iemOp_cvtsi2sd_Vsd_Ey, 8337 /* 0x2b */ iemOp_movntps_Mps_Vps, iemOp_movntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8338 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_cvttss2si_Gy_Wss, iemOp_cvttsd2si_Gy_Wsd, 8339 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_cvtss2si_Gy_Wss, iemOp_cvtsd2si_Gy_Wsd, 8340 /* 0x2e */ iemOp_ucomiss_Vss_Wss, iemOp_ucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8341 /* 0x2f */ iemOp_comiss_Vss_Wss, iemOp_comisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8461 8342 8462 8343 /* 0x30 */ IEMOP_X4(iemOp_wrmsr), … … 8494 8375 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev), 8495 8376 8496 /* 0x50 */ iemOp_ vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8497 /* 0x51 */ iemOp_ vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,8498 /* 0x52 */ iemOp_ vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss,iemOp_InvalidNeedRM,8499 /* 0x53 */ iemOp_ vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss,iemOp_InvalidNeedRM,8500 /* 0x54 */ iemOp_ vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8501 /* 0x55 */ iemOp_ vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8502 /* 0x56 */ iemOp_ vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8503 /* 0x57 */ iemOp_ vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8504 /* 0x58 */ iemOp_ vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,8505 /* 0x59 */ iemOp_ vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,8506 /* 0x5a */ iemOp_ vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,8507 /* 0x5b */ iemOp_ vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps,iemOp_InvalidNeedRM,8508 /* 0x5c */ iemOp_ vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,8509 /* 0x5d */ iemOp_ vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,8510 /* 0x5e */ iemOp_ vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,8511 /* 0x5f */ iemOp_ vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,8512 8513 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_ vpunpcklbw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8514 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_ vpunpcklwd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8515 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_ vpunpckldq_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8516 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_ vpacksswb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8517 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_ vpcmpgtb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8518 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_ vpcmpgtw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8519 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_ vpcmpgtd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8520 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_ vpackuswb_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8521 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_ vpunpckhbw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8522 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_ vpunpckhwd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8523 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_ vpunpckhdq_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8524 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_ vpackssdw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8525 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_ vpunpcklqdq_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8526 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_ vpunpckhqdq_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8527 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_ vmovd_q_Vy_Ey,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8528 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_ vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx,iemOp_InvalidNeedRM,8529 8530 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_ vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,8377 /* 0x50 */ iemOp_movmskps_Gy_Ups, iemOp_movmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8378 /* 0x51 */ iemOp_sqrtps_Vps_Wps, iemOp_sqrtpd_Vpd_Wpd, iemOp_sqrtss_Vss_Wss, iemOp_sqrtsd_Vsd_Wsd, 8379 /* 0x52 */ iemOp_rsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rsqrtss_Vss_Wss, iemOp_InvalidNeedRM, 8380 /* 0x53 */ iemOp_rcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_rcpss_Vss_Wss, iemOp_InvalidNeedRM, 8381 /* 0x54 */ iemOp_andps_Vps_Wps, iemOp_andpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8382 /* 0x55 */ iemOp_andnps_Vps_Wps, iemOp_andnpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8383 /* 0x56 */ iemOp_orps_Vps_Wps, iemOp_orpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8384 /* 0x57 */ iemOp_xorps_Vps_Wps, iemOp_xorpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8385 /* 0x58 */ iemOp_addps_Vps_Wps, iemOp_addpd_Vpd_Wpd, iemOp_addss_Vss_Wss, iemOp_addsd_Vsd_Wsd, 8386 /* 0x59 */ iemOp_mulps_Vps_Wps, iemOp_mulpd_Vpd_Wpd, iemOp_mulss_Vss_Wss, iemOp_mulsd_Vsd_Wsd, 8387 /* 0x5a */ iemOp_cvtps2pd_Vpd_Wps, iemOp_cvtpd2ps_Vps_Wpd, iemOp_cvtss2sd_Vsd_Wss, iemOp_cvtsd2ss_Vss_Wsd, 8388 /* 0x5b */ iemOp_cvtdq2ps_Vps_Wdq, iemOp_cvtps2dq_Vdq_Wps, iemOp_cvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM, 8389 /* 0x5c */ iemOp_subps_Vps_Wps, iemOp_subpd_Vpd_Wpd, iemOp_subss_Vss_Wss, iemOp_subsd_Vsd_Wsd, 8390 /* 0x5d */ iemOp_minps_Vps_Wps, iemOp_minpd_Vpd_Wpd, iemOp_minss_Vss_Wss, iemOp_minsd_Vsd_Wsd, 8391 /* 0x5e */ iemOp_divps_Vps_Wps, iemOp_divpd_Vpd_Wpd, iemOp_divss_Vss_Wss, iemOp_divsd_Vsd_Wsd, 8392 /* 0x5f */ iemOp_maxps_Vps_Wps, iemOp_maxpd_Vpd_Wpd, iemOp_maxss_Vss_Wss, iemOp_maxsd_Vsd_Wsd, 8393 8394 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_punpcklbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8395 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_punpcklwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8396 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_punpckldq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8397 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_packsswb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8398 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_pcmpgtb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8399 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_pcmpgtw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8400 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_pcmpgtd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8401 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_packuswb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8402 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_punpckhbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8403 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_punpckhwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8404 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_punpckhdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8405 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_packssdw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8406 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_punpcklqdq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8407 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_punpckhqdq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8408 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_movd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8409 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_movdqa_Vx_Wx, iemOp_movdqu_Vx_Wx, iemOp_InvalidNeedRM, 8410 8411 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_pshufd_Vx_Wx_Ib, iemOp_pshufhw_Vx_Wx_Ib, iemOp_pshuflw_Vx_Wx_Ib, 8531 8412 /* 0x71 */ IEMOP_X4(iemOp_Grp12), 8532 8413 /* 0x72 */ IEMOP_X4(iemOp_Grp13), 8533 8414 /* 0x73 */ IEMOP_X4(iemOp_Grp14), 8534 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_ vpcmpeqb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8535 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_ vpcmpeqw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8536 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_ vpcmpeqd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8537 /* 0x77 */ iemOp_emms __vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM,8415 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_pcmpeqb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8416 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_pcmpeqw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8417 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_pcmpeqd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8418 /* 0x77 */ iemOp_emms, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8538 8419 8539 8420 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, … … 8541 8422 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8542 8423 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8543 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_ vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,8544 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_ vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,8545 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_ vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq,iemOp_InvalidNeedRM,8546 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_ vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx,iemOp_InvalidNeedRM,8424 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_haddpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_haddps_Vps_Wps, 8425 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_hsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_hsubps_Vps_Wps, 8426 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_movd_q_Ey_Vy, iemOp_movq_Vq_Wq, iemOp_InvalidNeedRM, 8427 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_movdqa_Wx_Vx, iemOp_movdqu_Wx_Vx, iemOp_InvalidNeedRM, 8547 8428 8548 8429 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv), … … 8616 8497 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb), 8617 8498 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv), 8618 /* 0xc2 */ iemOp_ vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,8499 /* 0xc2 */ iemOp_cmpps_Vps_Wps_Ib, iemOp_cmppd_Vpd_Wpd_Ib, iemOp_cmpss_Vss_Wss_Ib, iemOp_cmpsd_Vsd_Wsd_Ib, 8619 8500 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8620 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_ vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,8621 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_ vpextrw_Gd_Udq_Ib,iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,8622 /* 0xc6 */ iemOp_ vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,8501 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_pinsrw_Vdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, 8502 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_pextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, 8503 /* 0xc6 */ iemOp_shufps_Vps_Wps_Ib, iemOp_shufpd_Vpd_Wpd_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8, 8623 8504 /* 0xc7 */ IEMOP_X4(iemOp_Grp9), 8624 8505 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8), … … 8631 8512 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15), 8632 8513 8633 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_ vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,8634 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_ vpsrlw_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8635 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_ vpsrld_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8636 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_ vpsrlq_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8637 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_ vpaddq_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8638 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_ vpmullw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8639 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_ vmovq_Wq_Vq,iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,8640 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_ vpmovmskb_Gd_Ux,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8641 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_ vpsubusb_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8642 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_ vpsubusw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8643 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_ vpminub_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8644 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_ vpand_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8645 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_ vpaddusb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8646 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_ vpaddusw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8647 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_ vpmaxub_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8648 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_ vpandn_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8649 8650 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_ vpavgb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8651 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_ vpsraw_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8652 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_ vpsrad_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8653 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_ vpavgw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8654 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_ vpmulhuw_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8655 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_ vpmulhw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8656 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_ vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,8657 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_ vmovntdq_Mx_Vx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8658 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_ vpsubsb_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8659 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_ vpsubsw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8660 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_ vpminsw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8661 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_ vpor_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8662 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_ vpaddsb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8663 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_ vpaddsw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8664 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_ vpmaxsw_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8665 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_ vpxor_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8666 8667 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_ vlddqu_Vx_Mx,8668 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_ vpsllw_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8669 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_ vpslld_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8670 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_ vpsllq_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8671 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_ vpmuludq_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8672 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_ vpmaddwd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8673 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_ vpsadbw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8674 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_ vmaskmovdqu_Vdq_Udq,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8675 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_ vpsubb_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8676 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_ vpsubw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8677 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_ vpsubd_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8678 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_ vpsubq_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8679 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_ vpaddb_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8680 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_ vpaddw_Vx_Hx_Wx,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8681 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_ vpaddd_Vx_Hx_W,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8514 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_addsubpd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_addsubps_Vps_Wps, 8515 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_psrlw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8516 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_psrld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8517 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_psrlq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8518 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_paddq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8519 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_pmullw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8520 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_movq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq, 8521 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_pmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8522 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_psubusb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8523 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_psubusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8524 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_pminub_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8525 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_pand_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8526 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_paddusb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8527 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_paddusw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8528 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_pmaxub_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8529 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_pandn_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8530 8531 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_pavgb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8532 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_psraw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8533 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_psrad_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8534 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_pavgw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8535 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_pmulhuw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8536 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_pmulhw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8537 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_cvttpd2dq_Vx_Wpd, iemOp_cvtdq2pd_Vx_Wpd, iemOp_cvtpd2dq_Vx_Wpd, 8538 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_movntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8539 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_psubsb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8540 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_psubsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8541 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_pminsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8542 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_por_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8543 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_paddsb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8544 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_paddsw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8545 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_pmaxsw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8546 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_pxor_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8547 8548 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_lddqu_Vx_Mx, 8549 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_psllw_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8550 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_pslld_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8551 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_psllq_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8552 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_pmuludq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8553 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_pmaddwd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8554 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_psadbw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8555 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_maskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8556 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_psubb_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8557 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_psubw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8558 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_psubd_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8559 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_psubq_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8560 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_paddb_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8561 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_paddw_Vx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8562 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_paddd_Vx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8682 8563 /* 0xff */ IEMOP_X4(iemOp_ud0), 8683 8564 }; 8684 8565 AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024); 8685 8566 8686 8687 /**8688 * VEX opcode map \#1.8689 *8690 * @remarks This is (currently) a subset of g_apfnTwoByteMap, so please check if8691 * it it needs updating too when making changes.8692 */8693 IEM_STATIC const PFNIEMOP g_apfnVexMap1[] =8694 {8695 /* no prefix, 066h prefix f3h prefix, f2h prefix */8696 /* 0x00 */ IEMOP_X4(iemOp_InvalidNeedRM),8697 /* 0x01 */ IEMOP_X4(iemOp_InvalidNeedRM),8698 /* 0x02 */ IEMOP_X4(iemOp_InvalidNeedRM),8699 /* 0x03 */ IEMOP_X4(iemOp_InvalidNeedRM),8700 /* 0x04 */ IEMOP_X4(iemOp_InvalidNeedRM),8701 /* 0x05 */ IEMOP_X4(iemOp_InvalidNeedRM),8702 /* 0x06 */ IEMOP_X4(iemOp_InvalidNeedRM),8703 /* 0x07 */ IEMOP_X4(iemOp_InvalidNeedRM),8704 /* 0x08 */ IEMOP_X4(iemOp_InvalidNeedRM),8705 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM),8706 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM),8707 /* 0x0b */ IEMOP_X4(iemOp_InvalidNeedRM),8708 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM),8709 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM),8710 /* 0x0e */ IEMOP_X4(iemOp_InvalidNeedRM),8711 /* 0x0f */ IEMOP_X4(iemOp_InvalidNeedRM),8712 8713 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_vmovss_Vx_Hx_Wss, iemOp_vmovsd_Vx_Hx_Wsd,8714 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,8715 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,8716 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8717 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8718 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8719 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,8720 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8721 /* 0x18 */ IEMOP_X4(iemOp_InvalidNeedRM),8722 /* 0x19 */ IEMOP_X4(iemOp_InvalidNeedRM),8723 /* 0x1a */ IEMOP_X4(iemOp_InvalidNeedRM),8724 /* 0x1b */ IEMOP_X4(iemOp_InvalidNeedRM),8725 /* 0x1c */ IEMOP_X4(iemOp_InvalidNeedRM),8726 /* 0x1d */ IEMOP_X4(iemOp_InvalidNeedRM),8727 /* 0x1e */ IEMOP_X4(iemOp_InvalidNeedRM),8728 /* 0x1f */ IEMOP_X4(iemOp_InvalidNeedRM),8729 8730 /* 0x20 */ IEMOP_X4(iemOp_InvalidNeedRM),8731 /* 0x21 */ IEMOP_X4(iemOp_InvalidNeedRM),8732 /* 0x22 */ IEMOP_X4(iemOp_InvalidNeedRM),8733 /* 0x23 */ IEMOP_X4(iemOp_InvalidNeedRM),8734 /* 0x24 */ IEMOP_X4(iemOp_InvalidNeedRM),8735 /* 0x25 */ IEMOP_X4(iemOp_InvalidNeedRM),8736 /* 0x26 */ IEMOP_X4(iemOp_InvalidNeedRM),8737 /* 0x27 */ IEMOP_X4(iemOp_InvalidNeedRM),8738 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8739 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8740 /* 0x2a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,8741 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8742 /* 0x2c */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,8743 /* 0x2d */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,8744 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8745 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8746 8747 /* 0x30 */ IEMOP_X4(iemOp_InvalidNeedRM),8748 /* 0x31 */ IEMOP_X4(iemOp_InvalidNeedRM),8749 /* 0x32 */ IEMOP_X4(iemOp_InvalidNeedRM),8750 /* 0x33 */ IEMOP_X4(iemOp_InvalidNeedRM),8751 /* 0x34 */ IEMOP_X4(iemOp_InvalidNeedRM),8752 /* 0x35 */ IEMOP_X4(iemOp_InvalidNeedRM),8753 /* 0x36 */ IEMOP_X4(iemOp_InvalidNeedRM),8754 /* 0x37 */ IEMOP_X4(iemOp_InvalidNeedRM),8755 /* 0x38 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8756 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8757 /* 0x3a */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8758 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8759 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8760 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8761 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8762 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeedRM), /** @todo check that there is no escape table stuff here */8763 8764 /* 0x40 */ IEMOP_X4(iemOp_InvalidNeedRM),8765 /* 0x41 */ IEMOP_X4(iemOp_InvalidNeedRM),8766 /* 0x42 */ IEMOP_X4(iemOp_InvalidNeedRM),8767 /* 0x43 */ IEMOP_X4(iemOp_InvalidNeedRM),8768 /* 0x44 */ IEMOP_X4(iemOp_InvalidNeedRM),8769 /* 0x45 */ IEMOP_X4(iemOp_InvalidNeedRM),8770 /* 0x46 */ IEMOP_X4(iemOp_InvalidNeedRM),8771 /* 0x47 */ IEMOP_X4(iemOp_InvalidNeedRM),8772 /* 0x48 */ IEMOP_X4(iemOp_InvalidNeedRM),8773 /* 0x49 */ IEMOP_X4(iemOp_InvalidNeedRM),8774 /* 0x4a */ IEMOP_X4(iemOp_InvalidNeedRM),8775 /* 0x4b */ IEMOP_X4(iemOp_InvalidNeedRM),8776 /* 0x4c */ IEMOP_X4(iemOp_InvalidNeedRM),8777 /* 0x4d */ IEMOP_X4(iemOp_InvalidNeedRM),8778 /* 0x4e */ IEMOP_X4(iemOp_InvalidNeedRM),8779 /* 0x4f */ IEMOP_X4(iemOp_InvalidNeedRM),8780 8781 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8782 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,8783 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,8784 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,8785 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8786 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8787 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8788 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8789 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,8790 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,8791 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,8792 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,8793 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,8794 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,8795 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,8796 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,8797 8798 /* 0x60 */ iemOp_InvalidNeedRM, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8799 /* 0x61 */ iemOp_InvalidNeedRM, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8800 /* 0x62 */ iemOp_InvalidNeedRM, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8801 /* 0x63 */ iemOp_InvalidNeedRM, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8802 /* 0x64 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8803 /* 0x65 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8804 /* 0x66 */ iemOp_InvalidNeedRM, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8805 /* 0x67 */ iemOp_InvalidNeedRM, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8806 /* 0x68 */ iemOp_InvalidNeedRM, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8807 /* 0x69 */ iemOp_InvalidNeedRM, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8808 /* 0x6a */ iemOp_InvalidNeedRM, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8809 /* 0x6b */ iemOp_InvalidNeedRM, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8810 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8811 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8812 /* 0x6e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8813 /* 0x6f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,8814 8815 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,8816 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_Grp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8817 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_Grp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8818 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_Grp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8819 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8820 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8821 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8822 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8823 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM),8824 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM),8825 /* 0x7a */ IEMOP_X4(iemOp_InvalidNeedRM),8826 /* 0x7b */ IEMOP_X4(iemOp_InvalidNeedRM),8827 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,8828 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,8829 /* 0x7e */ iemOp_InvalidNeedRM, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,8830 /* 0x7f */ iemOp_InvalidNeedRM, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,8831 8832 /* 0x80 */ IEMOP_X4(iemOp_InvalidNeedRM),8833 /* 0x81 */ IEMOP_X4(iemOp_InvalidNeedRM),8834 /* 0x82 */ IEMOP_X4(iemOp_InvalidNeedRM),8835 /* 0x83 */ IEMOP_X4(iemOp_InvalidNeedRM),8836 /* 0x84 */ IEMOP_X4(iemOp_InvalidNeedRM),8837 /* 0x85 */ IEMOP_X4(iemOp_InvalidNeedRM),8838 /* 0x86 */ IEMOP_X4(iemOp_InvalidNeedRM),8839 /* 0x87 */ IEMOP_X4(iemOp_InvalidNeedRM),8840 /* 0x88 */ IEMOP_X4(iemOp_InvalidNeedRM),8841 /* 0x89 */ IEMOP_X4(iemOp_InvalidNeedRM),8842 /* 0x8a */ IEMOP_X4(iemOp_InvalidNeedRM),8843 /* 0x8b */ IEMOP_X4(iemOp_InvalidNeedRM),8844 /* 0x8c */ IEMOP_X4(iemOp_InvalidNeedRM),8845 /* 0x8d */ IEMOP_X4(iemOp_InvalidNeedRM),8846 /* 0x8e */ IEMOP_X4(iemOp_InvalidNeedRM),8847 /* 0x8f */ IEMOP_X4(iemOp_InvalidNeedRM),8848 8849 /* 0x90 */ IEMOP_X4(iemOp_InvalidNeedRM),8850 /* 0x91 */ IEMOP_X4(iemOp_InvalidNeedRM),8851 /* 0x92 */ IEMOP_X4(iemOp_InvalidNeedRM),8852 /* 0x93 */ IEMOP_X4(iemOp_InvalidNeedRM),8853 /* 0x94 */ IEMOP_X4(iemOp_InvalidNeedRM),8854 /* 0x95 */ IEMOP_X4(iemOp_InvalidNeedRM),8855 /* 0x96 */ IEMOP_X4(iemOp_InvalidNeedRM),8856 /* 0x97 */ IEMOP_X4(iemOp_InvalidNeedRM),8857 /* 0x98 */ IEMOP_X4(iemOp_InvalidNeedRM),8858 /* 0x99 */ IEMOP_X4(iemOp_InvalidNeedRM),8859 /* 0x9a */ IEMOP_X4(iemOp_InvalidNeedRM),8860 /* 0x9b */ IEMOP_X4(iemOp_InvalidNeedRM),8861 /* 0x9c */ IEMOP_X4(iemOp_InvalidNeedRM),8862 /* 0x9d */ IEMOP_X4(iemOp_InvalidNeedRM),8863 /* 0x9e */ IEMOP_X4(iemOp_InvalidNeedRM),8864 /* 0x9f */ IEMOP_X4(iemOp_InvalidNeedRM),8865 8866 /* 0xa0 */ IEMOP_X4(iemOp_InvalidNeedRM),8867 /* 0xa1 */ IEMOP_X4(iemOp_InvalidNeedRM),8868 /* 0xa2 */ IEMOP_X4(iemOp_InvalidNeedRM),8869 /* 0xa3 */ IEMOP_X4(iemOp_InvalidNeedRM),8870 /* 0xa4 */ IEMOP_X4(iemOp_InvalidNeedRM),8871 /* 0xa5 */ IEMOP_X4(iemOp_InvalidNeedRM),8872 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),8873 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),8874 /* 0xa8 */ IEMOP_X4(iemOp_InvalidNeedRM),8875 /* 0xa9 */ IEMOP_X4(iemOp_InvalidNeedRM),8876 /* 0xaa */ IEMOP_X4(iemOp_InvalidNeedRM),8877 /* 0xab */ IEMOP_X4(iemOp_InvalidNeedRM),8878 /* 0xac */ IEMOP_X4(iemOp_InvalidNeedRM),8879 /* 0xad */ IEMOP_X4(iemOp_InvalidNeedRM),8880 /* 0xae */ IEMOP_X4(iemOp_VGrp15),8881 /* 0xaf */ IEMOP_X4(iemOp_InvalidNeedRM),8882 8883 /* 0xb0 */ IEMOP_X4(iemOp_InvalidNeedRM),8884 /* 0xb1 */ IEMOP_X4(iemOp_InvalidNeedRM),8885 /* 0xb2 */ IEMOP_X4(iemOp_InvalidNeedRM),8886 /* 0xb3 */ IEMOP_X4(iemOp_InvalidNeedRM),8887 /* 0xb4 */ IEMOP_X4(iemOp_InvalidNeedRM),8888 /* 0xb5 */ IEMOP_X4(iemOp_InvalidNeedRM),8889 /* 0xb6 */ IEMOP_X4(iemOp_InvalidNeedRM),8890 /* 0xb7 */ IEMOP_X4(iemOp_InvalidNeedRM),8891 /* 0xb8 */ IEMOP_X4(iemOp_InvalidNeedRM),8892 /* 0xb9 */ IEMOP_X4(iemOp_InvalidNeedRM),8893 /* 0xba */ IEMOP_X4(iemOp_InvalidNeedRM),8894 /* 0xbb */ IEMOP_X4(iemOp_InvalidNeedRM),8895 /* 0xbc */ IEMOP_X4(iemOp_InvalidNeedRM),8896 /* 0xbd */ IEMOP_X4(iemOp_InvalidNeedRM),8897 /* 0xbe */ IEMOP_X4(iemOp_InvalidNeedRM),8898 /* 0xbf */ IEMOP_X4(iemOp_InvalidNeedRM),8899 8900 /* 0xc0 */ IEMOP_X4(iemOp_InvalidNeedRM),8901 /* 0xc1 */ IEMOP_X4(iemOp_InvalidNeedRM),8902 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,8903 /* 0xc3 */ IEMOP_X4(iemOp_InvalidNeedRM),8904 /* 0xc4 */ iemOp_InvalidNeedRM, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,8905 /* 0xc5 */ iemOp_InvalidNeedRM, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,8906 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,8907 /* 0xc7 */ IEMOP_X4(iemOp_InvalidNeedRM),8908 /* 0xc8 */ IEMOP_X4(iemOp_InvalidNeedRM),8909 /* 0xc9 */ IEMOP_X4(iemOp_InvalidNeedRM),8910 /* 0xca */ IEMOP_X4(iemOp_InvalidNeedRM),8911 /* 0xcb */ IEMOP_X4(iemOp_InvalidNeedRM),8912 /* 0xcc */ IEMOP_X4(iemOp_InvalidNeedRM),8913 /* 0xcd */ IEMOP_X4(iemOp_InvalidNeedRM),8914 /* 0xce */ IEMOP_X4(iemOp_InvalidNeedRM),8915 /* 0xcf */ IEMOP_X4(iemOp_InvalidNeedRM),8916 8917 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,8918 /* 0xd1 */ iemOp_InvalidNeedRM, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8919 /* 0xd2 */ iemOp_InvalidNeedRM, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8920 /* 0xd3 */ iemOp_InvalidNeedRM, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8921 /* 0xd4 */ iemOp_InvalidNeedRM, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8922 /* 0xd5 */ iemOp_InvalidNeedRM, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8923 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8924 /* 0xd7 */ iemOp_InvalidNeedRM, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8925 /* 0xd8 */ iemOp_InvalidNeedRM, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8926 /* 0xd9 */ iemOp_InvalidNeedRM, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8927 /* 0xda */ iemOp_InvalidNeedRM, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8928 /* 0xdb */ iemOp_InvalidNeedRM, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8929 /* 0xdc */ iemOp_InvalidNeedRM, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8930 /* 0xdd */ iemOp_InvalidNeedRM, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8931 /* 0xde */ iemOp_InvalidNeedRM, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8932 /* 0xdf */ iemOp_InvalidNeedRM, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8933 8934 /* 0xe0 */ iemOp_InvalidNeedRM, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8935 /* 0xe1 */ iemOp_InvalidNeedRM, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8936 /* 0xe2 */ iemOp_InvalidNeedRM, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8937 /* 0xe3 */ iemOp_InvalidNeedRM, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8938 /* 0xe4 */ iemOp_InvalidNeedRM, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8939 /* 0xe5 */ iemOp_InvalidNeedRM, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8940 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,8941 /* 0xe7 */ iemOp_InvalidNeedRM, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8942 /* 0xe8 */ iemOp_InvalidNeedRM, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8943 /* 0xe9 */ iemOp_InvalidNeedRM, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8944 /* 0xea */ iemOp_InvalidNeedRM, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8945 /* 0xeb */ iemOp_InvalidNeedRM, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8946 /* 0xec */ iemOp_InvalidNeedRM, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8947 /* 0xed */ iemOp_InvalidNeedRM, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8948 /* 0xee */ iemOp_InvalidNeedRM, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8949 /* 0xef */ iemOp_InvalidNeedRM, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8950 8951 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,8952 /* 0xf1 */ iemOp_InvalidNeedRM, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8953 /* 0xf2 */ iemOp_InvalidNeedRM, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8954 /* 0xf3 */ iemOp_InvalidNeedRM, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8955 /* 0xf4 */ iemOp_InvalidNeedRM, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8956 /* 0xf5 */ iemOp_InvalidNeedRM, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8957 /* 0xf6 */ iemOp_InvalidNeedRM, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8958 /* 0xf7 */ iemOp_InvalidNeedRM, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8959 /* 0xf8 */ iemOp_InvalidNeedRM, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8960 /* 0xf9 */ iemOp_InvalidNeedRM, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8961 /* 0xfa */ iemOp_InvalidNeedRM, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8962 /* 0xfb */ iemOp_InvalidNeedRM, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8963 /* 0xfc */ iemOp_InvalidNeedRM, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8964 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8965 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8966 /* 0xff */ IEMOP_X4(iemOp_ud0),8967 };8968 AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024);8969 /** @} */8970 8971 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r66464 r66469 2 2 /** @file 3 3 * IEM - Instruction Decoding and Emulation. 4 * 5 * @remarks IEMAllInstructionsTwoByte0f.cpp.h is a legacy mirror of this file. 6 * Any update here is likely needed in that file too. 4 7 */ 5 8 … … 23 26 24 27 25 /** @name ..... opcodes. 26 * 28 29 /** @name VEX Opcode Map 1 27 30 * @{ 28 31 */ 29 32 30 /** @} */ 31 32 33 /** @name Two byte opcodes (first byte 0x0f). 34 * 35 * @{ 36 */ 37 38 /** Opcode 0x0f 0x00 /0. */ 39 FNIEMOPRM_DEF(iemOp_Grp6_sldt) 33 34 /* Opcode VEX.0F 0x00 - invalid */ 35 /* Opcode VEX.0F 0x01 - invalid */ 36 /* Opcode VEX.0F 0x02 - invalid */ 37 /* Opcode VEX.0F 0x03 - invalid */ 38 /* Opcode VEX.0F 0x04 - invalid */ 39 /* Opcode VEX.0F 0x05 - invalid */ 40 /* Opcode VEX.0F 0x06 - invalid */ 41 /* Opcode VEX.0F 0x07 - invalid */ 42 /* Opcode VEX.0F 0x08 - invalid */ 43 /* Opcode VEX.0F 0x09 - invalid */ 44 /* Opcode VEX.0F 0x0a - invalid */ 45 46 /** Opcode VEX.0F 0x0b. */ 47 FNIEMOP_DEF(iemOp_vud2) 40 48 { 41 IEMOP_MNEMONIC(sldt, "sldt Rv/Mw"); 42 IEMOP_HLP_MIN_286(); 43 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 44 45 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 46 { 47 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 48 switch (pVCpu->iem.s.enmEffOpSize) 49 { 50 case IEMMODE_16BIT: 51 IEM_MC_BEGIN(0, 1); 52 IEM_MC_LOCAL(uint16_t, u16Ldtr); 53 IEM_MC_FETCH_LDTR_U16(u16Ldtr); 54 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Ldtr); 55 IEM_MC_ADVANCE_RIP(); 56 IEM_MC_END(); 57 break; 58 59 case IEMMODE_32BIT: 60 IEM_MC_BEGIN(0, 1); 61 IEM_MC_LOCAL(uint32_t, u32Ldtr); 62 IEM_MC_FETCH_LDTR_U32(u32Ldtr); 63 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Ldtr); 64 IEM_MC_ADVANCE_RIP(); 65 IEM_MC_END(); 66 break; 67 68 case IEMMODE_64BIT: 69 IEM_MC_BEGIN(0, 1); 70 IEM_MC_LOCAL(uint64_t, u64Ldtr); 71 IEM_MC_FETCH_LDTR_U64(u64Ldtr); 72 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Ldtr); 73 IEM_MC_ADVANCE_RIP(); 74 IEM_MC_END(); 75 break; 76 77 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 78 } 79 } 80 else 81 { 82 IEM_MC_BEGIN(0, 2); 83 IEM_MC_LOCAL(uint16_t, u16Ldtr); 84 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 85 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 86 IEMOP_HLP_DECODED_NL_1(OP_SLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 87 IEM_MC_FETCH_LDTR_U16(u16Ldtr); 88 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Ldtr); 89 IEM_MC_ADVANCE_RIP(); 90 IEM_MC_END(); 91 } 92 return VINF_SUCCESS; 93 } 94 95 96 /** Opcode 0x0f 0x00 /1. */ 97 FNIEMOPRM_DEF(iemOp_Grp6_str) 98 { 99 IEMOP_MNEMONIC(str, "str Rv/Mw"); 100 IEMOP_HLP_MIN_286(); 101 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 102 103 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 104 { 105 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 106 switch (pVCpu->iem.s.enmEffOpSize) 107 { 108 case IEMMODE_16BIT: 109 IEM_MC_BEGIN(0, 1); 110 IEM_MC_LOCAL(uint16_t, u16Tr); 111 IEM_MC_FETCH_TR_U16(u16Tr); 112 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tr); 113 IEM_MC_ADVANCE_RIP(); 114 IEM_MC_END(); 115 break; 116 117 case IEMMODE_32BIT: 118 IEM_MC_BEGIN(0, 1); 119 IEM_MC_LOCAL(uint32_t, u32Tr); 120 IEM_MC_FETCH_TR_U32(u32Tr); 121 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tr); 122 IEM_MC_ADVANCE_RIP(); 123 IEM_MC_END(); 124 break; 125 126 case IEMMODE_64BIT: 127 IEM_MC_BEGIN(0, 1); 128 IEM_MC_LOCAL(uint64_t, u64Tr); 129 IEM_MC_FETCH_TR_U64(u64Tr); 130 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tr); 131 IEM_MC_ADVANCE_RIP(); 132 IEM_MC_END(); 133 break; 134 135 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 136 } 137 } 138 else 139 { 140 IEM_MC_BEGIN(0, 2); 141 IEM_MC_LOCAL(uint16_t, u16Tr); 142 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 143 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 144 IEMOP_HLP_DECODED_NL_1(OP_STR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 145 IEM_MC_FETCH_TR_U16(u16Tr); 146 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tr); 147 IEM_MC_ADVANCE_RIP(); 148 IEM_MC_END(); 149 } 150 return VINF_SUCCESS; 151 } 152 153 154 /** Opcode 0x0f 0x00 /2. */ 155 FNIEMOPRM_DEF(iemOp_Grp6_lldt) 156 { 157 IEMOP_MNEMONIC(lldt, "lldt Ew"); 158 IEMOP_HLP_MIN_286(); 159 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 160 161 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 162 { 163 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_REG, OP_PARM_Ew, DISOPTYPE_DANGEROUS); 164 IEM_MC_BEGIN(1, 0); 165 IEM_MC_ARG(uint16_t, u16Sel, 0); 166 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 167 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel); 168 IEM_MC_END(); 169 } 170 else 171 { 172 IEM_MC_BEGIN(1, 1); 173 IEM_MC_ARG(uint16_t, u16Sel, 0); 174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 175 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 176 IEMOP_HLP_DECODED_NL_1(OP_LLDT, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS); 177 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test order */ 178 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 179 IEM_MC_CALL_CIMPL_1(iemCImpl_lldt, u16Sel); 180 IEM_MC_END(); 181 } 182 return VINF_SUCCESS; 183 } 184 185 186 /** Opcode 0x0f 0x00 /3. */ 187 FNIEMOPRM_DEF(iemOp_Grp6_ltr) 188 { 189 IEMOP_MNEMONIC(ltr, "ltr Ew"); 190 IEMOP_HLP_MIN_286(); 191 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 192 193 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 194 { 195 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 196 IEM_MC_BEGIN(1, 0); 197 IEM_MC_ARG(uint16_t, u16Sel, 0); 198 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 199 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel); 200 IEM_MC_END(); 201 } 202 else 203 { 204 IEM_MC_BEGIN(1, 1); 205 IEM_MC_ARG(uint16_t, u16Sel, 0); 206 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 207 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 208 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 209 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); /** @todo test ordre */ 210 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 211 IEM_MC_CALL_CIMPL_1(iemCImpl_ltr, u16Sel); 212 IEM_MC_END(); 213 } 214 return VINF_SUCCESS; 215 } 216 217 218 /** Opcode 0x0f 0x00 /3. */ 219 FNIEMOP_DEF_2(iemOpCommonGrp6VerX, uint8_t, bRm, bool, fWrite) 220 { 221 IEMOP_HLP_MIN_286(); 222 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 223 224 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 225 { 226 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 227 IEM_MC_BEGIN(2, 0); 228 IEM_MC_ARG(uint16_t, u16Sel, 0); 229 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); 230 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 231 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg); 232 IEM_MC_END(); 233 } 234 else 235 { 236 IEM_MC_BEGIN(2, 1); 237 IEM_MC_ARG(uint16_t, u16Sel, 0); 238 IEM_MC_ARG_CONST(bool, fWriteArg, fWrite, 1); 239 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 240 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 241 IEMOP_HLP_DECODED_NL_1(fWrite ? OP_VERW : OP_VERR, IEMOPFORM_M_MEM, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 242 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 243 IEM_MC_CALL_CIMPL_2(iemCImpl_VerX, u16Sel, fWriteArg); 244 IEM_MC_END(); 245 } 246 return VINF_SUCCESS; 247 } 248 249 250 /** Opcode 0x0f 0x00 /4. */ 251 FNIEMOPRM_DEF(iemOp_Grp6_verr) 252 { 253 IEMOP_MNEMONIC(verr, "verr Ew"); 254 IEMOP_HLP_MIN_286(); 255 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, false); 256 } 257 258 259 /** Opcode 0x0f 0x00 /5. */ 260 FNIEMOPRM_DEF(iemOp_Grp6_verw) 261 { 262 IEMOP_MNEMONIC(verw, "verw Ew"); 263 IEMOP_HLP_MIN_286(); 264 return FNIEMOP_CALL_2(iemOpCommonGrp6VerX, bRm, true); 265 } 266 267 268 /** 269 * Group 6 jump table. 270 */ 271 IEM_STATIC const PFNIEMOPRM g_apfnGroup6[8] = 272 { 273 iemOp_Grp6_sldt, 274 iemOp_Grp6_str, 275 iemOp_Grp6_lldt, 276 iemOp_Grp6_ltr, 277 iemOp_Grp6_verr, 278 iemOp_Grp6_verw, 279 iemOp_InvalidWithRM, 280 iemOp_InvalidWithRM 281 }; 282 283 /** Opcode 0x0f 0x00. */ 284 FNIEMOP_DEF(iemOp_Grp6) 285 { 286 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 287 return FNIEMOP_CALL_1(g_apfnGroup6[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm); 288 } 289 290 291 /** Opcode 0x0f 0x01 /0. */ 292 FNIEMOP_DEF_1(iemOp_Grp7_sgdt, uint8_t, bRm) 293 { 294 IEMOP_MNEMONIC(sgdt, "sgdt Ms"); 295 IEMOP_HLP_MIN_286(); 296 IEMOP_HLP_64BIT_OP_SIZE(); 297 IEM_MC_BEGIN(2, 1); 298 IEM_MC_ARG(uint8_t, iEffSeg, 0); 299 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1); 300 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 301 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 302 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 303 IEM_MC_CALL_CIMPL_2(iemCImpl_sgdt, iEffSeg, GCPtrEffSrc); 304 IEM_MC_END(); 305 return VINF_SUCCESS; 306 } 307 308 309 /** Opcode 0x0f 0x01 /0. */ 310 FNIEMOP_DEF(iemOp_Grp7_vmcall) 311 { 312 IEMOP_BITCH_ABOUT_STUB(); 49 IEMOP_MNEMONIC(vud2, "vud2"); 313 50 return IEMOP_RAISE_INVALID_OPCODE(); 314 51 } 315 52 316 317 /** Opcode 0x0f 0x01 /0. */ 318 FNIEMOP_DEF(iemOp_Grp7_vmlaunch) 319 { 320 IEMOP_BITCH_ABOUT_STUB(); 321 return IEMOP_RAISE_INVALID_OPCODE(); 322 } 323 324 325 /** Opcode 0x0f 0x01 /0. */ 326 FNIEMOP_DEF(iemOp_Grp7_vmresume) 327 { 328 IEMOP_BITCH_ABOUT_STUB(); 329 return IEMOP_RAISE_INVALID_OPCODE(); 330 } 331 332 333 /** Opcode 0x0f 0x01 /0. */ 334 FNIEMOP_DEF(iemOp_Grp7_vmxoff) 335 { 336 IEMOP_BITCH_ABOUT_STUB(); 337 return IEMOP_RAISE_INVALID_OPCODE(); 338 } 339 340 341 /** Opcode 0x0f 0x01 /1. */ 342 FNIEMOP_DEF_1(iemOp_Grp7_sidt, uint8_t, bRm) 343 { 344 IEMOP_MNEMONIC(sidt, "sidt Ms"); 345 IEMOP_HLP_MIN_286(); 346 IEMOP_HLP_64BIT_OP_SIZE(); 347 IEM_MC_BEGIN(2, 1); 348 IEM_MC_ARG(uint8_t, iEffSeg, 0); 349 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1); 350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 351 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 352 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 353 IEM_MC_CALL_CIMPL_2(iemCImpl_sidt, iEffSeg, GCPtrEffSrc); 354 IEM_MC_END(); 355 return VINF_SUCCESS; 356 } 357 358 359 /** Opcode 0x0f 0x01 /1. */ 360 FNIEMOP_DEF(iemOp_Grp7_monitor) 361 { 362 IEMOP_MNEMONIC(monitor, "monitor"); 363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo Verify that monitor is allergic to lock prefixes. */ 364 return IEM_MC_DEFER_TO_CIMPL_1(iemCImpl_monitor, pVCpu->iem.s.iEffSeg); 365 } 366 367 368 /** Opcode 0x0f 0x01 /1. */ 369 FNIEMOP_DEF(iemOp_Grp7_mwait) 370 { 371 IEMOP_MNEMONIC(mwait, "mwait"); /** @todo Verify that mwait is allergic to lock prefixes. */ 372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 373 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_mwait); 374 } 375 376 377 /** Opcode 0x0f 0x01 /2. */ 378 FNIEMOP_DEF_1(iemOp_Grp7_lgdt, uint8_t, bRm) 379 { 380 IEMOP_MNEMONIC(lgdt, "lgdt"); 381 IEMOP_HLP_64BIT_OP_SIZE(); 382 IEM_MC_BEGIN(3, 1); 383 IEM_MC_ARG(uint8_t, iEffSeg, 0); 384 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1); 385 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/pVCpu->iem.s.enmEffOpSize, 2); 386 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 387 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 388 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 389 IEM_MC_CALL_CIMPL_3(iemCImpl_lgdt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg); 390 IEM_MC_END(); 391 return VINF_SUCCESS; 392 } 393 394 395 /** Opcode 0x0f 0x01 0xd0. */ 396 FNIEMOP_DEF(iemOp_Grp7_xgetbv) 397 { 398 IEMOP_MNEMONIC(xgetbv, "xgetbv"); 399 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor) 400 { 401 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); 402 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xgetbv); 403 } 404 return IEMOP_RAISE_INVALID_OPCODE(); 405 } 406 407 408 /** Opcode 0x0f 0x01 0xd1. */ 409 FNIEMOP_DEF(iemOp_Grp7_xsetbv) 410 { 411 IEMOP_MNEMONIC(xsetbv, "xsetbv"); 412 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor) 413 { 414 IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES(); 415 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_xsetbv); 416 } 417 return IEMOP_RAISE_INVALID_OPCODE(); 418 } 419 420 421 /** Opcode 0x0f 0x01 /3. */ 422 FNIEMOP_DEF_1(iemOp_Grp7_lidt, uint8_t, bRm) 423 { 424 IEMOP_MNEMONIC(lidt, "lidt"); 425 IEMMODE enmEffOpSize = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT 426 ? IEMMODE_64BIT 427 : pVCpu->iem.s.enmEffOpSize; 428 IEM_MC_BEGIN(3, 1); 429 IEM_MC_ARG(uint8_t, iEffSeg, 0); 430 IEM_MC_ARG(RTGCPTR, GCPtrEffSrc, 1); 431 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSizeArg,/*=*/enmEffOpSize, 2); 432 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 433 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 434 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 435 IEM_MC_CALL_CIMPL_3(iemCImpl_lidt, iEffSeg, GCPtrEffSrc, enmEffOpSizeArg); 436 IEM_MC_END(); 437 return VINF_SUCCESS; 438 } 439 440 441 #ifdef VBOX_WITH_NESTED_HWVIRT 442 /** Opcode 0x0f 0x01 0xd8. */ 443 FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun) 444 { 445 IEMOP_MNEMONIC(vmrun, "vmrun"); 446 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun); 447 } 448 449 /** Opcode 0x0f 0x01 0xd9. */ 450 FNIEMOP_DEF(iemOp_Grp7_Amd_vmmcall) 451 { 452 IEMOP_MNEMONIC(vmmcall, "vmmcall"); 453 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmmcall); 454 } 455 456 457 /** Opcode 0x0f 0x01 0xda. */ 458 FNIEMOP_DEF(iemOp_Grp7_Amd_vmload) 459 { 460 IEMOP_MNEMONIC(vmload, "vmload"); 461 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload); 462 } 463 464 465 /** Opcode 0x0f 0x01 0xdb. */ 466 FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave) 467 { 468 IEMOP_MNEMONIC(vmsave, "vmsave"); 469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave); 470 } 471 472 473 /** Opcode 0x0f 0x01 0xdc. */ 474 FNIEMOP_DEF(iemOp_Grp7_Amd_stgi) 475 { 476 IEMOP_MNEMONIC(stgi, "stgi"); 477 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi); 478 } 479 480 481 /** Opcode 0x0f 0x01 0xdd. */ 482 FNIEMOP_DEF(iemOp_Grp7_Amd_clgi) 483 { 484 IEMOP_MNEMONIC(clgi, "clgi"); 485 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi); 486 } 487 488 489 /** Opcode 0x0f 0x01 0xdf. */ 490 FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga) 491 { 492 IEMOP_MNEMONIC(invlpga, "invlpga"); 493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga); 494 } 495 #else 496 /** Opcode 0x0f 0x01 0xd8. */ 497 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun); 498 499 /** Opcode 0x0f 0x01 0xd9. */ 500 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall); 501 502 /** Opcode 0x0f 0x01 0xda. */ 503 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload); 504 505 /** Opcode 0x0f 0x01 0xdb. */ 506 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave); 507 508 /** Opcode 0x0f 0x01 0xdc. */ 509 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi); 510 511 /** Opcode 0x0f 0x01 0xdd. */ 512 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi); 513 514 /** Opcode 0x0f 0x01 0xdf. */ 515 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga); 516 #endif /* VBOX_WITH_NESTED_HWVIRT */ 517 518 /** Opcode 0x0f 0x01 0xde. */ 519 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit); 520 521 /** Opcode 0x0f 0x01 /4. */ 522 FNIEMOP_DEF_1(iemOp_Grp7_smsw, uint8_t, bRm) 523 { 524 IEMOP_MNEMONIC(smsw, "smsw"); 525 IEMOP_HLP_MIN_286(); 526 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 527 { 528 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 529 switch (pVCpu->iem.s.enmEffOpSize) 530 { 531 case IEMMODE_16BIT: 532 IEM_MC_BEGIN(0, 1); 533 IEM_MC_LOCAL(uint16_t, u16Tmp); 534 IEM_MC_FETCH_CR0_U16(u16Tmp); 535 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386) 536 { /* likely */ } 537 else if (IEM_GET_TARGET_CPU(pVCpu) >= IEMTARGETCPU_386) 538 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0); 539 else 540 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); 541 IEM_MC_STORE_GREG_U16((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u16Tmp); 542 IEM_MC_ADVANCE_RIP(); 543 IEM_MC_END(); 544 return VINF_SUCCESS; 545 546 case IEMMODE_32BIT: 547 IEM_MC_BEGIN(0, 1); 548 IEM_MC_LOCAL(uint32_t, u32Tmp); 549 IEM_MC_FETCH_CR0_U32(u32Tmp); 550 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 551 IEM_MC_ADVANCE_RIP(); 552 IEM_MC_END(); 553 return VINF_SUCCESS; 554 555 case IEMMODE_64BIT: 556 IEM_MC_BEGIN(0, 1); 557 IEM_MC_LOCAL(uint64_t, u64Tmp); 558 IEM_MC_FETCH_CR0_U64(u64Tmp); 559 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 560 IEM_MC_ADVANCE_RIP(); 561 IEM_MC_END(); 562 return VINF_SUCCESS; 563 564 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 565 } 566 } 567 else 568 { 569 /* Ignore operand size here, memory refs are always 16-bit. */ 570 IEM_MC_BEGIN(0, 2); 571 IEM_MC_LOCAL(uint16_t, u16Tmp); 572 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 573 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 574 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 575 IEM_MC_FETCH_CR0_U16(u16Tmp); 576 if (IEM_GET_TARGET_CPU(pVCpu) > IEMTARGETCPU_386) 577 { /* likely */ } 578 else if (pVCpu->iem.s.uTargetCpu >= IEMTARGETCPU_386) 579 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xffe0); 580 else 581 IEM_MC_OR_LOCAL_U16(u16Tmp, 0xfff0); 582 IEM_MC_STORE_MEM_U16(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u16Tmp); 583 IEM_MC_ADVANCE_RIP(); 584 IEM_MC_END(); 585 return VINF_SUCCESS; 586 } 587 } 588 589 590 /** Opcode 0x0f 0x01 /6. */ 591 FNIEMOP_DEF_1(iemOp_Grp7_lmsw, uint8_t, bRm) 592 { 593 /* The operand size is effectively ignored, all is 16-bit and only the 594 lower 3-bits are used. */ 595 IEMOP_MNEMONIC(lmsw, "lmsw"); 596 IEMOP_HLP_MIN_286(); 597 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 598 { 599 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 600 IEM_MC_BEGIN(1, 0); 601 IEM_MC_ARG(uint16_t, u16Tmp, 0); 602 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 603 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp); 604 IEM_MC_END(); 605 } 606 else 607 { 608 IEM_MC_BEGIN(1, 1); 609 IEM_MC_ARG(uint16_t, u16Tmp, 0); 610 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 611 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 612 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 613 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 614 IEM_MC_CALL_CIMPL_1(iemCImpl_lmsw, u16Tmp); 615 IEM_MC_END(); 616 } 617 return VINF_SUCCESS; 618 } 619 620 621 /** Opcode 0x0f 0x01 /7. */ 622 FNIEMOP_DEF_1(iemOp_Grp7_invlpg, uint8_t, bRm) 623 { 624 IEMOP_MNEMONIC(invlpg, "invlpg"); 625 IEMOP_HLP_MIN_486(); 626 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 627 IEM_MC_BEGIN(1, 1); 628 IEM_MC_ARG(RTGCPTR, GCPtrEffDst, 0); 629 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 630 IEM_MC_CALL_CIMPL_1(iemCImpl_invlpg, GCPtrEffDst); 631 IEM_MC_END(); 632 return VINF_SUCCESS; 633 } 634 635 636 /** Opcode 0x0f 0x01 /7. */ 637 FNIEMOP_DEF(iemOp_Grp7_swapgs) 638 { 639 IEMOP_MNEMONIC(swapgs, "swapgs"); 640 IEMOP_HLP_ONLY_64BIT(); 641 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 642 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_swapgs); 643 } 644 645 646 /** Opcode 0x0f 0x01 /7. */ 647 FNIEMOP_DEF(iemOp_Grp7_rdtscp) 648 { 649 NOREF(pVCpu); 650 IEMOP_BITCH_ABOUT_STUB(); 651 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 652 } 653 654 53 /* Opcode VEX.0F 0x0c - invalid */ 54 /* Opcode VEX.0F 0x0d - invalid */ 55 /* Opcode VEX.0F 0x0e - invalid */ 56 /* Opcode VEX.0F 0x0f - invalid */ 57 58 59 /** Opcode VEX.0F 0x10 - vmovups Vps, Wps */ 60 FNIEMOP_STUB(iemOp_vmovups_Vps_Wps); 61 /** Opcode VEX.66.0F 0x10 - vmovupd Vpd, Wpd */ 62 FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd); 63 64 65 /** Opcode VEX 0xf3 0x0f 0x10 - vmovsd Vx, Hx, Wsd */ 655 66 /** 656 * Group 7 jump table, memory variant. 67 * @ opcode 0x10 68 * @ oppfx 0xf3 69 * @ opcpuid sse 70 * @ opgroup og_sse_simdfp_datamove 71 * @ opxcpttype 5 72 * @ optest op1=1 op2=2 -> op1=2 73 * @ optest op1=0 op2=-22 -> op1=-22 74 * @ oponly 657 75 */ 658 IEM_STATIC const PFNIEMOPRM g_apfnGroup7Mem[8] = 659 { 660 iemOp_Grp7_sgdt, 661 iemOp_Grp7_sidt, 662 iemOp_Grp7_lgdt, 663 iemOp_Grp7_lidt, 664 iemOp_Grp7_smsw, 665 iemOp_InvalidWithRM, 666 iemOp_Grp7_lmsw, 667 iemOp_Grp7_invlpg 668 }; 669 670 671 /** Opcode 0x0f 0x01. */ 672 FNIEMOP_DEF(iemOp_Grp7) 673 { 674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 675 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 676 return FNIEMOP_CALL_1(g_apfnGroup7Mem[(bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK], bRm); 677 678 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) 679 { 680 case 0: 681 switch (bRm & X86_MODRM_RM_MASK) 682 { 683 case 1: return FNIEMOP_CALL(iemOp_Grp7_vmcall); 684 case 2: return FNIEMOP_CALL(iemOp_Grp7_vmlaunch); 685 case 3: return FNIEMOP_CALL(iemOp_Grp7_vmresume); 686 case 4: return FNIEMOP_CALL(iemOp_Grp7_vmxoff); 687 } 688 return IEMOP_RAISE_INVALID_OPCODE(); 689 690 case 1: 691 switch (bRm & X86_MODRM_RM_MASK) 692 { 693 case 0: return FNIEMOP_CALL(iemOp_Grp7_monitor); 694 case 1: return FNIEMOP_CALL(iemOp_Grp7_mwait); 695 } 696 return IEMOP_RAISE_INVALID_OPCODE(); 697 698 case 2: 699 switch (bRm & X86_MODRM_RM_MASK) 700 { 701 case 0: return FNIEMOP_CALL(iemOp_Grp7_xgetbv); 702 case 1: return FNIEMOP_CALL(iemOp_Grp7_xsetbv); 703 } 704 return IEMOP_RAISE_INVALID_OPCODE(); 705 706 case 3: 707 switch (bRm & X86_MODRM_RM_MASK) 708 { 709 case 0: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmrun); 710 case 1: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmmcall); 711 case 2: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmload); 712 case 3: return FNIEMOP_CALL(iemOp_Grp7_Amd_vmsave); 713 case 4: return FNIEMOP_CALL(iemOp_Grp7_Amd_stgi); 714 case 5: return FNIEMOP_CALL(iemOp_Grp7_Amd_clgi); 715 case 6: return FNIEMOP_CALL(iemOp_Grp7_Amd_skinit); 716 case 7: return FNIEMOP_CALL(iemOp_Grp7_Amd_invlpga); 717 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 718 } 719 720 case 4: 721 return FNIEMOP_CALL_1(iemOp_Grp7_smsw, bRm); 722 723 case 5: 724 return IEMOP_RAISE_INVALID_OPCODE(); 725 726 case 6: 727 return FNIEMOP_CALL_1(iemOp_Grp7_lmsw, bRm); 728 729 case 7: 730 switch (bRm & X86_MODRM_RM_MASK) 731 { 732 case 0: return FNIEMOP_CALL(iemOp_Grp7_swapgs); 733 case 1: return FNIEMOP_CALL(iemOp_Grp7_rdtscp); 734 } 735 return IEMOP_RAISE_INVALID_OPCODE(); 736 737 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 738 } 739 } 740 741 /** Opcode 0x0f 0x00 /3. */ 742 FNIEMOP_DEF_1(iemOpCommonLarLsl_Gv_Ew, bool, fIsLar) 743 { 744 IEMOP_HLP_NO_REAL_OR_V86_MODE(); 745 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 746 747 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 748 { 749 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_REG, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 750 switch (pVCpu->iem.s.enmEffOpSize) 751 { 752 case IEMMODE_16BIT: 753 { 754 IEM_MC_BEGIN(3, 0); 755 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 756 IEM_MC_ARG(uint16_t, u16Sel, 1); 757 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2); 758 759 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 760 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 761 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg); 762 763 IEM_MC_END(); 764 return VINF_SUCCESS; 765 } 766 767 case IEMMODE_32BIT: 768 case IEMMODE_64BIT: 769 { 770 IEM_MC_BEGIN(3, 0); 771 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 772 IEM_MC_ARG(uint16_t, u16Sel, 1); 773 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2); 774 775 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 776 IEM_MC_FETCH_GREG_U16(u16Sel, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 777 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg); 778 779 IEM_MC_END(); 780 return VINF_SUCCESS; 781 } 782 783 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 784 } 785 } 786 else 787 { 788 switch (pVCpu->iem.s.enmEffOpSize) 789 { 790 case IEMMODE_16BIT: 791 { 792 IEM_MC_BEGIN(3, 1); 793 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 794 IEM_MC_ARG(uint16_t, u16Sel, 1); 795 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2); 796 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 797 798 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 799 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 800 801 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 802 IEM_MC_REF_GREG_U16(pu16Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 803 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u16, pu16Dst, u16Sel, fIsLarArg); 804 805 IEM_MC_END(); 806 return VINF_SUCCESS; 807 } 808 809 case IEMMODE_32BIT: 810 case IEMMODE_64BIT: 811 { 812 IEM_MC_BEGIN(3, 1); 813 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 814 IEM_MC_ARG(uint16_t, u16Sel, 1); 815 IEM_MC_ARG_CONST(bool, fIsLarArg, fIsLar, 2); 816 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 817 818 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 819 IEMOP_HLP_DECODED_NL_2(fIsLar ? OP_LAR : OP_LSL, IEMOPFORM_RM_MEM, OP_PARM_Gv, OP_PARM_Ew, DISOPTYPE_DANGEROUS | DISOPTYPE_PRIVILEGED_NOTRAP); 820 /** @todo testcase: make sure it's a 16-bit read. */ 821 822 IEM_MC_FETCH_MEM_U16(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 823 IEM_MC_REF_GREG_U64(pu64Dst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 824 IEM_MC_CALL_CIMPL_3(iemCImpl_LarLsl_u64, pu64Dst, u16Sel, fIsLarArg); 825 826 IEM_MC_END(); 827 return VINF_SUCCESS; 828 } 829 830 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 831 } 832 } 833 } 834 835 836 837 /** Opcode 0x0f 0x02. */ 838 FNIEMOP_DEF(iemOp_lar_Gv_Ew) 839 { 840 IEMOP_MNEMONIC(lar, "lar Gv,Ew"); 841 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, true); 842 } 843 844 845 /** Opcode 0x0f 0x03. */ 846 FNIEMOP_DEF(iemOp_lsl_Gv_Ew) 847 { 848 IEMOP_MNEMONIC(lsl, "lsl Gv,Ew"); 849 return FNIEMOP_CALL_1(iemOpCommonLarLsl_Gv_Ew, false); 850 } 851 852 853 /** Opcode 0x0f 0x05. */ 854 FNIEMOP_DEF(iemOp_syscall) 855 { 856 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */ 857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 858 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_syscall); 859 } 860 861 862 /** Opcode 0x0f 0x06. */ 863 FNIEMOP_DEF(iemOp_clts) 864 { 865 IEMOP_MNEMONIC(clts, "clts"); 866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 867 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clts); 868 } 869 870 871 /** Opcode 0x0f 0x07. */ 872 FNIEMOP_DEF(iemOp_sysret) 873 { 874 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */ 875 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 876 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_sysret); 877 } 878 879 880 /** Opcode 0x0f 0x08. */ 881 FNIEMOP_STUB(iemOp_invd); 882 // IEMOP_HLP_MIN_486(); 883 884 885 /** Opcode 0x0f 0x09. */ 886 FNIEMOP_DEF(iemOp_wbinvd) 887 { 888 IEMOP_MNEMONIC(wbinvd, "wbinvd"); 889 IEMOP_HLP_MIN_486(); 890 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 891 IEM_MC_BEGIN(0, 0); 892 IEM_MC_RAISE_GP0_IF_CPL_NOT_ZERO(); 893 IEM_MC_ADVANCE_RIP(); 894 IEM_MC_END(); 895 return VINF_SUCCESS; /* ignore for now */ 896 } 897 898 899 /** Opcode 0x0f 0x0b. */ 900 FNIEMOP_DEF(iemOp_ud2) 901 { 902 IEMOP_MNEMONIC(ud2, "ud2"); 903 return IEMOP_RAISE_INVALID_OPCODE(); 904 } 905 906 /** Opcode 0x0f 0x0d. */ 907 FNIEMOP_DEF(iemOp_nop_Ev_GrpP) 908 { 909 /* AMD prefetch group, Intel implements this as NOP Ev (and so do we). */ 910 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNowPrefetch) 911 { 912 IEMOP_MNEMONIC(GrpPNotSupported, "GrpP"); 913 return IEMOP_RAISE_INVALID_OPCODE(); 914 } 915 916 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 917 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 918 { 919 IEMOP_MNEMONIC(GrpPInvalid, "GrpP"); 920 return IEMOP_RAISE_INVALID_OPCODE(); 921 } 922 923 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) 924 { 925 case 2: /* Aliased to /0 for the time being. */ 926 case 4: /* Aliased to /0 for the time being. */ 927 case 5: /* Aliased to /0 for the time being. */ 928 case 6: /* Aliased to /0 for the time being. */ 929 case 7: /* Aliased to /0 for the time being. */ 930 case 0: IEMOP_MNEMONIC(prefetch, "prefetch"); break; 931 case 1: IEMOP_MNEMONIC(prefetchw_1, "prefetchw"); break; 932 case 3: IEMOP_MNEMONIC(prefetchw_3, "prefetchw"); break; 933 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 934 } 935 936 IEM_MC_BEGIN(0, 1); 937 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 938 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 939 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 940 /* Currently a NOP. */ 941 NOREF(GCPtrEffSrc); 942 IEM_MC_ADVANCE_RIP(); 943 IEM_MC_END(); 944 return VINF_SUCCESS; 945 } 946 947 948 /** Opcode 0x0f 0x0e. */ 949 FNIEMOP_STUB(iemOp_femms); 950 951 952 /** Opcode 0x0f 0x0f 0x0c. */ 953 FNIEMOP_STUB(iemOp_3Dnow_pi2fw_Pq_Qq); 954 955 /** Opcode 0x0f 0x0f 0x0d. */ 956 FNIEMOP_STUB(iemOp_3Dnow_pi2fd_Pq_Qq); 957 958 /** Opcode 0x0f 0x0f 0x1c. */ 959 FNIEMOP_STUB(iemOp_3Dnow_pf2fw_Pq_Qq); 960 961 /** Opcode 0x0f 0x0f 0x1d. */ 962 FNIEMOP_STUB(iemOp_3Dnow_pf2fd_Pq_Qq); 963 964 /** Opcode 0x0f 0x0f 0x8a. */ 965 FNIEMOP_STUB(iemOp_3Dnow_pfnacc_Pq_Qq); 966 967 /** Opcode 0x0f 0x0f 0x8e. */ 968 FNIEMOP_STUB(iemOp_3Dnow_pfpnacc_Pq_Qq); 969 970 /** Opcode 0x0f 0x0f 0x90. */ 971 FNIEMOP_STUB(iemOp_3Dnow_pfcmpge_Pq_Qq); 972 973 /** Opcode 0x0f 0x0f 0x94. */ 974 FNIEMOP_STUB(iemOp_3Dnow_pfmin_Pq_Qq); 975 976 /** Opcode 0x0f 0x0f 0x96. */ 977 FNIEMOP_STUB(iemOp_3Dnow_pfrcp_Pq_Qq); 978 979 /** Opcode 0x0f 0x0f 0x97. */ 980 FNIEMOP_STUB(iemOp_3Dnow_pfrsqrt_Pq_Qq); 981 982 /** Opcode 0x0f 0x0f 0x9a. */ 983 FNIEMOP_STUB(iemOp_3Dnow_pfsub_Pq_Qq); 984 985 /** Opcode 0x0f 0x0f 0x9e. */ 986 FNIEMOP_STUB(iemOp_3Dnow_pfadd_PQ_Qq); 987 988 /** Opcode 0x0f 0x0f 0xa0. */ 989 FNIEMOP_STUB(iemOp_3Dnow_pfcmpgt_Pq_Qq); 990 991 /** Opcode 0x0f 0x0f 0xa4. */ 992 FNIEMOP_STUB(iemOp_3Dnow_pfmax_Pq_Qq); 993 994 /** Opcode 0x0f 0x0f 0xa6. */ 995 FNIEMOP_STUB(iemOp_3Dnow_pfrcpit1_Pq_Qq); 996 997 /** Opcode 0x0f 0x0f 0xa7. */ 998 FNIEMOP_STUB(iemOp_3Dnow_pfrsqit1_Pq_Qq); 999 1000 /** Opcode 0x0f 0x0f 0xaa. */ 1001 FNIEMOP_STUB(iemOp_3Dnow_pfsubr_Pq_Qq); 1002 1003 /** Opcode 0x0f 0x0f 0xae. */ 1004 FNIEMOP_STUB(iemOp_3Dnow_pfacc_PQ_Qq); 1005 1006 /** Opcode 0x0f 0x0f 0xb0. */ 1007 FNIEMOP_STUB(iemOp_3Dnow_pfcmpeq_Pq_Qq); 1008 1009 /** Opcode 0x0f 0x0f 0xb4. */ 1010 FNIEMOP_STUB(iemOp_3Dnow_pfmul_Pq_Qq); 1011 1012 /** Opcode 0x0f 0x0f 0xb6. */ 1013 FNIEMOP_STUB(iemOp_3Dnow_pfrcpit2_Pq_Qq); 1014 1015 /** Opcode 0x0f 0x0f 0xb7. */ 1016 FNIEMOP_STUB(iemOp_3Dnow_pmulhrw_Pq_Qq); 1017 1018 /** Opcode 0x0f 0x0f 0xbb. */ 1019 FNIEMOP_STUB(iemOp_3Dnow_pswapd_Pq_Qq); 1020 1021 /** Opcode 0x0f 0x0f 0xbf. */ 1022 FNIEMOP_STUB(iemOp_3Dnow_pavgusb_PQ_Qq); 1023 1024 1025 /** Opcode 0x0f 0x0f. */ 1026 FNIEMOP_DEF(iemOp_3Dnow) 1027 { 1028 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->f3DNow) 1029 { 1030 IEMOP_MNEMONIC(Inv3Dnow, "3Dnow"); 1031 return IEMOP_RAISE_INVALID_OPCODE(); 1032 } 1033 1034 /* This is pretty sparse, use switch instead of table. */ 1035 uint8_t b; IEM_OPCODE_GET_NEXT_U8(&b); 1036 switch (b) 1037 { 1038 case 0x0c: return FNIEMOP_CALL(iemOp_3Dnow_pi2fw_Pq_Qq); 1039 case 0x0d: return FNIEMOP_CALL(iemOp_3Dnow_pi2fd_Pq_Qq); 1040 case 0x1c: return FNIEMOP_CALL(iemOp_3Dnow_pf2fw_Pq_Qq); 1041 case 0x1d: return FNIEMOP_CALL(iemOp_3Dnow_pf2fd_Pq_Qq); 1042 case 0x8a: return FNIEMOP_CALL(iemOp_3Dnow_pfnacc_Pq_Qq); 1043 case 0x8e: return FNIEMOP_CALL(iemOp_3Dnow_pfpnacc_Pq_Qq); 1044 case 0x90: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpge_Pq_Qq); 1045 case 0x94: return FNIEMOP_CALL(iemOp_3Dnow_pfmin_Pq_Qq); 1046 case 0x96: return FNIEMOP_CALL(iemOp_3Dnow_pfrcp_Pq_Qq); 1047 case 0x97: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqrt_Pq_Qq); 1048 case 0x9a: return FNIEMOP_CALL(iemOp_3Dnow_pfsub_Pq_Qq); 1049 case 0x9e: return FNIEMOP_CALL(iemOp_3Dnow_pfadd_PQ_Qq); 1050 case 0xa0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpgt_Pq_Qq); 1051 case 0xa4: return FNIEMOP_CALL(iemOp_3Dnow_pfmax_Pq_Qq); 1052 case 0xa6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit1_Pq_Qq); 1053 case 0xa7: return FNIEMOP_CALL(iemOp_3Dnow_pfrsqit1_Pq_Qq); 1054 case 0xaa: return FNIEMOP_CALL(iemOp_3Dnow_pfsubr_Pq_Qq); 1055 case 0xae: return FNIEMOP_CALL(iemOp_3Dnow_pfacc_PQ_Qq); 1056 case 0xb0: return FNIEMOP_CALL(iemOp_3Dnow_pfcmpeq_Pq_Qq); 1057 case 0xb4: return FNIEMOP_CALL(iemOp_3Dnow_pfmul_Pq_Qq); 1058 case 0xb6: return FNIEMOP_CALL(iemOp_3Dnow_pfrcpit2_Pq_Qq); 1059 case 0xb7: return FNIEMOP_CALL(iemOp_3Dnow_pmulhrw_Pq_Qq); 1060 case 0xbb: return FNIEMOP_CALL(iemOp_3Dnow_pswapd_Pq_Qq); 1061 case 0xbf: return FNIEMOP_CALL(iemOp_3Dnow_pavgusb_PQ_Qq); 1062 default: 1063 return IEMOP_RAISE_INVALID_OPCODE(); 1064 } 1065 } 1066 1067 1068 /** Opcode 0x0f 0x10 - vmovups Vps, Wps */ 1069 FNIEMOP_STUB(iemOp_vmovups_Vps_Wps); 1070 /** Opcode 0x66 0x0f 0x10 - vmovupd Vpd, Wpd */ 1071 FNIEMOP_STUB(iemOp_vmovupd_Vpd_Wpd); 76 FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss); 77 //FNIEMOP_DEF(iemOp_movss_Vss_Wss) 78 //{ 79 // I E M O P _ M N E M O N I C 2(RM, VMOVSS, vmovss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 80 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 81 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 82 // { 83 // /* 84 // * Register, register. 85 // */ 86 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 87 // IEM_MC_BEGIN(0, 1); 88 // IEM_MC_LOCAL(uint32_t, uSrc); 89 // 90 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 91 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 92 // IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 93 // IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 94 // 95 // IEM_MC_ADVANCE_RIP(); 96 // IEM_MC_END(); 97 // } 98 // else 99 // { 100 // /* 101 // * Memory, register. 102 // */ 103 // IEM_MC_BEGIN(0, 2); 104 // IEM_MC_LOCAL(uint32_t, uSrc); 105 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 106 // 107 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 108 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 109 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 110 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 111 // 112 // IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 113 // IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 114 // 115 // IEM_MC_ADVANCE_RIP(); 116 // IEM_MC_END(); 117 // } 118 // return VINF_SUCCESS; 119 //} 120 121 /** Opcode VEX.F2.0F 0x10 - vmovsd Vx, Hx, Wsd */ 122 FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd); 1072 123 1073 124 1074 125 /** 1075 * @opcode 0x10 1076 * @oppfx 0xf3 1077 * @opcpuid sse 1078 * @opgroup og_sse_simdfp_datamove 1079 * @opxcpttype 5 1080 * @optest op1=1 op2=2 -> op1=2 1081 * @optest op1=0 op2=-22 -> op1=-22 1082 * @oponly 126 * @ opcode 0x11 127 * @ oppfx none 128 * @ opcpuid sse 129 * @ opgroup og_sse_simdfp_datamove 130 * @ opxcpttype 4UA 131 * @ optest op1=1 op2=2 -> op1=2 132 * @ optest op1=0 op2=-42 -> op1=-42 1083 133 */ 1084 FNIEMOP_DEF(iemOp_movss_Vss_Wss) 1085 { 1086 IEMOP_MNEMONIC2(RM, MOVSS, movss, VssZxReg, Wss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1087 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1088 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1089 { 1090 /* 1091 * Register, register. 1092 */ 1093 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1094 IEM_MC_BEGIN(0, 1); 1095 IEM_MC_LOCAL(uint32_t, uSrc); 1096 1097 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1098 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1099 IEM_MC_FETCH_XREG_U32(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1100 IEM_MC_STORE_XREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1101 1102 IEM_MC_ADVANCE_RIP(); 1103 IEM_MC_END(); 1104 } 1105 else 1106 { 1107 /* 1108 * Memory, register. 1109 */ 1110 IEM_MC_BEGIN(0, 2); 1111 IEM_MC_LOCAL(uint32_t, uSrc); 1112 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1113 1114 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1116 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1117 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1118 1119 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1120 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1121 1122 IEM_MC_ADVANCE_RIP(); 1123 IEM_MC_END(); 1124 } 1125 return VINF_SUCCESS; 1126 } 1127 1128 1129 /** Opcode VEX 0xf3 0x0f 0x10 - vmovsd Vx, Hx, Wsd */ 1130 FNIEMOP_STUB(iemOp_vmovss_Vx_Hx_Wss); 1131 1132 /** Opcode 0xf2 0x0f 0x10 - vmovsd Vx, Hx, Wsd */ 1133 FNIEMOP_STUB(iemOp_vmovsd_Vx_Hx_Wsd); 134 FNIEMOP_STUB(iemOp_vmovups_Wps_Vps); 135 //FNIEMOP_DEF(iemOp_vmovups_Wps_Vps) 136 //{ 137 // IEMOP_MNEMONIC2(MR, VMOVUPS, vmovups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 138 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 139 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 140 // { 141 // /* 142 // * Register, register. 143 // */ 144 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 145 // IEM_MC_BEGIN(0, 0); 146 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 147 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 148 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 149 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 150 // IEM_MC_ADVANCE_RIP(); 151 // IEM_MC_END(); 152 // } 153 // else 154 // { 155 // /* 156 // * Memory, register. 157 // */ 158 // IEM_MC_BEGIN(0, 2); 159 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 160 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 161 // 162 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 163 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 164 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 165 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 166 // 167 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 168 // IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 169 // 170 // IEM_MC_ADVANCE_RIP(); 171 // IEM_MC_END(); 172 // } 173 // return VINF_SUCCESS; 174 //} 1134 175 1135 176 1136 177 /** 1137 * @ opcode 0x111138 * @ oppfx none1139 * @ opcpuid sse1140 * @ opgroup og_sse_simdfp_datamove1141 * @ opxcpttype 4UA1142 * @ optest op1=1 op2=2 -> op1=21143 * @ optest op1=0 op2=-42 -> op1=-42178 * @ opcode 0x11 179 * @ oppfx 0x66 180 * @ opcpuid sse2 181 * @ opgroup og_sse2_pcksclr_datamove 182 * @ opxcpttype 4UA 183 * @ optest op1=1 op2=2 -> op1=2 184 * @ optest op1=0 op2=-42 -> op1=-42 1144 185 */ 1145 FNIEMOP_DEF(iemOp_vmovups_Wps_Vps) 1146 { 1147 IEMOP_MNEMONIC2(MR, MOVUPS, movups, Wps, Vps, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1148 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1149 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1150 { 1151 /* 1152 * Register, register. 1153 */ 1154 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1155 IEM_MC_BEGIN(0, 0); 1156 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1157 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1158 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1159 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1160 IEM_MC_ADVANCE_RIP(); 1161 IEM_MC_END(); 1162 } 1163 else 1164 { 1165 /* 1166 * Memory, register. 1167 */ 1168 IEM_MC_BEGIN(0, 2); 1169 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1170 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1171 1172 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1173 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1174 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1175 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1176 1177 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1178 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1179 1180 IEM_MC_ADVANCE_RIP(); 1181 IEM_MC_END(); 1182 } 1183 return VINF_SUCCESS; 1184 } 186 FNIEMOP_STUB(iemOp_vmovupd_Wpd_Vpd); 187 //FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd) 188 //{ 189 // IEMOP_MNEMONIC2(MR, VMOVUPD, vmovupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 190 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 191 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 192 // { 193 // /* 194 // * Register, register. 195 // */ 196 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 197 // IEM_MC_BEGIN(0, 0); 198 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 199 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 200 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 201 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 202 // IEM_MC_ADVANCE_RIP(); 203 // IEM_MC_END(); 204 // } 205 // else 206 // { 207 // /* 208 // * Memory, register. 209 // */ 210 // IEM_MC_BEGIN(0, 2); 211 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 212 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 213 // 214 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 215 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 216 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 217 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 218 // 219 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 220 // IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 221 // 222 // IEM_MC_ADVANCE_RIP(); 223 // IEM_MC_END(); 224 // } 225 // return VINF_SUCCESS; 226 //} 1185 227 1186 228 1187 229 /** 1188 * @ opcode 0x111189 * @ oppfx 0x661190 * @ opcpuid sse21191 * @ opgroup og_sse2_pcksclr_datamove1192 * @ opxcpttype 4UA1193 * @ optest op1=1 op2=2 -> op1=21194 * @ optest op1=0 op2=-42 -> op1=-42230 * @ opcode 0x11 231 * @ oppfx 0xf3 232 * @ opcpuid sse 233 * @ opgroup og_sse_simdfp_datamove 234 * @ opxcpttype 5 235 * @ optest op1=1 op2=2 -> op1=2 236 * @ optest op1=0 op2=-22 -> op1=-22 1195 237 */ 1196 FNIEMOP_DEF(iemOp_vmovupd_Wpd_Vpd) 1197 { 1198 IEMOP_MNEMONIC2(MR, MOVUPD, movupd, Wpd, Vpd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1199 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1200 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1201 { 1202 /* 1203 * Register, register. 1204 */ 1205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1206 IEM_MC_BEGIN(0, 0); 1207 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1208 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1209 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1210 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1211 IEM_MC_ADVANCE_RIP(); 1212 IEM_MC_END(); 1213 } 1214 else 1215 { 1216 /* 1217 * Memory, register. 1218 */ 1219 IEM_MC_BEGIN(0, 2); 1220 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1222 1223 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1224 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1226 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1227 1228 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1229 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1230 1231 IEM_MC_ADVANCE_RIP(); 1232 IEM_MC_END(); 1233 } 1234 return VINF_SUCCESS; 1235 } 238 FNIEMOP_STUB(iemOp_vmovss_Wss_Hx_Vss); 239 //FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss) 240 //{ 241 // IEMOP_MNEMONIC2(MR, VMOVSS, vmovss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 242 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 243 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 244 // { 245 // /* 246 // * Register, register. 247 // */ 248 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 249 // IEM_MC_BEGIN(0, 1); 250 // IEM_MC_LOCAL(uint32_t, uSrc); 251 // 252 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 253 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 254 // IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 255 // IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 256 // 257 // IEM_MC_ADVANCE_RIP(); 258 // IEM_MC_END(); 259 // } 260 // else 261 // { 262 // /* 263 // * Memory, register. 264 // */ 265 // IEM_MC_BEGIN(0, 2); 266 // IEM_MC_LOCAL(uint32_t, uSrc); 267 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 268 // 269 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 270 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 271 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 272 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 273 // 274 // IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 275 // IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 276 // 277 // IEM_MC_ADVANCE_RIP(); 278 // IEM_MC_END(); 279 // } 280 // return VINF_SUCCESS; 281 //} 1236 282 1237 283 1238 284 /** 1239 * @ opcode 0x111240 * @ oppfx 0xf31241 * @ opcpuid sse1242 * @ opgroup og_sse_simdfp_datamove1243 * @ opxcpttype 51244 * @ optest op1=1 op2=2 -> op1=21245 * @ optest op1=0 op2=-22 -> op1=-22285 * @ opcode 0x11 286 * @ oppfx 0xf2 287 * @ opcpuid sse2 288 * @ opgroup og_sse2_pcksclr_datamove 289 * @ opxcpttype 5 290 * @ optest op1=1 op2=2 -> op1=2 291 * @ optest op1=0 op2=-42 -> op1=-42 1246 292 */ 1247 FNIEMOP_DEF(iemOp_vmovss_Wss_Hx_Vss) 1248 { 1249 IEMOP_MNEMONIC2(MR, MOVSS, movss, Wss, Vss, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1250 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1251 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1252 { 1253 /* 1254 * Register, register. 1255 */ 1256 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1257 IEM_MC_BEGIN(0, 1); 1258 IEM_MC_LOCAL(uint32_t, uSrc); 1259 1260 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1261 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1262 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1263 IEM_MC_STORE_XREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 1264 1265 IEM_MC_ADVANCE_RIP(); 1266 IEM_MC_END(); 1267 } 1268 else 1269 { 1270 /* 1271 * Memory, register. 1272 */ 1273 IEM_MC_BEGIN(0, 2); 1274 IEM_MC_LOCAL(uint32_t, uSrc); 1275 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1276 1277 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1278 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1279 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1280 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1281 1282 IEM_MC_FETCH_XREG_U32(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1283 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1284 1285 IEM_MC_ADVANCE_RIP(); 1286 IEM_MC_END(); 1287 } 1288 return VINF_SUCCESS; 1289 } 293 FNIEMOP_STUB(iemOp_vmovsd_Wsd_Hx_Vsd); 294 //FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd) 295 //{ 296 // IEMOP_MNEMONIC2(MR, VMOVSD, vmovsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 297 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 298 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 299 // { 300 // /* 301 // * Register, register. 302 // */ 303 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 304 // IEM_MC_BEGIN(0, 1); 305 // IEM_MC_LOCAL(uint64_t, uSrc); 306 // 307 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 308 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 309 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 310 // IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 311 // 312 // IEM_MC_ADVANCE_RIP(); 313 // IEM_MC_END(); 314 // } 315 // else 316 // { 317 // /* 318 // * Memory, register. 319 // */ 320 // IEM_MC_BEGIN(0, 2); 321 // IEM_MC_LOCAL(uint64_t, uSrc); 322 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 323 // 324 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 325 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 326 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 327 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 328 // 329 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 330 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 331 // 332 // IEM_MC_ADVANCE_RIP(); 333 // IEM_MC_END(); 334 // } 335 // return VINF_SUCCESS; 336 //} 337 338 339 FNIEMOP_STUB(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps); 340 //FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps) 341 //{ 342 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 343 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 344 // { 345 // /** 346 // * @ opcode 0x12 347 // * @ opcodesub 11 mr/reg 348 // * @ oppfx none 349 // * @ opcpuid sse 350 // * @ opgroup og_sse_simdfp_datamove 351 // * @ opxcpttype 5 352 // * @ optest op1=1 op2=2 -> op1=2 353 // * @ optest op1=0 op2=-42 -> op1=-42 354 // */ 355 // IEMOP_MNEMONIC2(RM_REG, VMOVHLPS, vmovhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 356 // 357 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 358 // IEM_MC_BEGIN(0, 1); 359 // IEM_MC_LOCAL(uint64_t, uSrc); 360 // 361 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 362 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 363 // IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 364 // IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 365 // 366 // IEM_MC_ADVANCE_RIP(); 367 // IEM_MC_END(); 368 // } 369 // else 370 // { 371 // /** 372 // * @ opdone 373 // * @ opcode 0x12 374 // * @ opcodesub !11 mr/reg 375 // * @ oppfx none 376 // * @ opcpuid sse 377 // * @ opgroup og_sse_simdfp_datamove 378 // * @ opxcpttype 5 379 // * @ optest op1=1 op2=2 -> op1=2 380 // * @ optest op1=0 op2=-42 -> op1=-42 381 // * @ opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps 382 // */ 383 // IEMOP_MNEMONIC2(RM_MEM, VMOVLPS, vmovlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 384 // 385 // IEM_MC_BEGIN(0, 2); 386 // IEM_MC_LOCAL(uint64_t, uSrc); 387 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 388 // 389 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 390 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 391 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 392 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 393 // 394 // IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 395 // IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 396 // 397 // IEM_MC_ADVANCE_RIP(); 398 // IEM_MC_END(); 399 // } 400 // return VINF_SUCCESS; 401 //} 1290 402 1291 403 1292 404 /** 1293 * @opcode 0x11 1294 * @oppfx 0xf2 1295 * @opcpuid sse2 1296 * @opgroup og_sse2_pcksclr_datamove 1297 * @opxcpttype 5 1298 * @optest op1=1 op2=2 -> op1=2 1299 * @optest op1=0 op2=-42 -> op1=-42 405 * @ opcode 0x12 406 * @ opcodesub !11 mr/reg 407 * @ oppfx 0x66 408 * @ opcpuid sse2 409 * @ opgroup og_sse2_pcksclr_datamove 410 * @ opxcpttype 5 411 * @ optest op1=1 op2=2 -> op1=2 412 * @ optest op1=0 op2=-42 -> op1=-42 1300 413 */ 1301 FNIEMOP_DEF(iemOp_vmovsd_Wsd_Hx_Vsd) 1302 { 1303 IEMOP_MNEMONIC2(MR, MOVSD, movsd, Wsd, Vsd, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1304 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1305 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1306 { 1307 /* 1308 * Register, register. 1309 */ 1310 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1311 IEM_MC_BEGIN(0, 1); 1312 IEM_MC_LOCAL(uint64_t, uSrc); 1313 1314 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1315 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1316 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1317 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 1318 1319 IEM_MC_ADVANCE_RIP(); 1320 IEM_MC_END(); 1321 } 1322 else 1323 { 1324 /* 1325 * Memory, register. 1326 */ 1327 IEM_MC_BEGIN(0, 2); 1328 IEM_MC_LOCAL(uint64_t, uSrc); 1329 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1330 1331 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1332 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1333 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1334 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1335 1336 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1337 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1338 1339 IEM_MC_ADVANCE_RIP(); 1340 IEM_MC_END(); 1341 } 1342 return VINF_SUCCESS; 1343 } 1344 1345 1346 FNIEMOP_DEF(iemOp_vmovlps_Vq_Hq_Mq__vmovhlps) 1347 { 1348 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1349 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1350 { 1351 /** 1352 * @opcode 0x12 1353 * @opcodesub 11 mr/reg 1354 * @oppfx none 1355 * @opcpuid sse 1356 * @opgroup og_sse_simdfp_datamove 1357 * @opxcpttype 5 1358 * @optest op1=1 op2=2 -> op1=2 1359 * @optest op1=0 op2=-42 -> op1=-42 1360 */ 1361 IEMOP_MNEMONIC2(RM_REG, MOVHLPS, movhlps, Vq, UqHi, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1362 1363 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1364 IEM_MC_BEGIN(0, 1); 1365 IEM_MC_LOCAL(uint64_t, uSrc); 1366 1367 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1368 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1369 IEM_MC_FETCH_XREG_HI_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1370 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1371 1372 IEM_MC_ADVANCE_RIP(); 1373 IEM_MC_END(); 1374 } 1375 else 1376 { 1377 /** 1378 * @opdone 1379 * @opcode 0x12 1380 * @opcodesub !11 mr/reg 1381 * @oppfx none 1382 * @opcpuid sse 1383 * @opgroup og_sse_simdfp_datamove 1384 * @opxcpttype 5 1385 * @optest op1=1 op2=2 -> op1=2 1386 * @optest op1=0 op2=-42 -> op1=-42 1387 * @opfunction iemOp_vmovlps_Vq_Hq_Mq__vmovhlps 1388 */ 1389 IEMOP_MNEMONIC2(RM_MEM, MOVLPS, movlps, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1390 1391 IEM_MC_BEGIN(0, 2); 1392 IEM_MC_LOCAL(uint64_t, uSrc); 1393 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1394 1395 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1396 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1397 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1398 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1399 1400 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1401 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1402 1403 IEM_MC_ADVANCE_RIP(); 1404 IEM_MC_END(); 1405 } 1406 return VINF_SUCCESS; 1407 } 414 FNIEMOP_STUB(iemOp_vmovlpd_Vq_Hq_Mq); 415 //FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq) 416 //{ 417 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 418 // if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 419 // { 420 // IEMOP_MNEMONIC2(RM_MEM, VMOVLPD, vmovlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 421 // 422 // IEM_MC_BEGIN(0, 2); 423 // IEM_MC_LOCAL(uint64_t, uSrc); 424 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 425 // 426 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 427 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 428 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 429 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 430 // 431 // IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 432 // IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 433 // 434 // IEM_MC_ADVANCE_RIP(); 435 // IEM_MC_END(); 436 // return VINF_SUCCESS; 437 // } 438 // 439 // /** 440 // * @ opdone 441 // * @ opmnemonic ud660f12m3 442 // * @ opcode 0x12 443 // * @ opcodesub 11 mr/reg 444 // * @ oppfx 0x66 445 // * @ opunused immediate 446 // * @ opcpuid sse 447 // * @ optest -> 448 // */ 449 // return IEMOP_RAISE_INVALID_OPCODE(); 450 //} 1408 451 1409 452 1410 453 /** 1411 * @opcode 0x12 1412 * @opcodesub !11 mr/reg 1413 * @oppfx 0x66 1414 * @opcpuid sse2 1415 * @opgroup og_sse2_pcksclr_datamove 1416 * @opxcpttype 5 1417 * @optest op1=1 op2=2 -> op1=2 1418 * @optest op1=0 op2=-42 -> op1=-42 454 * @ opcode 0x12 455 * @ oppfx 0xf3 456 * @ opcpuid sse3 457 * @ opgroup og_sse3_pcksclr_datamove 458 * @ opxcpttype 4 459 * @ optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 -> 460 * op1=0x00000002000000020000000100000001 1419 461 */ 1420 FNIEMOP_DEF(iemOp_vmovlpd_Vq_Hq_Mq) 1421 { 1422 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1423 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 1424 { 1425 IEMOP_MNEMONIC2(RM_MEM, MOVLPD, movlpd, Vq, Mq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1426 1427 IEM_MC_BEGIN(0, 2); 1428 IEM_MC_LOCAL(uint64_t, uSrc); 1429 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1430 1431 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1432 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1433 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1434 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1435 1436 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1437 IEM_MC_STORE_XREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1438 1439 IEM_MC_ADVANCE_RIP(); 1440 IEM_MC_END(); 1441 return VINF_SUCCESS; 1442 } 1443 1444 /** 1445 * @opdone 1446 * @opmnemonic ud660f12m3 1447 * @opcode 0x12 1448 * @opcodesub 11 mr/reg 1449 * @oppfx 0x66 1450 * @opunused immediate 1451 * @opcpuid sse 1452 * @optest -> 1453 */ 1454 return IEMOP_RAISE_INVALID_OPCODE(); 1455 } 462 FNIEMOP_STUB(iemOp_vmovsldup_Vx_Wx); 463 //FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx) 464 //{ 465 // IEMOP_MNEMONIC2(RM, VMOVSLDUP, vmovsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 466 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 467 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 468 // { 469 // /* 470 // * Register, register. 471 // */ 472 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 473 // IEM_MC_BEGIN(2, 0); 474 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 475 // IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 476 // 477 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 478 // IEM_MC_PREPARE_SSE_USAGE(); 479 // 480 // IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 481 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 482 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc); 483 // 484 // IEM_MC_ADVANCE_RIP(); 485 // IEM_MC_END(); 486 // } 487 // else 488 // { 489 // /* 490 // * Register, memory. 491 // */ 492 // IEM_MC_BEGIN(2, 2); 493 // IEM_MC_LOCAL(RTUINT128U, uSrc); 494 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 495 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 496 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 497 // 498 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 499 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 500 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 501 // IEM_MC_PREPARE_SSE_USAGE(); 502 // 503 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 504 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 505 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc); 506 // 507 // IEM_MC_ADVANCE_RIP(); 508 // IEM_MC_END(); 509 // } 510 // return VINF_SUCCESS; 511 //} 1456 512 1457 513 1458 514 /** 1459 * @ opcode 0x121460 * @ oppfx 0xf31461 * @ opcpuid sse31462 * @ opgroup og_sse3_pcksclr_datamove1463 * @ opxcpttype 41464 * @ optest op1=-1 op2=0xdddddddd00000002eeeeeeee00000001 ->1465 * op1=0x00000002000000020000000100000001515 * @ opcode 0x12 516 * @ oppfx 0xf2 517 * @ opcpuid sse3 518 * @ opgroup og_sse3_pcksclr_datamove 519 * @ opxcpttype 5 520 * @ optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 -> 521 * op1=0x22222222111111112222222211111111 1466 522 */ 1467 FNIEMOP_DEF(iemOp_vmovsldup_Vx_Wx) 1468 { 1469 IEMOP_MNEMONIC2(RM, MOVSLDUP, movsldup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1470 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1471 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1472 { 1473 /* 1474 * Register, register. 1475 */ 1476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1477 IEM_MC_BEGIN(2, 0); 1478 IEM_MC_ARG(PRTUINT128U, puDst, 0); 1479 IEM_MC_ARG(PCRTUINT128U, puSrc, 1); 1480 1481 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 1482 IEM_MC_PREPARE_SSE_USAGE(); 1483 1484 IEM_MC_REF_XREG_U128_CONST(puSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1485 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1486 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc); 1487 1488 IEM_MC_ADVANCE_RIP(); 1489 IEM_MC_END(); 1490 } 1491 else 1492 { 1493 /* 1494 * Register, memory. 1495 */ 1496 IEM_MC_BEGIN(2, 2); 1497 IEM_MC_LOCAL(RTUINT128U, uSrc); 1498 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1499 IEM_MC_ARG(PRTUINT128U, puDst, 0); 1500 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc, uSrc, 1); 1501 1502 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1503 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1504 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 1505 IEM_MC_PREPARE_SSE_USAGE(); 1506 1507 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1508 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1509 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movsldup, puDst, puSrc); 1510 1511 IEM_MC_ADVANCE_RIP(); 1512 IEM_MC_END(); 1513 } 1514 return VINF_SUCCESS; 1515 } 523 FNIEMOP_STUB(iemOp_vmovddup_Vx_Wx); 524 //FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx) 525 //{ 526 // IEMOP_MNEMONIC2(RM, VMOVDDUP, vmovddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 527 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 528 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 529 // { 530 // /* 531 // * Register, register. 532 // */ 533 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 534 // IEM_MC_BEGIN(2, 0); 535 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 536 // IEM_MC_ARG(uint64_t, uSrc, 1); 537 // 538 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 539 // IEM_MC_PREPARE_SSE_USAGE(); 540 // 541 // IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 542 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 543 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 544 // 545 // IEM_MC_ADVANCE_RIP(); 546 // IEM_MC_END(); 547 // } 548 // else 549 // { 550 // /* 551 // * Register, memory. 552 // */ 553 // IEM_MC_BEGIN(2, 2); 554 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 555 // IEM_MC_ARG(PRTUINT128U, puDst, 0); 556 // IEM_MC_ARG(uint64_t, uSrc, 1); 557 // 558 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 559 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 560 // IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 561 // IEM_MC_PREPARE_SSE_USAGE(); 562 // 563 // IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 564 // IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 565 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 566 // 567 // IEM_MC_ADVANCE_RIP(); 568 // IEM_MC_END(); 569 // } 570 // return VINF_SUCCESS; 571 //} 572 573 574 /** Opcode VEX.0F 0x13 - vmovlps Mq, Vq */ 575 FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq); 576 577 /** Opcode VEX.66.0F 0x13 - vmovlpd Mq, Vq */ 578 FNIEMOP_STUB(iemOp_vmovlpd_Mq_Vq); 579 //FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq) 580 //{ 581 // IEMOP_MNEMONIC(vmovlpd_Mq_Vq, "movlpd Mq,Vq"); 582 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 583 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 584 // { 585 //#if 0 586 // /* 587 // * Register, register. 588 // */ 589 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 590 // IEM_MC_BEGIN(0, 1); 591 // IEM_MC_LOCAL(uint64_t, uSrc); 592 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 593 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 594 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 595 // IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 596 // IEM_MC_ADVANCE_RIP(); 597 // IEM_MC_END(); 598 //#else 599 // return IEMOP_RAISE_INVALID_OPCODE(); 600 //#endif 601 // } 602 // else 603 // { 604 // /* 605 // * Memory, register. 606 // */ 607 // IEM_MC_BEGIN(0, 2); 608 // IEM_MC_LOCAL(uint64_t, uSrc); 609 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 610 // 611 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 612 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 613 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 614 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 615 // 616 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 617 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 618 // 619 // IEM_MC_ADVANCE_RIP(); 620 // IEM_MC_END(); 621 // } 622 // return VINF_SUCCESS; 623 //} 624 625 /* Opcode VEX.F3.0F 0x13 - invalid */ 626 /* Opcode VEX.F2.0F 0x13 - invalid */ 627 628 /** Opcode VEX.0F 0x14 - vunpcklps Vx, Hx, Wx*/ 629 FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx); 630 /** Opcode VEX.66.0F 0x14 - vunpcklpd Vx,Hx,Wx */ 631 FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx); 632 /* Opcode VEX.F3.0F 0x14 - invalid */ 633 /* Opcode VEX.F2.0F 0x14 - invalid */ 634 /** Opcode VEX.0F 0x15 - vunpckhps Vx, Hx, Wx */ 635 FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx); 636 /** Opcode VEX.66.0F 0x15 - vunpckhpd Vx,Hx,Wx */ 637 FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx); 638 /* Opcode VEX.F3.0F 0x15 - invalid */ 639 /* Opcode VEX.F2.0F 0x15 - invalid */ 640 /** Opcode VEX.0F 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */ 641 FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT 642 /** Opcode VEX.66.0F 0x16 - vmovhpdv1 Vdq, Hq, Mq */ 643 FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT 644 /** Opcode VEX.F3.0F 0x16 - vmovshdup Vx, Wx */ 645 FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT 646 /* Opcode VEX.F2.0F 0x16 - invalid */ 647 /** Opcode VEX.0F 0x17 - vmovhpsv1 Mq, Vq */ 648 FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT 649 /** Opcode VEX.66.0F 0x17 - vmovhpdv1 Mq, Vq */ 650 FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT 651 /* Opcode VEX.F3.0F 0x17 - invalid */ 652 /* Opcode VEX.F2.0F 0x17 - invalid */ 653 654 655 /* Opcode VEX.0F 0x18 - invalid */ 656 /* Opcode VEX.0F 0x19 - invalid */ 657 /* Opcode VEX.0F 0x1a - invalid */ 658 /* Opcode VEX.0F 0x1b - invalid */ 659 /* Opcode VEX.0F 0x1c - invalid */ 660 /* Opcode VEX.0F 0x1d - invalid */ 661 /* Opcode VEX.0F 0x1e - invalid */ 662 /* Opcode VEX.0F 0x1f - invalid */ 663 664 /* Opcode VEX.0F 0x20 - invalid */ 665 /* Opcode VEX.0F 0x21 - invalid */ 666 /* Opcode VEX.0F 0x22 - invalid */ 667 /* Opcode VEX.0F 0x23 - invalid */ 668 /* Opcode VEX.0F 0x24 - invalid */ 669 /* Opcode VEX.0F 0x25 - invalid */ 670 /* Opcode VEX.0F 0x26 - invalid */ 671 /* Opcode VEX.0F 0x27 - invalid */ 672 673 /** Opcode VEX.0F 0x28 - vmovaps Vps, Wps */ 674 FNIEMOP_STUB(iemOp_vmovaps_Vps_Wps); 675 //FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps) 676 //{ 677 // IEMOP_MNEMONIC(vmovaps_Vps_Wps, "vmovaps Vps,Wps"); 678 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 679 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 680 // { 681 // /* 682 // * Register, register. 683 // */ 684 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 685 // IEM_MC_BEGIN(0, 0); 686 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 687 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 688 // IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 689 // (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 690 // IEM_MC_ADVANCE_RIP(); 691 // IEM_MC_END(); 692 // } 693 // else 694 // { 695 // /* 696 // * Register, memory. 697 // */ 698 // IEM_MC_BEGIN(0, 2); 699 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 700 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 701 // 702 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 703 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 704 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 705 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 706 // 707 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 708 // IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 709 // 710 // IEM_MC_ADVANCE_RIP(); 711 // IEM_MC_END(); 712 // } 713 // return VINF_SUCCESS; 714 //} 715 716 /** Opcode VEX.66.0F 0x28 - vmovapd Vpd, Wpd */ 717 FNIEMOP_STUB(iemOp_vmovapd_Vpd_Wpd); 718 //FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd) 719 //{ 720 // IEMOP_MNEMONIC(vmovapd_Wpd_Wpd, "vmovapd Wpd,Wpd"); 721 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 722 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 723 // { 724 // /* 725 // * Register, register. 726 // */ 727 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 728 // IEM_MC_BEGIN(0, 0); 729 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 730 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 731 // IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 732 // (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 733 // IEM_MC_ADVANCE_RIP(); 734 // IEM_MC_END(); 735 // } 736 // else 737 // { 738 // /* 739 // * Register, memory. 740 // */ 741 // IEM_MC_BEGIN(0, 2); 742 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 743 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 744 // 745 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 746 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 747 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 748 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 749 // 750 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 751 // IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 752 // 753 // IEM_MC_ADVANCE_RIP(); 754 // IEM_MC_END(); 755 // } 756 // return VINF_SUCCESS; 757 //} 758 759 /* Opcode VEX.F3.0F 0x28 - invalid */ 760 /* Opcode VEX.F2.0F 0x28 - invalid */ 761 762 /** Opcode VEX.0F 0x29 - vmovaps Wps, Vps */ 763 FNIEMOP_STUB(iemOp_vmovaps_Wps_Vps); 764 //FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps) 765 //{ 766 // IEMOP_MNEMONIC(vmovaps_Wps_Vps, "vmovaps Wps,Vps"); 767 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 768 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 769 // { 770 // /* 771 // * Register, register. 772 // */ 773 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 774 // IEM_MC_BEGIN(0, 0); 775 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 776 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 777 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 778 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 779 // IEM_MC_ADVANCE_RIP(); 780 // IEM_MC_END(); 781 // } 782 // else 783 // { 784 // /* 785 // * Memory, register. 786 // */ 787 // IEM_MC_BEGIN(0, 2); 788 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 789 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 790 // 791 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 792 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 793 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 794 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 795 // 796 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 797 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 798 // 799 // IEM_MC_ADVANCE_RIP(); 800 // IEM_MC_END(); 801 // } 802 // return VINF_SUCCESS; 803 //} 804 805 /** Opcode VEX.66.0F 0x29 - vmovapd Wpd,Vpd */ 806 FNIEMOP_STUB(iemOp_vmovapd_Wpd_Vpd); 807 //FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd) 808 //{ 809 // IEMOP_MNEMONIC(vmovapd_Wpd_Vpd, "movapd Wpd,Vpd"); 810 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 811 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 812 // { 813 // /* 814 // * Register, register. 815 // */ 816 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 817 // IEM_MC_BEGIN(0, 0); 818 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 819 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 820 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 821 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 822 // IEM_MC_ADVANCE_RIP(); 823 // IEM_MC_END(); 824 // } 825 // else 826 // { 827 // /* 828 // * Memory, register. 829 // */ 830 // IEM_MC_BEGIN(0, 2); 831 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 832 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 833 // 834 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 835 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 836 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 837 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 838 // 839 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 840 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 841 // 842 // IEM_MC_ADVANCE_RIP(); 843 // IEM_MC_END(); 844 // } 845 // return VINF_SUCCESS; 846 //} 847 848 /* Opcode VEX.F3.0F 0x29 - invalid */ 849 /* Opcode VEX.F2.0F 0x29 - invalid */ 850 851 852 /** Opcode VEX.0F 0x2a - invalid */ 853 /** Opcode VEX.66.0F 0x2a - invalid */ 854 /** Opcode VEX.F3.0F 0x2a - vcvtsi2ss Vss, Hss, Ey */ 855 FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); 856 /** Opcode VEX.F2.0F 0x2a - vcvtsi2sd Vsd, Hsd, Ey */ 857 FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); 858 859 860 /** Opcode VEX.0F 0x2b - vmovntps Mps, Vps */ 861 FNIEMOP_STUB(iemOp_vmovntps_Mps_Vps); 862 //FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps) 863 //{ 864 // IEMOP_MNEMONIC(vmovntps_Mps_Vps, "movntps Mps,Vps"); 865 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 866 // if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 867 // { 868 // /* 869 // * memory, register. 870 // */ 871 // IEM_MC_BEGIN(0, 2); 872 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 873 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 874 // 875 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 876 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 877 // IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 878 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 879 // 880 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 881 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 882 // 883 // IEM_MC_ADVANCE_RIP(); 884 // IEM_MC_END(); 885 // } 886 // /* The register, register encoding is invalid. */ 887 // else 888 // return IEMOP_RAISE_INVALID_OPCODE(); 889 // return VINF_SUCCESS; 890 //} 891 892 /** Opcode VEX.66.0F 0x2b - vmovntpd Mpd, Vpd */ 893 FNIEMOP_STUB(iemOp_vmovntpd_Mpd_Vpd); 894 //FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd) 895 //{ 896 // IEMOP_MNEMONIC(vmovntpd_Mpd_Vpd, "movntpd Mdq,Vpd"); 897 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 898 // if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 899 // { 900 // /* 901 // * memory, register. 902 // */ 903 // IEM_MC_BEGIN(0, 2); 904 // IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 905 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 906 // 907 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 908 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 909 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 910 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 911 // 912 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 913 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 914 // 915 // IEM_MC_ADVANCE_RIP(); 916 // IEM_MC_END(); 917 // } 918 // /* The register, register encoding is invalid. */ 919 // else 920 // return IEMOP_RAISE_INVALID_OPCODE(); 921 // return VINF_SUCCESS; 922 //} 923 /* Opcode VEX.F3.0F 0x2b - invalid */ 924 /* Opcode VEX.F2.0F 0x2b - invalid */ 925 926 927 /* Opcode VEX.0F 0x2c - invalid */ 928 /* Opcode VEX.66.0F 0x2c - invalid */ 929 /** Opcode VEX.F3.0F 0x2c - vcvttss2si Gy, Wss */ 930 FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss); 931 /** Opcode VEX.F2.0F 0x2c - vcvttsd2si Gy, Wsd */ 932 FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd); 933 934 /* Opcode VEX.0F 0x2d - invalid */ 935 /* Opcode VEX.66.0F 0x2d - invalid */ 936 /** Opcode VEX.F3.0F 0x2d - vcvtss2si Gy, Wss */ 937 FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss); 938 /** Opcode VEX.F2.0F 0x2d - vcvtsd2si Gy, Wsd */ 939 FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd); 940 941 /** Opcode VEX.0F 0x2e - vucomiss Vss, Wss */ 942 FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); 943 /** Opcode VEX.66.0F 0x2e - vucomisd Vsd, Wsd */ 944 FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); 945 /* Opcode VEX.F3.0F 0x2e - invalid */ 946 /* Opcode VEX.F2.0F 0x2e - invalid */ 947 948 /** Opcode VEX.0F 0x2f - vcomiss Vss, Wss */ 949 FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss); 950 /** Opcode VEX.66.0F 0x2f - vcomisd Vsd, Wsd */ 951 FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd); 952 /* Opcode VEX.F3.0F 0x2f - invalid */ 953 /* Opcode VEX.F2.0F 0x2f - invalid */ 954 955 /* Opcode VEX.0F 0x30 - invalid */ 956 /* Opcode VEX.0F 0x31 - invalid */ 957 /* Opcode VEX.0F 0x32 - invalid */ 958 /* Opcode VEX.0F 0x33 - invalid */ 959 /* Opcode VEX.0F 0x34 - invalid */ 960 /* Opcode VEX.0F 0x35 - invalid */ 961 /* Opcode VEX.0F 0x36 - invalid */ 962 /* Opcode VEX.0F 0x37 - invalid */ 963 /* Opcode VEX.0F 0x38 - invalid */ 964 /* Opcode VEX.0F 0x39 - invalid */ 965 /* Opcode VEX.0F 0x3a - invalid */ 966 /* Opcode VEX.0F 0x3b - invalid */ 967 /* Opcode VEX.0F 0x3c - invalid */ 968 /* Opcode VEX.0F 0x3d - invalid */ 969 /* Opcode VEX.0F 0x3e - invalid */ 970 /* Opcode VEX.0F 0x3f - invalid */ 971 /* Opcode VEX.0F 0x40 - invalid */ 972 /* Opcode VEX.0F 0x41 - invalid */ 973 /* Opcode VEX.0F 0x42 - invalid */ 974 /* Opcode VEX.0F 0x43 - invalid */ 975 /* Opcode VEX.0F 0x44 - invalid */ 976 /* Opcode VEX.0F 0x45 - invalid */ 977 /* Opcode VEX.0F 0x46 - invalid */ 978 /* Opcode VEX.0F 0x47 - invalid */ 979 /* Opcode VEX.0F 0x48 - invalid */ 980 /* Opcode VEX.0F 0x49 - invalid */ 981 /* Opcode VEX.0F 0x4a - invalid */ 982 /* Opcode VEX.0F 0x4b - invalid */ 983 /* Opcode VEX.0F 0x4c - invalid */ 984 /* Opcode VEX.0F 0x4d - invalid */ 985 /* Opcode VEX.0F 0x4e - invalid */ 986 /* Opcode VEX.0F 0x4f - invalid */ 987 988 /** Opcode VEX.0F 0x50 - vmovmskps Gy, Ups */ 989 FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups); 990 /** Opcode VEX.66.0F 0x50 - vmovmskpd Gy,Upd */ 991 FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd); 992 /* Opcode VEX.F3.0F 0x50 - invalid */ 993 /* Opcode VEX.F2.0F 0x50 - invalid */ 994 995 /** Opcode VEX.0F 0x51 - vsqrtps Vps, Wps */ 996 FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps); 997 /** Opcode VEX.66.0F 0x51 - vsqrtpd Vpd, Wpd */ 998 FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd); 999 /** Opcode VEX.F3.0F 0x51 - vsqrtss Vss, Hss, Wss */ 1000 FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss); 1001 /** Opcode VEX.F2.0F 0x51 - vsqrtsd Vsd, Hsd, Wsd */ 1002 FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd); 1003 1004 /** Opcode VEX.0F 0x52 - vrsqrtps Vps, Wps */ 1005 FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps); 1006 /* Opcode VEX.66.0F 0x52 - invalid */ 1007 /** Opcode VEX.F3.0F 0x52 - vrsqrtss Vss, Hss, Wss */ 1008 FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss); 1009 /* Opcode VEX.F2.0F 0x52 - invalid */ 1010 1011 /** Opcode VEX.0F 0x53 - vrcpps Vps, Wps */ 1012 FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps); 1013 /* Opcode VEX.66.0F 0x53 - invalid */ 1014 /** Opcode VEX.F3.0F 0x53 - vrcpss Vss, Hss, Wss */ 1015 FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss); 1016 /* Opcode VEX.F2.0F 0x53 - invalid */ 1017 1018 /** Opcode VEX.0F 0x54 - vandps Vps, Hps, Wps */ 1019 FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps); 1020 /** Opcode VEX.66.0F 0x54 - vandpd Vpd, Hpd, Wpd */ 1021 FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd); 1022 /* Opcode VEX.F3.0F 0x54 - invalid */ 1023 /* Opcode VEX.F2.0F 0x54 - invalid */ 1024 1025 /** Opcode VEX.0F 0x55 - vandnps Vps, Hps, Wps */ 1026 FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps); 1027 /** Opcode VEX.66.0F 0x55 - vandnpd Vpd, Hpd, Wpd */ 1028 FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd); 1029 /* Opcode VEX.F3.0F 0x55 - invalid */ 1030 /* Opcode VEX.F2.0F 0x55 - invalid */ 1031 1032 /** Opcode VEX.0F 0x56 - vorps Vps, Hps, Wps */ 1033 FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps); 1034 /** Opcode VEX.66.0F 0x56 - vorpd Vpd, Hpd, Wpd */ 1035 FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd); 1036 /* Opcode VEX.F3.0F 0x56 - invalid */ 1037 /* Opcode VEX.F2.0F 0x56 - invalid */ 1038 1039 /** Opcode VEX.0F 0x57 - vxorps Vps, Hps, Wps */ 1040 FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps); 1041 /** Opcode VEX.66.0F 0x57 - vxorpd Vpd, Hpd, Wpd */ 1042 FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd); 1043 /* Opcode VEX.F3.0F 0x57 - invalid */ 1044 /* Opcode VEX.F2.0F 0x57 - invalid */ 1045 1046 /** Opcode VEX.0F 0x58 - vaddps Vps, Hps, Wps */ 1047 FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps); 1048 /** Opcode VEX.66.0F 0x58 - vaddpd Vpd, Hpd, Wpd */ 1049 FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd); 1050 /** Opcode VEX.F3.0F 0x58 - vaddss Vss, Hss, Wss */ 1051 FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss); 1052 /** Opcode VEX.F2.0F 0x58 - vaddsd Vsd, Hsd, Wsd */ 1053 FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd); 1054 1055 /** Opcode VEX.0F 0x59 - vmulps Vps, Hps, Wps */ 1056 FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps); 1057 /** Opcode VEX.66.0F 0x59 - vmulpd Vpd, Hpd, Wpd */ 1058 FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd); 1059 /** Opcode VEX.F3.0F 0x59 - vmulss Vss, Hss, Wss */ 1060 FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss); 1061 /** Opcode VEX.F2.0F 0x59 - vmulsd Vsd, Hsd, Wsd */ 1062 FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd); 1063 1064 /** Opcode VEX.0F 0x5a - vcvtps2pd Vpd, Wps */ 1065 FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps); 1066 /** Opcode VEX.66.0F 0x5a - vcvtpd2ps Vps, Wpd */ 1067 FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd); 1068 /** Opcode VEX.F3.0F 0x5a - vcvtss2sd Vsd, Hx, Wss */ 1069 FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss); 1070 /** Opcode VEX.F2.0F 0x5a - vcvtsd2ss Vss, Hx, Wsd */ 1071 FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd); 1072 1073 /** Opcode VEX.0F 0x5b - vcvtdq2ps Vps, Wdq */ 1074 FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq); 1075 /** Opcode VEX.66.0F 0x5b - vcvtps2dq Vdq, Wps */ 1076 FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps); 1077 /** Opcode VEX.F3.0F 0x5b - vcvttps2dq Vdq, Wps */ 1078 FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps); 1079 /* Opcode VEX.F2.0F 0x5b - invalid */ 1080 1081 /** Opcode VEX.0F 0x5c - vsubps Vps, Hps, Wps */ 1082 FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps); 1083 /** Opcode VEX.66.0F 0x5c - vsubpd Vpd, Hpd, Wpd */ 1084 FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd); 1085 /** Opcode VEX.F3.0F 0x5c - vsubss Vss, Hss, Wss */ 1086 FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss); 1087 /** Opcode VEX.F2.0F 0x5c - vsubsd Vsd, Hsd, Wsd */ 1088 FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd); 1089 1090 /** Opcode VEX.0F 0x5d - vminps Vps, Hps, Wps */ 1091 FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps); 1092 /** Opcode VEX.66.0F 0x5d - vminpd Vpd, Hpd, Wpd */ 1093 FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd); 1094 /** Opcode VEX.F3.0F 0x5d - vminss Vss, Hss, Wss */ 1095 FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss); 1096 /** Opcode VEX.F2.0F 0x5d - vminsd Vsd, Hsd, Wsd */ 1097 FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd); 1098 1099 /** Opcode VEX.0F 0x5e - vdivps Vps, Hps, Wps */ 1100 FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps); 1101 /** Opcode VEX.66.0F 0x5e - vdivpd Vpd, Hpd, Wpd */ 1102 FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd); 1103 /** Opcode VEX.F3.0F 0x5e - vdivss Vss, Hss, Wss */ 1104 FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss); 1105 /** Opcode VEX.F2.0F 0x5e - vdivsd Vsd, Hsd, Wsd */ 1106 FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd); 1107 1108 /** Opcode VEX.0F 0x5f - vmaxps Vps, Hps, Wps */ 1109 FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps); 1110 /** Opcode VEX.66.0F 0x5f - vmaxpd Vpd, Hpd, Wpd */ 1111 FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd); 1112 /** Opcode VEX.F3.0F 0x5f - vmaxss Vss, Hss, Wss */ 1113 FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss); 1114 /** Opcode VEX.F2.0F 0x5f - vmaxsd Vsd, Hsd, Wsd */ 1115 FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd); 1116 1117 1118 ///** 1119 // * Common worker for SSE2 instructions on the forms: 1120 // * pxxxx xmm1, xmm2/mem128 1121 // * 1122 // * The 2nd operand is the first half of a register, which in the memory case 1123 // * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit 1124 // * memory accessed for MMX. 1125 // * 1126 // * Exceptions type 4. 1127 // */ 1128 //FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl) 1129 //{ 1130 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1131 // if (!pImpl->pfnU64) 1132 // return IEMOP_RAISE_INVALID_OPCODE(); 1133 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1134 // { 1135 // /* 1136 // * Register, register. 1137 // */ 1138 // /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 1139 // /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 1140 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1141 // IEM_MC_BEGIN(2, 0); 1142 // IEM_MC_ARG(uint64_t *, pDst, 0); 1143 // IEM_MC_ARG(uint32_t const *, pSrc, 1); 1144 // IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 1145 // IEM_MC_PREPARE_FPU_USAGE(); 1146 // IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 1147 // IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 1148 // IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 1149 // IEM_MC_ADVANCE_RIP(); 1150 // IEM_MC_END(); 1151 // } 1152 // else 1153 // { 1154 // /* 1155 // * Register, memory. 1156 // */ 1157 // IEM_MC_BEGIN(2, 2); 1158 // IEM_MC_ARG(uint64_t *, pDst, 0); 1159 // IEM_MC_LOCAL(uint32_t, uSrc); 1160 // IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1); 1161 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1162 // 1163 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1164 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1165 // IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 1166 // IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1167 // 1168 // IEM_MC_PREPARE_FPU_USAGE(); 1169 // IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 1170 // IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 1171 // 1172 // IEM_MC_ADVANCE_RIP(); 1173 // IEM_MC_END(); 1174 // } 1175 // return VINF_SUCCESS; 1176 //} 1177 1178 1179 /* Opcode VEX.0F 0x60 - invalid */ 1180 1181 /** Opcode VEX.66.0F 0x60 - vpunpcklbw Vx, Hx, W */ 1182 FNIEMOP_STUB(iemOp_vpunpcklbw_Vx_Hx_Wx); 1183 //FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx) 1184 //{ 1185 // IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx"); 1186 // return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklbw); 1187 //} 1188 1189 /* Opcode VEX.F3.0F 0x60 - invalid */ 1190 1191 1192 /* Opcode VEX.0F 0x61 - invalid */ 1193 1194 /** Opcode VEX.66.0F 0x61 - vpunpcklwd Vx, Hx, Wx */ 1195 FNIEMOP_STUB(iemOp_vpunpcklwd_Vx_Hx_Wx); 1196 //FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx) 1197 //{ 1198 // IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx"); 1199 // return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd); 1200 //} 1201 1202 /* Opcode VEX.F3.0F 0x61 - invalid */ 1203 1204 1205 /* Opcode VEX.0F 0x62 - invalid */ 1206 1207 /** Opcode VEX.66.0F 0x62 - vpunpckldq Vx, Hx, Wx */ 1208 FNIEMOP_STUB(iemOp_vpunpckldq_Vx_Hx_Wx); 1209 //FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx) 1210 //{ 1211 // IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx"); 1212 // return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq); 1213 //} 1214 1215 /* Opcode VEX.F3.0F 0x62 - invalid */ 1216 1217 1218 1219 /* Opcode VEX.0F 0x63 - invalid */ 1220 /** Opcode VEX.66.0F 0x63 - vpacksswb Vx, Hx, Wx */ 1221 FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx); 1222 /* Opcode VEX.F3.0F 0x63 - invalid */ 1223 1224 /* Opcode VEX.0F 0x64 - invalid */ 1225 /** Opcode VEX.66.0F 0x64 - vpcmpgtb Vx, Hx, Wx */ 1226 FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx); 1227 /* Opcode VEX.F3.0F 0x64 - invalid */ 1228 1229 /* Opcode VEX.0F 0x65 - invalid */ 1230 /** Opcode VEX.66.0F 0x65 - vpcmpgtw Vx, Hx, Wx */ 1231 FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx); 1232 /* Opcode VEX.F3.0F 0x65 - invalid */ 1233 1234 /* Opcode VEX.0F 0x66 - invalid */ 1235 /** Opcode VEX.66.0F 0x66 - vpcmpgtd Vx, Hx, Wx */ 1236 FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx); 1237 /* Opcode VEX.F3.0F 0x66 - invalid */ 1238 1239 /* Opcode VEX.0F 0x67 - invalid */ 1240 /** Opcode VEX.66.0F 0x67 - vpackuswb Vx, Hx, W */ 1241 FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W); 1242 /* Opcode VEX.F3.0F 0x67 - invalid */ 1243 1244 1245 ///** 1246 // * Common worker for SSE2 instructions on the form: 1247 // * pxxxx xmm1, xmm2/mem128 1248 // * 1249 // * The 2nd operand is the second half of a register, which in the memory case 1250 // * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access 1251 // * where it may read the full 128 bits or only the upper 64 bits. 1252 // * 1253 // * Exceptions type 4. 1254 // */ 1255 //FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl) 1256 //{ 1257 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1258 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1259 // { 1260 // /* 1261 // * Register, register. 1262 // */ 1263 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1264 // IEM_MC_BEGIN(2, 0); 1265 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1266 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 1267 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1268 // IEM_MC_PREPARE_SSE_USAGE(); 1269 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1270 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1271 // IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 1272 // IEM_MC_ADVANCE_RIP(); 1273 // IEM_MC_END(); 1274 // } 1275 // else 1276 // { 1277 // /* 1278 // * Register, memory. 1279 // */ 1280 // IEM_MC_BEGIN(2, 2); 1281 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1282 // IEM_MC_LOCAL(RTUINT128U, uSrc); 1283 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 1284 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1285 // 1286 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1287 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1288 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1289 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */ 1290 // 1291 // IEM_MC_PREPARE_SSE_USAGE(); 1292 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1293 // IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 1294 // 1295 // IEM_MC_ADVANCE_RIP(); 1296 // IEM_MC_END(); 1297 // } 1298 // return VINF_SUCCESS; 1299 //} 1300 1301 1302 /* Opcode VEX.0F 0x68 - invalid */ 1303 1304 /** Opcode VEX.66.0F 0x68 - vpunpckhbw Vx, Hx, Wx */ 1305 FNIEMOP_STUB(iemOp_vpunpckhbw_Vx_Hx_Wx); 1306 //FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx) 1307 //{ 1308 // IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx"); 1309 // return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw); 1310 //} 1311 /* Opcode VEX.F3.0F 0x68 - invalid */ 1312 1313 1314 /* Opcode VEX.0F 0x69 - invalid */ 1315 1316 /** Opcode VEX.66.0F 0x69 - vpunpckhwd Vx, Hx, Wx */ 1317 FNIEMOP_STUB(iemOp_vpunpckhwd_Vx_Hx_Wx); 1318 //FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx) 1319 //{ 1320 // IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx"); 1321 // return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd); 1322 // 1323 //} 1324 /* Opcode VEX.F3.0F 0x69 - invalid */ 1325 1326 1327 /* Opcode VEX.0F 0x6a - invalid */ 1328 1329 /** Opcode VEX.66.0F 0x6a - vpunpckhdq Vx, Hx, W */ 1330 FNIEMOP_STUB(iemOp_vpunpckhdq_Vx_Hx_W); 1331 //FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W) 1332 //{ 1333 // IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W"); 1334 // return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq); 1335 //} 1336 /* Opcode VEX.F3.0F 0x6a - invalid */ 1337 1338 1339 /* Opcode VEX.0F 0x6b - invalid */ 1340 /** Opcode VEX.66.0F 0x6b - vpackssdw Vx, Hx, Wx */ 1341 FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx); 1342 /* Opcode VEX.F3.0F 0x6b - invalid */ 1343 1344 1345 /* Opcode VEX.0F 0x6c - invalid */ 1346 1347 /** Opcode VEX.66.0F 0x6c - vpunpcklqdq Vx, Hx, Wx */ 1348 FNIEMOP_STUB(iemOp_vpunpcklqdq_Vx_Hx_Wx); 1349 //FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx) 1350 //{ 1351 // IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx"); 1352 // return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq); 1353 //} 1354 1355 /* Opcode VEX.F3.0F 0x6c - invalid */ 1356 /* Opcode VEX.F2.0F 0x6c - invalid */ 1357 1358 1359 /* Opcode VEX.0F 0x6d - invalid */ 1360 1361 /** Opcode VEX.66.0F 0x6d - vpunpckhqdq Vx, Hx, W */ 1362 FNIEMOP_STUB(iemOp_vpunpckhqdq_Vx_Hx_W); 1363 //FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W) 1364 //{ 1365 // IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq"); 1366 // return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq); 1367 //} 1368 1369 /* Opcode VEX.F3.0F 0x6d - invalid */ 1370 1371 1372 /* Opcode VEX.0F 0x6e - invalid */ 1373 1374 /** Opcode VEX.66.0F 0x6e - vmovd/q Vy, Ey */ 1375 FNIEMOP_STUB(iemOp_vmovd_q_Vy_Ey); 1376 //FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey) 1377 //{ 1378 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1379 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1380 // IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq"); 1381 // else 1382 // IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed"); 1383 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1384 // { 1385 // /* XMM, greg*/ 1386 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1387 // IEM_MC_BEGIN(0, 1); 1388 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1389 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1390 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1391 // { 1392 // IEM_MC_LOCAL(uint64_t, u64Tmp); 1393 // IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1394 // IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 1395 // } 1396 // else 1397 // { 1398 // IEM_MC_LOCAL(uint32_t, u32Tmp); 1399 // IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1400 // IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 1401 // } 1402 // IEM_MC_ADVANCE_RIP(); 1403 // IEM_MC_END(); 1404 // } 1405 // else 1406 // { 1407 // /* XMM, [mem] */ 1408 // IEM_MC_BEGIN(0, 2); 1409 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1410 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 1411 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 1412 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1413 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1414 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1415 // { 1416 // IEM_MC_LOCAL(uint64_t, u64Tmp); 1417 // IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1418 // IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 1419 // } 1420 // else 1421 // { 1422 // IEM_MC_LOCAL(uint32_t, u32Tmp); 1423 // IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1424 // IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 1425 // } 1426 // IEM_MC_ADVANCE_RIP(); 1427 // IEM_MC_END(); 1428 // } 1429 // return VINF_SUCCESS; 1430 //} 1431 1432 /* Opcode VEX.F3.0F 0x6e - invalid */ 1433 1434 1435 /* Opcode VEX.0F 0x6f - invalid */ 1436 1437 /** Opcode VEX.66.0F 0x6f - vmovdqa Vx, Wx */ 1438 FNIEMOP_STUB(iemOp_vmovdqa_Vx_Wx); 1439 //FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx) 1440 //{ 1441 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1442 // IEMOP_MNEMONIC(vmovdqa_Vdq_Wdq, "movdqa Vdq,Wdq"); 1443 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1444 // { 1445 // /* 1446 // * Register, register. 1447 // */ 1448 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1449 // IEM_MC_BEGIN(0, 0); 1450 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1451 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1452 // IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1453 // (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1454 // IEM_MC_ADVANCE_RIP(); 1455 // IEM_MC_END(); 1456 // } 1457 // else 1458 // { 1459 // /* 1460 // * Register, memory. 1461 // */ 1462 // IEM_MC_BEGIN(0, 2); 1463 // IEM_MC_LOCAL(RTUINT128U, u128Tmp); 1464 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1465 // 1466 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1467 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1468 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1469 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1470 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1471 // IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 1472 // 1473 // IEM_MC_ADVANCE_RIP(); 1474 // IEM_MC_END(); 1475 // } 1476 // return VINF_SUCCESS; 1477 //} 1478 1479 /** Opcode VEX.F3.0F 0x6f - vmovdqu Vx, Wx */ 1480 FNIEMOP_STUB(iemOp_vmovdqu_Vx_Wx); 1481 //FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx) 1482 //{ 1483 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1484 // IEMOP_MNEMONIC(vmovdqu_Vdq_Wdq, "movdqu Vdq,Wdq"); 1485 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1486 // { 1487 // /* 1488 // * Register, register. 1489 // */ 1490 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1491 // IEM_MC_BEGIN(0, 0); 1492 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1493 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1494 // IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1495 // (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1496 // IEM_MC_ADVANCE_RIP(); 1497 // IEM_MC_END(); 1498 // } 1499 // else 1500 // { 1501 // /* 1502 // * Register, memory. 1503 // */ 1504 // IEM_MC_BEGIN(0, 2); 1505 // IEM_MC_LOCAL(RTUINT128U, u128Tmp); 1506 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1507 // 1508 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1509 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1510 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1511 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1512 // IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1513 // IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 1514 // 1515 // IEM_MC_ADVANCE_RIP(); 1516 // IEM_MC_END(); 1517 // } 1518 // return VINF_SUCCESS; 1519 //} 1520 1521 1522 /* Opcode VEX.0F 0x70 - invalid */ 1523 1524 /** Opcode VEX.66.0F 0x70 - vpshufd Vx, Wx, Ib */ 1525 FNIEMOP_STUB(iemOp_vpshufd_Vx_Wx_Ib); 1526 //FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib) 1527 //{ 1528 // IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib"); 1529 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1530 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1531 // { 1532 // /* 1533 // * Register, register. 1534 // */ 1535 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1536 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1537 // 1538 // IEM_MC_BEGIN(3, 0); 1539 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1540 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 1541 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1542 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1543 // IEM_MC_PREPARE_SSE_USAGE(); 1544 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1545 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1546 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 1547 // IEM_MC_ADVANCE_RIP(); 1548 // IEM_MC_END(); 1549 // } 1550 // else 1551 // { 1552 // /* 1553 // * Register, memory. 1554 // */ 1555 // IEM_MC_BEGIN(3, 2); 1556 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1557 // IEM_MC_LOCAL(RTUINT128U, uSrc); 1558 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 1559 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1560 // 1561 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1562 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1563 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1564 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1565 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1566 // 1567 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1568 // IEM_MC_PREPARE_SSE_USAGE(); 1569 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1570 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 1571 // 1572 // IEM_MC_ADVANCE_RIP(); 1573 // IEM_MC_END(); 1574 // } 1575 // return VINF_SUCCESS; 1576 //} 1577 1578 /** Opcode VEX.F3.0F 0x70 - vpshufhw Vx, Wx, Ib */ 1579 FNIEMOP_STUB(iemOp_vpshufhw_Vx_Wx_Ib); 1580 //FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib) 1581 //{ 1582 // IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib"); 1583 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1584 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1585 // { 1586 // /* 1587 // * Register, register. 1588 // */ 1589 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1590 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1591 // 1592 // IEM_MC_BEGIN(3, 0); 1593 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1594 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 1595 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1596 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1597 // IEM_MC_PREPARE_SSE_USAGE(); 1598 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1599 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1600 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 1601 // IEM_MC_ADVANCE_RIP(); 1602 // IEM_MC_END(); 1603 // } 1604 // else 1605 // { 1606 // /* 1607 // * Register, memory. 1608 // */ 1609 // IEM_MC_BEGIN(3, 2); 1610 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1611 // IEM_MC_LOCAL(RTUINT128U, uSrc); 1612 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 1613 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1614 // 1615 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1616 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1617 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1618 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1619 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1620 // 1621 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1622 // IEM_MC_PREPARE_SSE_USAGE(); 1623 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1624 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 1625 // 1626 // IEM_MC_ADVANCE_RIP(); 1627 // IEM_MC_END(); 1628 // } 1629 // return VINF_SUCCESS; 1630 //} 1631 1632 /** Opcode VEX.F2.0F 0x70 - vpshuflw Vx, Wx, Ib */ 1633 FNIEMOP_STUB(iemOp_vpshuflw_Vx_Wx_Ib); 1634 //FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib) 1635 //{ 1636 // IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib"); 1637 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1638 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1639 // { 1640 // /* 1641 // * Register, register. 1642 // */ 1643 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1644 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1645 // 1646 // IEM_MC_BEGIN(3, 0); 1647 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1648 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 1649 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1650 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1651 // IEM_MC_PREPARE_SSE_USAGE(); 1652 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1653 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1654 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 1655 // IEM_MC_ADVANCE_RIP(); 1656 // IEM_MC_END(); 1657 // } 1658 // else 1659 // { 1660 // /* 1661 // * Register, memory. 1662 // */ 1663 // IEM_MC_BEGIN(3, 2); 1664 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1665 // IEM_MC_LOCAL(RTUINT128U, uSrc); 1666 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 1667 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1668 // 1669 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1670 // uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 1671 // IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 1672 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1673 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1674 // 1675 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1676 // IEM_MC_PREPARE_SSE_USAGE(); 1677 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1678 // IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 1679 // 1680 // IEM_MC_ADVANCE_RIP(); 1681 // IEM_MC_END(); 1682 // } 1683 // return VINF_SUCCESS; 1684 //} 1685 1686 1687 /* Opcode VEX.0F 0x71 11/2 - invalid. */ 1688 /** Opcode VEX.66.0F 0x71 11/2. */ 1689 FNIEMOP_STUB_1(iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm); 1690 1691 /* Opcode VEX.0F 0x71 11/4 - invalid */ 1692 /** Opcode VEX.66.0F 0x71 11/4. */ 1693 FNIEMOP_STUB_1(iemOp_VGrp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm); 1694 1695 /* Opcode VEX.0F 0x71 11/6 - invalid */ 1696 /** Opcode VEX.66.0F 0x71 11/6. */ 1697 FNIEMOP_STUB_1(iemOp_VGrp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm); 1516 1698 1517 1699 1518 1700 /** 1519 * @opcode 0x12 1520 * @oppfx 0xf2 1521 * @opcpuid sse3 1522 * @opgroup og_sse3_pcksclr_datamove 1523 * @opxcpttype 5 1524 * @optest op1=-1 op2=0xddddddddeeeeeeee2222222211111111 -> 1525 * op1=0x22222222111111112222222211111111 1701 * VEX Group 12 jump table for register variant. 1526 1702 */ 1527 FNIEMOP_DEF(iemOp_vmovddup_Vx_Wx) 1528 { 1529 IEMOP_MNEMONIC2(RM, MOVDDUP, movddup, Vdq, Wdq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 1530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1532 { 1533 /* 1534 * Register, register. 1535 */ 1536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1537 IEM_MC_BEGIN(2, 0); 1538 IEM_MC_ARG(PRTUINT128U, puDst, 0); 1539 IEM_MC_ARG(uint64_t, uSrc, 1); 1540 1541 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 1542 IEM_MC_PREPARE_SSE_USAGE(); 1543 1544 IEM_MC_FETCH_XREG_U64(uSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1545 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1546 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 1547 1548 IEM_MC_ADVANCE_RIP(); 1549 IEM_MC_END(); 1550 } 1551 else 1552 { 1553 /* 1554 * Register, memory. 1555 */ 1556 IEM_MC_BEGIN(2, 2); 1557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1558 IEM_MC_ARG(PRTUINT128U, puDst, 0); 1559 IEM_MC_ARG(uint64_t, uSrc, 1); 1560 1561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1562 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1563 IEM_MC_MAYBE_RAISE_SSE3_RELATED_XCPT(); 1564 IEM_MC_PREPARE_SSE_USAGE(); 1565 1566 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1567 IEM_MC_REF_XREG_U128(puDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1568 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_movddup, puDst, uSrc); 1569 1570 IEM_MC_ADVANCE_RIP(); 1571 IEM_MC_END(); 1572 } 1573 return VINF_SUCCESS; 1574 } 1575 1576 1577 /** Opcode 0x0f 0x13 - vmovlps Mq, Vq */ 1578 FNIEMOP_STUB(iemOp_vmovlps_Mq_Vq); 1579 1580 /** Opcode 0x66 0x0f 0x13 - vmovlpd Mq, Vq */ 1581 FNIEMOP_DEF(iemOp_vmovlpd_Mq_Vq) 1582 { 1583 IEMOP_MNEMONIC(movlpd_Mq_Vq, "movlpd Mq,Vq"); 1584 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1585 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1586 { 1587 #if 0 1588 /* 1589 * Register, register. 1590 */ 1591 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1592 IEM_MC_BEGIN(0, 1); 1593 IEM_MC_LOCAL(uint64_t, uSrc); 1594 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1595 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1596 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1597 IEM_MC_STORE_XREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 1598 IEM_MC_ADVANCE_RIP(); 1599 IEM_MC_END(); 1600 #else 1601 return IEMOP_RAISE_INVALID_OPCODE(); 1602 #endif 1603 } 1604 else 1605 { 1606 /* 1607 * Memory, register. 1608 */ 1609 IEM_MC_BEGIN(0, 2); 1610 IEM_MC_LOCAL(uint64_t, uSrc); 1611 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1612 1613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1615 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1616 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1617 1618 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1619 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1620 1621 IEM_MC_ADVANCE_RIP(); 1622 IEM_MC_END(); 1623 } 1624 return VINF_SUCCESS; 1625 } 1626 1627 /* Opcode 0xf3 0x0f 0x13 - invalid */ 1628 /* Opcode 0xf2 0x0f 0x13 - invalid */ 1629 1630 /** Opcode 0x0f 0x14 - vunpcklps Vx, Hx, Wx*/ 1631 FNIEMOP_STUB(iemOp_vunpcklps_Vx_Hx_Wx); 1632 /** Opcode 0x66 0x0f 0x14 - vunpcklpd Vx,Hx,Wx */ 1633 FNIEMOP_STUB(iemOp_vunpcklpd_Vx_Hx_Wx); 1634 /* Opcode 0xf3 0x0f 0x14 - invalid */ 1635 /* Opcode 0xf2 0x0f 0x14 - invalid */ 1636 /** Opcode 0x0f 0x15 - vunpckhps Vx, Hx, Wx */ 1637 FNIEMOP_STUB(iemOp_vunpckhps_Vx_Hx_Wx); 1638 /** Opcode 0x66 0x0f 0x15 - vunpckhpd Vx,Hx,Wx */ 1639 FNIEMOP_STUB(iemOp_vunpckhpd_Vx_Hx_Wx); 1640 /* Opcode 0xf3 0x0f 0x15 - invalid */ 1641 /* Opcode 0xf2 0x0f 0x15 - invalid */ 1642 /** Opcode 0x0f 0x16 - vmovhpsv1 Vdq, Hq, Mq vmovlhps Vdq, Hq, Uq */ 1643 FNIEMOP_STUB(iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq); //NEXT 1644 /** Opcode 0x66 0x0f 0x16 - vmovhpdv1 Vdq, Hq, Mq */ 1645 FNIEMOP_STUB(iemOp_vmovhpdv1_Vdq_Hq_Mq); //NEXT 1646 /** Opcode 0xf3 0x0f 0x16 - vmovshdup Vx, Wx */ 1647 FNIEMOP_STUB(iemOp_vmovshdup_Vx_Wx); //NEXT 1648 /* Opcode 0xf2 0x0f 0x16 - invalid */ 1649 /** Opcode 0x0f 0x17 - vmovhpsv1 Mq, Vq */ 1650 FNIEMOP_STUB(iemOp_vmovhpsv1_Mq_Vq); //NEXT 1651 /** Opcode 0x66 0x0f 0x17 - vmovhpdv1 Mq, Vq */ 1652 FNIEMOP_STUB(iemOp_vmovhpdv1_Mq_Vq); //NEXT 1653 /* Opcode 0xf3 0x0f 0x17 - invalid */ 1654 /* Opcode 0xf2 0x0f 0x17 - invalid */ 1655 1656 1657 /** Opcode 0x0f 0x18. */ 1658 FNIEMOP_DEF(iemOp_prefetch_Grp16) 1659 { 1660 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1661 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 1662 { 1663 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) 1664 { 1665 case 4: /* Aliased to /0 for the time being according to AMD. */ 1666 case 5: /* Aliased to /0 for the time being according to AMD. */ 1667 case 6: /* Aliased to /0 for the time being according to AMD. */ 1668 case 7: /* Aliased to /0 for the time being according to AMD. */ 1669 case 0: IEMOP_MNEMONIC(prefetchNTA, "prefetchNTA m8"); break; 1670 case 1: IEMOP_MNEMONIC(prefetchT0, "prefetchT0 m8"); break; 1671 case 2: IEMOP_MNEMONIC(prefetchT1, "prefetchT1 m8"); break; 1672 case 3: IEMOP_MNEMONIC(prefetchT2, "prefetchT2 m8"); break; 1673 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 1674 } 1675 1676 IEM_MC_BEGIN(0, 1); 1677 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1678 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1679 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1680 /* Currently a NOP. */ 1681 NOREF(GCPtrEffSrc); 1682 IEM_MC_ADVANCE_RIP(); 1683 IEM_MC_END(); 1684 return VINF_SUCCESS; 1685 } 1686 1687 return IEMOP_RAISE_INVALID_OPCODE(); 1688 } 1689 1690 1691 /** Opcode 0x0f 0x19..0x1f. */ 1692 FNIEMOP_DEF(iemOp_nop_Ev) 1693 { 1694 IEMOP_MNEMONIC(nop_Ev, "nop Ev"); 1695 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1696 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1697 { 1698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1699 IEM_MC_BEGIN(0, 0); 1700 IEM_MC_ADVANCE_RIP(); 1701 IEM_MC_END(); 1702 } 1703 else 1704 { 1705 IEM_MC_BEGIN(0, 1); 1706 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1707 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1708 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1709 /* Currently a NOP. */ 1710 NOREF(GCPtrEffSrc); 1711 IEM_MC_ADVANCE_RIP(); 1712 IEM_MC_END(); 1713 } 1714 return VINF_SUCCESS; 1715 } 1716 1717 1718 /** Opcode 0x0f 0x20. */ 1719 FNIEMOP_DEF(iemOp_mov_Rd_Cd) 1720 { 1721 /* mod is ignored, as is operand size overrides. */ 1722 IEMOP_MNEMONIC(mov_Rd_Cd, "mov Rd,Cd"); 1723 IEMOP_HLP_MIN_386(); 1724 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) 1725 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; 1726 else 1727 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT; 1728 1729 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1730 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg; 1731 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) 1732 { 1733 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */ 1734 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit) 1735 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */ 1736 iCrReg |= 8; 1737 } 1738 switch (iCrReg) 1739 { 1740 case 0: case 2: case 3: case 4: case 8: 1741 break; 1742 default: 1743 return IEMOP_RAISE_INVALID_OPCODE(); 1744 } 1745 IEMOP_HLP_DONE_DECODING(); 1746 1747 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Cd, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, iCrReg); 1748 } 1749 1750 1751 /** Opcode 0x0f 0x21. */ 1752 FNIEMOP_DEF(iemOp_mov_Rd_Dd) 1753 { 1754 IEMOP_MNEMONIC(mov_Rd_Dd, "mov Rd,Dd"); 1755 IEMOP_HLP_MIN_386(); 1756 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1757 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1758 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R) 1759 return IEMOP_RAISE_INVALID_OPCODE(); 1760 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Rd_Dd, 1761 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB, 1762 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK)); 1763 } 1764 1765 1766 /** Opcode 0x0f 0x22. */ 1767 FNIEMOP_DEF(iemOp_mov_Cd_Rd) 1768 { 1769 /* mod is ignored, as is operand size overrides. */ 1770 IEMOP_MNEMONIC(mov_Cd_Rd, "mov Cd,Rd"); 1771 IEMOP_HLP_MIN_386(); 1772 if (pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT) 1773 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_64BIT; 1774 else 1775 pVCpu->iem.s.enmEffOpSize = pVCpu->iem.s.enmDefOpSize = IEMMODE_32BIT; 1776 1777 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1778 uint8_t iCrReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg; 1779 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK) 1780 { 1781 /* The lock prefix can be used to encode CR8 accesses on some CPUs. */ 1782 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCr8In32Bit) 1783 return IEMOP_RAISE_INVALID_OPCODE(); /* #UD takes precedence over #GP(), see test. */ 1784 iCrReg |= 8; 1785 } 1786 switch (iCrReg) 1787 { 1788 case 0: case 2: case 3: case 4: case 8: 1789 break; 1790 default: 1791 return IEMOP_RAISE_INVALID_OPCODE(); 1792 } 1793 IEMOP_HLP_DONE_DECODING(); 1794 1795 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Cd_Rd, iCrReg, (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB); 1796 } 1797 1798 1799 /** Opcode 0x0f 0x23. */ 1800 FNIEMOP_DEF(iemOp_mov_Dd_Rd) 1801 { 1802 IEMOP_MNEMONIC(mov_Dd_Rd, "mov Dd,Rd"); 1803 IEMOP_HLP_MIN_386(); 1804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1805 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1806 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_REX_R) 1807 return IEMOP_RAISE_INVALID_OPCODE(); 1808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_mov_Dd_Rd, 1809 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK), 1810 (X86_MODRM_RM_MASK & bRm) | pVCpu->iem.s.uRexB); 1811 } 1812 1813 1814 /** Opcode 0x0f 0x24. */ 1815 FNIEMOP_DEF(iemOp_mov_Rd_Td) 1816 { 1817 IEMOP_MNEMONIC(mov_Rd_Td, "mov Rd,Td"); 1818 /** @todo works on 386 and 486. */ 1819 /* The RM byte is not considered, see testcase. */ 1820 return IEMOP_RAISE_INVALID_OPCODE(); 1821 } 1822 1823 1824 /** Opcode 0x0f 0x26. */ 1825 FNIEMOP_DEF(iemOp_mov_Td_Rd) 1826 { 1827 IEMOP_MNEMONIC(mov_Td_Rd, "mov Td,Rd"); 1828 /** @todo works on 386 and 486. */ 1829 /* The RM byte is not considered, see testcase. */ 1830 return IEMOP_RAISE_INVALID_OPCODE(); 1831 } 1832 1833 1834 /** Opcode 0x0f 0x28 - vmovaps Vps, Wps */ 1835 FNIEMOP_DEF(iemOp_vmovaps_Vps_Wps) 1836 { 1837 IEMOP_MNEMONIC(movaps_r_mr, "movaps r,mr"); 1838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1840 { 1841 /* 1842 * Register, register. 1843 */ 1844 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1845 IEM_MC_BEGIN(0, 0); 1846 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1847 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1848 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1849 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1850 IEM_MC_ADVANCE_RIP(); 1851 IEM_MC_END(); 1852 } 1853 else 1854 { 1855 /* 1856 * Register, memory. 1857 */ 1858 IEM_MC_BEGIN(0, 2); 1859 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1860 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1861 1862 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1863 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1864 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1865 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1866 1867 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1868 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1869 1870 IEM_MC_ADVANCE_RIP(); 1871 IEM_MC_END(); 1872 } 1873 return VINF_SUCCESS; 1874 } 1875 1876 /** Opcode 0x66 0x0f 0x28 - vmovapd Vpd, Wpd */ 1877 FNIEMOP_DEF(iemOp_vmovapd_Vpd_Wpd) 1878 { 1879 IEMOP_MNEMONIC(movapd_r_mr, "movapd r,mr"); 1880 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1881 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1882 { 1883 /* 1884 * Register, register. 1885 */ 1886 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1887 IEM_MC_BEGIN(0, 0); 1888 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1889 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1890 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 1891 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1892 IEM_MC_ADVANCE_RIP(); 1893 IEM_MC_END(); 1894 } 1895 else 1896 { 1897 /* 1898 * Register, memory. 1899 */ 1900 IEM_MC_BEGIN(0, 2); 1901 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1902 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1903 1904 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1905 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1906 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1907 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1908 1909 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1910 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, uSrc); 1911 1912 IEM_MC_ADVANCE_RIP(); 1913 IEM_MC_END(); 1914 } 1915 return VINF_SUCCESS; 1916 } 1917 1918 /* Opcode 0xf3 0x0f 0x28 - invalid */ 1919 /* Opcode 0xf2 0x0f 0x28 - invalid */ 1920 1921 /** Opcode 0x0f 0x29 - vmovaps Wps, Vps */ 1922 FNIEMOP_DEF(iemOp_vmovaps_Wps_Vps) 1923 { 1924 IEMOP_MNEMONIC(movaps_mr_r, "movaps Wps,Vps"); 1925 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1927 { 1928 /* 1929 * Register, register. 1930 */ 1931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1932 IEM_MC_BEGIN(0, 0); 1933 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1935 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1936 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1937 IEM_MC_ADVANCE_RIP(); 1938 IEM_MC_END(); 1939 } 1940 else 1941 { 1942 /* 1943 * Memory, register. 1944 */ 1945 IEM_MC_BEGIN(0, 2); 1946 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1948 1949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1951 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1953 1954 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1955 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1956 1957 IEM_MC_ADVANCE_RIP(); 1958 IEM_MC_END(); 1959 } 1960 return VINF_SUCCESS; 1961 } 1962 1963 /** Opcode 0x66 0x0f 0x29 - vmovapd Wpd,Vpd */ 1964 FNIEMOP_DEF(iemOp_vmovapd_Wpd_Vpd) 1965 { 1966 IEMOP_MNEMONIC(movapd_mr_r, "movapd Wpd,Vpd"); 1967 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1968 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1969 { 1970 /* 1971 * Register, register. 1972 */ 1973 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1974 IEM_MC_BEGIN(0, 0); 1975 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1976 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 1977 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1978 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1979 IEM_MC_ADVANCE_RIP(); 1980 IEM_MC_END(); 1981 } 1982 else 1983 { 1984 /* 1985 * Memory, register. 1986 */ 1987 IEM_MC_BEGIN(0, 2); 1988 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 1989 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1990 1991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1993 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1994 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1995 1996 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1997 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 1998 1999 IEM_MC_ADVANCE_RIP(); 2000 IEM_MC_END(); 2001 } 2002 return VINF_SUCCESS; 2003 } 2004 2005 /* Opcode 0xf3 0x0f 0x29 - invalid */ 2006 /* Opcode 0xf2 0x0f 0x29 - invalid */ 2007 2008 2009 /** Opcode 0x0f 0x2a - cvtpi2ps Vps, Qpi */ 2010 FNIEMOP_STUB(iemOp_cvtpi2ps_Vps_Qpi); //NEXT 2011 /** Opcode 0x66 0x0f 0x2a - cvtpi2pd Vpd, Qpi */ 2012 FNIEMOP_STUB(iemOp_cvtpi2pd_Vpd_Qpi); //NEXT 2013 /** Opcode 0xf3 0x0f 0x2a - vcvtsi2ss Vss, Hss, Ey */ 2014 FNIEMOP_STUB(iemOp_vcvtsi2ss_Vss_Hss_Ey); //NEXT 2015 /** Opcode 0xf2 0x0f 0x2a - vcvtsi2sd Vsd, Hsd, Ey */ 2016 FNIEMOP_STUB(iemOp_vcvtsi2sd_Vsd_Hsd_Ey); //NEXT 2017 2018 2019 /** Opcode 0x0f 0x2b - vmovntps Mps, Vps */ 2020 FNIEMOP_DEF(iemOp_vmovntps_Mps_Vps) 2021 { 2022 IEMOP_MNEMONIC(movntps_mr_r, "movntps Mps,Vps"); 2023 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2024 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 2025 { 2026 /* 2027 * memory, register. 2028 */ 2029 IEM_MC_BEGIN(0, 2); 2030 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 2031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2032 2033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2035 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 2036 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2037 2038 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2039 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2040 2041 IEM_MC_ADVANCE_RIP(); 2042 IEM_MC_END(); 2043 } 2044 /* The register, register encoding is invalid. */ 2045 else 2046 return IEMOP_RAISE_INVALID_OPCODE(); 2047 return VINF_SUCCESS; 2048 } 2049 2050 /** Opcode 0x66 0x0f 0x2b - vmovntpd Mpd, Vpd */ 2051 FNIEMOP_DEF(iemOp_vmovntpd_Mpd_Vpd) 2052 { 2053 IEMOP_MNEMONIC(movntpd_mr_r, "movntpd Mdq,Vpd"); 2054 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2055 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 2056 { 2057 /* 2058 * memory, register. 2059 */ 2060 IEM_MC_BEGIN(0, 2); 2061 IEM_MC_LOCAL(RTUINT128U, uSrc); /** @todo optimize this one day... */ 2062 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2063 2064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2066 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2067 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2068 2069 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2070 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2071 2072 IEM_MC_ADVANCE_RIP(); 2073 IEM_MC_END(); 2074 } 2075 /* The register, register encoding is invalid. */ 2076 else 2077 return IEMOP_RAISE_INVALID_OPCODE(); 2078 return VINF_SUCCESS; 2079 } 2080 /* Opcode 0xf3 0x0f 0x2b - invalid */ 2081 /* Opcode 0xf2 0x0f 0x2b - invalid */ 2082 2083 2084 /** Opcode 0x0f 0x2c - cvttps2pi Ppi, Wps */ 2085 FNIEMOP_STUB(iemOp_cvttps2pi_Ppi_Wps); 2086 /** Opcode 0x66 0x0f 0x2c - cvttpd2pi Ppi, Wpd */ 2087 FNIEMOP_STUB(iemOp_cvttpd2pi_Ppi_Wpd); 2088 /** Opcode 0xf3 0x0f 0x2c - vcvttss2si Gy, Wss */ 2089 FNIEMOP_STUB(iemOp_vcvttss2si_Gy_Wss); 2090 /** Opcode 0xf2 0x0f 0x2c - vcvttsd2si Gy, Wsd */ 2091 FNIEMOP_STUB(iemOp_vcvttsd2si_Gy_Wsd); 2092 2093 /** Opcode 0x0f 0x2d - cvtps2pi Ppi, Wps */ 2094 FNIEMOP_STUB(iemOp_cvtps2pi_Ppi_Wps); 2095 /** Opcode 0x66 0x0f 0x2d - cvtpd2pi Qpi, Wpd */ 2096 FNIEMOP_STUB(iemOp_cvtpd2pi_Qpi_Wpd); 2097 /** Opcode 0xf3 0x0f 0x2d - vcvtss2si Gy, Wss */ 2098 FNIEMOP_STUB(iemOp_vcvtss2si_Gy_Wss); 2099 /** Opcode 0xf2 0x0f 0x2d - vcvtsd2si Gy, Wsd */ 2100 FNIEMOP_STUB(iemOp_vcvtsd2si_Gy_Wsd); 2101 2102 /** Opcode 0x0f 0x2e - vucomiss Vss, Wss */ 2103 FNIEMOP_STUB(iemOp_vucomiss_Vss_Wss); // NEXT 2104 /** Opcode 0x66 0x0f 0x2e - vucomisd Vsd, Wsd */ 2105 FNIEMOP_STUB(iemOp_vucomisd_Vsd_Wsd); // NEXT 2106 /* Opcode 0xf3 0x0f 0x2e - invalid */ 2107 /* Opcode 0xf2 0x0f 0x2e - invalid */ 2108 2109 /** Opcode 0x0f 0x2f - vcomiss Vss, Wss */ 2110 FNIEMOP_STUB(iemOp_vcomiss_Vss_Wss); 2111 /** Opcode 0x66 0x0f 0x2f - vcomisd Vsd, Wsd */ 2112 FNIEMOP_STUB(iemOp_vcomisd_Vsd_Wsd); 2113 /* Opcode 0xf3 0x0f 0x2f - invalid */ 2114 /* Opcode 0xf2 0x0f 0x2f - invalid */ 2115 2116 /** Opcode 0x0f 0x30. */ 2117 FNIEMOP_DEF(iemOp_wrmsr) 2118 { 2119 IEMOP_MNEMONIC(wrmsr, "wrmsr"); 2120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2121 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_wrmsr); 2122 } 2123 2124 2125 /** Opcode 0x0f 0x31. */ 2126 FNIEMOP_DEF(iemOp_rdtsc) 2127 { 2128 IEMOP_MNEMONIC(rdtsc, "rdtsc"); 2129 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2130 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdtsc); 2131 } 2132 2133 2134 /** Opcode 0x0f 0x33. */ 2135 FNIEMOP_DEF(iemOp_rdmsr) 2136 { 2137 IEMOP_MNEMONIC(rdmsr, "rdmsr"); 2138 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2139 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_rdmsr); 2140 } 2141 2142 2143 /** Opcode 0x0f 0x34. */ 2144 FNIEMOP_STUB(iemOp_rdpmc); 2145 /** Opcode 0x0f 0x34. */ 2146 FNIEMOP_STUB(iemOp_sysenter); 2147 /** Opcode 0x0f 0x35. */ 2148 FNIEMOP_STUB(iemOp_sysexit); 2149 /** Opcode 0x0f 0x37. */ 2150 FNIEMOP_STUB(iemOp_getsec); 2151 /** Opcode 0x0f 0x38. */ 2152 FNIEMOP_UD_STUB(iemOp_3byte_Esc_A4); /* Here there be dragons... */ 2153 /** Opcode 0x0f 0x3a. */ 2154 FNIEMOP_UD_STUB(iemOp_3byte_Esc_A5); /* Here there be dragons... */ 2155 2156 2157 /** 2158 * Implements a conditional move. 2159 * 2160 * Wish there was an obvious way to do this where we could share and reduce 2161 * code bloat. 2162 * 2163 * @param a_Cnd The conditional "microcode" operation. 2164 */ 2165 #define CMOV_X(a_Cnd) \ 2166 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 2167 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \ 2168 { \ 2169 switch (pVCpu->iem.s.enmEffOpSize) \ 2170 { \ 2171 case IEMMODE_16BIT: \ 2172 IEM_MC_BEGIN(0, 1); \ 2173 IEM_MC_LOCAL(uint16_t, u16Tmp); \ 2174 a_Cnd { \ 2175 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \ 2176 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \ 2177 } IEM_MC_ENDIF(); \ 2178 IEM_MC_ADVANCE_RIP(); \ 2179 IEM_MC_END(); \ 2180 return VINF_SUCCESS; \ 2181 \ 2182 case IEMMODE_32BIT: \ 2183 IEM_MC_BEGIN(0, 1); \ 2184 IEM_MC_LOCAL(uint32_t, u32Tmp); \ 2185 a_Cnd { \ 2186 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \ 2187 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \ 2188 } IEM_MC_ELSE() { \ 2189 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \ 2190 } IEM_MC_ENDIF(); \ 2191 IEM_MC_ADVANCE_RIP(); \ 2192 IEM_MC_END(); \ 2193 return VINF_SUCCESS; \ 2194 \ 2195 case IEMMODE_64BIT: \ 2196 IEM_MC_BEGIN(0, 1); \ 2197 IEM_MC_LOCAL(uint64_t, u64Tmp); \ 2198 a_Cnd { \ 2199 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); \ 2200 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \ 2201 } IEM_MC_ENDIF(); \ 2202 IEM_MC_ADVANCE_RIP(); \ 2203 IEM_MC_END(); \ 2204 return VINF_SUCCESS; \ 2205 \ 2206 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 2207 } \ 2208 } \ 2209 else \ 2210 { \ 2211 switch (pVCpu->iem.s.enmEffOpSize) \ 2212 { \ 2213 case IEMMODE_16BIT: \ 2214 IEM_MC_BEGIN(0, 2); \ 2215 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 2216 IEM_MC_LOCAL(uint16_t, u16Tmp); \ 2217 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 2218 IEM_MC_FETCH_MEM_U16(u16Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 2219 a_Cnd { \ 2220 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Tmp); \ 2221 } IEM_MC_ENDIF(); \ 2222 IEM_MC_ADVANCE_RIP(); \ 2223 IEM_MC_END(); \ 2224 return VINF_SUCCESS; \ 2225 \ 2226 case IEMMODE_32BIT: \ 2227 IEM_MC_BEGIN(0, 2); \ 2228 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 2229 IEM_MC_LOCAL(uint32_t, u32Tmp); \ 2230 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 2231 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 2232 a_Cnd { \ 2233 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); \ 2234 } IEM_MC_ELSE() { \ 2235 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); \ 2236 } IEM_MC_ENDIF(); \ 2237 IEM_MC_ADVANCE_RIP(); \ 2238 IEM_MC_END(); \ 2239 return VINF_SUCCESS; \ 2240 \ 2241 case IEMMODE_64BIT: \ 2242 IEM_MC_BEGIN(0, 2); \ 2243 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 2244 IEM_MC_LOCAL(uint64_t, u64Tmp); \ 2245 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 2246 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 2247 a_Cnd { \ 2248 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); \ 2249 } IEM_MC_ENDIF(); \ 2250 IEM_MC_ADVANCE_RIP(); \ 2251 IEM_MC_END(); \ 2252 return VINF_SUCCESS; \ 2253 \ 2254 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 2255 } \ 2256 } do {} while (0) 2257 2258 2259 2260 /** Opcode 0x0f 0x40. */ 2261 FNIEMOP_DEF(iemOp_cmovo_Gv_Ev) 2262 { 2263 IEMOP_MNEMONIC(cmovo_Gv_Ev, "cmovo Gv,Ev"); 2264 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF)); 2265 } 2266 2267 2268 /** Opcode 0x0f 0x41. */ 2269 FNIEMOP_DEF(iemOp_cmovno_Gv_Ev) 2270 { 2271 IEMOP_MNEMONIC(cmovno_Gv_Ev, "cmovno Gv,Ev"); 2272 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF)); 2273 } 2274 2275 2276 /** Opcode 0x0f 0x42. */ 2277 FNIEMOP_DEF(iemOp_cmovc_Gv_Ev) 2278 { 2279 IEMOP_MNEMONIC(cmovc_Gv_Ev, "cmovc Gv,Ev"); 2280 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)); 2281 } 2282 2283 2284 /** Opcode 0x0f 0x43. */ 2285 FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev) 2286 { 2287 IEMOP_MNEMONIC(cmovnc_Gv_Ev, "cmovnc Gv,Ev"); 2288 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)); 2289 } 2290 2291 2292 /** Opcode 0x0f 0x44. */ 2293 FNIEMOP_DEF(iemOp_cmove_Gv_Ev) 2294 { 2295 IEMOP_MNEMONIC(cmove_Gv_Ev, "cmove Gv,Ev"); 2296 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)); 2297 } 2298 2299 2300 /** Opcode 0x0f 0x45. */ 2301 FNIEMOP_DEF(iemOp_cmovne_Gv_Ev) 2302 { 2303 IEMOP_MNEMONIC(cmovne_Gv_Ev, "cmovne Gv,Ev"); 2304 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)); 2305 } 2306 2307 2308 /** Opcode 0x0f 0x46. */ 2309 FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev) 2310 { 2311 IEMOP_MNEMONIC(cmovbe_Gv_Ev, "cmovbe Gv,Ev"); 2312 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)); 2313 } 2314 2315 2316 /** Opcode 0x0f 0x47. */ 2317 FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev) 2318 { 2319 IEMOP_MNEMONIC(cmovnbe_Gv_Ev, "cmovnbe Gv,Ev"); 2320 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)); 2321 } 2322 2323 2324 /** Opcode 0x0f 0x48. */ 2325 FNIEMOP_DEF(iemOp_cmovs_Gv_Ev) 2326 { 2327 IEMOP_MNEMONIC(cmovs_Gv_Ev, "cmovs Gv,Ev"); 2328 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF)); 2329 } 2330 2331 2332 /** Opcode 0x0f 0x49. */ 2333 FNIEMOP_DEF(iemOp_cmovns_Gv_Ev) 2334 { 2335 IEMOP_MNEMONIC(cmovns_Gv_Ev, "cmovns Gv,Ev"); 2336 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF)); 2337 } 2338 2339 2340 /** Opcode 0x0f 0x4a. */ 2341 FNIEMOP_DEF(iemOp_cmovp_Gv_Ev) 2342 { 2343 IEMOP_MNEMONIC(cmovp_Gv_Ev, "cmovp Gv,Ev"); 2344 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)); 2345 } 2346 2347 2348 /** Opcode 0x0f 0x4b. */ 2349 FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev) 2350 { 2351 IEMOP_MNEMONIC(cmovnp_Gv_Ev, "cmovnp Gv,Ev"); 2352 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)); 2353 } 2354 2355 2356 /** Opcode 0x0f 0x4c. */ 2357 FNIEMOP_DEF(iemOp_cmovl_Gv_Ev) 2358 { 2359 IEMOP_MNEMONIC(cmovl_Gv_Ev, "cmovl Gv,Ev"); 2360 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF)); 2361 } 2362 2363 2364 /** Opcode 0x0f 0x4d. */ 2365 FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev) 2366 { 2367 IEMOP_MNEMONIC(cmovnl_Gv_Ev, "cmovnl Gv,Ev"); 2368 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF)); 2369 } 2370 2371 2372 /** Opcode 0x0f 0x4e. */ 2373 FNIEMOP_DEF(iemOp_cmovle_Gv_Ev) 2374 { 2375 IEMOP_MNEMONIC(cmovle_Gv_Ev, "cmovle Gv,Ev"); 2376 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF)); 2377 } 2378 2379 2380 /** Opcode 0x0f 0x4f. */ 2381 FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev) 2382 { 2383 IEMOP_MNEMONIC(cmovnle_Gv_Ev, "cmovnle Gv,Ev"); 2384 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_AND_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF)); 2385 } 2386 2387 #undef CMOV_X 2388 2389 /** Opcode 0x0f 0x50 - vmovmskps Gy, Ups */ 2390 FNIEMOP_STUB(iemOp_vmovmskps_Gy_Ups); 2391 /** Opcode 0x66 0x0f 0x50 - vmovmskpd Gy,Upd */ 2392 FNIEMOP_STUB(iemOp_vmovmskpd_Gy_Upd); 2393 /* Opcode 0xf3 0x0f 0x50 - invalid */ 2394 /* Opcode 0xf2 0x0f 0x50 - invalid */ 2395 2396 /** Opcode 0x0f 0x51 - vsqrtps Vps, Wps */ 2397 FNIEMOP_STUB(iemOp_vsqrtps_Vps_Wps); 2398 /** Opcode 0x66 0x0f 0x51 - vsqrtpd Vpd, Wpd */ 2399 FNIEMOP_STUB(iemOp_vsqrtpd_Vpd_Wpd); 2400 /** Opcode 0xf3 0x0f 0x51 - vsqrtss Vss, Hss, Wss */ 2401 FNIEMOP_STUB(iemOp_vsqrtss_Vss_Hss_Wss); 2402 /** Opcode 0xf2 0x0f 0x51 - vsqrtsd Vsd, Hsd, Wsd */ 2403 FNIEMOP_STUB(iemOp_vsqrtsd_Vsd_Hsd_Wsd); 2404 2405 /** Opcode 0x0f 0x52 - vrsqrtps Vps, Wps */ 2406 FNIEMOP_STUB(iemOp_vrsqrtps_Vps_Wps); 2407 /* Opcode 0x66 0x0f 0x52 - invalid */ 2408 /** Opcode 0xf3 0x0f 0x52 - vrsqrtss Vss, Hss, Wss */ 2409 FNIEMOP_STUB(iemOp_vrsqrtss_Vss_Hss_Wss); 2410 /* Opcode 0xf2 0x0f 0x52 - invalid */ 2411 2412 /** Opcode 0x0f 0x53 - vrcpps Vps, Wps */ 2413 FNIEMOP_STUB(iemOp_vrcpps_Vps_Wps); 2414 /* Opcode 0x66 0x0f 0x53 - invalid */ 2415 /** Opcode 0xf3 0x0f 0x53 - vrcpss Vss, Hss, Wss */ 2416 FNIEMOP_STUB(iemOp_vrcpss_Vss_Hss_Wss); 2417 /* Opcode 0xf2 0x0f 0x53 - invalid */ 2418 2419 /** Opcode 0x0f 0x54 - vandps Vps, Hps, Wps */ 2420 FNIEMOP_STUB(iemOp_vandps_Vps_Hps_Wps); 2421 /** Opcode 0x66 0x0f 0x54 - vandpd Vpd, Hpd, Wpd */ 2422 FNIEMOP_STUB(iemOp_vandpd_Vpd_Hpd_Wpd); 2423 /* Opcode 0xf3 0x0f 0x54 - invalid */ 2424 /* Opcode 0xf2 0x0f 0x54 - invalid */ 2425 2426 /** Opcode 0x0f 0x55 - vandnps Vps, Hps, Wps */ 2427 FNIEMOP_STUB(iemOp_vandnps_Vps_Hps_Wps); 2428 /** Opcode 0x66 0x0f 0x55 - vandnpd Vpd, Hpd, Wpd */ 2429 FNIEMOP_STUB(iemOp_vandnpd_Vpd_Hpd_Wpd); 2430 /* Opcode 0xf3 0x0f 0x55 - invalid */ 2431 /* Opcode 0xf2 0x0f 0x55 - invalid */ 2432 2433 /** Opcode 0x0f 0x56 - vorps Vps, Hps, Wps */ 2434 FNIEMOP_STUB(iemOp_vorps_Vps_Hps_Wps); 2435 /** Opcode 0x66 0x0f 0x56 - vorpd Vpd, Hpd, Wpd */ 2436 FNIEMOP_STUB(iemOp_vorpd_Vpd_Hpd_Wpd); 2437 /* Opcode 0xf3 0x0f 0x56 - invalid */ 2438 /* Opcode 0xf2 0x0f 0x56 - invalid */ 2439 2440 /** Opcode 0x0f 0x57 - vxorps Vps, Hps, Wps */ 2441 FNIEMOP_STUB(iemOp_vxorps_Vps_Hps_Wps); 2442 /** Opcode 0x66 0x0f 0x57 - vxorpd Vpd, Hpd, Wpd */ 2443 FNIEMOP_STUB(iemOp_vxorpd_Vpd_Hpd_Wpd); 2444 /* Opcode 0xf3 0x0f 0x57 - invalid */ 2445 /* Opcode 0xf2 0x0f 0x57 - invalid */ 2446 2447 /** Opcode 0x0f 0x58 - vaddps Vps, Hps, Wps */ 2448 FNIEMOP_STUB(iemOp_vaddps_Vps_Hps_Wps); 2449 /** Opcode 0x66 0x0f 0x58 - vaddpd Vpd, Hpd, Wpd */ 2450 FNIEMOP_STUB(iemOp_vaddpd_Vpd_Hpd_Wpd); 2451 /** Opcode 0xf3 0x0f 0x58 - vaddss Vss, Hss, Wss */ 2452 FNIEMOP_STUB(iemOp_vaddss_Vss_Hss_Wss); 2453 /** Opcode 0xf2 0x0f 0x58 - vaddsd Vsd, Hsd, Wsd */ 2454 FNIEMOP_STUB(iemOp_vaddsd_Vsd_Hsd_Wsd); 2455 2456 /** Opcode 0x0f 0x59 - vmulps Vps, Hps, Wps */ 2457 FNIEMOP_STUB(iemOp_vmulps_Vps_Hps_Wps); 2458 /** Opcode 0x66 0x0f 0x59 - vmulpd Vpd, Hpd, Wpd */ 2459 FNIEMOP_STUB(iemOp_vmulpd_Vpd_Hpd_Wpd); 2460 /** Opcode 0xf3 0x0f 0x59 - vmulss Vss, Hss, Wss */ 2461 FNIEMOP_STUB(iemOp_vmulss_Vss_Hss_Wss); 2462 /** Opcode 0xf2 0x0f 0x59 - vmulsd Vsd, Hsd, Wsd */ 2463 FNIEMOP_STUB(iemOp_vmulsd_Vsd_Hsd_Wsd); 2464 2465 /** Opcode 0x0f 0x5a - vcvtps2pd Vpd, Wps */ 2466 FNIEMOP_STUB(iemOp_vcvtps2pd_Vpd_Wps); 2467 /** Opcode 0x66 0x0f 0x5a - vcvtpd2ps Vps, Wpd */ 2468 FNIEMOP_STUB(iemOp_vcvtpd2ps_Vps_Wpd); 2469 /** Opcode 0xf3 0x0f 0x5a - vcvtss2sd Vsd, Hx, Wss */ 2470 FNIEMOP_STUB(iemOp_vcvtss2sd_Vsd_Hx_Wss); 2471 /** Opcode 0xf2 0x0f 0x5a - vcvtsd2ss Vss, Hx, Wsd */ 2472 FNIEMOP_STUB(iemOp_vcvtsd2ss_Vss_Hx_Wsd); 2473 2474 /** Opcode 0x0f 0x5b - vcvtdq2ps Vps, Wdq */ 2475 FNIEMOP_STUB(iemOp_vcvtdq2ps_Vps_Wdq); 2476 /** Opcode 0x66 0x0f 0x5b - vcvtps2dq Vdq, Wps */ 2477 FNIEMOP_STUB(iemOp_vcvtps2dq_Vdq_Wps); 2478 /** Opcode 0xf3 0x0f 0x5b - vcvttps2dq Vdq, Wps */ 2479 FNIEMOP_STUB(iemOp_vcvttps2dq_Vdq_Wps); 2480 /* Opcode 0xf2 0x0f 0x5b - invalid */ 2481 2482 /** Opcode 0x0f 0x5c - vsubps Vps, Hps, Wps */ 2483 FNIEMOP_STUB(iemOp_vsubps_Vps_Hps_Wps); 2484 /** Opcode 0x66 0x0f 0x5c - vsubpd Vpd, Hpd, Wpd */ 2485 FNIEMOP_STUB(iemOp_vsubpd_Vpd_Hpd_Wpd); 2486 /** Opcode 0xf3 0x0f 0x5c - vsubss Vss, Hss, Wss */ 2487 FNIEMOP_STUB(iemOp_vsubss_Vss_Hss_Wss); 2488 /** Opcode 0xf2 0x0f 0x5c - vsubsd Vsd, Hsd, Wsd */ 2489 FNIEMOP_STUB(iemOp_vsubsd_Vsd_Hsd_Wsd); 2490 2491 /** Opcode 0x0f 0x5d - vminps Vps, Hps, Wps */ 2492 FNIEMOP_STUB(iemOp_vminps_Vps_Hps_Wps); 2493 /** Opcode 0x66 0x0f 0x5d - vminpd Vpd, Hpd, Wpd */ 2494 FNIEMOP_STUB(iemOp_vminpd_Vpd_Hpd_Wpd); 2495 /** Opcode 0xf3 0x0f 0x5d - vminss Vss, Hss, Wss */ 2496 FNIEMOP_STUB(iemOp_vminss_Vss_Hss_Wss); 2497 /** Opcode 0xf2 0x0f 0x5d - vminsd Vsd, Hsd, Wsd */ 2498 FNIEMOP_STUB(iemOp_vminsd_Vsd_Hsd_Wsd); 2499 2500 /** Opcode 0x0f 0x5e - vdivps Vps, Hps, Wps */ 2501 FNIEMOP_STUB(iemOp_vdivps_Vps_Hps_Wps); 2502 /** Opcode 0x66 0x0f 0x5e - vdivpd Vpd, Hpd, Wpd */ 2503 FNIEMOP_STUB(iemOp_vdivpd_Vpd_Hpd_Wpd); 2504 /** Opcode 0xf3 0x0f 0x5e - vdivss Vss, Hss, Wss */ 2505 FNIEMOP_STUB(iemOp_vdivss_Vss_Hss_Wss); 2506 /** Opcode 0xf2 0x0f 0x5e - vdivsd Vsd, Hsd, Wsd */ 2507 FNIEMOP_STUB(iemOp_vdivsd_Vsd_Hsd_Wsd); 2508 2509 /** Opcode 0x0f 0x5f - vmaxps Vps, Hps, Wps */ 2510 FNIEMOP_STUB(iemOp_vmaxps_Vps_Hps_Wps); 2511 /** Opcode 0x66 0x0f 0x5f - vmaxpd Vpd, Hpd, Wpd */ 2512 FNIEMOP_STUB(iemOp_vmaxpd_Vpd_Hpd_Wpd); 2513 /** Opcode 0xf3 0x0f 0x5f - vmaxss Vss, Hss, Wss */ 2514 FNIEMOP_STUB(iemOp_vmaxss_Vss_Hss_Wss); 2515 /** Opcode 0xf2 0x0f 0x5f - vmaxsd Vsd, Hsd, Wsd */ 2516 FNIEMOP_STUB(iemOp_vmaxsd_Vsd_Hsd_Wsd); 2517 2518 /** 2519 * Common worker for MMX instructions on the forms: 2520 * pxxxx mm1, mm2/mem32 2521 * 2522 * The 2nd operand is the first half of a register, which in the memory case 2523 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit 2524 * memory accessed for MMX. 2525 * 2526 * Exceptions type 4. 2527 */ 2528 FNIEMOP_DEF_1(iemOpCommonMmx_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl) 2529 { 2530 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2531 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2532 { 2533 /* 2534 * Register, register. 2535 */ 2536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2537 IEM_MC_BEGIN(2, 0); 2538 IEM_MC_ARG(PRTUINT128U, pDst, 0); 2539 IEM_MC_ARG(uint64_t const *, pSrc, 1); 2540 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2541 IEM_MC_PREPARE_SSE_USAGE(); 2542 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2543 IEM_MC_REF_XREG_U64_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2544 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2545 IEM_MC_ADVANCE_RIP(); 2546 IEM_MC_END(); 2547 } 2548 else 2549 { 2550 /* 2551 * Register, memory. 2552 */ 2553 IEM_MC_BEGIN(2, 2); 2554 IEM_MC_ARG(PRTUINT128U, pDst, 0); 2555 IEM_MC_LOCAL(uint64_t, uSrc); 2556 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 2557 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2558 2559 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2560 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2561 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2562 IEM_MC_FETCH_MEM_U64_ALIGN_U128(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2563 2564 IEM_MC_PREPARE_SSE_USAGE(); 2565 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2566 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2567 2568 IEM_MC_ADVANCE_RIP(); 2569 IEM_MC_END(); 2570 } 2571 return VINF_SUCCESS; 2572 } 2573 2574 2575 /** 2576 * Common worker for SSE2 instructions on the forms: 2577 * pxxxx xmm1, xmm2/mem128 2578 * 2579 * The 2nd operand is the first half of a register, which in the memory case 2580 * means a 32-bit memory access for MMX and 128-bit aligned 64-bit or 128-bit 2581 * memory accessed for MMX. 2582 * 2583 * Exceptions type 4. 2584 */ 2585 FNIEMOP_DEF_1(iemOpCommonSse_LowLow_To_Full, PCIEMOPMEDIAF1L1, pImpl) 2586 { 2587 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2588 if (!pImpl->pfnU64) 2589 return IEMOP_RAISE_INVALID_OPCODE(); 2590 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2591 { 2592 /* 2593 * Register, register. 2594 */ 2595 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 2596 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 2597 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2598 IEM_MC_BEGIN(2, 0); 2599 IEM_MC_ARG(uint64_t *, pDst, 0); 2600 IEM_MC_ARG(uint32_t const *, pSrc, 1); 2601 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2602 IEM_MC_PREPARE_FPU_USAGE(); 2603 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2604 IEM_MC_REF_MREG_U32_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 2605 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2606 IEM_MC_ADVANCE_RIP(); 2607 IEM_MC_END(); 2608 } 2609 else 2610 { 2611 /* 2612 * Register, memory. 2613 */ 2614 IEM_MC_BEGIN(2, 2); 2615 IEM_MC_ARG(uint64_t *, pDst, 0); 2616 IEM_MC_LOCAL(uint32_t, uSrc); 2617 IEM_MC_ARG_LOCAL_REF(uint32_t const *, pSrc, uSrc, 1); 2618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2619 2620 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2621 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2622 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2623 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2624 2625 IEM_MC_PREPARE_FPU_USAGE(); 2626 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2627 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2628 2629 IEM_MC_ADVANCE_RIP(); 2630 IEM_MC_END(); 2631 } 2632 return VINF_SUCCESS; 2633 } 2634 2635 2636 /** Opcode 0x0f 0x60 - punpcklbw Pq, Qd */ 2637 FNIEMOP_DEF(iemOp_punpcklbw_Pq_Qd) 2638 { 2639 IEMOP_MNEMONIC(punpcklbw, "punpcklbw Pq, Qd"); 2640 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw); 2641 } 2642 2643 /** Opcode 0x66 0x0f 0x60 - vpunpcklbw Vx, Hx, W */ 2644 FNIEMOP_DEF(iemOp_vpunpcklbw_Vx_Hx_Wx) 2645 { 2646 IEMOP_MNEMONIC(vpunpcklbw, "vpunpcklbw Vx, Hx, Wx"); 2647 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklbw); 2648 } 2649 2650 /* Opcode 0xf3 0x0f 0x60 - invalid */ 2651 2652 2653 /** Opcode 0x0f 0x61 - punpcklwd Pq, Qd */ 2654 FNIEMOP_DEF(iemOp_punpcklwd_Pq_Qd) 2655 { 2656 IEMOP_MNEMONIC(punpcklwd, "punpcklwd Pq, Qd"); /** @todo AMD mark the MMX version as 3DNow!. Intel says MMX CPUID req. */ 2657 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpcklwd); 2658 } 2659 2660 /** Opcode 0x66 0x0f 0x61 - vpunpcklwd Vx, Hx, Wx */ 2661 FNIEMOP_DEF(iemOp_vpunpcklwd_Vx_Hx_Wx) 2662 { 2663 IEMOP_MNEMONIC(vpunpcklwd, "vpunpcklwd Vx, Hx, Wx"); 2664 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklwd); 2665 } 2666 2667 /* Opcode 0xf3 0x0f 0x61 - invalid */ 2668 2669 2670 /** Opcode 0x0f 0x62 - punpckldq Pq, Qd */ 2671 FNIEMOP_DEF(iemOp_punpckldq_Pq_Qd) 2672 { 2673 IEMOP_MNEMONIC(punpckldq, "punpckldq Pq, Qd"); 2674 return FNIEMOP_CALL_1(iemOpCommonMmx_LowLow_To_Full, &g_iemAImpl_punpckldq); 2675 } 2676 2677 /** Opcode 0x66 0x0f 0x62 - vpunpckldq Vx, Hx, Wx */ 2678 FNIEMOP_DEF(iemOp_vpunpckldq_Vx_Hx_Wx) 2679 { 2680 IEMOP_MNEMONIC(vpunpckldq, "vpunpckldq Vx, Hx, Wx"); 2681 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpckldq); 2682 } 2683 2684 /* Opcode 0xf3 0x0f 0x62 - invalid */ 2685 2686 2687 2688 /** Opcode 0x0f 0x63 - packsswb Pq, Qq */ 2689 FNIEMOP_STUB(iemOp_packsswb_Pq_Qq); 2690 /** Opcode 0x66 0x0f 0x63 - vpacksswb Vx, Hx, Wx */ 2691 FNIEMOP_STUB(iemOp_vpacksswb_Vx_Hx_Wx); 2692 /* Opcode 0xf3 0x0f 0x63 - invalid */ 2693 2694 /** Opcode 0x0f 0x64 - pcmpgtb Pq, Qq */ 2695 FNIEMOP_STUB(iemOp_pcmpgtb_Pq_Qq); 2696 /** Opcode 0x66 0x0f 0x64 - vpcmpgtb Vx, Hx, Wx */ 2697 FNIEMOP_STUB(iemOp_vpcmpgtb_Vx_Hx_Wx); 2698 /* Opcode 0xf3 0x0f 0x64 - invalid */ 2699 2700 /** Opcode 0x0f 0x65 - pcmpgtw Pq, Qq */ 2701 FNIEMOP_STUB(iemOp_pcmpgtw_Pq_Qq); 2702 /** Opcode 0x66 0x0f 0x65 - vpcmpgtw Vx, Hx, Wx */ 2703 FNIEMOP_STUB(iemOp_vpcmpgtw_Vx_Hx_Wx); 2704 /* Opcode 0xf3 0x0f 0x65 - invalid */ 2705 2706 /** Opcode 0x0f 0x66 - pcmpgtd Pq, Qq */ 2707 FNIEMOP_STUB(iemOp_pcmpgtd_Pq_Qq); 2708 /** Opcode 0x66 0x0f 0x66 - vpcmpgtd Vx, Hx, Wx */ 2709 FNIEMOP_STUB(iemOp_vpcmpgtd_Vx_Hx_Wx); 2710 /* Opcode 0xf3 0x0f 0x66 - invalid */ 2711 2712 /** Opcode 0x0f 0x67 - packuswb Pq, Qq */ 2713 FNIEMOP_STUB(iemOp_packuswb_Pq_Qq); 2714 /** Opcode 0x66 0x0f 0x67 - vpackuswb Vx, Hx, W */ 2715 FNIEMOP_STUB(iemOp_vpackuswb_Vx_Hx_W); 2716 /* Opcode 0xf3 0x0f 0x67 - invalid */ 2717 2718 2719 /** 2720 * Common worker for MMX instructions on the form: 2721 * pxxxx mm1, mm2/mem64 2722 * 2723 * The 2nd operand is the second half of a register, which in the memory case 2724 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access 2725 * where it may read the full 128 bits or only the upper 64 bits. 2726 * 2727 * Exceptions type 4. 2728 */ 2729 FNIEMOP_DEF_1(iemOpCommonMmx_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl) 2730 { 2731 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2732 AssertReturn(pImpl->pfnU64, IEMOP_RAISE_INVALID_OPCODE()); 2733 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2734 { 2735 /* 2736 * Register, register. 2737 */ 2738 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 2739 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 2740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2741 IEM_MC_BEGIN(2, 0); 2742 IEM_MC_ARG(uint64_t *, pDst, 0); 2743 IEM_MC_ARG(uint64_t const *, pSrc, 1); 2744 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2745 IEM_MC_PREPARE_FPU_USAGE(); 2746 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2747 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 2748 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2749 IEM_MC_ADVANCE_RIP(); 2750 IEM_MC_END(); 2751 } 2752 else 2753 { 2754 /* 2755 * Register, memory. 2756 */ 2757 IEM_MC_BEGIN(2, 2); 2758 IEM_MC_ARG(uint64_t *, pDst, 0); 2759 IEM_MC_LOCAL(uint64_t, uSrc); 2760 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 2761 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2762 2763 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2764 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2765 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2766 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2767 2768 IEM_MC_PREPARE_FPU_USAGE(); 2769 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 2770 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 2771 2772 IEM_MC_ADVANCE_RIP(); 2773 IEM_MC_END(); 2774 } 2775 return VINF_SUCCESS; 2776 } 2777 2778 2779 /** 2780 * Common worker for SSE2 instructions on the form: 2781 * pxxxx xmm1, xmm2/mem128 2782 * 2783 * The 2nd operand is the second half of a register, which in the memory case 2784 * means a 64-bit memory access for MMX, and for SSE a 128-bit aligned access 2785 * where it may read the full 128 bits or only the upper 64 bits. 2786 * 2787 * Exceptions type 4. 2788 */ 2789 FNIEMOP_DEF_1(iemOpCommonSse_HighHigh_To_Full, PCIEMOPMEDIAF1H1, pImpl) 2790 { 2791 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2792 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2793 { 2794 /* 2795 * Register, register. 2796 */ 2797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2798 IEM_MC_BEGIN(2, 0); 2799 IEM_MC_ARG(PRTUINT128U, pDst, 0); 2800 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 2801 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2802 IEM_MC_PREPARE_SSE_USAGE(); 2803 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2804 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2805 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2806 IEM_MC_ADVANCE_RIP(); 2807 IEM_MC_END(); 2808 } 2809 else 2810 { 2811 /* 2812 * Register, memory. 2813 */ 2814 IEM_MC_BEGIN(2, 2); 2815 IEM_MC_ARG(PRTUINT128U, pDst, 0); 2816 IEM_MC_LOCAL(RTUINT128U, uSrc); 2817 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 2818 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2819 2820 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2821 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2822 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2823 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); /* Most CPUs probably only right high qword */ 2824 2825 IEM_MC_PREPARE_SSE_USAGE(); 2826 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2827 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 2828 2829 IEM_MC_ADVANCE_RIP(); 2830 IEM_MC_END(); 2831 } 2832 return VINF_SUCCESS; 2833 } 2834 2835 2836 /** Opcode 0x0f 0x68 - punpckhbw Pq, Qd */ 2837 FNIEMOP_DEF(iemOp_punpckhbw_Pq_Qd) 2838 { 2839 IEMOP_MNEMONIC(punpckhbw, "punpckhbw Pq, Qd"); 2840 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhbw); 2841 } 2842 2843 /** Opcode 0x66 0x0f 0x68 - vpunpckhbw Vx, Hx, Wx */ 2844 FNIEMOP_DEF(iemOp_vpunpckhbw_Vx_Hx_Wx) 2845 { 2846 IEMOP_MNEMONIC(vpunpckhbw, "vpunpckhbw Vx, Hx, Wx"); 2847 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhbw); 2848 } 2849 /* Opcode 0xf3 0x0f 0x68 - invalid */ 2850 2851 2852 /** Opcode 0x0f 0x69 - punpckhwd Pq, Qd */ 2853 FNIEMOP_DEF(iemOp_punpckhwd_Pq_Qd) 2854 { 2855 IEMOP_MNEMONIC(punpckhwd, "punpckhwd Pq, Qd"); 2856 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhwd); 2857 } 2858 2859 /** Opcode 0x66 0x0f 0x69 - vpunpckhwd Vx, Hx, Wx */ 2860 FNIEMOP_DEF(iemOp_vpunpckhwd_Vx_Hx_Wx) 2861 { 2862 IEMOP_MNEMONIC(vpunpckhwd, "vpunpckhwd Vx, Hx, Wx"); 2863 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhwd); 2864 2865 } 2866 /* Opcode 0xf3 0x0f 0x69 - invalid */ 2867 2868 2869 /** Opcode 0x0f 0x6a - punpckhdq Pq, Qd */ 2870 FNIEMOP_DEF(iemOp_punpckhdq_Pq_Qd) 2871 { 2872 IEMOP_MNEMONIC(punpckhdq, "punpckhdq Pq, Qd"); 2873 return FNIEMOP_CALL_1(iemOpCommonMmx_HighHigh_To_Full, &g_iemAImpl_punpckhdq); 2874 } 2875 2876 /** Opcode 0x66 0x0f 0x6a - vpunpckhdq Vx, Hx, W */ 2877 FNIEMOP_DEF(iemOp_vpunpckhdq_Vx_Hx_W) 2878 { 2879 IEMOP_MNEMONIC(vpunpckhdq, "vpunpckhdq Vx, Hx, W"); 2880 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhdq); 2881 } 2882 /* Opcode 0xf3 0x0f 0x6a - invalid */ 2883 2884 2885 /** Opcode 0x0f 0x6b - packssdw Pq, Qd */ 2886 FNIEMOP_STUB(iemOp_packssdw_Pq_Qd); 2887 /** Opcode 0x66 0x0f 0x6b - vpackssdw Vx, Hx, Wx */ 2888 FNIEMOP_STUB(iemOp_vpackssdw_Vx_Hx_Wx); 2889 /* Opcode 0xf3 0x0f 0x6b - invalid */ 2890 2891 2892 /* Opcode 0x0f 0x6c - invalid */ 2893 2894 /** Opcode 0x66 0x0f 0x6c - vpunpcklqdq Vx, Hx, Wx */ 2895 FNIEMOP_DEF(iemOp_vpunpcklqdq_Vx_Hx_Wx) 2896 { 2897 IEMOP_MNEMONIC(vpunpcklqdq, "vpunpcklqdq Vx, Hx, Wx"); 2898 return FNIEMOP_CALL_1(iemOpCommonSse_LowLow_To_Full, &g_iemAImpl_punpcklqdq); 2899 } 2900 2901 /* Opcode 0xf3 0x0f 0x6c - invalid */ 2902 /* Opcode 0xf2 0x0f 0x6c - invalid */ 2903 2904 2905 /* Opcode 0x0f 0x6d - invalid */ 2906 2907 /** Opcode 0x66 0x0f 0x6d - vpunpckhqdq Vx, Hx, W */ 2908 FNIEMOP_DEF(iemOp_vpunpckhqdq_Vx_Hx_W) 2909 { 2910 IEMOP_MNEMONIC(punpckhqdq, "punpckhqdq"); 2911 return FNIEMOP_CALL_1(iemOpCommonSse_HighHigh_To_Full, &g_iemAImpl_punpckhqdq); 2912 } 2913 2914 /* Opcode 0xf3 0x0f 0x6d - invalid */ 2915 2916 2917 /** Opcode 0x0f 0x6e - movd/q Pd, Ey */ 2918 FNIEMOP_DEF(iemOp_movd_q_Pd_Ey) 2919 { 2920 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2921 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2922 IEMOP_MNEMONIC(movq_Pq_Eq, "movq Pq,Eq"); 2923 else 2924 IEMOP_MNEMONIC(movd_Pd_Ed, "movd Pd,Ed"); 2925 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2926 { 2927 /* MMX, greg */ 2928 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2929 IEM_MC_BEGIN(0, 1); 2930 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2931 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 2932 IEM_MC_LOCAL(uint64_t, u64Tmp); 2933 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2934 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2935 else 2936 IEM_MC_FETCH_GREG_U32_ZX_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2937 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2938 IEM_MC_ADVANCE_RIP(); 2939 IEM_MC_END(); 2940 } 2941 else 2942 { 2943 /* MMX, [mem] */ 2944 IEM_MC_BEGIN(0, 2); 2945 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2946 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 2947 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 2948 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2949 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 2950 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2951 { 2952 IEM_MC_LOCAL(uint64_t, u64Tmp); 2953 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2954 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 2955 } 2956 else 2957 { 2958 IEM_MC_LOCAL(uint32_t, u32Tmp); 2959 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 2960 IEM_MC_STORE_MREG_U32_ZX_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u32Tmp); 2961 } 2962 IEM_MC_ADVANCE_RIP(); 2963 IEM_MC_END(); 2964 } 2965 return VINF_SUCCESS; 2966 } 2967 2968 /** Opcode 0x66 0x0f 0x6e - vmovd/q Vy, Ey */ 2969 FNIEMOP_DEF(iemOp_vmovd_q_Vy_Ey) 2970 { 2971 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2972 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2973 IEMOP_MNEMONIC(vmovdq_Wq_Eq, "vmovq Wq,Eq"); 2974 else 2975 IEMOP_MNEMONIC(vmovdq_Wd_Ed, "vmovd Wd,Ed"); 2976 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2977 { 2978 /* XMM, greg*/ 2979 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2980 IEM_MC_BEGIN(0, 1); 2981 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2982 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2983 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 2984 { 2985 IEM_MC_LOCAL(uint64_t, u64Tmp); 2986 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2987 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 2988 } 2989 else 2990 { 2991 IEM_MC_LOCAL(uint32_t, u32Tmp); 2992 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2993 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 2994 } 2995 IEM_MC_ADVANCE_RIP(); 2996 IEM_MC_END(); 2997 } 2998 else 2999 { 3000 /* XMM, [mem] */ 3001 IEM_MC_BEGIN(0, 2); 3002 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3003 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); /** @todo order */ 3004 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3005 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3006 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3007 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3008 { 3009 IEM_MC_LOCAL(uint64_t, u64Tmp); 3010 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3011 IEM_MC_STORE_XREG_U64_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Tmp); 3012 } 3013 else 3014 { 3015 IEM_MC_LOCAL(uint32_t, u32Tmp); 3016 IEM_MC_FETCH_MEM_U32(u32Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3017 IEM_MC_STORE_XREG_U32_ZX_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Tmp); 3018 } 3019 IEM_MC_ADVANCE_RIP(); 3020 IEM_MC_END(); 3021 } 3022 return VINF_SUCCESS; 3023 } 3024 3025 /* Opcode 0xf3 0x0f 0x6e - invalid */ 3026 3027 3028 /** Opcode 0x0f 0x6f - movq Pq, Qq */ 3029 FNIEMOP_DEF(iemOp_movq_Pq_Qq) 3030 { 3031 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3032 IEMOP_MNEMONIC(movq_Pq_Qq, "movq Pq,Qq"); 3033 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3034 { 3035 /* 3036 * Register, register. 3037 */ 3038 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3039 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3040 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3041 IEM_MC_BEGIN(0, 1); 3042 IEM_MC_LOCAL(uint64_t, u64Tmp); 3043 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3044 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3045 IEM_MC_FETCH_MREG_U64(u64Tmp, bRm & X86_MODRM_RM_MASK); 3046 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3047 IEM_MC_ADVANCE_RIP(); 3048 IEM_MC_END(); 3049 } 3050 else 3051 { 3052 /* 3053 * Register, memory. 3054 */ 3055 IEM_MC_BEGIN(0, 2); 3056 IEM_MC_LOCAL(uint64_t, u64Tmp); 3057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3058 3059 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3061 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3062 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3063 IEM_MC_FETCH_MEM_U64(u64Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3064 IEM_MC_STORE_MREG_U64((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK, u64Tmp); 3065 3066 IEM_MC_ADVANCE_RIP(); 3067 IEM_MC_END(); 3068 } 3069 return VINF_SUCCESS; 3070 } 3071 3072 /** Opcode 0x66 0x0f 0x6f - vmovdqa Vx, Wx */ 3073 FNIEMOP_DEF(iemOp_vmovdqa_Vx_Wx) 3074 { 3075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3076 IEMOP_MNEMONIC(movdqa_Vdq_Wdq, "movdqa Vdq,Wdq"); 3077 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3078 { 3079 /* 3080 * Register, register. 3081 */ 3082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3083 IEM_MC_BEGIN(0, 0); 3084 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3085 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3086 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 3087 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3088 IEM_MC_ADVANCE_RIP(); 3089 IEM_MC_END(); 3090 } 3091 else 3092 { 3093 /* 3094 * Register, memory. 3095 */ 3096 IEM_MC_BEGIN(0, 2); 3097 IEM_MC_LOCAL(RTUINT128U, u128Tmp); 3098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3099 3100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3101 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3102 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3103 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3104 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3105 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 3106 3107 IEM_MC_ADVANCE_RIP(); 3108 IEM_MC_END(); 3109 } 3110 return VINF_SUCCESS; 3111 } 3112 3113 /** Opcode 0xf3 0x0f 0x6f - vmovdqu Vx, Wx */ 3114 FNIEMOP_DEF(iemOp_vmovdqu_Vx_Wx) 3115 { 3116 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3117 IEMOP_MNEMONIC(movdqu_Vdq_Wdq, "movdqu Vdq,Wdq"); 3118 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3119 { 3120 /* 3121 * Register, register. 3122 */ 3123 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3124 IEM_MC_BEGIN(0, 0); 3125 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3126 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3127 IEM_MC_COPY_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, 3128 (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3129 IEM_MC_ADVANCE_RIP(); 3130 IEM_MC_END(); 3131 } 3132 else 3133 { 3134 /* 3135 * Register, memory. 3136 */ 3137 IEM_MC_BEGIN(0, 2); 3138 IEM_MC_LOCAL(RTUINT128U, u128Tmp); 3139 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3140 3141 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3142 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3143 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3144 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3145 IEM_MC_FETCH_MEM_U128(u128Tmp, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3146 IEM_MC_STORE_XREG_U128(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u128Tmp); 3147 3148 IEM_MC_ADVANCE_RIP(); 3149 IEM_MC_END(); 3150 } 3151 return VINF_SUCCESS; 3152 } 3153 3154 3155 /** Opcode 0x0f 0x70 - pshufw Pq, Qq, Ib */ 3156 FNIEMOP_DEF(iemOp_pshufw_Pq_Qq_Ib) 3157 { 3158 IEMOP_MNEMONIC(pshufw_Pq_Qq, "pshufw Pq,Qq,Ib"); 3159 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3160 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3161 { 3162 /* 3163 * Register, register. 3164 */ 3165 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3166 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3167 3168 IEM_MC_BEGIN(3, 0); 3169 IEM_MC_ARG(uint64_t *, pDst, 0); 3170 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3171 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3172 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3173 IEM_MC_PREPARE_FPU_USAGE(); 3174 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3175 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3176 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3177 IEM_MC_ADVANCE_RIP(); 3178 IEM_MC_END(); 3179 } 3180 else 3181 { 3182 /* 3183 * Register, memory. 3184 */ 3185 IEM_MC_BEGIN(3, 2); 3186 IEM_MC_ARG(uint64_t *, pDst, 0); 3187 IEM_MC_LOCAL(uint64_t, uSrc); 3188 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3189 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3190 3191 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3192 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3193 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3194 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3195 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 3196 3197 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3198 IEM_MC_PREPARE_FPU_USAGE(); 3199 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3200 IEM_MC_CALL_MMX_AIMPL_3(iemAImpl_pshufw, pDst, pSrc, bEvilArg); 3201 3202 IEM_MC_ADVANCE_RIP(); 3203 IEM_MC_END(); 3204 } 3205 return VINF_SUCCESS; 3206 } 3207 3208 /** Opcode 0x66 0x0f 0x70 - vpshufd Vx, Wx, Ib */ 3209 FNIEMOP_DEF(iemOp_vpshufd_Vx_Wx_Ib) 3210 { 3211 IEMOP_MNEMONIC(vpshufd_Vx_Wx_Ib, "vpshufd Vx,Wx,Ib"); 3212 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3213 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3214 { 3215 /* 3216 * Register, register. 3217 */ 3218 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3219 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3220 3221 IEM_MC_BEGIN(3, 0); 3222 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3223 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 3224 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3225 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3226 IEM_MC_PREPARE_SSE_USAGE(); 3227 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3228 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3229 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 3230 IEM_MC_ADVANCE_RIP(); 3231 IEM_MC_END(); 3232 } 3233 else 3234 { 3235 /* 3236 * Register, memory. 3237 */ 3238 IEM_MC_BEGIN(3, 2); 3239 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3240 IEM_MC_LOCAL(RTUINT128U, uSrc); 3241 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 3242 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3243 3244 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3245 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3246 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3247 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3248 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3249 3250 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3251 IEM_MC_PREPARE_SSE_USAGE(); 3252 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3253 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufd, pDst, pSrc, bEvilArg); 3254 3255 IEM_MC_ADVANCE_RIP(); 3256 IEM_MC_END(); 3257 } 3258 return VINF_SUCCESS; 3259 } 3260 3261 /** Opcode 0xf3 0x0f 0x70 - vpshufhw Vx, Wx, Ib */ 3262 FNIEMOP_DEF(iemOp_vpshufhw_Vx_Wx_Ib) 3263 { 3264 IEMOP_MNEMONIC(vpshufhw_Vx_Wx_Ib, "vpshufhw Vx,Wx,Ib"); 3265 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3266 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3267 { 3268 /* 3269 * Register, register. 3270 */ 3271 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3272 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3273 3274 IEM_MC_BEGIN(3, 0); 3275 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3276 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 3277 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3278 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3279 IEM_MC_PREPARE_SSE_USAGE(); 3280 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3281 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3282 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 3283 IEM_MC_ADVANCE_RIP(); 3284 IEM_MC_END(); 3285 } 3286 else 3287 { 3288 /* 3289 * Register, memory. 3290 */ 3291 IEM_MC_BEGIN(3, 2); 3292 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3293 IEM_MC_LOCAL(RTUINT128U, uSrc); 3294 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 3295 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3296 3297 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3298 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3299 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3301 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3302 3303 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3304 IEM_MC_PREPARE_SSE_USAGE(); 3305 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3306 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshufhw, pDst, pSrc, bEvilArg); 3307 3308 IEM_MC_ADVANCE_RIP(); 3309 IEM_MC_END(); 3310 } 3311 return VINF_SUCCESS; 3312 } 3313 3314 /** Opcode 0xf2 0x0f 0x70 - vpshuflw Vx, Wx, Ib */ 3315 FNIEMOP_DEF(iemOp_vpshuflw_Vx_Wx_Ib) 3316 { 3317 IEMOP_MNEMONIC(vpshuflw_Vx_Wx_Ib, "vpshuflw Vx,Wx,Ib"); 3318 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3319 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3320 { 3321 /* 3322 * Register, register. 3323 */ 3324 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3325 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3326 3327 IEM_MC_BEGIN(3, 0); 3328 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3329 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 3330 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3331 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3332 IEM_MC_PREPARE_SSE_USAGE(); 3333 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3334 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3335 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 3336 IEM_MC_ADVANCE_RIP(); 3337 IEM_MC_END(); 3338 } 3339 else 3340 { 3341 /* 3342 * Register, memory. 3343 */ 3344 IEM_MC_BEGIN(3, 2); 3345 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3346 IEM_MC_LOCAL(RTUINT128U, uSrc); 3347 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 3348 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3349 3350 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3351 uint8_t bEvil; IEM_OPCODE_GET_NEXT_U8(&bEvil); 3352 IEM_MC_ARG_CONST(uint8_t, bEvilArg, /*=*/ bEvil, 2); 3353 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3354 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3355 3356 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3357 IEM_MC_PREPARE_SSE_USAGE(); 3358 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3359 IEM_MC_CALL_SSE_AIMPL_3(iemAImpl_pshuflw, pDst, pSrc, bEvilArg); 3360 3361 IEM_MC_ADVANCE_RIP(); 3362 IEM_MC_END(); 3363 } 3364 return VINF_SUCCESS; 3365 } 3366 3367 3368 /** Opcode 0x0f 0x71 11/2. */ 3369 FNIEMOP_STUB_1(iemOp_Grp12_psrlw_Nq_Ib, uint8_t, bRm); 3370 3371 /** Opcode 0x66 0x0f 0x71 11/2. */ 3372 FNIEMOP_STUB_1(iemOp_Grp12_vpsrlw_Hx_Ux_Ib, uint8_t, bRm); 3373 3374 /** Opcode 0x0f 0x71 11/4. */ 3375 FNIEMOP_STUB_1(iemOp_Grp12_psraw_Nq_Ib, uint8_t, bRm); 3376 3377 /** Opcode 0x66 0x0f 0x71 11/4. */ 3378 FNIEMOP_STUB_1(iemOp_Grp12_vpsraw_Hx_Ux_Ib, uint8_t, bRm); 3379 3380 /** Opcode 0x0f 0x71 11/6. */ 3381 FNIEMOP_STUB_1(iemOp_Grp12_psllw_Nq_Ib, uint8_t, bRm); 3382 3383 /** Opcode 0x66 0x0f 0x71 11/6. */ 3384 FNIEMOP_STUB_1(iemOp_Grp12_vpsllw_Hx_Ux_Ib, uint8_t, bRm); 3385 3386 3387 /** 3388 * Group 12 jump table for register variant. 3389 */ 3390 IEM_STATIC const PFNIEMOPRM g_apfnGroup12RegReg[] = 1703 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup12RegReg[] = 3391 1704 { 3392 1705 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3393 1706 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3394 /* /2 */ iemOp_ Grp12_psrlw_Nq_Ib, iemOp_Grp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1707 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsrlw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3395 1708 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3396 /* /4 */ iemOp_ Grp12_psraw_Nq_Ib, iemOp_Grp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1709 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsraw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3397 1710 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3398 /* /6 */ iemOp_ Grp12_psllw_Nq_Ib, iemOp_Grp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1711 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp12_vpsllw_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3399 1712 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8) 3400 1713 }; 3401 AssertCompile(RT_ELEMENTS(g_apfn Group12RegReg) == 8*4);3402 3403 3404 /** Opcode 0x0f0x71. */3405 FNIEMOP_DEF(iemOp_ Grp12)1714 AssertCompile(RT_ELEMENTS(g_apfnVexGroup12RegReg) == 8*4); 1715 1716 1717 /** Opcode VEX.0F 0x71. */ 1718 FNIEMOP_DEF(iemOp_VGrp12) 3406 1719 { 3407 1720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3408 1721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3409 1722 /* register, register */ 3410 return FNIEMOP_CALL_1(g_apfn Group12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 43411 + pVCpu->iem.s.idxPrefix], bRm);1723 return FNIEMOP_CALL_1(g_apfnVexGroup12RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 1724 + pVCpu->iem.s.idxPrefix], bRm); 3412 1725 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm); 3413 1726 } 3414 1727 3415 1728 3416 /** Opcode 0x0f 0x72 11/2. */ 3417 FNIEMOP_STUB_1(iemOp_Grp13_psrld_Nq_Ib, uint8_t, bRm); 3418 3419 /** Opcode 0x66 0x0f 0x72 11/2. */ 3420 FNIEMOP_STUB_1(iemOp_Grp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm); 3421 3422 /** Opcode 0x0f 0x72 11/4. */ 3423 FNIEMOP_STUB_1(iemOp_Grp13_psrad_Nq_Ib, uint8_t, bRm); 3424 3425 /** Opcode 0x66 0x0f 0x72 11/4. */ 3426 FNIEMOP_STUB_1(iemOp_Grp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm); 3427 3428 /** Opcode 0x0f 0x72 11/6. */ 3429 FNIEMOP_STUB_1(iemOp_Grp13_pslld_Nq_Ib, uint8_t, bRm); 3430 3431 /** Opcode 0x66 0x0f 0x72 11/6. */ 3432 FNIEMOP_STUB_1(iemOp_Grp13_vpslld_Hx_Ux_Ib, uint8_t, bRm); 1729 /* Opcode VEX.0F 0x72 11/2 - invalid. */ 1730 /** Opcode VEX.66.0F 0x72 11/2. */ 1731 FNIEMOP_STUB_1(iemOp_VGrp13_vpsrld_Hx_Ux_Ib, uint8_t, bRm); 1732 1733 /* Opcode VEX.0F 0x72 11/4 - invalid. */ 1734 /** Opcode VEX.66.0F 0x72 11/4. */ 1735 FNIEMOP_STUB_1(iemOp_VGrp13_vpsrad_Hx_Ux_Ib, uint8_t, bRm); 1736 1737 /* Opcode VEX.0F 0x72 11/6 - invalid. */ 1738 /** Opcode VEX.66.0F 0x72 11/6. */ 1739 FNIEMOP_STUB_1(iemOp_VGrp13_vpslld_Hx_Ux_Ib, uint8_t, bRm); 3433 1740 3434 1741 … … 3436 1743 * Group 13 jump table for register variant. 3437 1744 */ 3438 IEM_STATIC const PFNIEMOPRM g_apfn Group13RegReg[] =1745 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup13RegReg[] = 3439 1746 { 3440 1747 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3441 1748 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3442 /* /2 */ iemOp_ Grp13_psrld_Nq_Ib, iemOp_Grp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1749 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3443 1750 /* /3 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3444 /* /4 */ iemOp_ Grp13_psrad_Nq_Ib, iemOp_Grp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1751 /* /4 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpsrad_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3445 1752 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3446 /* /6 */ iemOp_ Grp13_pslld_Nq_Ib, iemOp_Grp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1753 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp13_vpslld_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3447 1754 /* /7 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8) 3448 1755 }; 3449 AssertCompile(RT_ELEMENTS(g_apfn Group13RegReg) == 8*4);3450 3451 /** Opcode 0x0f0x72. */3452 FNIEMOP_DEF(iemOp_ Grp13)1756 AssertCompile(RT_ELEMENTS(g_apfnVexGroup13RegReg) == 8*4); 1757 1758 /** Opcode VEX.0F 0x72. */ 1759 FNIEMOP_DEF(iemOp_VGrp13) 3453 1760 { 3454 1761 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3455 1762 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3456 1763 /* register, register */ 3457 return FNIEMOP_CALL_1(g_apfn Group13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 43458 + pVCpu->iem.s.idxPrefix], bRm);1764 return FNIEMOP_CALL_1(g_apfnVexGroup13RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 1765 + pVCpu->iem.s.idxPrefix], bRm); 3459 1766 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm); 3460 1767 } 3461 1768 3462 1769 3463 /** Opcode 0x0f 0x73 11/2. */ 3464 FNIEMOP_STUB_1(iemOp_Grp14_psrlq_Nq_Ib, uint8_t, bRm); 3465 3466 /** Opcode 0x66 0x0f 0x73 11/2. */ 3467 FNIEMOP_STUB_1(iemOp_Grp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm); 3468 3469 /** Opcode 0x66 0x0f 0x73 11/3. */ 3470 FNIEMOP_STUB_1(iemOp_Grp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT 3471 3472 /** Opcode 0x0f 0x73 11/6. */ 3473 FNIEMOP_STUB_1(iemOp_Grp14_psllq_Nq_Ib, uint8_t, bRm); 3474 3475 /** Opcode 0x66 0x0f 0x73 11/6. */ 3476 FNIEMOP_STUB_1(iemOp_Grp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm); 3477 3478 /** Opcode 0x66 0x0f 0x73 11/7. */ 3479 FNIEMOP_STUB_1(iemOp_Grp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); //NEXT 1770 /* Opcode VEX.0F 0x73 11/2 - invalid. */ 1771 /** Opcode VEX.66.0F 0x73 11/2. */ 1772 FNIEMOP_STUB_1(iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, uint8_t, bRm); 1773 1774 /** Opcode VEX.66.0F 0x73 11/3. */ 1775 FNIEMOP_STUB_1(iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, uint8_t, bRm); 1776 1777 /* Opcode VEX.0F 0x73 11/6 - invalid. */ 1778 /** Opcode VEX.66.0F 0x73 11/6. */ 1779 FNIEMOP_STUB_1(iemOp_VGrp14_vpsllq_Hx_Ux_Ib, uint8_t, bRm); 1780 1781 /** Opcode VEX.66.0F 0x73 11/7. */ 1782 FNIEMOP_STUB_1(iemOp_VGrp14_vpslldq_Hx_Ux_Ib, uint8_t, bRm); 3480 1783 3481 1784 /** 3482 1785 * Group 14 jump table for register variant. 3483 1786 */ 3484 IEM_STATIC const PFNIEMOPRM g_apfn Group14RegReg[] =1787 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup14RegReg[] = 3485 1788 { 3486 1789 /* /0 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3487 1790 /* /1 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3488 /* /2 */ iemOp_ Grp14_psrlq_Nq_Ib, iemOp_Grp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3489 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_ Grp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1791 /* /2 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrlq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 1792 /* /3 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsrldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3490 1793 /* /4 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3491 1794 /* /5 */ IEMOP_X4(iemOp_InvalidWithRMNeedImm8), 3492 /* /6 */ iemOp_ Grp14_psllq_Nq_Ib, iemOp_Grp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,3493 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_ Grp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8,1795 /* /6 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpsllq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 1796 /* /7 */ iemOp_InvalidWithRMNeedImm8, iemOp_VGrp14_vpslldq_Hx_Ux_Ib, iemOp_InvalidWithRMNeedImm8, iemOp_InvalidWithRMNeedImm8, 3494 1797 }; 3495 AssertCompile(RT_ELEMENTS(g_apfn Group14RegReg) == 8*4);3496 3497 3498 /** Opcode 0x0f0x73. */3499 FNIEMOP_DEF(iemOp_ Grp14)1798 AssertCompile(RT_ELEMENTS(g_apfnVexGroup14RegReg) == 8*4); 1799 1800 1801 /** Opcode VEX.0F 0x73. */ 1802 FNIEMOP_DEF(iemOp_VGrp14) 3500 1803 { 3501 1804 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3502 1805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3503 1806 /* register, register */ 3504 return FNIEMOP_CALL_1(g_apfn Group14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 43505 + pVCpu->iem.s.idxPrefix], bRm);1807 return FNIEMOP_CALL_1(g_apfnVexGroup14RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 1808 + pVCpu->iem.s.idxPrefix], bRm); 3506 1809 return FNIEMOP_CALL_1(iemOp_InvalidWithRMNeedImm8, bRm); 3507 1810 } 3508 1811 3509 1812 1813 ///** 1814 // * Common worker for SSE2 instructions on the forms: 1815 // * pxxx xmm1, xmm2/mem128 1816 // * 1817 // * Proper alignment of the 128-bit operand is enforced. 1818 // * Exceptions type 4. SSE2 cpuid checks. 1819 // */ 1820 //FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 1821 //{ 1822 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1823 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1824 // { 1825 // /* 1826 // * Register, register. 1827 // */ 1828 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1829 // IEM_MC_BEGIN(2, 0); 1830 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1831 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 1832 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1833 // IEM_MC_PREPARE_SSE_USAGE(); 1834 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1835 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 1836 // IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 1837 // IEM_MC_ADVANCE_RIP(); 1838 // IEM_MC_END(); 1839 // } 1840 // else 1841 // { 1842 // /* 1843 // * Register, memory. 1844 // */ 1845 // IEM_MC_BEGIN(2, 2); 1846 // IEM_MC_ARG(PRTUINT128U, pDst, 0); 1847 // IEM_MC_LOCAL(RTUINT128U, uSrc); 1848 // IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 1849 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1850 // 1851 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 1852 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1853 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1854 // IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1855 // 1856 // IEM_MC_PREPARE_SSE_USAGE(); 1857 // IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1858 // IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 1859 // 1860 // IEM_MC_ADVANCE_RIP(); 1861 // IEM_MC_END(); 1862 // } 1863 // return VINF_SUCCESS; 1864 //} 1865 1866 1867 /* Opcode VEX.0F 0x74 - invalid */ 1868 1869 /** Opcode VEX.66.0F 0x74 - vpcmpeqb Vx, Hx, Wx */ 1870 FNIEMOP_STUB(iemOp_vpcmpeqb_Vx_Hx_Wx); 1871 //FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx) 1872 //{ 1873 // IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb"); 1874 // return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 1875 //} 1876 1877 /* Opcode VEX.F3.0F 0x74 - invalid */ 1878 /* Opcode VEX.F2.0F 0x74 - invalid */ 1879 1880 1881 /* Opcode VEX.0F 0x75 - invalid */ 1882 1883 /** Opcode VEX.66.0F 0x75 - vpcmpeqw Vx, Hx, Wx */ 1884 FNIEMOP_STUB(iemOp_vpcmpeqw_Vx_Hx_Wx); 1885 //FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx) 1886 //{ 1887 // IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw"); 1888 // return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 1889 //} 1890 1891 /* Opcode VEX.F3.0F 0x75 - invalid */ 1892 /* Opcode VEX.F2.0F 0x75 - invalid */ 1893 1894 1895 /* Opcode VEX.0F 0x76 - invalid */ 1896 1897 /** Opcode VEX.66.0F 0x76 - vpcmpeqd Vx, Hx, Wx */ 1898 FNIEMOP_STUB(iemOp_vpcmpeqd_Vx_Hx_Wx); 1899 //FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx) 1900 //{ 1901 // IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd"); 1902 // return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 1903 //} 1904 1905 /* Opcode VEX.F3.0F 0x76 - invalid */ 1906 /* Opcode VEX.F2.0F 0x76 - invalid */ 1907 1908 1909 /** Opcode VEX.0F 0x77 - vzeroupperv vzeroallv */ 1910 FNIEMOP_STUB(iemOp_vzeroupperv__vzeroallv); 1911 /* Opcode VEX.66.0F 0x77 - invalid */ 1912 /* Opcode VEX.F3.0F 0x77 - invalid */ 1913 /* Opcode VEX.F2.0F 0x77 - invalid */ 1914 1915 /* Opcode VEX.0F 0x78 - invalid */ 1916 /* Opcode VEX.66.0F 0x78 - invalid */ 1917 /* Opcode VEX.F3.0F 0x78 - invalid */ 1918 /* Opcode VEX.F2.0F 0x78 - invalid */ 1919 1920 /* Opcode VEX.0F 0x79 - invalid */ 1921 /* Opcode VEX.66.0F 0x79 - invalid */ 1922 /* Opcode VEX.F3.0F 0x79 - invalid */ 1923 /* Opcode VEX.F2.0F 0x79 - invalid */ 1924 1925 /* Opcode VEX.0F 0x7a - invalid */ 1926 /* Opcode VEX.66.0F 0x7a - invalid */ 1927 /* Opcode VEX.F3.0F 0x7a - invalid */ 1928 /* Opcode VEX.F2.0F 0x7a - invalid */ 1929 1930 /* Opcode VEX.0F 0x7b - invalid */ 1931 /* Opcode VEX.66.0F 0x7b - invalid */ 1932 /* Opcode VEX.F3.0F 0x7b - invalid */ 1933 /* Opcode VEX.F2.0F 0x7b - invalid */ 1934 1935 /* Opcode VEX.0F 0x7c - invalid */ 1936 /** Opcode VEX.66.0F 0x7c - vhaddpd Vpd, Hpd, Wpd */ 1937 FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd); 1938 /* Opcode VEX.F3.0F 0x7c - invalid */ 1939 /** Opcode VEX.F2.0F 0x7c - vhaddps Vps, Hps, Wps */ 1940 FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps); 1941 1942 /* Opcode VEX.0F 0x7d - invalid */ 1943 /** Opcode VEX.66.0F 0x7d - vhsubpd Vpd, Hpd, Wpd */ 1944 FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd); 1945 /* Opcode VEX.F3.0F 0x7d - invalid */ 1946 /** Opcode VEX.F2.0F 0x7d - vhsubps Vps, Hps, Wps */ 1947 FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps); 1948 1949 1950 /* Opcode VEX.0F 0x7e - invalid */ 1951 1952 /** Opcode VEX.66.0F 0x7e - vmovd_q Ey, Vy */ 1953 FNIEMOP_STUB(iemOp_vmovd_q_Ey_Vy); 1954 //FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy) 1955 //{ 1956 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 1957 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1958 // IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq"); 1959 // else 1960 // IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd"); 1961 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 1962 // { 1963 // /* greg, XMM */ 1964 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1965 // IEM_MC_BEGIN(0, 1); 1966 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1967 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1968 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1969 // { 1970 // IEM_MC_LOCAL(uint64_t, u64Tmp); 1971 // IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1972 // IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 1973 // } 1974 // else 1975 // { 1976 // IEM_MC_LOCAL(uint32_t, u32Tmp); 1977 // IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1978 // IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 1979 // } 1980 // IEM_MC_ADVANCE_RIP(); 1981 // IEM_MC_END(); 1982 // } 1983 // else 1984 // { 1985 // /* [mem], XMM */ 1986 // IEM_MC_BEGIN(0, 2); 1987 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 1988 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 1989 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 1990 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1991 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 1992 // if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 1993 // { 1994 // IEM_MC_LOCAL(uint64_t, u64Tmp); 1995 // IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 1996 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 1997 // } 1998 // else 1999 // { 2000 // IEM_MC_LOCAL(uint32_t, u32Tmp); 2001 // IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2002 // IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 2003 // } 2004 // IEM_MC_ADVANCE_RIP(); 2005 // IEM_MC_END(); 2006 // } 2007 // return VINF_SUCCESS; 2008 //} 2009 2010 /** Opcode VEX.F3.0F 0x7e - vmovq Vq, Wq */ 2011 FNIEMOP_STUB(iemOp_vmovq_Vq_Wq); 2012 /* Opcode VEX.F2.0F 0x7e - invalid */ 2013 2014 2015 /* Opcode VEX.0F 0x7f - invalid */ 2016 2017 /** Opcode VEX.66.0F 0x7f - vmovdqa Wx,Vx */ 2018 FNIEMOP_STUB(iemOp_vmovdqa_Wx_Vx); 2019 //FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx) 2020 //{ 2021 // IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx"); 2022 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2023 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2024 // { 2025 // /* 2026 // * Register, register. 2027 // */ 2028 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2029 // IEM_MC_BEGIN(0, 0); 2030 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2031 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2032 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2033 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2034 // IEM_MC_ADVANCE_RIP(); 2035 // IEM_MC_END(); 2036 // } 2037 // else 2038 // { 2039 // /* 2040 // * Register, memory. 2041 // */ 2042 // IEM_MC_BEGIN(0, 2); 2043 // IEM_MC_LOCAL(RTUINT128U, u128Tmp); 2044 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2045 // 2046 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2047 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2048 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2049 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2050 // 2051 // IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2052 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 2053 // 2054 // IEM_MC_ADVANCE_RIP(); 2055 // IEM_MC_END(); 2056 // } 2057 // return VINF_SUCCESS; 2058 //} 2059 2060 /** Opcode VEX.F3.0F 0x7f - vmovdqu Wx,Vx */ 2061 FNIEMOP_STUB(iemOp_vmovdqu_Wx_Vx); 2062 //FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx) 2063 //{ 2064 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2065 // IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx"); 2066 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2067 // { 2068 // /* 2069 // * Register, register. 2070 // */ 2071 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2072 // IEM_MC_BEGIN(0, 0); 2073 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2074 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2075 // IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 2076 // ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2077 // IEM_MC_ADVANCE_RIP(); 2078 // IEM_MC_END(); 2079 // } 2080 // else 2081 // { 2082 // /* 2083 // * Register, memory. 2084 // */ 2085 // IEM_MC_BEGIN(0, 2); 2086 // IEM_MC_LOCAL(RTUINT128U, u128Tmp); 2087 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2088 // 2089 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2090 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2091 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2092 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2093 // 2094 // IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2095 // IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 2096 // 2097 // IEM_MC_ADVANCE_RIP(); 2098 // IEM_MC_END(); 2099 // } 2100 // return VINF_SUCCESS; 2101 //} 2102 2103 /* Opcode VEX.F2.0F 0x7f - invalid */ 2104 2105 2106 /* Opcode VEX.0F 0x80 - invalid */ 2107 /* Opcode VEX.0F 0x81 - invalid */ 2108 /* Opcode VEX.0F 0x82 - invalid */ 2109 /* Opcode VEX.0F 0x83 - invalid */ 2110 /* Opcode VEX.0F 0x84 - invalid */ 2111 /* Opcode VEX.0F 0x85 - invalid */ 2112 /* Opcode VEX.0F 0x86 - invalid */ 2113 /* Opcode VEX.0F 0x87 - invalid */ 2114 /* Opcode VEX.0F 0x88 - invalid */ 2115 /* Opcode VEX.0F 0x89 - invalid */ 2116 /* Opcode VEX.0F 0x8a - invalid */ 2117 /* Opcode VEX.0F 0x8b - invalid */ 2118 /* Opcode VEX.0F 0x8c - invalid */ 2119 /* Opcode VEX.0F 0x8d - invalid */ 2120 /* Opcode VEX.0F 0x8e - invalid */ 2121 /* Opcode VEX.0F 0x8f - invalid */ 2122 /* Opcode VEX.0F 0x90 - invalid */ 2123 /* Opcode VEX.0F 0x91 - invalid */ 2124 /* Opcode VEX.0F 0x92 - invalid */ 2125 /* Opcode VEX.0F 0x93 - invalid */ 2126 /* Opcode VEX.0F 0x94 - invalid */ 2127 /* Opcode VEX.0F 0x95 - invalid */ 2128 /* Opcode VEX.0F 0x96 - invalid */ 2129 /* Opcode VEX.0F 0x97 - invalid */ 2130 /* Opcode VEX.0F 0x98 - invalid */ 2131 /* Opcode VEX.0F 0x99 - invalid */ 2132 /* Opcode VEX.0F 0x9a - invalid */ 2133 /* Opcode VEX.0F 0x9b - invalid */ 2134 /* Opcode VEX.0F 0x9c - invalid */ 2135 /* Opcode VEX.0F 0x9d - invalid */ 2136 /* Opcode VEX.0F 0x9e - invalid */ 2137 /* Opcode VEX.0F 0x9f - invalid */ 2138 /* Opcode VEX.0F 0xa0 - invalid */ 2139 /* Opcode VEX.0F 0xa1 - invalid */ 2140 /* Opcode VEX.0F 0xa2 - invalid */ 2141 /* Opcode VEX.0F 0xa3 - invalid */ 2142 /* Opcode VEX.0F 0xa4 - invalid */ 2143 /* Opcode VEX.0F 0xa5 - invalid */ 2144 /* Opcode VEX.0F 0xa6 - invalid */ 2145 /* Opcode VEX.0F 0xa7 - invalid */ 2146 /* Opcode VEX.0F 0xa8 - invalid */ 2147 /* Opcode VEX.0F 0xa9 - invalid */ 2148 /* Opcode VEX.0F 0xaa - invalid */ 2149 /* Opcode VEX.0F 0xab - invalid */ 2150 /* Opcode VEX.0F 0xac - invalid */ 2151 /* Opcode VEX.0F 0xad - invalid */ 2152 2153 2154 /* Opcode VEX.0F 0xae mem/0 - invalid. */ 2155 /* Opcode VEX.0F 0xae mem/1 - invalid. */ 2156 3510 2157 /** 3511 * Common worker for MMX instructions on the form: 3512 * pxxx mm1, mm2/mem64 2158 * @ opmaps grp15 2159 * @ opcode !11/2 2160 * @ oppfx none 2161 * @ opcpuid sse 2162 * @ opgroup og_sse_mxcsrsm 2163 * @ opxcpttype 5 2164 * @ optest op1=0 -> mxcsr=0 2165 * @ optest op1=0x2083 -> mxcsr=0x2083 2166 * @ optest op1=0xfffffffe -> value.xcpt=0xd 2167 * @ optest op1=0x2083 cr0|=ts -> value.xcpt=0x7 2168 * @ optest op1=0x2083 cr0|=em -> value.xcpt=0x6 2169 * @ optest op1=0x2083 cr0|=mp -> mxcsr=0x2083 2170 * @ optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6 2171 * @ optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6 2172 * @ optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6 2173 * @ optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6 2174 * @ optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6 3513 2175 */ 3514 FNIEMOP_DEF_1(iemOpCommonMmx_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 3515 { 3516 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3517 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3518 { 3519 /* 3520 * Register, register. 3521 */ 3522 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3523 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3524 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3525 IEM_MC_BEGIN(2, 0); 3526 IEM_MC_ARG(uint64_t *, pDst, 0); 3527 IEM_MC_ARG(uint64_t const *, pSrc, 1); 3528 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3529 IEM_MC_PREPARE_FPU_USAGE(); 3530 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3531 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 3532 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3533 IEM_MC_ADVANCE_RIP(); 3534 IEM_MC_END(); 3535 } 3536 else 3537 { 3538 /* 3539 * Register, memory. 3540 */ 3541 IEM_MC_BEGIN(2, 2); 3542 IEM_MC_ARG(uint64_t *, pDst, 0); 3543 IEM_MC_LOCAL(uint64_t, uSrc); 3544 IEM_MC_ARG_LOCAL_REF(uint64_t const *, pSrc, uSrc, 1); 3545 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3546 3547 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3548 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3549 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3550 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3551 3552 IEM_MC_PREPARE_FPU_USAGE(); 3553 IEM_MC_REF_MREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3554 IEM_MC_CALL_MMX_AIMPL_2(pImpl->pfnU64, pDst, pSrc); 3555 3556 IEM_MC_ADVANCE_RIP(); 3557 IEM_MC_END(); 3558 } 3559 return VINF_SUCCESS; 3560 } 3561 3562 3563 /** 3564 * Common worker for SSE2 instructions on the forms: 3565 * pxxx xmm1, xmm2/mem128 3566 * 3567 * Proper alignment of the 128-bit operand is enforced. 3568 * Exceptions type 4. SSE2 cpuid checks. 3569 */ 3570 FNIEMOP_DEF_1(iemOpCommonSse2_FullFull_To_Full, PCIEMOPMEDIAF2, pImpl) 3571 { 3572 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3573 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3574 { 3575 /* 3576 * Register, register. 3577 */ 3578 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3579 IEM_MC_BEGIN(2, 0); 3580 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3581 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 3582 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3583 IEM_MC_PREPARE_SSE_USAGE(); 3584 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3585 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 3586 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3587 IEM_MC_ADVANCE_RIP(); 3588 IEM_MC_END(); 3589 } 3590 else 3591 { 3592 /* 3593 * Register, memory. 3594 */ 3595 IEM_MC_BEGIN(2, 2); 3596 IEM_MC_ARG(PRTUINT128U, pDst, 0); 3597 IEM_MC_LOCAL(RTUINT128U, uSrc); 3598 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, pSrc, uSrc, 1); 3599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3600 3601 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3602 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3603 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3604 IEM_MC_FETCH_MEM_U128_ALIGN_SSE(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 3605 3606 IEM_MC_PREPARE_SSE_USAGE(); 3607 IEM_MC_REF_XREG_U128(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3608 IEM_MC_CALL_SSE_AIMPL_2(pImpl->pfnU128, pDst, pSrc); 3609 3610 IEM_MC_ADVANCE_RIP(); 3611 IEM_MC_END(); 3612 } 3613 return VINF_SUCCESS; 3614 } 3615 3616 3617 /** Opcode 0x0f 0x74 - pcmpeqb Pq, Qq */ 3618 FNIEMOP_DEF(iemOp_pcmpeqb_Pq_Qq) 3619 { 3620 IEMOP_MNEMONIC(pcmpeqb, "pcmpeqb"); 3621 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3622 } 3623 3624 /** Opcode 0x66 0x0f 0x74 - vpcmpeqb Vx, Hx, Wx */ 3625 FNIEMOP_DEF(iemOp_vpcmpeqb_Vx_Hx_Wx) 3626 { 3627 IEMOP_MNEMONIC(vpcmpeqb, "vpcmpeqb"); 3628 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqb); 3629 } 3630 3631 /* Opcode 0xf3 0x0f 0x74 - invalid */ 3632 /* Opcode 0xf2 0x0f 0x74 - invalid */ 3633 3634 3635 /** Opcode 0x0f 0x75 - pcmpeqw Pq, Qq */ 3636 FNIEMOP_DEF(iemOp_pcmpeqw_Pq_Qq) 3637 { 3638 IEMOP_MNEMONIC(pcmpeqw, "pcmpeqw"); 3639 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3640 } 3641 3642 /** Opcode 0x66 0x0f 0x75 - vpcmpeqw Vx, Hx, Wx */ 3643 FNIEMOP_DEF(iemOp_vpcmpeqw_Vx_Hx_Wx) 3644 { 3645 IEMOP_MNEMONIC(vpcmpeqw, "vpcmpeqw"); 3646 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqw); 3647 } 3648 3649 /* Opcode 0xf3 0x0f 0x75 - invalid */ 3650 /* Opcode 0xf2 0x0f 0x75 - invalid */ 3651 3652 3653 /** Opcode 0x0f 0x76 - pcmpeqd Pq, Qq */ 3654 FNIEMOP_DEF(iemOp_pcmpeqd_Pq_Qq) 3655 { 3656 IEMOP_MNEMONIC(pcmpeqd, "pcmpeqd"); 3657 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3658 } 3659 3660 /** Opcode 0x66 0x0f 0x76 - vpcmpeqd Vx, Hx, Wx */ 3661 FNIEMOP_DEF(iemOp_vpcmpeqd_Vx_Hx_Wx) 3662 { 3663 IEMOP_MNEMONIC(vpcmpeqd, "vpcmpeqd"); 3664 return FNIEMOP_CALL_1(iemOpCommonSse2_FullFull_To_Full, &g_iemAImpl_pcmpeqd); 3665 } 3666 3667 /* Opcode 0xf3 0x0f 0x76 - invalid */ 3668 /* Opcode 0xf2 0x0f 0x76 - invalid */ 3669 3670 3671 /** Opcode 0x0f 0x77 - emms vzeroupperv vzeroallv */ 3672 FNIEMOP_STUB(iemOp_emms__vzeroupperv__vzeroallv); 3673 /* Opcode 0x66 0x0f 0x77 - invalid */ 3674 /* Opcode 0xf3 0x0f 0x77 - invalid */ 3675 /* Opcode 0xf2 0x0f 0x77 - invalid */ 3676 3677 /** Opcode 0x0f 0x78 - VMREAD Ey, Gy */ 3678 FNIEMOP_STUB(iemOp_vmread_Ey_Gy); 3679 /* Opcode 0x66 0x0f 0x78 - AMD Group 17 */ 3680 FNIEMOP_STUB(iemOp_AmdGrp17); 3681 /* Opcode 0xf3 0x0f 0x78 - invalid */ 3682 /* Opcode 0xf2 0x0f 0x78 - invalid */ 3683 3684 /** Opcode 0x0f 0x79 - VMWRITE Gy, Ey */ 3685 FNIEMOP_STUB(iemOp_vmwrite_Gy_Ey); 3686 /* Opcode 0x66 0x0f 0x79 - invalid */ 3687 /* Opcode 0xf3 0x0f 0x79 - invalid */ 3688 /* Opcode 0xf2 0x0f 0x79 - invalid */ 3689 3690 /* Opcode 0x0f 0x7a - invalid */ 3691 /* Opcode 0x66 0x0f 0x7a - invalid */ 3692 /* Opcode 0xf3 0x0f 0x7a - invalid */ 3693 /* Opcode 0xf2 0x0f 0x7a - invalid */ 3694 3695 /* Opcode 0x0f 0x7b - invalid */ 3696 /* Opcode 0x66 0x0f 0x7b - invalid */ 3697 /* Opcode 0xf3 0x0f 0x7b - invalid */ 3698 /* Opcode 0xf2 0x0f 0x7b - invalid */ 3699 3700 /* Opcode 0x0f 0x7c - invalid */ 3701 /** Opcode 0x66 0x0f 0x7c - vhaddpd Vpd, Hpd, Wpd */ 3702 FNIEMOP_STUB(iemOp_vhaddpd_Vpd_Hpd_Wpd); 3703 /* Opcode 0xf3 0x0f 0x7c - invalid */ 3704 /** Opcode 0xf2 0x0f 0x7c - vhaddps Vps, Hps, Wps */ 3705 FNIEMOP_STUB(iemOp_vhaddps_Vps_Hps_Wps); 3706 3707 /* Opcode 0x0f 0x7d - invalid */ 3708 /** Opcode 0x66 0x0f 0x7d - vhsubpd Vpd, Hpd, Wpd */ 3709 FNIEMOP_STUB(iemOp_vhsubpd_Vpd_Hpd_Wpd); 3710 /* Opcode 0xf3 0x0f 0x7d - invalid */ 3711 /** Opcode 0xf2 0x0f 0x7d - vhsubps Vps, Hps, Wps */ 3712 FNIEMOP_STUB(iemOp_vhsubps_Vps_Hps_Wps); 3713 3714 3715 /** Opcode 0x0f 0x7e - movd_q Ey, Pd */ 3716 FNIEMOP_DEF(iemOp_movd_q_Ey_Pd) 3717 { 3718 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3719 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3720 IEMOP_MNEMONIC(movq_Eq_Pq, "movq Eq,Pq"); 3721 else 3722 IEMOP_MNEMONIC(movd_Ed_Pd, "movd Ed,Pd"); 3723 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3724 { 3725 /* greg, MMX */ 3726 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3727 IEM_MC_BEGIN(0, 1); 3728 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3729 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3730 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3731 { 3732 IEM_MC_LOCAL(uint64_t, u64Tmp); 3733 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3734 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3735 } 3736 else 3737 { 3738 IEM_MC_LOCAL(uint32_t, u32Tmp); 3739 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3740 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3741 } 3742 IEM_MC_ADVANCE_RIP(); 3743 IEM_MC_END(); 3744 } 3745 else 3746 { 3747 /* [mem], MMX */ 3748 IEM_MC_BEGIN(0, 2); 3749 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3750 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3751 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3752 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3753 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3754 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3755 { 3756 IEM_MC_LOCAL(uint64_t, u64Tmp); 3757 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3758 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3759 } 3760 else 3761 { 3762 IEM_MC_LOCAL(uint32_t, u32Tmp); 3763 IEM_MC_FETCH_MREG_U32(u32Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3764 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3765 } 3766 IEM_MC_ADVANCE_RIP(); 3767 IEM_MC_END(); 3768 } 3769 return VINF_SUCCESS; 3770 } 3771 3772 /** Opcode 0x66 0x0f 0x7e - vmovd_q Ey, Vy */ 3773 FNIEMOP_DEF(iemOp_vmovd_q_Ey_Vy) 3774 { 3775 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3776 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3777 IEMOP_MNEMONIC(vmovq_Eq_Wq, "vmovq Eq,Wq"); 3778 else 3779 IEMOP_MNEMONIC(vmovd_Ed_Wd, "vmovd Ed,Wd"); 3780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3781 { 3782 /* greg, XMM */ 3783 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3784 IEM_MC_BEGIN(0, 1); 3785 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3786 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3787 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3788 { 3789 IEM_MC_LOCAL(uint64_t, u64Tmp); 3790 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3791 IEM_MC_STORE_GREG_U64((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u64Tmp); 3792 } 3793 else 3794 { 3795 IEM_MC_LOCAL(uint32_t, u32Tmp); 3796 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3797 IEM_MC_STORE_GREG_U32((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, u32Tmp); 3798 } 3799 IEM_MC_ADVANCE_RIP(); 3800 IEM_MC_END(); 3801 } 3802 else 3803 { 3804 /* [mem], XMM */ 3805 IEM_MC_BEGIN(0, 2); 3806 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3807 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3808 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 3809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3811 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 3812 { 3813 IEM_MC_LOCAL(uint64_t, u64Tmp); 3814 IEM_MC_FETCH_XREG_U64(u64Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3815 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3816 } 3817 else 3818 { 3819 IEM_MC_LOCAL(uint32_t, u32Tmp); 3820 IEM_MC_FETCH_XREG_U32(u32Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3821 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 3822 } 3823 IEM_MC_ADVANCE_RIP(); 3824 IEM_MC_END(); 3825 } 3826 return VINF_SUCCESS; 3827 } 3828 3829 /** Opcode 0xf3 0x0f 0x7e - vmovq Vq, Wq */ 3830 FNIEMOP_STUB(iemOp_vmovq_Vq_Wq); 3831 /* Opcode 0xf2 0x0f 0x7e - invalid */ 3832 3833 3834 /** Opcode 0x0f 0x7f - movq Qq, Pq */ 3835 FNIEMOP_DEF(iemOp_movq_Qq_Pq) 3836 { 3837 IEMOP_MNEMONIC(movq_Qq_Pq, "movq Qq,Pq"); 3838 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3839 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3840 { 3841 /* 3842 * Register, register. 3843 */ 3844 /** @todo testcase: REX.B / REX.R and MMX register indexing. Ignored? */ 3845 /** @todo testcase: REX.B / REX.R and segment register indexing. Ignored? */ 3846 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3847 IEM_MC_BEGIN(0, 1); 3848 IEM_MC_LOCAL(uint64_t, u64Tmp); 3849 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3850 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 3851 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3852 IEM_MC_STORE_MREG_U64(bRm & X86_MODRM_RM_MASK, u64Tmp); 3853 IEM_MC_ADVANCE_RIP(); 3854 IEM_MC_END(); 3855 } 3856 else 3857 { 3858 /* 3859 * Register, memory. 3860 */ 3861 IEM_MC_BEGIN(0, 2); 3862 IEM_MC_LOCAL(uint64_t, u64Tmp); 3863 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3864 3865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3867 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 3868 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 3869 3870 IEM_MC_FETCH_MREG_U64(u64Tmp, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 3871 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 3872 3873 IEM_MC_ADVANCE_RIP(); 3874 IEM_MC_END(); 3875 } 3876 return VINF_SUCCESS; 3877 } 3878 3879 /** Opcode 0x66 0x0f 0x7f - vmovdqa Wx,Vx */ 3880 FNIEMOP_DEF(iemOp_vmovdqa_Wx_Vx) 3881 { 3882 IEMOP_MNEMONIC(vmovdqa_Wdq_Vdq, "vmovdqa Wx,Vx"); 3883 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3884 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3885 { 3886 /* 3887 * Register, register. 3888 */ 3889 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3890 IEM_MC_BEGIN(0, 0); 3891 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3892 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3893 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 3894 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3895 IEM_MC_ADVANCE_RIP(); 3896 IEM_MC_END(); 3897 } 3898 else 3899 { 3900 /* 3901 * Register, memory. 3902 */ 3903 IEM_MC_BEGIN(0, 2); 3904 IEM_MC_LOCAL(RTUINT128U, u128Tmp); 3905 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3906 3907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3909 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3910 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3911 3912 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3913 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 3914 3915 IEM_MC_ADVANCE_RIP(); 3916 IEM_MC_END(); 3917 } 3918 return VINF_SUCCESS; 3919 } 3920 3921 /** Opcode 0xf3 0x0f 0x7f - vmovdqu Wx,Vx */ 3922 FNIEMOP_DEF(iemOp_vmovdqu_Wx_Vx) 3923 { 3924 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 3925 IEMOP_MNEMONIC(vmovdqu_Wdq_Vdq, "vmovdqu Wx,Vx"); 3926 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 3927 { 3928 /* 3929 * Register, register. 3930 */ 3931 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3932 IEM_MC_BEGIN(0, 0); 3933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 3935 IEM_MC_COPY_XREG_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 3936 ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3937 IEM_MC_ADVANCE_RIP(); 3938 IEM_MC_END(); 3939 } 3940 else 3941 { 3942 /* 3943 * Register, memory. 3944 */ 3945 IEM_MC_BEGIN(0, 2); 3946 IEM_MC_LOCAL(RTUINT128U, u128Tmp); 3947 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 3948 3949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 3950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3951 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 3952 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3953 3954 IEM_MC_FETCH_XREG_U128(u128Tmp, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 3955 IEM_MC_STORE_MEM_U128(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u128Tmp); 3956 3957 IEM_MC_ADVANCE_RIP(); 3958 IEM_MC_END(); 3959 } 3960 return VINF_SUCCESS; 3961 } 3962 3963 /* Opcode 0xf2 0x0f 0x7f - invalid */ 3964 3965 3966 3967 /** Opcode 0x0f 0x80. */ 3968 FNIEMOP_DEF(iemOp_jo_Jv) 3969 { 3970 IEMOP_MNEMONIC(jo_Jv, "jo Jv"); 3971 IEMOP_HLP_MIN_386(); 3972 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 3973 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 3974 { 3975 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 3976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3977 3978 IEM_MC_BEGIN(0, 0); 3979 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 3980 IEM_MC_REL_JMP_S16(i16Imm); 3981 } IEM_MC_ELSE() { 3982 IEM_MC_ADVANCE_RIP(); 3983 } IEM_MC_ENDIF(); 3984 IEM_MC_END(); 3985 } 3986 else 3987 { 3988 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 3989 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 3990 3991 IEM_MC_BEGIN(0, 0); 3992 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 3993 IEM_MC_REL_JMP_S32(i32Imm); 3994 } IEM_MC_ELSE() { 3995 IEM_MC_ADVANCE_RIP(); 3996 } IEM_MC_ENDIF(); 3997 IEM_MC_END(); 3998 } 3999 return VINF_SUCCESS; 4000 } 4001 4002 4003 /** Opcode 0x0f 0x81. */ 4004 FNIEMOP_DEF(iemOp_jno_Jv) 4005 { 4006 IEMOP_MNEMONIC(jno_Jv, "jno Jv"); 4007 IEMOP_HLP_MIN_386(); 4008 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4009 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4010 { 4011 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4013 4014 IEM_MC_BEGIN(0, 0); 4015 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4016 IEM_MC_ADVANCE_RIP(); 4017 } IEM_MC_ELSE() { 4018 IEM_MC_REL_JMP_S16(i16Imm); 4019 } IEM_MC_ENDIF(); 4020 IEM_MC_END(); 4021 } 4022 else 4023 { 4024 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4025 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4026 4027 IEM_MC_BEGIN(0, 0); 4028 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4029 IEM_MC_ADVANCE_RIP(); 4030 } IEM_MC_ELSE() { 4031 IEM_MC_REL_JMP_S32(i32Imm); 4032 } IEM_MC_ENDIF(); 4033 IEM_MC_END(); 4034 } 4035 return VINF_SUCCESS; 4036 } 4037 4038 4039 /** Opcode 0x0f 0x82. */ 4040 FNIEMOP_DEF(iemOp_jc_Jv) 4041 { 4042 IEMOP_MNEMONIC(jc_Jv, "jc/jb/jnae Jv"); 4043 IEMOP_HLP_MIN_386(); 4044 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4045 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4046 { 4047 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4048 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4049 4050 IEM_MC_BEGIN(0, 0); 4051 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4052 IEM_MC_REL_JMP_S16(i16Imm); 4053 } IEM_MC_ELSE() { 4054 IEM_MC_ADVANCE_RIP(); 4055 } IEM_MC_ENDIF(); 4056 IEM_MC_END(); 4057 } 4058 else 4059 { 4060 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4061 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4062 4063 IEM_MC_BEGIN(0, 0); 4064 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4065 IEM_MC_REL_JMP_S32(i32Imm); 4066 } IEM_MC_ELSE() { 4067 IEM_MC_ADVANCE_RIP(); 4068 } IEM_MC_ENDIF(); 4069 IEM_MC_END(); 4070 } 4071 return VINF_SUCCESS; 4072 } 4073 4074 4075 /** Opcode 0x0f 0x83. */ 4076 FNIEMOP_DEF(iemOp_jnc_Jv) 4077 { 4078 IEMOP_MNEMONIC(jnc_Jv, "jnc/jnb/jae Jv"); 4079 IEMOP_HLP_MIN_386(); 4080 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4081 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4082 { 4083 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4084 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4085 4086 IEM_MC_BEGIN(0, 0); 4087 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4088 IEM_MC_ADVANCE_RIP(); 4089 } IEM_MC_ELSE() { 4090 IEM_MC_REL_JMP_S16(i16Imm); 4091 } IEM_MC_ENDIF(); 4092 IEM_MC_END(); 4093 } 4094 else 4095 { 4096 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4097 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4098 4099 IEM_MC_BEGIN(0, 0); 4100 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4101 IEM_MC_ADVANCE_RIP(); 4102 } IEM_MC_ELSE() { 4103 IEM_MC_REL_JMP_S32(i32Imm); 4104 } IEM_MC_ENDIF(); 4105 IEM_MC_END(); 4106 } 4107 return VINF_SUCCESS; 4108 } 4109 4110 4111 /** Opcode 0x0f 0x84. */ 4112 FNIEMOP_DEF(iemOp_je_Jv) 4113 { 4114 IEMOP_MNEMONIC(je_Jv, "je/jz Jv"); 4115 IEMOP_HLP_MIN_386(); 4116 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4117 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4118 { 4119 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4120 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4121 4122 IEM_MC_BEGIN(0, 0); 4123 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4124 IEM_MC_REL_JMP_S16(i16Imm); 4125 } IEM_MC_ELSE() { 4126 IEM_MC_ADVANCE_RIP(); 4127 } IEM_MC_ENDIF(); 4128 IEM_MC_END(); 4129 } 4130 else 4131 { 4132 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4133 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4134 4135 IEM_MC_BEGIN(0, 0); 4136 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4137 IEM_MC_REL_JMP_S32(i32Imm); 4138 } IEM_MC_ELSE() { 4139 IEM_MC_ADVANCE_RIP(); 4140 } IEM_MC_ENDIF(); 4141 IEM_MC_END(); 4142 } 4143 return VINF_SUCCESS; 4144 } 4145 4146 4147 /** Opcode 0x0f 0x85. */ 4148 FNIEMOP_DEF(iemOp_jne_Jv) 4149 { 4150 IEMOP_MNEMONIC(jne_Jv, "jne/jnz Jv"); 4151 IEMOP_HLP_MIN_386(); 4152 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4153 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4154 { 4155 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4156 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4157 4158 IEM_MC_BEGIN(0, 0); 4159 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4160 IEM_MC_ADVANCE_RIP(); 4161 } IEM_MC_ELSE() { 4162 IEM_MC_REL_JMP_S16(i16Imm); 4163 } IEM_MC_ENDIF(); 4164 IEM_MC_END(); 4165 } 4166 else 4167 { 4168 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4169 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4170 4171 IEM_MC_BEGIN(0, 0); 4172 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4173 IEM_MC_ADVANCE_RIP(); 4174 } IEM_MC_ELSE() { 4175 IEM_MC_REL_JMP_S32(i32Imm); 4176 } IEM_MC_ENDIF(); 4177 IEM_MC_END(); 4178 } 4179 return VINF_SUCCESS; 4180 } 4181 4182 4183 /** Opcode 0x0f 0x86. */ 4184 FNIEMOP_DEF(iemOp_jbe_Jv) 4185 { 4186 IEMOP_MNEMONIC(jbe_Jv, "jbe/jna Jv"); 4187 IEMOP_HLP_MIN_386(); 4188 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4189 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4190 { 4191 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4192 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4193 4194 IEM_MC_BEGIN(0, 0); 4195 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4196 IEM_MC_REL_JMP_S16(i16Imm); 4197 } IEM_MC_ELSE() { 4198 IEM_MC_ADVANCE_RIP(); 4199 } IEM_MC_ENDIF(); 4200 IEM_MC_END(); 4201 } 4202 else 4203 { 4204 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4206 4207 IEM_MC_BEGIN(0, 0); 4208 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4209 IEM_MC_REL_JMP_S32(i32Imm); 4210 } IEM_MC_ELSE() { 4211 IEM_MC_ADVANCE_RIP(); 4212 } IEM_MC_ENDIF(); 4213 IEM_MC_END(); 4214 } 4215 return VINF_SUCCESS; 4216 } 4217 4218 4219 /** Opcode 0x0f 0x87. */ 4220 FNIEMOP_DEF(iemOp_jnbe_Jv) 4221 { 4222 IEMOP_MNEMONIC(ja_Jv, "jnbe/ja Jv"); 4223 IEMOP_HLP_MIN_386(); 4224 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4225 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4226 { 4227 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4229 4230 IEM_MC_BEGIN(0, 0); 4231 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4232 IEM_MC_ADVANCE_RIP(); 4233 } IEM_MC_ELSE() { 4234 IEM_MC_REL_JMP_S16(i16Imm); 4235 } IEM_MC_ENDIF(); 4236 IEM_MC_END(); 4237 } 4238 else 4239 { 4240 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4241 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4242 4243 IEM_MC_BEGIN(0, 0); 4244 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4245 IEM_MC_ADVANCE_RIP(); 4246 } IEM_MC_ELSE() { 4247 IEM_MC_REL_JMP_S32(i32Imm); 4248 } IEM_MC_ENDIF(); 4249 IEM_MC_END(); 4250 } 4251 return VINF_SUCCESS; 4252 } 4253 4254 4255 /** Opcode 0x0f 0x88. */ 4256 FNIEMOP_DEF(iemOp_js_Jv) 4257 { 4258 IEMOP_MNEMONIC(js_Jv, "js Jv"); 4259 IEMOP_HLP_MIN_386(); 4260 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4261 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4262 { 4263 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4264 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4265 4266 IEM_MC_BEGIN(0, 0); 4267 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4268 IEM_MC_REL_JMP_S16(i16Imm); 4269 } IEM_MC_ELSE() { 4270 IEM_MC_ADVANCE_RIP(); 4271 } IEM_MC_ENDIF(); 4272 IEM_MC_END(); 4273 } 4274 else 4275 { 4276 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4277 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4278 4279 IEM_MC_BEGIN(0, 0); 4280 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4281 IEM_MC_REL_JMP_S32(i32Imm); 4282 } IEM_MC_ELSE() { 4283 IEM_MC_ADVANCE_RIP(); 4284 } IEM_MC_ENDIF(); 4285 IEM_MC_END(); 4286 } 4287 return VINF_SUCCESS; 4288 } 4289 4290 4291 /** Opcode 0x0f 0x89. */ 4292 FNIEMOP_DEF(iemOp_jns_Jv) 4293 { 4294 IEMOP_MNEMONIC(jns_Jv, "jns Jv"); 4295 IEMOP_HLP_MIN_386(); 4296 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4297 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4298 { 4299 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4301 4302 IEM_MC_BEGIN(0, 0); 4303 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4304 IEM_MC_ADVANCE_RIP(); 4305 } IEM_MC_ELSE() { 4306 IEM_MC_REL_JMP_S16(i16Imm); 4307 } IEM_MC_ENDIF(); 4308 IEM_MC_END(); 4309 } 4310 else 4311 { 4312 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4313 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4314 4315 IEM_MC_BEGIN(0, 0); 4316 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4317 IEM_MC_ADVANCE_RIP(); 4318 } IEM_MC_ELSE() { 4319 IEM_MC_REL_JMP_S32(i32Imm); 4320 } IEM_MC_ENDIF(); 4321 IEM_MC_END(); 4322 } 4323 return VINF_SUCCESS; 4324 } 4325 4326 4327 /** Opcode 0x0f 0x8a. */ 4328 FNIEMOP_DEF(iemOp_jp_Jv) 4329 { 4330 IEMOP_MNEMONIC(jp_Jv, "jp Jv"); 4331 IEMOP_HLP_MIN_386(); 4332 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4333 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4334 { 4335 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4336 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4337 4338 IEM_MC_BEGIN(0, 0); 4339 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4340 IEM_MC_REL_JMP_S16(i16Imm); 4341 } IEM_MC_ELSE() { 4342 IEM_MC_ADVANCE_RIP(); 4343 } IEM_MC_ENDIF(); 4344 IEM_MC_END(); 4345 } 4346 else 4347 { 4348 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4349 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4350 4351 IEM_MC_BEGIN(0, 0); 4352 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4353 IEM_MC_REL_JMP_S32(i32Imm); 4354 } IEM_MC_ELSE() { 4355 IEM_MC_ADVANCE_RIP(); 4356 } IEM_MC_ENDIF(); 4357 IEM_MC_END(); 4358 } 4359 return VINF_SUCCESS; 4360 } 4361 4362 4363 /** Opcode 0x0f 0x8b. */ 4364 FNIEMOP_DEF(iemOp_jnp_Jv) 4365 { 4366 IEMOP_MNEMONIC(jnp_Jv, "jnp Jv"); 4367 IEMOP_HLP_MIN_386(); 4368 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4369 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4370 { 4371 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4372 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4373 4374 IEM_MC_BEGIN(0, 0); 4375 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4376 IEM_MC_ADVANCE_RIP(); 4377 } IEM_MC_ELSE() { 4378 IEM_MC_REL_JMP_S16(i16Imm); 4379 } IEM_MC_ENDIF(); 4380 IEM_MC_END(); 4381 } 4382 else 4383 { 4384 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4385 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4386 4387 IEM_MC_BEGIN(0, 0); 4388 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4389 IEM_MC_ADVANCE_RIP(); 4390 } IEM_MC_ELSE() { 4391 IEM_MC_REL_JMP_S32(i32Imm); 4392 } IEM_MC_ENDIF(); 4393 IEM_MC_END(); 4394 } 4395 return VINF_SUCCESS; 4396 } 4397 4398 4399 /** Opcode 0x0f 0x8c. */ 4400 FNIEMOP_DEF(iemOp_jl_Jv) 4401 { 4402 IEMOP_MNEMONIC(jl_Jv, "jl/jnge Jv"); 4403 IEMOP_HLP_MIN_386(); 4404 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4405 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4406 { 4407 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4408 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4409 4410 IEM_MC_BEGIN(0, 0); 4411 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 4412 IEM_MC_REL_JMP_S16(i16Imm); 4413 } IEM_MC_ELSE() { 4414 IEM_MC_ADVANCE_RIP(); 4415 } IEM_MC_ENDIF(); 4416 IEM_MC_END(); 4417 } 4418 else 4419 { 4420 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4421 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4422 4423 IEM_MC_BEGIN(0, 0); 4424 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 4425 IEM_MC_REL_JMP_S32(i32Imm); 4426 } IEM_MC_ELSE() { 4427 IEM_MC_ADVANCE_RIP(); 4428 } IEM_MC_ENDIF(); 4429 IEM_MC_END(); 4430 } 4431 return VINF_SUCCESS; 4432 } 4433 4434 4435 /** Opcode 0x0f 0x8d. */ 4436 FNIEMOP_DEF(iemOp_jnl_Jv) 4437 { 4438 IEMOP_MNEMONIC(jge_Jv, "jnl/jge Jv"); 4439 IEMOP_HLP_MIN_386(); 4440 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4441 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4442 { 4443 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4444 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4445 4446 IEM_MC_BEGIN(0, 0); 4447 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 4448 IEM_MC_ADVANCE_RIP(); 4449 } IEM_MC_ELSE() { 4450 IEM_MC_REL_JMP_S16(i16Imm); 4451 } IEM_MC_ENDIF(); 4452 IEM_MC_END(); 4453 } 4454 else 4455 { 4456 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4457 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4458 4459 IEM_MC_BEGIN(0, 0); 4460 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 4461 IEM_MC_ADVANCE_RIP(); 4462 } IEM_MC_ELSE() { 4463 IEM_MC_REL_JMP_S32(i32Imm); 4464 } IEM_MC_ENDIF(); 4465 IEM_MC_END(); 4466 } 4467 return VINF_SUCCESS; 4468 } 4469 4470 4471 /** Opcode 0x0f 0x8e. */ 4472 FNIEMOP_DEF(iemOp_jle_Jv) 4473 { 4474 IEMOP_MNEMONIC(jle_Jv, "jle/jng Jv"); 4475 IEMOP_HLP_MIN_386(); 4476 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4477 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4478 { 4479 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4480 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4481 4482 IEM_MC_BEGIN(0, 0); 4483 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 4484 IEM_MC_REL_JMP_S16(i16Imm); 4485 } IEM_MC_ELSE() { 4486 IEM_MC_ADVANCE_RIP(); 4487 } IEM_MC_ENDIF(); 4488 IEM_MC_END(); 4489 } 4490 else 4491 { 4492 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4494 4495 IEM_MC_BEGIN(0, 0); 4496 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 4497 IEM_MC_REL_JMP_S32(i32Imm); 4498 } IEM_MC_ELSE() { 4499 IEM_MC_ADVANCE_RIP(); 4500 } IEM_MC_ENDIF(); 4501 IEM_MC_END(); 4502 } 4503 return VINF_SUCCESS; 4504 } 4505 4506 4507 /** Opcode 0x0f 0x8f. */ 4508 FNIEMOP_DEF(iemOp_jnle_Jv) 4509 { 4510 IEMOP_MNEMONIC(jg_Jv, "jnle/jg Jv"); 4511 IEMOP_HLP_MIN_386(); 4512 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 4513 if (pVCpu->iem.s.enmEffOpSize == IEMMODE_16BIT) 4514 { 4515 int16_t i16Imm; IEM_OPCODE_GET_NEXT_S16(&i16Imm); 4516 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4517 4518 IEM_MC_BEGIN(0, 0); 4519 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 4520 IEM_MC_ADVANCE_RIP(); 4521 } IEM_MC_ELSE() { 4522 IEM_MC_REL_JMP_S16(i16Imm); 4523 } IEM_MC_ENDIF(); 4524 IEM_MC_END(); 4525 } 4526 else 4527 { 4528 int32_t i32Imm; IEM_OPCODE_GET_NEXT_S32(&i32Imm); 4529 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4530 4531 IEM_MC_BEGIN(0, 0); 4532 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 4533 IEM_MC_ADVANCE_RIP(); 4534 } IEM_MC_ELSE() { 4535 IEM_MC_REL_JMP_S32(i32Imm); 4536 } IEM_MC_ENDIF(); 4537 IEM_MC_END(); 4538 } 4539 return VINF_SUCCESS; 4540 } 4541 4542 4543 /** Opcode 0x0f 0x90. */ 4544 FNIEMOP_DEF(iemOp_seto_Eb) 4545 { 4546 IEMOP_MNEMONIC(seto_Eb, "seto Eb"); 4547 IEMOP_HLP_MIN_386(); 4548 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4549 4550 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4551 * any way. AMD says it's "unused", whatever that means. We're 4552 * ignoring for now. */ 4553 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4554 { 4555 /* register target */ 4556 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4557 IEM_MC_BEGIN(0, 0); 4558 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4559 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4560 } IEM_MC_ELSE() { 4561 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4562 } IEM_MC_ENDIF(); 4563 IEM_MC_ADVANCE_RIP(); 4564 IEM_MC_END(); 4565 } 4566 else 4567 { 4568 /* memory target */ 4569 IEM_MC_BEGIN(0, 1); 4570 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4571 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4572 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4573 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4574 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4575 } IEM_MC_ELSE() { 4576 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4577 } IEM_MC_ENDIF(); 4578 IEM_MC_ADVANCE_RIP(); 4579 IEM_MC_END(); 4580 } 4581 return VINF_SUCCESS; 4582 } 4583 4584 4585 /** Opcode 0x0f 0x91. */ 4586 FNIEMOP_DEF(iemOp_setno_Eb) 4587 { 4588 IEMOP_MNEMONIC(setno_Eb, "setno Eb"); 4589 IEMOP_HLP_MIN_386(); 4590 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4591 4592 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4593 * any way. AMD says it's "unused", whatever that means. We're 4594 * ignoring for now. */ 4595 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4596 { 4597 /* register target */ 4598 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4599 IEM_MC_BEGIN(0, 0); 4600 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4601 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4602 } IEM_MC_ELSE() { 4603 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4604 } IEM_MC_ENDIF(); 4605 IEM_MC_ADVANCE_RIP(); 4606 IEM_MC_END(); 4607 } 4608 else 4609 { 4610 /* memory target */ 4611 IEM_MC_BEGIN(0, 1); 4612 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4613 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4614 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4615 IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF) { 4616 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4617 } IEM_MC_ELSE() { 4618 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4619 } IEM_MC_ENDIF(); 4620 IEM_MC_ADVANCE_RIP(); 4621 IEM_MC_END(); 4622 } 4623 return VINF_SUCCESS; 4624 } 4625 4626 4627 /** Opcode 0x0f 0x92. */ 4628 FNIEMOP_DEF(iemOp_setc_Eb) 4629 { 4630 IEMOP_MNEMONIC(setc_Eb, "setc Eb"); 4631 IEMOP_HLP_MIN_386(); 4632 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4633 4634 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4635 * any way. AMD says it's "unused", whatever that means. We're 4636 * ignoring for now. */ 4637 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4638 { 4639 /* register target */ 4640 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4641 IEM_MC_BEGIN(0, 0); 4642 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4643 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4644 } IEM_MC_ELSE() { 4645 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4646 } IEM_MC_ENDIF(); 4647 IEM_MC_ADVANCE_RIP(); 4648 IEM_MC_END(); 4649 } 4650 else 4651 { 4652 /* memory target */ 4653 IEM_MC_BEGIN(0, 1); 4654 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4655 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4656 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4657 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4658 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4659 } IEM_MC_ELSE() { 4660 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4661 } IEM_MC_ENDIF(); 4662 IEM_MC_ADVANCE_RIP(); 4663 IEM_MC_END(); 4664 } 4665 return VINF_SUCCESS; 4666 } 4667 4668 4669 /** Opcode 0x0f 0x93. */ 4670 FNIEMOP_DEF(iemOp_setnc_Eb) 4671 { 4672 IEMOP_MNEMONIC(setnc_Eb, "setnc Eb"); 4673 IEMOP_HLP_MIN_386(); 4674 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4675 4676 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4677 * any way. AMD says it's "unused", whatever that means. We're 4678 * ignoring for now. */ 4679 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4680 { 4681 /* register target */ 4682 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4683 IEM_MC_BEGIN(0, 0); 4684 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4685 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4686 } IEM_MC_ELSE() { 4687 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4688 } IEM_MC_ENDIF(); 4689 IEM_MC_ADVANCE_RIP(); 4690 IEM_MC_END(); 4691 } 4692 else 4693 { 4694 /* memory target */ 4695 IEM_MC_BEGIN(0, 1); 4696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4698 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4699 IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF) { 4700 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4701 } IEM_MC_ELSE() { 4702 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4703 } IEM_MC_ENDIF(); 4704 IEM_MC_ADVANCE_RIP(); 4705 IEM_MC_END(); 4706 } 4707 return VINF_SUCCESS; 4708 } 4709 4710 4711 /** Opcode 0x0f 0x94. */ 4712 FNIEMOP_DEF(iemOp_sete_Eb) 4713 { 4714 IEMOP_MNEMONIC(sete_Eb, "sete Eb"); 4715 IEMOP_HLP_MIN_386(); 4716 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4717 4718 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4719 * any way. AMD says it's "unused", whatever that means. We're 4720 * ignoring for now. */ 4721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4722 { 4723 /* register target */ 4724 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4725 IEM_MC_BEGIN(0, 0); 4726 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4727 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4728 } IEM_MC_ELSE() { 4729 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4730 } IEM_MC_ENDIF(); 4731 IEM_MC_ADVANCE_RIP(); 4732 IEM_MC_END(); 4733 } 4734 else 4735 { 4736 /* memory target */ 4737 IEM_MC_BEGIN(0, 1); 4738 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4739 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4740 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4741 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4742 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4743 } IEM_MC_ELSE() { 4744 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4745 } IEM_MC_ENDIF(); 4746 IEM_MC_ADVANCE_RIP(); 4747 IEM_MC_END(); 4748 } 4749 return VINF_SUCCESS; 4750 } 4751 4752 4753 /** Opcode 0x0f 0x95. */ 4754 FNIEMOP_DEF(iemOp_setne_Eb) 4755 { 4756 IEMOP_MNEMONIC(setne_Eb, "setne Eb"); 4757 IEMOP_HLP_MIN_386(); 4758 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4759 4760 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4761 * any way. AMD says it's "unused", whatever that means. We're 4762 * ignoring for now. */ 4763 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4764 { 4765 /* register target */ 4766 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4767 IEM_MC_BEGIN(0, 0); 4768 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4769 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4770 } IEM_MC_ELSE() { 4771 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4772 } IEM_MC_ENDIF(); 4773 IEM_MC_ADVANCE_RIP(); 4774 IEM_MC_END(); 4775 } 4776 else 4777 { 4778 /* memory target */ 4779 IEM_MC_BEGIN(0, 1); 4780 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4781 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4783 IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF) { 4784 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4785 } IEM_MC_ELSE() { 4786 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4787 } IEM_MC_ENDIF(); 4788 IEM_MC_ADVANCE_RIP(); 4789 IEM_MC_END(); 4790 } 4791 return VINF_SUCCESS; 4792 } 4793 4794 4795 /** Opcode 0x0f 0x96. */ 4796 FNIEMOP_DEF(iemOp_setbe_Eb) 4797 { 4798 IEMOP_MNEMONIC(setbe_Eb, "setbe Eb"); 4799 IEMOP_HLP_MIN_386(); 4800 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4801 4802 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4803 * any way. AMD says it's "unused", whatever that means. We're 4804 * ignoring for now. */ 4805 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4806 { 4807 /* register target */ 4808 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4809 IEM_MC_BEGIN(0, 0); 4810 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4811 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4812 } IEM_MC_ELSE() { 4813 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4814 } IEM_MC_ENDIF(); 4815 IEM_MC_ADVANCE_RIP(); 4816 IEM_MC_END(); 4817 } 4818 else 4819 { 4820 /* memory target */ 4821 IEM_MC_BEGIN(0, 1); 4822 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4823 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4825 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4826 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4827 } IEM_MC_ELSE() { 4828 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4829 } IEM_MC_ENDIF(); 4830 IEM_MC_ADVANCE_RIP(); 4831 IEM_MC_END(); 4832 } 4833 return VINF_SUCCESS; 4834 } 4835 4836 4837 /** Opcode 0x0f 0x97. */ 4838 FNIEMOP_DEF(iemOp_setnbe_Eb) 4839 { 4840 IEMOP_MNEMONIC(setnbe_Eb, "setnbe Eb"); 4841 IEMOP_HLP_MIN_386(); 4842 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4843 4844 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4845 * any way. AMD says it's "unused", whatever that means. We're 4846 * ignoring for now. */ 4847 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4848 { 4849 /* register target */ 4850 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4851 IEM_MC_BEGIN(0, 0); 4852 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4853 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4854 } IEM_MC_ELSE() { 4855 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4856 } IEM_MC_ENDIF(); 4857 IEM_MC_ADVANCE_RIP(); 4858 IEM_MC_END(); 4859 } 4860 else 4861 { 4862 /* memory target */ 4863 IEM_MC_BEGIN(0, 1); 4864 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4865 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4866 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4867 IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF) { 4868 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4869 } IEM_MC_ELSE() { 4870 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4871 } IEM_MC_ENDIF(); 4872 IEM_MC_ADVANCE_RIP(); 4873 IEM_MC_END(); 4874 } 4875 return VINF_SUCCESS; 4876 } 4877 4878 4879 /** Opcode 0x0f 0x98. */ 4880 FNIEMOP_DEF(iemOp_sets_Eb) 4881 { 4882 IEMOP_MNEMONIC(sets_Eb, "sets Eb"); 4883 IEMOP_HLP_MIN_386(); 4884 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4885 4886 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4887 * any way. AMD says it's "unused", whatever that means. We're 4888 * ignoring for now. */ 4889 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4890 { 4891 /* register target */ 4892 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4893 IEM_MC_BEGIN(0, 0); 4894 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4895 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4896 } IEM_MC_ELSE() { 4897 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4898 } IEM_MC_ENDIF(); 4899 IEM_MC_ADVANCE_RIP(); 4900 IEM_MC_END(); 4901 } 4902 else 4903 { 4904 /* memory target */ 4905 IEM_MC_BEGIN(0, 1); 4906 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4907 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4908 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4909 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4910 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4911 } IEM_MC_ELSE() { 4912 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4913 } IEM_MC_ENDIF(); 4914 IEM_MC_ADVANCE_RIP(); 4915 IEM_MC_END(); 4916 } 4917 return VINF_SUCCESS; 4918 } 4919 4920 4921 /** Opcode 0x0f 0x99. */ 4922 FNIEMOP_DEF(iemOp_setns_Eb) 4923 { 4924 IEMOP_MNEMONIC(setns_Eb, "setns Eb"); 4925 IEMOP_HLP_MIN_386(); 4926 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4927 4928 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4929 * any way. AMD says it's "unused", whatever that means. We're 4930 * ignoring for now. */ 4931 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4932 { 4933 /* register target */ 4934 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4935 IEM_MC_BEGIN(0, 0); 4936 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4937 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4938 } IEM_MC_ELSE() { 4939 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4940 } IEM_MC_ENDIF(); 4941 IEM_MC_ADVANCE_RIP(); 4942 IEM_MC_END(); 4943 } 4944 else 4945 { 4946 /* memory target */ 4947 IEM_MC_BEGIN(0, 1); 4948 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4949 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4950 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4951 IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF) { 4952 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4953 } IEM_MC_ELSE() { 4954 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4955 } IEM_MC_ENDIF(); 4956 IEM_MC_ADVANCE_RIP(); 4957 IEM_MC_END(); 4958 } 4959 return VINF_SUCCESS; 4960 } 4961 4962 4963 /** Opcode 0x0f 0x9a. */ 4964 FNIEMOP_DEF(iemOp_setp_Eb) 4965 { 4966 IEMOP_MNEMONIC(setp_Eb, "setp Eb"); 4967 IEMOP_HLP_MIN_386(); 4968 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 4969 4970 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 4971 * any way. AMD says it's "unused", whatever that means. We're 4972 * ignoring for now. */ 4973 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 4974 { 4975 /* register target */ 4976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4977 IEM_MC_BEGIN(0, 0); 4978 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4979 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 4980 } IEM_MC_ELSE() { 4981 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 4982 } IEM_MC_ENDIF(); 4983 IEM_MC_ADVANCE_RIP(); 4984 IEM_MC_END(); 4985 } 4986 else 4987 { 4988 /* memory target */ 4989 IEM_MC_BEGIN(0, 1); 4990 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 4991 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 4992 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 4993 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 4994 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 4995 } IEM_MC_ELSE() { 4996 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 4997 } IEM_MC_ENDIF(); 4998 IEM_MC_ADVANCE_RIP(); 4999 IEM_MC_END(); 5000 } 5001 return VINF_SUCCESS; 5002 } 5003 5004 5005 /** Opcode 0x0f 0x9b. */ 5006 FNIEMOP_DEF(iemOp_setnp_Eb) 5007 { 5008 IEMOP_MNEMONIC(setnp_Eb, "setnp Eb"); 5009 IEMOP_HLP_MIN_386(); 5010 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5011 5012 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 5013 * any way. AMD says it's "unused", whatever that means. We're 5014 * ignoring for now. */ 5015 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5016 { 5017 /* register target */ 5018 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5019 IEM_MC_BEGIN(0, 0); 5020 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 5021 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 5022 } IEM_MC_ELSE() { 5023 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 5024 } IEM_MC_ENDIF(); 5025 IEM_MC_ADVANCE_RIP(); 5026 IEM_MC_END(); 5027 } 5028 else 5029 { 5030 /* memory target */ 5031 IEM_MC_BEGIN(0, 1); 5032 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5033 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5034 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5035 IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF) { 5036 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5037 } IEM_MC_ELSE() { 5038 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 5039 } IEM_MC_ENDIF(); 5040 IEM_MC_ADVANCE_RIP(); 5041 IEM_MC_END(); 5042 } 5043 return VINF_SUCCESS; 5044 } 5045 5046 5047 /** Opcode 0x0f 0x9c. */ 5048 FNIEMOP_DEF(iemOp_setl_Eb) 5049 { 5050 IEMOP_MNEMONIC(setl_Eb, "setl Eb"); 5051 IEMOP_HLP_MIN_386(); 5052 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5053 5054 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 5055 * any way. AMD says it's "unused", whatever that means. We're 5056 * ignoring for now. */ 5057 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5058 { 5059 /* register target */ 5060 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5061 IEM_MC_BEGIN(0, 0); 5062 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 5063 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 5064 } IEM_MC_ELSE() { 5065 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 5066 } IEM_MC_ENDIF(); 5067 IEM_MC_ADVANCE_RIP(); 5068 IEM_MC_END(); 5069 } 5070 else 5071 { 5072 /* memory target */ 5073 IEM_MC_BEGIN(0, 1); 5074 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5075 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5076 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5077 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 5078 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 5079 } IEM_MC_ELSE() { 5080 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5081 } IEM_MC_ENDIF(); 5082 IEM_MC_ADVANCE_RIP(); 5083 IEM_MC_END(); 5084 } 5085 return VINF_SUCCESS; 5086 } 5087 5088 5089 /** Opcode 0x0f 0x9d. */ 5090 FNIEMOP_DEF(iemOp_setnl_Eb) 5091 { 5092 IEMOP_MNEMONIC(setnl_Eb, "setnl Eb"); 5093 IEMOP_HLP_MIN_386(); 5094 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5095 5096 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 5097 * any way. AMD says it's "unused", whatever that means. We're 5098 * ignoring for now. */ 5099 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5100 { 5101 /* register target */ 5102 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5103 IEM_MC_BEGIN(0, 0); 5104 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 5105 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 5106 } IEM_MC_ELSE() { 5107 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 5108 } IEM_MC_ENDIF(); 5109 IEM_MC_ADVANCE_RIP(); 5110 IEM_MC_END(); 5111 } 5112 else 5113 { 5114 /* memory target */ 5115 IEM_MC_BEGIN(0, 1); 5116 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5117 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5118 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5119 IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF) { 5120 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5121 } IEM_MC_ELSE() { 5122 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 5123 } IEM_MC_ENDIF(); 5124 IEM_MC_ADVANCE_RIP(); 5125 IEM_MC_END(); 5126 } 5127 return VINF_SUCCESS; 5128 } 5129 5130 5131 /** Opcode 0x0f 0x9e. */ 5132 FNIEMOP_DEF(iemOp_setle_Eb) 5133 { 5134 IEMOP_MNEMONIC(setle_Eb, "setle Eb"); 5135 IEMOP_HLP_MIN_386(); 5136 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5137 5138 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 5139 * any way. AMD says it's "unused", whatever that means. We're 5140 * ignoring for now. */ 5141 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5142 { 5143 /* register target */ 5144 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5145 IEM_MC_BEGIN(0, 0); 5146 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 5147 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 5148 } IEM_MC_ELSE() { 5149 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 5150 } IEM_MC_ENDIF(); 5151 IEM_MC_ADVANCE_RIP(); 5152 IEM_MC_END(); 5153 } 5154 else 5155 { 5156 /* memory target */ 5157 IEM_MC_BEGIN(0, 1); 5158 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5159 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5161 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 5162 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 5163 } IEM_MC_ELSE() { 5164 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5165 } IEM_MC_ENDIF(); 5166 IEM_MC_ADVANCE_RIP(); 5167 IEM_MC_END(); 5168 } 5169 return VINF_SUCCESS; 5170 } 5171 5172 5173 /** Opcode 0x0f 0x9f. */ 5174 FNIEMOP_DEF(iemOp_setnle_Eb) 5175 { 5176 IEMOP_MNEMONIC(setnle_Eb, "setnle Eb"); 5177 IEMOP_HLP_MIN_386(); 5178 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5179 5180 /** @todo Encoding test: Check if the 'reg' field is ignored or decoded in 5181 * any way. AMD says it's "unused", whatever that means. We're 5182 * ignoring for now. */ 5183 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5184 { 5185 /* register target */ 5186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5187 IEM_MC_BEGIN(0, 0); 5188 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 5189 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 0); 5190 } IEM_MC_ELSE() { 5191 IEM_MC_STORE_GREG_U8_CONST((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, 1); 5192 } IEM_MC_ENDIF(); 5193 IEM_MC_ADVANCE_RIP(); 5194 IEM_MC_END(); 5195 } 5196 else 5197 { 5198 /* memory target */ 5199 IEM_MC_BEGIN(0, 1); 5200 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5201 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5202 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5203 IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF) { 5204 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5205 } IEM_MC_ELSE() { 5206 IEM_MC_STORE_MEM_U8_CONST(pVCpu->iem.s.iEffSeg, GCPtrEffDst, 1); 5207 } IEM_MC_ENDIF(); 5208 IEM_MC_ADVANCE_RIP(); 5209 IEM_MC_END(); 5210 } 5211 return VINF_SUCCESS; 5212 } 5213 5214 5215 /** 5216 * Common 'push segment-register' helper. 5217 */ 5218 FNIEMOP_DEF_1(iemOpCommonPushSReg, uint8_t, iReg) 5219 { 5220 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5221 Assert(iReg < X86_SREG_FS || pVCpu->iem.s.enmCpuMode != IEMMODE_64BIT); 5222 IEMOP_HLP_DEFAULT_64BIT_OP_SIZE(); 5223 5224 switch (pVCpu->iem.s.enmEffOpSize) 5225 { 5226 case IEMMODE_16BIT: 5227 IEM_MC_BEGIN(0, 1); 5228 IEM_MC_LOCAL(uint16_t, u16Value); 5229 IEM_MC_FETCH_SREG_U16(u16Value, iReg); 5230 IEM_MC_PUSH_U16(u16Value); 5231 IEM_MC_ADVANCE_RIP(); 5232 IEM_MC_END(); 5233 break; 5234 5235 case IEMMODE_32BIT: 5236 IEM_MC_BEGIN(0, 1); 5237 IEM_MC_LOCAL(uint32_t, u32Value); 5238 IEM_MC_FETCH_SREG_ZX_U32(u32Value, iReg); 5239 IEM_MC_PUSH_U32_SREG(u32Value); 5240 IEM_MC_ADVANCE_RIP(); 5241 IEM_MC_END(); 5242 break; 5243 5244 case IEMMODE_64BIT: 5245 IEM_MC_BEGIN(0, 1); 5246 IEM_MC_LOCAL(uint64_t, u64Value); 5247 IEM_MC_FETCH_SREG_ZX_U64(u64Value, iReg); 5248 IEM_MC_PUSH_U64(u64Value); 5249 IEM_MC_ADVANCE_RIP(); 5250 IEM_MC_END(); 5251 break; 5252 } 5253 5254 return VINF_SUCCESS; 5255 } 5256 5257 5258 /** Opcode 0x0f 0xa0. */ 5259 FNIEMOP_DEF(iemOp_push_fs) 5260 { 5261 IEMOP_MNEMONIC(push_fs, "push fs"); 5262 IEMOP_HLP_MIN_386(); 5263 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5264 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_FS); 5265 } 5266 5267 5268 /** Opcode 0x0f 0xa1. */ 5269 FNIEMOP_DEF(iemOp_pop_fs) 5270 { 5271 IEMOP_MNEMONIC(pop_fs, "pop fs"); 5272 IEMOP_HLP_MIN_386(); 5273 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5274 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_FS, pVCpu->iem.s.enmEffOpSize); 5275 } 5276 5277 5278 /** Opcode 0x0f 0xa2. */ 5279 FNIEMOP_DEF(iemOp_cpuid) 5280 { 5281 IEMOP_MNEMONIC(cpuid, "cpuid"); 5282 IEMOP_HLP_MIN_486(); /* not all 486es. */ 5283 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5284 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_cpuid); 5285 } 5286 5287 5288 /** 5289 * Common worker for iemOp_bt_Ev_Gv, iemOp_btc_Ev_Gv, iemOp_btr_Ev_Gv and 5290 * iemOp_bts_Ev_Gv. 5291 */ 5292 FNIEMOP_DEF_1(iemOpCommonBit_Ev_Gv, PCIEMOPBINSIZES, pImpl) 5293 { 5294 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5295 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 5296 5297 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5298 { 5299 /* register destination. */ 5300 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5301 switch (pVCpu->iem.s.enmEffOpSize) 5302 { 5303 case IEMMODE_16BIT: 5304 IEM_MC_BEGIN(3, 0); 5305 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5306 IEM_MC_ARG(uint16_t, u16Src, 1); 5307 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5308 5309 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5310 IEM_MC_AND_LOCAL_U16(u16Src, 0xf); 5311 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5312 IEM_MC_REF_EFLAGS(pEFlags); 5313 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 5314 5315 IEM_MC_ADVANCE_RIP(); 5316 IEM_MC_END(); 5317 return VINF_SUCCESS; 5318 5319 case IEMMODE_32BIT: 5320 IEM_MC_BEGIN(3, 0); 5321 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5322 IEM_MC_ARG(uint32_t, u32Src, 1); 5323 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5324 5325 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5326 IEM_MC_AND_LOCAL_U32(u32Src, 0x1f); 5327 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5328 IEM_MC_REF_EFLAGS(pEFlags); 5329 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 5330 5331 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 5332 IEM_MC_ADVANCE_RIP(); 5333 IEM_MC_END(); 5334 return VINF_SUCCESS; 5335 5336 case IEMMODE_64BIT: 5337 IEM_MC_BEGIN(3, 0); 5338 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5339 IEM_MC_ARG(uint64_t, u64Src, 1); 5340 IEM_MC_ARG(uint32_t *, pEFlags, 2); 5341 5342 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5343 IEM_MC_AND_LOCAL_U64(u64Src, 0x3f); 5344 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5345 IEM_MC_REF_EFLAGS(pEFlags); 5346 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 5347 5348 IEM_MC_ADVANCE_RIP(); 5349 IEM_MC_END(); 5350 return VINF_SUCCESS; 5351 5352 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5353 } 5354 } 5355 else 5356 { 5357 /* memory destination. */ 5358 5359 uint32_t fAccess; 5360 if (pImpl->pfnLockedU16) 5361 fAccess = IEM_ACCESS_DATA_RW; 5362 else /* BT */ 5363 fAccess = IEM_ACCESS_DATA_R; 5364 5365 /** @todo test negative bit offsets! */ 5366 switch (pVCpu->iem.s.enmEffOpSize) 5367 { 5368 case IEMMODE_16BIT: 5369 IEM_MC_BEGIN(3, 2); 5370 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5371 IEM_MC_ARG(uint16_t, u16Src, 1); 5372 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 5373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5374 IEM_MC_LOCAL(int16_t, i16AddrAdj); 5375 5376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5377 if (pImpl->pfnLockedU16) 5378 IEMOP_HLP_DONE_DECODING(); 5379 else 5380 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5381 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5382 IEM_MC_ASSIGN(i16AddrAdj, u16Src); 5383 IEM_MC_AND_ARG_U16(u16Src, 0x0f); 5384 IEM_MC_SAR_LOCAL_S16(i16AddrAdj, 4); 5385 IEM_MC_SHL_LOCAL_S16(i16AddrAdj, 1); 5386 IEM_MC_ADD_LOCAL_S16_TO_EFF_ADDR(GCPtrEffDst, i16AddrAdj); 5387 IEM_MC_FETCH_EFLAGS(EFlags); 5388 5389 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5390 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 5391 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 5392 else 5393 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 5394 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess); 5395 5396 IEM_MC_COMMIT_EFLAGS(EFlags); 5397 IEM_MC_ADVANCE_RIP(); 5398 IEM_MC_END(); 5399 return VINF_SUCCESS; 5400 5401 case IEMMODE_32BIT: 5402 IEM_MC_BEGIN(3, 2); 5403 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5404 IEM_MC_ARG(uint32_t, u32Src, 1); 5405 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 5406 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5407 IEM_MC_LOCAL(int32_t, i32AddrAdj); 5408 5409 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5410 if (pImpl->pfnLockedU16) 5411 IEMOP_HLP_DONE_DECODING(); 5412 else 5413 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5414 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5415 IEM_MC_ASSIGN(i32AddrAdj, u32Src); 5416 IEM_MC_AND_ARG_U32(u32Src, 0x1f); 5417 IEM_MC_SAR_LOCAL_S32(i32AddrAdj, 5); 5418 IEM_MC_SHL_LOCAL_S32(i32AddrAdj, 2); 5419 IEM_MC_ADD_LOCAL_S32_TO_EFF_ADDR(GCPtrEffDst, i32AddrAdj); 5420 IEM_MC_FETCH_EFLAGS(EFlags); 5421 5422 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5423 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 5424 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 5425 else 5426 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 5427 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess); 5428 5429 IEM_MC_COMMIT_EFLAGS(EFlags); 5430 IEM_MC_ADVANCE_RIP(); 5431 IEM_MC_END(); 5432 return VINF_SUCCESS; 5433 5434 case IEMMODE_64BIT: 5435 IEM_MC_BEGIN(3, 2); 5436 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5437 IEM_MC_ARG(uint64_t, u64Src, 1); 5438 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 5439 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5440 IEM_MC_LOCAL(int64_t, i64AddrAdj); 5441 5442 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5443 if (pImpl->pfnLockedU16) 5444 IEMOP_HLP_DONE_DECODING(); 5445 else 5446 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5447 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5448 IEM_MC_ASSIGN(i64AddrAdj, u64Src); 5449 IEM_MC_AND_ARG_U64(u64Src, 0x3f); 5450 IEM_MC_SAR_LOCAL_S64(i64AddrAdj, 6); 5451 IEM_MC_SHL_LOCAL_S64(i64AddrAdj, 3); 5452 IEM_MC_ADD_LOCAL_S64_TO_EFF_ADDR(GCPtrEffDst, i64AddrAdj); 5453 IEM_MC_FETCH_EFLAGS(EFlags); 5454 5455 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5456 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 5457 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 5458 else 5459 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 5460 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess); 5461 5462 IEM_MC_COMMIT_EFLAGS(EFlags); 5463 IEM_MC_ADVANCE_RIP(); 5464 IEM_MC_END(); 5465 return VINF_SUCCESS; 5466 5467 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5468 } 5469 } 5470 } 5471 5472 5473 /** Opcode 0x0f 0xa3. */ 5474 FNIEMOP_DEF(iemOp_bt_Ev_Gv) 5475 { 5476 IEMOP_MNEMONIC(bt_Ev_Gv, "bt Ev,Gv"); 5477 IEMOP_HLP_MIN_386(); 5478 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bt); 5479 } 5480 5481 5482 /** 5483 * Common worker for iemOp_shrd_Ev_Gv_Ib and iemOp_shld_Ev_Gv_Ib. 5484 */ 5485 FNIEMOP_DEF_1(iemOpCommonShldShrd_Ib, PCIEMOPSHIFTDBLSIZES, pImpl) 5486 { 5487 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5488 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); 5489 5490 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5491 { 5492 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 5493 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5494 5495 switch (pVCpu->iem.s.enmEffOpSize) 5496 { 5497 case IEMMODE_16BIT: 5498 IEM_MC_BEGIN(4, 0); 5499 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5500 IEM_MC_ARG(uint16_t, u16Src, 1); 5501 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); 5502 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5503 5504 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5505 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5506 IEM_MC_REF_EFLAGS(pEFlags); 5507 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); 5508 5509 IEM_MC_ADVANCE_RIP(); 5510 IEM_MC_END(); 5511 return VINF_SUCCESS; 5512 5513 case IEMMODE_32BIT: 5514 IEM_MC_BEGIN(4, 0); 5515 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5516 IEM_MC_ARG(uint32_t, u32Src, 1); 5517 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); 5518 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5519 5520 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5521 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5522 IEM_MC_REF_EFLAGS(pEFlags); 5523 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); 5524 5525 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 5526 IEM_MC_ADVANCE_RIP(); 5527 IEM_MC_END(); 5528 return VINF_SUCCESS; 5529 5530 case IEMMODE_64BIT: 5531 IEM_MC_BEGIN(4, 0); 5532 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5533 IEM_MC_ARG(uint64_t, u64Src, 1); 5534 IEM_MC_ARG_CONST(uint8_t, cShiftArg, /*=*/cShift, 2); 5535 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5536 5537 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5538 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5539 IEM_MC_REF_EFLAGS(pEFlags); 5540 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); 5541 5542 IEM_MC_ADVANCE_RIP(); 5543 IEM_MC_END(); 5544 return VINF_SUCCESS; 5545 5546 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5547 } 5548 } 5549 else 5550 { 5551 switch (pVCpu->iem.s.enmEffOpSize) 5552 { 5553 case IEMMODE_16BIT: 5554 IEM_MC_BEGIN(4, 2); 5555 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5556 IEM_MC_ARG(uint16_t, u16Src, 1); 5557 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5558 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5559 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5560 5561 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 5562 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 5563 IEM_MC_ASSIGN(cShiftArg, cShift); 5564 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5565 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5566 IEM_MC_FETCH_EFLAGS(EFlags); 5567 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5568 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); 5569 5570 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 5571 IEM_MC_COMMIT_EFLAGS(EFlags); 5572 IEM_MC_ADVANCE_RIP(); 5573 IEM_MC_END(); 5574 return VINF_SUCCESS; 5575 5576 case IEMMODE_32BIT: 5577 IEM_MC_BEGIN(4, 2); 5578 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5579 IEM_MC_ARG(uint32_t, u32Src, 1); 5580 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5581 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5582 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5583 5584 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 5585 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 5586 IEM_MC_ASSIGN(cShiftArg, cShift); 5587 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5588 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5589 IEM_MC_FETCH_EFLAGS(EFlags); 5590 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5591 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); 5592 5593 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 5594 IEM_MC_COMMIT_EFLAGS(EFlags); 5595 IEM_MC_ADVANCE_RIP(); 5596 IEM_MC_END(); 5597 return VINF_SUCCESS; 5598 5599 case IEMMODE_64BIT: 5600 IEM_MC_BEGIN(4, 2); 5601 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5602 IEM_MC_ARG(uint64_t, u64Src, 1); 5603 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5604 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5605 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5606 5607 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 5608 uint8_t cShift; IEM_OPCODE_GET_NEXT_U8(&cShift); 5609 IEM_MC_ASSIGN(cShiftArg, cShift); 5610 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5611 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5612 IEM_MC_FETCH_EFLAGS(EFlags); 5613 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5614 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); 5615 5616 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 5617 IEM_MC_COMMIT_EFLAGS(EFlags); 5618 IEM_MC_ADVANCE_RIP(); 5619 IEM_MC_END(); 5620 return VINF_SUCCESS; 5621 5622 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5623 } 5624 } 5625 } 5626 5627 5628 /** 5629 * Common worker for iemOp_shrd_Ev_Gv_CL and iemOp_shld_Ev_Gv_CL. 5630 */ 5631 FNIEMOP_DEF_1(iemOpCommonShldShrd_CL, PCIEMOPSHIFTDBLSIZES, pImpl) 5632 { 5633 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 5634 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_OF); 5635 5636 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 5637 { 5638 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5639 5640 switch (pVCpu->iem.s.enmEffOpSize) 5641 { 5642 case IEMMODE_16BIT: 5643 IEM_MC_BEGIN(4, 0); 5644 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5645 IEM_MC_ARG(uint16_t, u16Src, 1); 5646 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5647 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5648 5649 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5650 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5651 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5652 IEM_MC_REF_EFLAGS(pEFlags); 5653 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); 5654 5655 IEM_MC_ADVANCE_RIP(); 5656 IEM_MC_END(); 5657 return VINF_SUCCESS; 5658 5659 case IEMMODE_32BIT: 5660 IEM_MC_BEGIN(4, 0); 5661 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5662 IEM_MC_ARG(uint32_t, u32Src, 1); 5663 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5664 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5665 5666 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5667 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5668 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5669 IEM_MC_REF_EFLAGS(pEFlags); 5670 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); 5671 5672 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 5673 IEM_MC_ADVANCE_RIP(); 5674 IEM_MC_END(); 5675 return VINF_SUCCESS; 5676 5677 case IEMMODE_64BIT: 5678 IEM_MC_BEGIN(4, 0); 5679 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5680 IEM_MC_ARG(uint64_t, u64Src, 1); 5681 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5682 IEM_MC_ARG(uint32_t *, pEFlags, 3); 5683 5684 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5685 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 5686 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5687 IEM_MC_REF_EFLAGS(pEFlags); 5688 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); 5689 5690 IEM_MC_ADVANCE_RIP(); 5691 IEM_MC_END(); 5692 return VINF_SUCCESS; 5693 5694 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5695 } 5696 } 5697 else 5698 { 5699 switch (pVCpu->iem.s.enmEffOpSize) 5700 { 5701 case IEMMODE_16BIT: 5702 IEM_MC_BEGIN(4, 2); 5703 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 5704 IEM_MC_ARG(uint16_t, u16Src, 1); 5705 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5706 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5707 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5708 5709 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5710 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5711 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5712 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5713 IEM_MC_FETCH_EFLAGS(EFlags); 5714 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5715 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU16, pu16Dst, u16Src, cShiftArg, pEFlags); 5716 5717 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 5718 IEM_MC_COMMIT_EFLAGS(EFlags); 5719 IEM_MC_ADVANCE_RIP(); 5720 IEM_MC_END(); 5721 return VINF_SUCCESS; 5722 5723 case IEMMODE_32BIT: 5724 IEM_MC_BEGIN(4, 2); 5725 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 5726 IEM_MC_ARG(uint32_t, u32Src, 1); 5727 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5728 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5729 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5730 5731 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5732 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5733 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5734 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5735 IEM_MC_FETCH_EFLAGS(EFlags); 5736 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5737 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU32, pu32Dst, u32Src, cShiftArg, pEFlags); 5738 5739 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 5740 IEM_MC_COMMIT_EFLAGS(EFlags); 5741 IEM_MC_ADVANCE_RIP(); 5742 IEM_MC_END(); 5743 return VINF_SUCCESS; 5744 5745 case IEMMODE_64BIT: 5746 IEM_MC_BEGIN(4, 2); 5747 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 5748 IEM_MC_ARG(uint64_t, u64Src, 1); 5749 IEM_MC_ARG(uint8_t, cShiftArg, 2); 5750 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 5751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 5752 5753 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 5754 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5755 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 5756 IEM_MC_FETCH_GREG_U8(cShiftArg, X86_GREG_xCX); 5757 IEM_MC_FETCH_EFLAGS(EFlags); 5758 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 5759 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnNormalU64, pu64Dst, u64Src, cShiftArg, pEFlags); 5760 5761 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 5762 IEM_MC_COMMIT_EFLAGS(EFlags); 5763 IEM_MC_ADVANCE_RIP(); 5764 IEM_MC_END(); 5765 return VINF_SUCCESS; 5766 5767 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 5768 } 5769 } 5770 } 5771 5772 5773 5774 /** Opcode 0x0f 0xa4. */ 5775 FNIEMOP_DEF(iemOp_shld_Ev_Gv_Ib) 5776 { 5777 IEMOP_MNEMONIC(shld_Ev_Gv_Ib, "shld Ev,Gv,Ib"); 5778 IEMOP_HLP_MIN_386(); 5779 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shld); 5780 } 5781 5782 5783 /** Opcode 0x0f 0xa5. */ 5784 FNIEMOP_DEF(iemOp_shld_Ev_Gv_CL) 5785 { 5786 IEMOP_MNEMONIC(shld_Ev_Gv_CL, "shld Ev,Gv,CL"); 5787 IEMOP_HLP_MIN_386(); 5788 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shld); 5789 } 5790 5791 5792 /** Opcode 0x0f 0xa8. */ 5793 FNIEMOP_DEF(iemOp_push_gs) 5794 { 5795 IEMOP_MNEMONIC(push_gs, "push gs"); 5796 IEMOP_HLP_MIN_386(); 5797 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5798 return FNIEMOP_CALL_1(iemOpCommonPushSReg, X86_SREG_GS); 5799 } 5800 5801 5802 /** Opcode 0x0f 0xa9. */ 5803 FNIEMOP_DEF(iemOp_pop_gs) 5804 { 5805 IEMOP_MNEMONIC(pop_gs, "pop gs"); 5806 IEMOP_HLP_MIN_386(); 5807 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5808 return IEM_MC_DEFER_TO_CIMPL_2(iemCImpl_pop_Sreg, X86_SREG_GS, pVCpu->iem.s.enmEffOpSize); 5809 } 5810 5811 5812 /** Opcode 0x0f 0xaa. */ 5813 FNIEMOP_STUB(iemOp_rsm); 5814 //IEMOP_HLP_MIN_386(); 5815 5816 5817 /** Opcode 0x0f 0xab. */ 5818 FNIEMOP_DEF(iemOp_bts_Ev_Gv) 5819 { 5820 IEMOP_MNEMONIC(bts_Ev_Gv, "bts Ev,Gv"); 5821 IEMOP_HLP_MIN_386(); 5822 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_bts); 5823 } 5824 5825 5826 /** Opcode 0x0f 0xac. */ 5827 FNIEMOP_DEF(iemOp_shrd_Ev_Gv_Ib) 5828 { 5829 IEMOP_MNEMONIC(shrd_Ev_Gv_Ib, "shrd Ev,Gv,Ib"); 5830 IEMOP_HLP_MIN_386(); 5831 return FNIEMOP_CALL_1(iemOpCommonShldShrd_Ib, &g_iemAImpl_shrd); 5832 } 5833 5834 5835 /** Opcode 0x0f 0xad. */ 5836 FNIEMOP_DEF(iemOp_shrd_Ev_Gv_CL) 5837 { 5838 IEMOP_MNEMONIC(shrd_Ev_Gv_CL, "shrd Ev,Gv,CL"); 5839 IEMOP_HLP_MIN_386(); 5840 return FNIEMOP_CALL_1(iemOpCommonShldShrd_CL, &g_iemAImpl_shrd); 5841 } 5842 5843 5844 /** Opcode 0x0f 0xae mem/0. */ 5845 FNIEMOP_DEF_1(iemOp_Grp15_fxsave, uint8_t, bRm) 5846 { 5847 IEMOP_MNEMONIC(fxsave, "fxsave m512"); 5848 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor) 5849 return IEMOP_RAISE_INVALID_OPCODE(); 5850 5851 IEM_MC_BEGIN(3, 1); 5852 IEM_MC_ARG(uint8_t, iEffSeg, 0); 5853 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 5854 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2); 5855 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 5856 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5857 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 5858 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 5859 IEM_MC_CALL_CIMPL_3(iemCImpl_fxsave, iEffSeg, GCPtrEff, enmEffOpSize); 5860 IEM_MC_END(); 5861 return VINF_SUCCESS; 5862 } 5863 5864 5865 /** Opcode 0x0f 0xae mem/1. */ 5866 FNIEMOP_DEF_1(iemOp_Grp15_fxrstor, uint8_t, bRm) 5867 { 5868 IEMOP_MNEMONIC(fxrstor, "fxrstor m512"); 5869 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fFxSaveRstor) 5870 return IEMOP_RAISE_INVALID_OPCODE(); 5871 5872 IEM_MC_BEGIN(3, 1); 5873 IEM_MC_ARG(uint8_t, iEffSeg, 0); 5874 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 5875 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2); 5876 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 5877 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5878 IEM_MC_ACTUALIZE_FPU_STATE_FOR_CHANGE(); 5879 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 5880 IEM_MC_CALL_CIMPL_3(iemCImpl_fxrstor, iEffSeg, GCPtrEff, enmEffOpSize); 5881 IEM_MC_END(); 5882 return VINF_SUCCESS; 5883 } 5884 5885 5886 /** 5887 * @opmaps grp15 5888 * @opcode !11/2 5889 * @oppfx none 5890 * @opcpuid sse 5891 * @opgroup og_sse_mxcsrsm 5892 * @opxcpttype 5 5893 * @optest op1=0 -> mxcsr=0 5894 * @optest op1=0x2083 -> mxcsr=0x2083 5895 * @optest op1=0xfffffffe -> value.xcpt=0xd 5896 * @optest op1=0x2083 cr0|=ts -> value.xcpt=0x7 5897 * @optest op1=0x2083 cr0|=em -> value.xcpt=0x6 5898 * @optest op1=0x2083 cr0|=mp -> mxcsr=0x2083 5899 * @optest op1=0x2083 cr4&~=osfxsr -> value.xcpt=0x6 5900 * @optest op1=0x2083 cr0|=ts,em -> value.xcpt=0x6 5901 * @optest op1=0x2083 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6 5902 * @optest op1=0x2083 cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6 5903 * @optest op1=0x2083 cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6 5904 */ 5905 FNIEMOP_DEF_1(iemOp_Grp15_ldmxcsr, uint8_t, bRm) 5906 { 5907 IEMOP_MNEMONIC1(M_MEM, LDMXCSR, ldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 5908 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse) 5909 return IEMOP_RAISE_INVALID_OPCODE(); 5910 5911 IEM_MC_BEGIN(2, 0); 5912 IEM_MC_ARG(uint8_t, iEffSeg, 0); 5913 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 5914 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 5915 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5916 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 5917 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 5918 IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff); 5919 IEM_MC_END(); 5920 return VINF_SUCCESS; 5921 } 5922 5923 5924 /** 5925 * @opmaps grp15 5926 * @opcode !11/3 5927 * @oppfx none 5928 * @opcpuid sse 5929 * @opgroup og_sse_mxcsrsm 5930 * @opxcpttype 5 5931 * @optest mxcsr=0 -> op1=0 5932 * @optest mxcsr=0x2083 -> op1=0x2083 5933 * @optest mxcsr=0x2084 cr0|=ts -> value.xcpt=0x7 5934 * @optest mxcsr=0x2085 cr0|=em -> value.xcpt=0x6 5935 * @optest mxcsr=0x2086 cr0|=mp -> op1=0x2086 5936 * @optest mxcsr=0x2087 cr4&~=osfxsr -> value.xcpt=0x6 5937 * @optest mxcsr=0x2088 cr0|=ts,em -> value.xcpt=0x6 5938 * @optest mxcsr=0x2089 cr0|=em cr4&~=osfxsr -> value.xcpt=0x6 5939 * @optest mxcsr=0x208a cr0|=ts,em cr4&~=osfxsr -> value.xcpt=0x6 5940 * @optest mxcsr=0x208b cr0|=ts,em,mp cr4&~=osfxsr -> value.xcpt=0x6 5941 */ 5942 FNIEMOP_DEF_1(iemOp_Grp15_stmxcsr, uint8_t, bRm) 5943 { 5944 IEMOP_MNEMONIC1(M_MEM, STMXCSR, stmxcsr, MdWO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 5945 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse) 5946 return IEMOP_RAISE_INVALID_OPCODE(); 5947 5948 IEM_MC_BEGIN(2, 0); 5949 IEM_MC_ARG(uint8_t, iEffSeg, 0); 5950 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 5951 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 5952 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 5954 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 5955 IEM_MC_CALL_CIMPL_2(iemCImpl_stmxcsr, iEffSeg, GCPtrEff); 5956 IEM_MC_END(); 5957 return VINF_SUCCESS; 5958 } 2176 FNIEMOP_STUB_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm); 2177 //FNIEMOP_DEF_1(iemOp_VGrp15_vldmxcsr, uint8_t, bRm) 2178 //{ 2179 // IEMOP_MNEMONIC1(M_MEM, VLDMXCSR, vldmxcsr, MdRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 2180 // if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse) 2181 // return IEMOP_RAISE_INVALID_OPCODE(); 2182 // 2183 // IEM_MC_BEGIN(2, 0); 2184 // IEM_MC_ARG(uint8_t, iEffSeg, 0); 2185 // IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 2186 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 2187 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2188 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2189 // IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 2190 // IEM_MC_CALL_CIMPL_2(iemCImpl_ldmxcsr, iEffSeg, GCPtrEff); 2191 // IEM_MC_END(); 2192 // return VINF_SUCCESS; 2193 //} 5959 2194 5960 2195 … … 6015 2250 } 6016 2251 6017 6018 /** 6019 * @opmaps vexgrp15 6020 * @opcode !11/4 6021 * @oppfx none 6022 * @opcpuid xsave 6023 * @opgroup og_system 6024 * @opxcpttype none 6025 */ 6026 FNIEMOP_DEF_1(iemOp_Grp15_xsave, uint8_t, bRm) 6027 { 6028 IEMOP_MNEMONIC1(M_MEM, XSAVE, xsave, MRW, DISOPTYPE_HARMLESS, 0); 6029 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor) 6030 return IEMOP_RAISE_INVALID_OPCODE(); 6031 6032 IEM_MC_BEGIN(3, 0); 6033 IEM_MC_ARG(uint8_t, iEffSeg, 0); 6034 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 6035 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2); 6036 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6037 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6038 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 6039 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 6040 IEM_MC_CALL_CIMPL_3(iemCImpl_xsave, iEffSeg, GCPtrEff, enmEffOpSize); 6041 IEM_MC_END(); 6042 return VINF_SUCCESS; 6043 } 6044 6045 6046 /** 6047 * @opmaps vexgrp15 6048 * @opcode !11/5 6049 * @oppfx none 6050 * @opcpuid xsave 6051 * @opgroup og_system 6052 * @opxcpttype none 6053 */ 6054 FNIEMOP_DEF_1(iemOp_Grp15_xrstor, uint8_t, bRm) 6055 { 6056 IEMOP_MNEMONIC1(M_MEM, XRSTOR, xrstor, MRO, DISOPTYPE_HARMLESS, 0); 6057 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fXSaveRstor) 6058 return IEMOP_RAISE_INVALID_OPCODE(); 6059 6060 IEM_MC_BEGIN(3, 0); 6061 IEM_MC_ARG(uint8_t, iEffSeg, 0); 6062 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 6063 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 2); 6064 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6065 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6066 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 6067 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 6068 IEM_MC_CALL_CIMPL_3(iemCImpl_xrstor, iEffSeg, GCPtrEff, enmEffOpSize); 6069 IEM_MC_END(); 6070 return VINF_SUCCESS; 6071 } 6072 6073 /** Opcode 0x0f 0xae mem/6. */ 6074 FNIEMOP_UD_STUB_1(iemOp_Grp15_xsaveopt, uint8_t, bRm); 6075 6076 /** 6077 * @opmaps grp15 6078 * @opcode !11/7 6079 * @oppfx none 6080 * @opcpuid clfsh 6081 * @opgroup og_cachectl 6082 * @optest op1=1 -> 6083 */ 6084 FNIEMOP_DEF_1(iemOp_Grp15_clflush, uint8_t, bRm) 6085 { 6086 IEMOP_MNEMONIC1(M_MEM, CLFLUSH, clflush, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 6087 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlush) 6088 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm); 6089 6090 IEM_MC_BEGIN(2, 0); 6091 IEM_MC_ARG(uint8_t, iEffSeg, 0); 6092 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 6093 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6094 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6095 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 6096 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff); 6097 IEM_MC_END(); 6098 return VINF_SUCCESS; 6099 } 6100 6101 /** 6102 * @opmaps grp15 6103 * @opcode !11/7 6104 * @oppfx 0x66 6105 * @opcpuid clflushopt 6106 * @opgroup og_cachectl 6107 * @optest op1=1 -> 6108 */ 6109 FNIEMOP_DEF_1(iemOp_Grp15_clflushopt, uint8_t, bRm) 6110 { 6111 IEMOP_MNEMONIC1(M_MEM, CLFLUSHOPT, clflushopt, MbRO, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 6112 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fClFlushOpt) 6113 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeeded, bRm); 6114 6115 IEM_MC_BEGIN(2, 0); 6116 IEM_MC_ARG(uint8_t, iEffSeg, 0); 6117 IEM_MC_ARG(RTGCPTR, GCPtrEff, 1); 6118 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6119 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6120 IEM_MC_ASSIGN(iEffSeg, pVCpu->iem.s.iEffSeg); 6121 IEM_MC_CALL_CIMPL_2(iemCImpl_clflush_clflushopt, iEffSeg, GCPtrEff); 6122 IEM_MC_END(); 6123 return VINF_SUCCESS; 6124 } 6125 6126 6127 /** Opcode 0x0f 0xae 11b/5. */ 6128 FNIEMOP_DEF_1(iemOp_Grp15_lfence, uint8_t, bRm) 6129 { 6130 RT_NOREF_PV(bRm); 6131 IEMOP_MNEMONIC(lfence, "lfence"); 6132 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6133 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2) 6134 return IEMOP_RAISE_INVALID_OPCODE(); 6135 6136 IEM_MC_BEGIN(0, 0); 6137 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2) 6138 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_lfence); 6139 else 6140 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence); 6141 IEM_MC_ADVANCE_RIP(); 6142 IEM_MC_END(); 6143 return VINF_SUCCESS; 6144 } 6145 6146 6147 /** Opcode 0x0f 0xae 11b/6. */ 6148 FNIEMOP_DEF_1(iemOp_Grp15_mfence, uint8_t, bRm) 6149 { 6150 RT_NOREF_PV(bRm); 6151 IEMOP_MNEMONIC(mfence, "mfence"); 6152 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6153 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2) 6154 return IEMOP_RAISE_INVALID_OPCODE(); 6155 6156 IEM_MC_BEGIN(0, 0); 6157 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2) 6158 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_mfence); 6159 else 6160 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence); 6161 IEM_MC_ADVANCE_RIP(); 6162 IEM_MC_END(); 6163 return VINF_SUCCESS; 6164 } 6165 6166 6167 /** Opcode 0x0f 0xae 11b/7. */ 6168 FNIEMOP_DEF_1(iemOp_Grp15_sfence, uint8_t, bRm) 6169 { 6170 RT_NOREF_PV(bRm); 6171 IEMOP_MNEMONIC(sfence, "sfence"); 6172 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6173 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2) 6174 return IEMOP_RAISE_INVALID_OPCODE(); 6175 6176 IEM_MC_BEGIN(0, 0); 6177 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fSse2) 6178 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_sfence); 6179 else 6180 IEM_MC_CALL_VOID_AIMPL_0(iemAImpl_alt_mem_fence); 6181 IEM_MC_ADVANCE_RIP(); 6182 IEM_MC_END(); 6183 return VINF_SUCCESS; 6184 } 6185 6186 6187 /** Opcode 0xf3 0x0f 0xae 11b/0. */ 6188 FNIEMOP_UD_STUB_1(iemOp_Grp15_rdfsbase, uint8_t, bRm); 6189 6190 /** Opcode 0xf3 0x0f 0xae 11b/1. */ 6191 FNIEMOP_UD_STUB_1(iemOp_Grp15_rdgsbase, uint8_t, bRm); 6192 6193 /** Opcode 0xf3 0x0f 0xae 11b/2. */ 6194 FNIEMOP_UD_STUB_1(iemOp_Grp15_wrfsbase, uint8_t, bRm); 6195 6196 /** Opcode 0xf3 0x0f 0xae 11b/3. */ 6197 FNIEMOP_UD_STUB_1(iemOp_Grp15_wrgsbase, uint8_t, bRm); 6198 6199 6200 /** 6201 * Group 15 jump table for register variant. 6202 */ 6203 IEM_STATIC const PFNIEMOPRM g_apfnGroup15RegReg[] = 6204 { /* pfx: none, 066h, 0f3h, 0f2h */ 6205 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM, 6206 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM, 6207 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM, 6208 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM, 6209 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM), 6210 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6211 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6212 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6213 }; 6214 AssertCompile(RT_ELEMENTS(g_apfnGroup15RegReg) == 8*4); 6215 6216 6217 /** 6218 * Group 15 jump table for memory variant. 6219 */ 6220 IEM_STATIC const PFNIEMOPRM g_apfnGroup15MemReg[] = 6221 { /* pfx: none, 066h, 0f3h, 0f2h */ 6222 /* /0 */ iemOp_Grp15_fxsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6223 /* /1 */ iemOp_Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6224 /* /2 */ iemOp_Grp15_ldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6225 /* /3 */ iemOp_Grp15_stmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6226 /* /4 */ iemOp_Grp15_xsave, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6227 /* /5 */ iemOp_Grp15_xrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6228 /* /6 */ iemOp_Grp15_xsaveopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6229 /* /7 */ iemOp_Grp15_clflush, iemOp_Grp15_clflushopt, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6230 }; 6231 AssertCompile(RT_ELEMENTS(g_apfnGroup15MemReg) == 8*4); 6232 6233 6234 /** Opcode 0x0f 0xae. */ 6235 FNIEMOP_DEF(iemOp_Grp15) 6236 { 6237 IEMOP_HLP_MIN_586(); /* Not entirely accurate nor needed, but useful for debugging 286 code. */ 6238 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6239 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6240 /* register, register */ 6241 return FNIEMOP_CALL_1(g_apfnGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 6242 + pVCpu->iem.s.idxPrefix], bRm); 6243 /* memory, register */ 6244 return FNIEMOP_CALL_1(g_apfnGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 6245 + pVCpu->iem.s.idxPrefix], bRm); 6246 } 6247 6248 6249 /** 6250 * Vex group 15 jump table for register variant. 6251 * @todo work in progress 6252 */ 6253 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15RegReg[] = 6254 { /* pfx: none, 066h, 0f3h, 0f2h */ 6255 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdfsbase, iemOp_InvalidWithRM, 6256 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_rdgsbase, iemOp_InvalidWithRM, 6257 /* /2 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrfsbase, iemOp_InvalidWithRM, 6258 /* /3 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_Grp15_wrgsbase, iemOp_InvalidWithRM, 6259 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM), 6260 /* /5 */ iemOp_Grp15_lfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6261 /* /6 */ iemOp_Grp15_mfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6262 /* /7 */ iemOp_Grp15_sfence, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6263 }; 6264 AssertCompile(RT_ELEMENTS(g_apfnVexGroup15RegReg) == 8*4); 6265 2252 /* Opcode VEX.0F 0xae mem/4 - invalid. */ 2253 /* Opcode VEX.0F 0xae mem/5 - invalid. */ 2254 /* Opcode VEX.0F 0xae mem/6 - invalid. */ 2255 /* Opcode VEX.0F 0xae mem/7 - invalid. */ 2256 2257 /* Opcode VEX.0F 0xae 11b/0 - invalid. */ 2258 /* Opcode VEX.0F 0xae 11b/1 - invalid. */ 2259 /* Opcode VEX.0F 0xae 11b/2 - invalid. */ 2260 /* Opcode VEX.0F 0xae 11b/3 - invalid. */ 2261 /* Opcode VEX.0F 0xae 11b/4 - invalid. */ 2262 /* Opcode VEX.0F 0xae 11b/5 - invalid. */ 2263 /* Opcode VEX.0F 0xae 11b/6 - invalid. */ 2264 /* Opcode VEX.0F 0xae 11b/7 - invalid. */ 6266 2265 6267 2266 /** 6268 2267 * Vex group 15 jump table for memory variant. 6269 * @todo work in progress6270 2268 */ 6271 2269 IEM_STATIC const PFNIEMOPRM g_apfnVexGroup15MemReg[] = 6272 2270 { /* pfx: none, 066h, 0f3h, 0f2h */ 6273 /* /0 */ iemOp_ Grp15_fxsave,iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6274 /* /1 */ iemOp_ Grp15_fxrstor, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6275 /* /2 */ iemOp_ Grp15_ldmxcsr,iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,2271 /* /0 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 2272 /* /1 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 2273 /* /2 */ iemOp_VGrp15_vldmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6276 2274 /* /3 */ iemOp_VGrp15_vstmxcsr, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6277 /* /4 */ iemOp_ Grp15_xsave,iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6278 /* /5 */ iemOp_ Grp15_xrstor,iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6279 /* /6 */ iemOp_ Grp15_xsaveopt,iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM,6280 /* /7 */ iemOp_ Grp15_clflush, iemOp_Grp15_clflushopt,iemOp_InvalidWithRM, iemOp_InvalidWithRM,2275 /* /4 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 2276 /* /5 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 2277 /* /6 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 2278 /* /7 */ iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 6281 2279 }; 6282 2280 AssertCompile(RT_ELEMENTS(g_apfnVexGroup15MemReg) == 8*4); … … 6289 2287 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6290 2288 /* register, register */ 6291 return FNIEMOP_CALL_1( g_apfnVexGroup15RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 46292 + pVCpu->iem.s.idxPrefix], bRm); 2289 return FNIEMOP_CALL_1(iemOp_InvalidWithRM, bRm); 2290 6293 2291 /* memory, register */ 6294 2292 return FNIEMOP_CALL_1(g_apfnVexGroup15MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 … … 6297 2295 6298 2296 6299 /** Opcode 0x0f 0xaf. */ 6300 FNIEMOP_DEF(iemOp_imul_Gv_Ev) 6301 { 6302 IEMOP_MNEMONIC(imul_Gv_Ev, "imul Gv,Ev"); 6303 IEMOP_HLP_MIN_386(); 6304 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 6305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_imul_two); 6306 } 6307 6308 6309 /** Opcode 0x0f 0xb0. */ 6310 FNIEMOP_DEF(iemOp_cmpxchg_Eb_Gb) 6311 { 6312 IEMOP_MNEMONIC(cmpxchg_Eb_Gb, "cmpxchg Eb,Gb"); 6313 IEMOP_HLP_MIN_486(); 6314 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6315 6316 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6317 { 6318 IEMOP_HLP_DONE_DECODING(); 6319 IEM_MC_BEGIN(4, 0); 6320 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 6321 IEM_MC_ARG(uint8_t *, pu8Al, 1); 6322 IEM_MC_ARG(uint8_t, u8Src, 2); 6323 IEM_MC_ARG(uint32_t *, pEFlags, 3); 6324 6325 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6326 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6327 IEM_MC_REF_GREG_U8(pu8Al, X86_GREG_xAX); 6328 IEM_MC_REF_EFLAGS(pEFlags); 6329 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6330 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags); 6331 else 6332 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags); 6333 6334 IEM_MC_ADVANCE_RIP(); 6335 IEM_MC_END(); 6336 } 6337 else 6338 { 6339 IEM_MC_BEGIN(4, 3); 6340 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 6341 IEM_MC_ARG(uint8_t *, pu8Al, 1); 6342 IEM_MC_ARG(uint8_t, u8Src, 2); 6343 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 6344 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6345 IEM_MC_LOCAL(uint8_t, u8Al); 6346 6347 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6348 IEMOP_HLP_DONE_DECODING(); 6349 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6350 IEM_MC_FETCH_GREG_U8(u8Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6351 IEM_MC_FETCH_GREG_U8(u8Al, X86_GREG_xAX); 6352 IEM_MC_FETCH_EFLAGS(EFlags); 6353 IEM_MC_REF_LOCAL(pu8Al, u8Al); 6354 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6355 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8, pu8Dst, pu8Al, u8Src, pEFlags); 6356 else 6357 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u8_locked, pu8Dst, pu8Al, u8Src, pEFlags); 6358 6359 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); 6360 IEM_MC_COMMIT_EFLAGS(EFlags); 6361 IEM_MC_STORE_GREG_U8(X86_GREG_xAX, u8Al); 6362 IEM_MC_ADVANCE_RIP(); 6363 IEM_MC_END(); 6364 } 6365 return VINF_SUCCESS; 6366 } 6367 6368 /** Opcode 0x0f 0xb1. */ 6369 FNIEMOP_DEF(iemOp_cmpxchg_Ev_Gv) 6370 { 6371 IEMOP_MNEMONIC(cmpxchg_Ev_Gv, "cmpxchg Ev,Gv"); 6372 IEMOP_HLP_MIN_486(); 6373 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6374 6375 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6376 { 6377 IEMOP_HLP_DONE_DECODING(); 6378 switch (pVCpu->iem.s.enmEffOpSize) 6379 { 6380 case IEMMODE_16BIT: 6381 IEM_MC_BEGIN(4, 0); 6382 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 6383 IEM_MC_ARG(uint16_t *, pu16Ax, 1); 6384 IEM_MC_ARG(uint16_t, u16Src, 2); 6385 IEM_MC_ARG(uint32_t *, pEFlags, 3); 6386 6387 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6388 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6389 IEM_MC_REF_GREG_U16(pu16Ax, X86_GREG_xAX); 6390 IEM_MC_REF_EFLAGS(pEFlags); 6391 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6392 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags); 6393 else 6394 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags); 6395 6396 IEM_MC_ADVANCE_RIP(); 6397 IEM_MC_END(); 6398 return VINF_SUCCESS; 6399 6400 case IEMMODE_32BIT: 6401 IEM_MC_BEGIN(4, 0); 6402 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 6403 IEM_MC_ARG(uint32_t *, pu32Eax, 1); 6404 IEM_MC_ARG(uint32_t, u32Src, 2); 6405 IEM_MC_ARG(uint32_t *, pEFlags, 3); 6406 6407 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6408 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6409 IEM_MC_REF_GREG_U32(pu32Eax, X86_GREG_xAX); 6410 IEM_MC_REF_EFLAGS(pEFlags); 6411 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6412 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags); 6413 else 6414 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags); 6415 6416 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Eax); 6417 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 6418 IEM_MC_ADVANCE_RIP(); 6419 IEM_MC_END(); 6420 return VINF_SUCCESS; 6421 6422 case IEMMODE_64BIT: 6423 IEM_MC_BEGIN(4, 0); 6424 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 6425 IEM_MC_ARG(uint64_t *, pu64Rax, 1); 6426 #ifdef RT_ARCH_X86 6427 IEM_MC_ARG(uint64_t *, pu64Src, 2); 6428 #else 6429 IEM_MC_ARG(uint64_t, u64Src, 2); 6430 #endif 6431 IEM_MC_ARG(uint32_t *, pEFlags, 3); 6432 6433 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6434 IEM_MC_REF_GREG_U64(pu64Rax, X86_GREG_xAX); 6435 IEM_MC_REF_EFLAGS(pEFlags); 6436 #ifdef RT_ARCH_X86 6437 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6438 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6439 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags); 6440 else 6441 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags); 6442 #else 6443 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6444 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6445 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags); 6446 else 6447 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags); 6448 #endif 6449 6450 IEM_MC_ADVANCE_RIP(); 6451 IEM_MC_END(); 6452 return VINF_SUCCESS; 6453 6454 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6455 } 6456 } 6457 else 6458 { 6459 switch (pVCpu->iem.s.enmEffOpSize) 6460 { 6461 case IEMMODE_16BIT: 6462 IEM_MC_BEGIN(4, 3); 6463 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 6464 IEM_MC_ARG(uint16_t *, pu16Ax, 1); 6465 IEM_MC_ARG(uint16_t, u16Src, 2); 6466 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 6467 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6468 IEM_MC_LOCAL(uint16_t, u16Ax); 6469 6470 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6471 IEMOP_HLP_DONE_DECODING(); 6472 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6473 IEM_MC_FETCH_GREG_U16(u16Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6474 IEM_MC_FETCH_GREG_U16(u16Ax, X86_GREG_xAX); 6475 IEM_MC_FETCH_EFLAGS(EFlags); 6476 IEM_MC_REF_LOCAL(pu16Ax, u16Ax); 6477 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6478 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16, pu16Dst, pu16Ax, u16Src, pEFlags); 6479 else 6480 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u16_locked, pu16Dst, pu16Ax, u16Src, pEFlags); 6481 6482 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 6483 IEM_MC_COMMIT_EFLAGS(EFlags); 6484 IEM_MC_STORE_GREG_U16(X86_GREG_xAX, u16Ax); 6485 IEM_MC_ADVANCE_RIP(); 6486 IEM_MC_END(); 6487 return VINF_SUCCESS; 6488 6489 case IEMMODE_32BIT: 6490 IEM_MC_BEGIN(4, 3); 6491 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 6492 IEM_MC_ARG(uint32_t *, pu32Eax, 1); 6493 IEM_MC_ARG(uint32_t, u32Src, 2); 6494 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 6495 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6496 IEM_MC_LOCAL(uint32_t, u32Eax); 6497 6498 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6499 IEMOP_HLP_DONE_DECODING(); 6500 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6501 IEM_MC_FETCH_GREG_U32(u32Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6502 IEM_MC_FETCH_GREG_U32(u32Eax, X86_GREG_xAX); 6503 IEM_MC_FETCH_EFLAGS(EFlags); 6504 IEM_MC_REF_LOCAL(pu32Eax, u32Eax); 6505 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6506 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32, pu32Dst, pu32Eax, u32Src, pEFlags); 6507 else 6508 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u32_locked, pu32Dst, pu32Eax, u32Src, pEFlags); 6509 6510 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 6511 IEM_MC_COMMIT_EFLAGS(EFlags); 6512 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u32Eax); 6513 IEM_MC_ADVANCE_RIP(); 6514 IEM_MC_END(); 6515 return VINF_SUCCESS; 6516 6517 case IEMMODE_64BIT: 6518 IEM_MC_BEGIN(4, 3); 6519 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 6520 IEM_MC_ARG(uint64_t *, pu64Rax, 1); 6521 #ifdef RT_ARCH_X86 6522 IEM_MC_ARG(uint64_t *, pu64Src, 2); 6523 #else 6524 IEM_MC_ARG(uint64_t, u64Src, 2); 6525 #endif 6526 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 3); 6527 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6528 IEM_MC_LOCAL(uint64_t, u64Rax); 6529 6530 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6531 IEMOP_HLP_DONE_DECODING(); 6532 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6533 IEM_MC_FETCH_GREG_U64(u64Rax, X86_GREG_xAX); 6534 IEM_MC_FETCH_EFLAGS(EFlags); 6535 IEM_MC_REF_LOCAL(pu64Rax, u64Rax); 6536 #ifdef RT_ARCH_X86 6537 IEM_MC_REF_GREG_U64(pu64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6538 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6539 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, pu64Src, pEFlags); 6540 else 6541 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, pu64Src, pEFlags); 6542 #else 6543 IEM_MC_FETCH_GREG_U64(u64Src, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 6544 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6545 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64, pu64Dst, pu64Rax, u64Src, pEFlags); 6546 else 6547 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg_u64_locked, pu64Dst, pu64Rax, u64Src, pEFlags); 6548 #endif 6549 6550 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 6551 IEM_MC_COMMIT_EFLAGS(EFlags); 6552 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u64Rax); 6553 IEM_MC_ADVANCE_RIP(); 6554 IEM_MC_END(); 6555 return VINF_SUCCESS; 6556 6557 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6558 } 6559 } 6560 } 6561 6562 6563 FNIEMOP_DEF_2(iemOpCommonLoadSRegAndGreg, uint8_t, iSegReg, uint8_t, bRm) 6564 { 6565 Assert((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)); /* Caller checks this */ 6566 uint8_t const iGReg = ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg; 6567 6568 switch (pVCpu->iem.s.enmEffOpSize) 6569 { 6570 case IEMMODE_16BIT: 6571 IEM_MC_BEGIN(5, 1); 6572 IEM_MC_ARG(uint16_t, uSel, 0); 6573 IEM_MC_ARG(uint16_t, offSeg, 1); 6574 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2); 6575 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3); 6576 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4); 6577 IEM_MC_LOCAL(RTGCPTR, GCPtrEff); 6578 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6579 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6580 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff); 6581 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 2); 6582 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize); 6583 IEM_MC_END(); 6584 return VINF_SUCCESS; 6585 6586 case IEMMODE_32BIT: 6587 IEM_MC_BEGIN(5, 1); 6588 IEM_MC_ARG(uint16_t, uSel, 0); 6589 IEM_MC_ARG(uint32_t, offSeg, 1); 6590 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2); 6591 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3); 6592 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4); 6593 IEM_MC_LOCAL(RTGCPTR, GCPtrEff); 6594 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6595 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6596 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff); 6597 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 4); 6598 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize); 6599 IEM_MC_END(); 6600 return VINF_SUCCESS; 6601 6602 case IEMMODE_64BIT: 6603 IEM_MC_BEGIN(5, 1); 6604 IEM_MC_ARG(uint16_t, uSel, 0); 6605 IEM_MC_ARG(uint64_t, offSeg, 1); 6606 IEM_MC_ARG_CONST(uint8_t, iSegRegArg,/*=*/iSegReg, 2); 6607 IEM_MC_ARG_CONST(uint8_t, iGRegArg, /*=*/iGReg, 3); 6608 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize,/*=*/pVCpu->iem.s.enmEffOpSize, 4); 6609 IEM_MC_LOCAL(RTGCPTR, GCPtrEff); 6610 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEff, bRm, 0); 6611 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6612 if (IEM_IS_GUEST_CPU_AMD(pVCpu)) /** @todo testcase: rev 3.15 of the amd manuals claims it only loads a 32-bit greg. */ 6613 IEM_MC_FETCH_MEM_U32_SX_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff); 6614 else 6615 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEff); 6616 IEM_MC_FETCH_MEM_U16_DISP(uSel, pVCpu->iem.s.iEffSeg, GCPtrEff, 8); 6617 IEM_MC_CALL_CIMPL_5(iemCImpl_load_SReg_Greg, uSel, offSeg, iSegRegArg, iGRegArg, enmEffOpSize); 6618 IEM_MC_END(); 6619 return VINF_SUCCESS; 6620 6621 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6622 } 6623 } 6624 6625 6626 /** Opcode 0x0f 0xb2. */ 6627 FNIEMOP_DEF(iemOp_lss_Gv_Mp) 6628 { 6629 IEMOP_MNEMONIC(lss_Gv_Mp, "lss Gv,Mp"); 6630 IEMOP_HLP_MIN_386(); 6631 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6632 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6633 return IEMOP_RAISE_INVALID_OPCODE(); 6634 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_SS, bRm); 6635 } 6636 6637 6638 /** Opcode 0x0f 0xb3. */ 6639 FNIEMOP_DEF(iemOp_btr_Ev_Gv) 6640 { 6641 IEMOP_MNEMONIC(btr_Ev_Gv, "btr Ev,Gv"); 6642 IEMOP_HLP_MIN_386(); 6643 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btr); 6644 } 6645 6646 6647 /** Opcode 0x0f 0xb4. */ 6648 FNIEMOP_DEF(iemOp_lfs_Gv_Mp) 6649 { 6650 IEMOP_MNEMONIC(lfs_Gv_Mp, "lfs Gv,Mp"); 6651 IEMOP_HLP_MIN_386(); 6652 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6653 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6654 return IEMOP_RAISE_INVALID_OPCODE(); 6655 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_FS, bRm); 6656 } 6657 6658 6659 /** Opcode 0x0f 0xb5. */ 6660 FNIEMOP_DEF(iemOp_lgs_Gv_Mp) 6661 { 6662 IEMOP_MNEMONIC(lgs_Gv_Mp, "lgs Gv,Mp"); 6663 IEMOP_HLP_MIN_386(); 6664 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6665 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6666 return IEMOP_RAISE_INVALID_OPCODE(); 6667 return FNIEMOP_CALL_2(iemOpCommonLoadSRegAndGreg, X86_SREG_GS, bRm); 6668 } 6669 6670 6671 /** Opcode 0x0f 0xb6. */ 6672 FNIEMOP_DEF(iemOp_movzx_Gv_Eb) 6673 { 6674 IEMOP_MNEMONIC(movzx_Gv_Eb, "movzx Gv,Eb"); 6675 IEMOP_HLP_MIN_386(); 6676 6677 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6678 6679 /* 6680 * If rm is denoting a register, no more instruction bytes. 6681 */ 6682 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6683 { 6684 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6685 switch (pVCpu->iem.s.enmEffOpSize) 6686 { 6687 case IEMMODE_16BIT: 6688 IEM_MC_BEGIN(0, 1); 6689 IEM_MC_LOCAL(uint16_t, u16Value); 6690 IEM_MC_FETCH_GREG_U8_ZX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6691 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value); 6692 IEM_MC_ADVANCE_RIP(); 6693 IEM_MC_END(); 6694 return VINF_SUCCESS; 6695 6696 case IEMMODE_32BIT: 6697 IEM_MC_BEGIN(0, 1); 6698 IEM_MC_LOCAL(uint32_t, u32Value); 6699 IEM_MC_FETCH_GREG_U8_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6700 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 6701 IEM_MC_ADVANCE_RIP(); 6702 IEM_MC_END(); 6703 return VINF_SUCCESS; 6704 6705 case IEMMODE_64BIT: 6706 IEM_MC_BEGIN(0, 1); 6707 IEM_MC_LOCAL(uint64_t, u64Value); 6708 IEM_MC_FETCH_GREG_U8_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6709 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 6710 IEM_MC_ADVANCE_RIP(); 6711 IEM_MC_END(); 6712 return VINF_SUCCESS; 6713 6714 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6715 } 6716 } 6717 else 6718 { 6719 /* 6720 * We're loading a register from memory. 6721 */ 6722 switch (pVCpu->iem.s.enmEffOpSize) 6723 { 6724 case IEMMODE_16BIT: 6725 IEM_MC_BEGIN(0, 2); 6726 IEM_MC_LOCAL(uint16_t, u16Value); 6727 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6728 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6729 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6730 IEM_MC_FETCH_MEM_U8_ZX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 6731 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value); 6732 IEM_MC_ADVANCE_RIP(); 6733 IEM_MC_END(); 6734 return VINF_SUCCESS; 6735 6736 case IEMMODE_32BIT: 6737 IEM_MC_BEGIN(0, 2); 6738 IEM_MC_LOCAL(uint32_t, u32Value); 6739 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6740 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6741 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6742 IEM_MC_FETCH_MEM_U8_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 6743 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 6744 IEM_MC_ADVANCE_RIP(); 6745 IEM_MC_END(); 6746 return VINF_SUCCESS; 6747 6748 case IEMMODE_64BIT: 6749 IEM_MC_BEGIN(0, 2); 6750 IEM_MC_LOCAL(uint64_t, u64Value); 6751 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6752 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6753 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6754 IEM_MC_FETCH_MEM_U8_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 6755 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 6756 IEM_MC_ADVANCE_RIP(); 6757 IEM_MC_END(); 6758 return VINF_SUCCESS; 6759 6760 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6761 } 6762 } 6763 } 6764 6765 6766 /** Opcode 0x0f 0xb7. */ 6767 FNIEMOP_DEF(iemOp_movzx_Gv_Ew) 6768 { 6769 IEMOP_MNEMONIC(movzx_Gv_Ew, "movzx Gv,Ew"); 6770 IEMOP_HLP_MIN_386(); 6771 6772 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6773 6774 /** @todo Not entirely sure how the operand size prefix is handled here, 6775 * assuming that it will be ignored. Would be nice to have a few 6776 * test for this. */ 6777 /* 6778 * If rm is denoting a register, no more instruction bytes. 6779 */ 6780 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6781 { 6782 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6783 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT) 6784 { 6785 IEM_MC_BEGIN(0, 1); 6786 IEM_MC_LOCAL(uint32_t, u32Value); 6787 IEM_MC_FETCH_GREG_U16_ZX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6788 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 6789 IEM_MC_ADVANCE_RIP(); 6790 IEM_MC_END(); 6791 } 6792 else 6793 { 6794 IEM_MC_BEGIN(0, 1); 6795 IEM_MC_LOCAL(uint64_t, u64Value); 6796 IEM_MC_FETCH_GREG_U16_ZX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6797 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 6798 IEM_MC_ADVANCE_RIP(); 6799 IEM_MC_END(); 6800 } 6801 } 6802 else 6803 { 6804 /* 6805 * We're loading a register from memory. 6806 */ 6807 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT) 6808 { 6809 IEM_MC_BEGIN(0, 2); 6810 IEM_MC_LOCAL(uint32_t, u32Value); 6811 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6812 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6813 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6814 IEM_MC_FETCH_MEM_U16_ZX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 6815 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 6816 IEM_MC_ADVANCE_RIP(); 6817 IEM_MC_END(); 6818 } 6819 else 6820 { 6821 IEM_MC_BEGIN(0, 2); 6822 IEM_MC_LOCAL(uint64_t, u64Value); 6823 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6824 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 6825 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6826 IEM_MC_FETCH_MEM_U16_ZX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 6827 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 6828 IEM_MC_ADVANCE_RIP(); 6829 IEM_MC_END(); 6830 } 6831 } 6832 return VINF_SUCCESS; 6833 } 6834 6835 6836 /** Opcode 0x0f 0xb8 - JMPE (reserved for emulator on IPF) */ 6837 FNIEMOP_UD_STUB(iemOp_jmpe); 6838 /** Opcode 0xf3 0x0f 0xb8 - POPCNT Gv, Ev */ 6839 FNIEMOP_STUB(iemOp_popcnt_Gv_Ev); 6840 2297 /* Opcode VEX.0F 0xaf - invalid. */ 2298 2299 /* Opcode VEX.0F 0xb0 - invalid. */ 2300 /* Opcode VEX.0F 0xb1 - invalid. */ 2301 /* Opcode VEX.0F 0xb2 - invalid. */ 2302 /* Opcode VEX.0F 0xb2 - invalid. */ 2303 /* Opcode VEX.0F 0xb3 - invalid. */ 2304 /* Opcode VEX.0F 0xb4 - invalid. */ 2305 /* Opcode VEX.0F 0xb5 - invalid. */ 2306 /* Opcode VEX.0F 0xb6 - invalid. */ 2307 /* Opcode VEX.0F 0xb7 - invalid. */ 2308 /* Opcode VEX.0F 0xb8 - invalid. */ 2309 /* Opcode VEX.0F 0xb9 - invalid. */ 2310 /* Opcode VEX.0F 0xba - invalid. */ 2311 /* Opcode VEX.0F 0xbb - invalid. */ 2312 /* Opcode VEX.0F 0xbc - invalid. */ 2313 /* Opcode VEX.0F 0xbd - invalid. */ 2314 /* Opcode VEX.0F 0xbe - invalid. */ 2315 /* Opcode VEX.0F 0xbf - invalid. */ 2316 2317 /* Opcode VEX.0F 0xc0 - invalid. */ 2318 /* Opcode VEX.66.0F 0xc0 - invalid. */ 2319 /* Opcode VEX.F3.0F 0xc0 - invalid. */ 2320 /* Opcode VEX.F2.0F 0xc0 - invalid. */ 2321 2322 /* Opcode VEX.0F 0xc1 - invalid. */ 2323 /* Opcode VEX.66.0F 0xc1 - invalid. */ 2324 /* Opcode VEX.F3.0F 0xc1 - invalid. */ 2325 /* Opcode VEX.F2.0F 0xc1 - invalid. */ 2326 2327 /** Opcode VEX.0F 0xc2 - vcmpps Vps,Hps,Wps,Ib */ 2328 FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib); 2329 /** Opcode VEX.66.0F 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */ 2330 FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib); 2331 /** Opcode VEX.F3.0F 0xc2 - vcmpss Vss,Hss,Wss,Ib */ 2332 FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib); 2333 /** Opcode VEX.F2.0F 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */ 2334 FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib); 2335 2336 /* Opcode VEX.0F 0xc3 - invalid */ 2337 /* Opcode VEX.66.0F 0xc3 - invalid */ 2338 /* Opcode VEX.F3.0F 0xc3 - invalid */ 2339 /* Opcode VEX.F2.0F 0xc3 - invalid */ 2340 2341 /* Opcode VEX.0F 0xc4 - invalid */ 2342 /** Opcode VEX.66.0F 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */ 2343 FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib); 2344 /* Opcode VEX.F3.0F 0xc4 - invalid */ 2345 /* Opcode VEX.F2.0F 0xc4 - invalid */ 2346 2347 /* Opcode VEX.0F 0xc5 - invlid */ 2348 /** Opcode VEX.66.0F 0xc5 - vpextrw Gd, Udq, Ib */ 2349 FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib); 2350 /* Opcode VEX.F3.0F 0xc5 - invalid */ 2351 /* Opcode VEX.F2.0F 0xc5 - invalid */ 2352 2353 /** Opcode VEX.0F 0xc6 - vshufps Vps,Hps,Wps,Ib */ 2354 FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib); 2355 /** Opcode VEX.66.0F 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */ 2356 FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib); 2357 /* Opcode VEX.F3.0F 0xc6 - invalid */ 2358 /* Opcode VEX.F2.0F 0xc6 - invalid */ 2359 2360 /* Opcode VEX.0F 0xc7 - invalid */ 2361 /* Opcode VEX.66.0F 0xc7 - invalid */ 2362 /* Opcode VEX.F3.0F 0xc7 - invalid */ 2363 /* Opcode VEX.F2.0F 0xc7 - invalid */ 2364 2365 /* Opcode VEX.0F 0xc8 - invalid */ 2366 /* Opcode VEX.0F 0xc9 - invalid */ 2367 /* Opcode VEX.0F 0xca - invalid */ 2368 /* Opcode VEX.0F 0xcb - invalid */ 2369 /* Opcode VEX.0F 0xcc - invalid */ 2370 /* Opcode VEX.0F 0xcd - invalid */ 2371 /* Opcode VEX.0F 0xce - invalid */ 2372 /* Opcode VEX.0F 0xcf - invalid */ 2373 2374 2375 /* Opcode VEX.0F 0xd0 - invalid */ 2376 /** Opcode VEX.66.0F 0xd0 - vaddsubpd Vpd, Hpd, Wpd */ 2377 FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd); 2378 /* Opcode VEX.F3.0F 0xd0 - invalid */ 2379 /** Opcode VEX.F2.0F 0xd0 - vaddsubps Vps, Hps, Wps */ 2380 FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps); 2381 2382 /* Opcode VEX.0F 0xd1 - invalid */ 2383 /** Opcode VEX.66.0F 0xd1 - vpsrlw Vx, Hx, W */ 2384 FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W); 2385 /* Opcode VEX.F3.0F 0xd1 - invalid */ 2386 /* Opcode VEX.F2.0F 0xd1 - invalid */ 2387 2388 /* Opcode VEX.0F 0xd2 - invalid */ 2389 /** Opcode VEX.66.0F 0xd2 - vpsrld Vx, Hx, Wx */ 2390 FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx); 2391 /* Opcode VEX.F3.0F 0xd2 - invalid */ 2392 /* Opcode VEX.F2.0F 0xd2 - invalid */ 2393 2394 /* Opcode VEX.0F 0xd3 - invalid */ 2395 /** Opcode VEX.66.0F 0xd3 - vpsrlq Vx, Hx, Wx */ 2396 FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx); 2397 /* Opcode VEX.F3.0F 0xd3 - invalid */ 2398 /* Opcode VEX.F2.0F 0xd3 - invalid */ 2399 2400 /* Opcode VEX.0F 0xd4 - invalid */ 2401 /** Opcode VEX.66.0F 0xd4 - vpaddq Vx, Hx, W */ 2402 FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W); 2403 /* Opcode VEX.F3.0F 0xd4 - invalid */ 2404 /* Opcode VEX.F2.0F 0xd4 - invalid */ 2405 2406 /* Opcode VEX.0F 0xd5 - invalid */ 2407 /** Opcode VEX.66.0F 0xd5 - vpmullw Vx, Hx, Wx */ 2408 FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx); 2409 /* Opcode VEX.F3.0F 0xd5 - invalid */ 2410 /* Opcode VEX.F2.0F 0xd5 - invalid */ 2411 2412 /* Opcode VEX.0F 0xd6 - invalid */ 6841 2413 6842 2414 /** 6843 * @opcode 0xb9 6844 * @opinvalid intel-modrm 6845 * @optest -> 2415 * @ opcode 0xd6 2416 * @ oppfx 0x66 2417 * @ opcpuid sse2 2418 * @ opgroup og_sse2_pcksclr_datamove 2419 * @ opxcpttype none 2420 * @ optest op1=-1 op2=2 -> op1=2 2421 * @ optest op1=0 op2=-42 -> op1=-42 6846 2422 */ 6847 FNIEMOP_DEF(iemOp_Grp10) 6848 { 6849 /* 6850 * AMD does not decode beyond the 0xb9 whereas intel does the modr/m bit 6851 * too. See bs3-cpu-decoder-1.c32. So, we can forward to iemOp_InvalidNeedRM. 6852 */ 6853 Log(("iemOp_Grp10 aka UD1 -> #UD\n")); 6854 IEMOP_MNEMONIC2EX(ud1, "ud1", RM, UD1, ud1, Gb, Eb, DISOPTYPE_INVALID, IEMOPHINT_IGNORES_OP_SIZE); /* just picked Gb,Eb here. */ 6855 return FNIEMOP_CALL(iemOp_InvalidNeedRM); 6856 } 6857 6858 6859 /** Opcode 0x0f 0xba. */ 6860 FNIEMOP_DEF(iemOp_Grp8) 6861 { 6862 IEMOP_HLP_MIN_386(); 6863 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 6864 PCIEMOPBINSIZES pImpl; 6865 switch ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) 6866 { 6867 case 0: case 1: case 2: case 3: 6868 /* Both AMD and Intel want full modr/m decoding and imm8. */ 6869 return FNIEMOP_CALL_1(iemOp_InvalidWithRMAllNeedImm8, bRm); 6870 case 4: pImpl = &g_iemAImpl_bt; IEMOP_MNEMONIC(bt_Ev_Ib, "bt Ev,Ib"); break; 6871 case 5: pImpl = &g_iemAImpl_bts; IEMOP_MNEMONIC(bts_Ev_Ib, "bts Ev,Ib"); break; 6872 case 6: pImpl = &g_iemAImpl_btr; IEMOP_MNEMONIC(btr_Ev_Ib, "btr Ev,Ib"); break; 6873 case 7: pImpl = &g_iemAImpl_btc; IEMOP_MNEMONIC(btc_Ev_Ib, "btc Ev,Ib"); break; 6874 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6875 } 6876 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 6877 6878 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 6879 { 6880 /* register destination. */ 6881 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit); 6882 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6883 6884 switch (pVCpu->iem.s.enmEffOpSize) 6885 { 6886 case IEMMODE_16BIT: 6887 IEM_MC_BEGIN(3, 0); 6888 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 6889 IEM_MC_ARG_CONST(uint16_t, u16Src, /*=*/ u8Bit & 0x0f, 1); 6890 IEM_MC_ARG(uint32_t *, pEFlags, 2); 6891 6892 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6893 IEM_MC_REF_EFLAGS(pEFlags); 6894 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 6895 6896 IEM_MC_ADVANCE_RIP(); 6897 IEM_MC_END(); 6898 return VINF_SUCCESS; 6899 6900 case IEMMODE_32BIT: 6901 IEM_MC_BEGIN(3, 0); 6902 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 6903 IEM_MC_ARG_CONST(uint32_t, u32Src, /*=*/ u8Bit & 0x1f, 1); 6904 IEM_MC_ARG(uint32_t *, pEFlags, 2); 6905 6906 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6907 IEM_MC_REF_EFLAGS(pEFlags); 6908 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 6909 6910 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 6911 IEM_MC_ADVANCE_RIP(); 6912 IEM_MC_END(); 6913 return VINF_SUCCESS; 6914 6915 case IEMMODE_64BIT: 6916 IEM_MC_BEGIN(3, 0); 6917 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 6918 IEM_MC_ARG_CONST(uint64_t, u64Src, /*=*/ u8Bit & 0x3f, 1); 6919 IEM_MC_ARG(uint32_t *, pEFlags, 2); 6920 6921 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 6922 IEM_MC_REF_EFLAGS(pEFlags); 6923 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 6924 6925 IEM_MC_ADVANCE_RIP(); 6926 IEM_MC_END(); 6927 return VINF_SUCCESS; 6928 6929 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 6930 } 6931 } 6932 else 6933 { 6934 /* memory destination. */ 6935 6936 uint32_t fAccess; 6937 if (pImpl->pfnLockedU16) 6938 fAccess = IEM_ACCESS_DATA_RW; 6939 else /* BT */ 6940 fAccess = IEM_ACCESS_DATA_R; 6941 6942 /** @todo test negative bit offsets! */ 6943 switch (pVCpu->iem.s.enmEffOpSize) 6944 { 6945 case IEMMODE_16BIT: 6946 IEM_MC_BEGIN(3, 1); 6947 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 6948 IEM_MC_ARG(uint16_t, u16Src, 1); 6949 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 6950 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6951 6952 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 6953 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit); 6954 IEM_MC_ASSIGN(u16Src, u8Bit & 0x0f); 6955 if (pImpl->pfnLockedU16) 6956 IEMOP_HLP_DONE_DECODING(); 6957 else 6958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6959 IEM_MC_FETCH_EFLAGS(EFlags); 6960 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6961 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6962 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags); 6963 else 6964 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags); 6965 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess); 6966 6967 IEM_MC_COMMIT_EFLAGS(EFlags); 6968 IEM_MC_ADVANCE_RIP(); 6969 IEM_MC_END(); 6970 return VINF_SUCCESS; 6971 6972 case IEMMODE_32BIT: 6973 IEM_MC_BEGIN(3, 1); 6974 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 6975 IEM_MC_ARG(uint32_t, u32Src, 1); 6976 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 6977 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 6978 6979 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 6980 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit); 6981 IEM_MC_ASSIGN(u32Src, u8Bit & 0x1f); 6982 if (pImpl->pfnLockedU16) 6983 IEMOP_HLP_DONE_DECODING(); 6984 else 6985 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 6986 IEM_MC_FETCH_EFLAGS(EFlags); 6987 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 6988 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 6989 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags); 6990 else 6991 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags); 6992 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess); 6993 6994 IEM_MC_COMMIT_EFLAGS(EFlags); 6995 IEM_MC_ADVANCE_RIP(); 6996 IEM_MC_END(); 6997 return VINF_SUCCESS; 6998 6999 case IEMMODE_64BIT: 7000 IEM_MC_BEGIN(3, 1); 7001 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 7002 IEM_MC_ARG(uint64_t, u64Src, 1); 7003 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 7004 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7005 7006 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 7007 uint8_t u8Bit; IEM_OPCODE_GET_NEXT_U8(&u8Bit); 7008 IEM_MC_ASSIGN(u64Src, u8Bit & 0x3f); 7009 if (pImpl->pfnLockedU16) 7010 IEMOP_HLP_DONE_DECODING(); 7011 else 7012 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7013 IEM_MC_FETCH_EFLAGS(EFlags); 7014 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0); 7015 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7016 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags); 7017 else 7018 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags); 7019 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess); 7020 7021 IEM_MC_COMMIT_EFLAGS(EFlags); 7022 IEM_MC_ADVANCE_RIP(); 7023 IEM_MC_END(); 7024 return VINF_SUCCESS; 7025 7026 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7027 } 7028 } 7029 } 7030 7031 7032 /** Opcode 0x0f 0xbb. */ 7033 FNIEMOP_DEF(iemOp_btc_Ev_Gv) 7034 { 7035 IEMOP_MNEMONIC(btc_Ev_Gv, "btc Ev,Gv"); 7036 IEMOP_HLP_MIN_386(); 7037 return FNIEMOP_CALL_1(iemOpCommonBit_Ev_Gv, &g_iemAImpl_btc); 7038 } 7039 7040 7041 /** Opcode 0x0f 0xbc. */ 7042 FNIEMOP_DEF(iemOp_bsf_Gv_Ev) 7043 { 7044 IEMOP_MNEMONIC(bsf_Gv_Ev, "bsf Gv,Ev"); 7045 IEMOP_HLP_MIN_386(); 7046 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF); 7047 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsf); 7048 } 7049 7050 7051 /** Opcode 0xf3 0x0f 0xbc - TZCNT Gv, Ev */ 7052 FNIEMOP_STUB(iemOp_tzcnt_Gv_Ev); 7053 7054 7055 /** Opcode 0x0f 0xbd. */ 7056 FNIEMOP_DEF(iemOp_bsr_Gv_Ev) 7057 { 7058 IEMOP_MNEMONIC(bsr_Gv_Ev, "bsr Gv,Ev"); 7059 IEMOP_HLP_MIN_386(); 7060 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF | X86_EFL_CF); 7061 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_bsr); 7062 } 7063 7064 7065 /** Opcode 0xf3 0x0f 0xbd - LZCNT Gv, Ev */ 7066 FNIEMOP_STUB(iemOp_lzcnt_Gv_Ev); 7067 7068 7069 /** Opcode 0x0f 0xbe. */ 7070 FNIEMOP_DEF(iemOp_movsx_Gv_Eb) 7071 { 7072 IEMOP_MNEMONIC(movsx_Gv_Eb, "movsx Gv,Eb"); 7073 IEMOP_HLP_MIN_386(); 7074 7075 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7076 7077 /* 7078 * If rm is denoting a register, no more instruction bytes. 7079 */ 7080 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7081 { 7082 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7083 switch (pVCpu->iem.s.enmEffOpSize) 7084 { 7085 case IEMMODE_16BIT: 7086 IEM_MC_BEGIN(0, 1); 7087 IEM_MC_LOCAL(uint16_t, u16Value); 7088 IEM_MC_FETCH_GREG_U8_SX_U16(u16Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7089 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value); 7090 IEM_MC_ADVANCE_RIP(); 7091 IEM_MC_END(); 7092 return VINF_SUCCESS; 7093 7094 case IEMMODE_32BIT: 7095 IEM_MC_BEGIN(0, 1); 7096 IEM_MC_LOCAL(uint32_t, u32Value); 7097 IEM_MC_FETCH_GREG_U8_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7098 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 7099 IEM_MC_ADVANCE_RIP(); 7100 IEM_MC_END(); 7101 return VINF_SUCCESS; 7102 7103 case IEMMODE_64BIT: 7104 IEM_MC_BEGIN(0, 1); 7105 IEM_MC_LOCAL(uint64_t, u64Value); 7106 IEM_MC_FETCH_GREG_U8_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7107 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 7108 IEM_MC_ADVANCE_RIP(); 7109 IEM_MC_END(); 7110 return VINF_SUCCESS; 7111 7112 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7113 } 7114 } 7115 else 7116 { 7117 /* 7118 * We're loading a register from memory. 7119 */ 7120 switch (pVCpu->iem.s.enmEffOpSize) 7121 { 7122 case IEMMODE_16BIT: 7123 IEM_MC_BEGIN(0, 2); 7124 IEM_MC_LOCAL(uint16_t, u16Value); 7125 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7126 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7127 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7128 IEM_MC_FETCH_MEM_U8_SX_U16(u16Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 7129 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16Value); 7130 IEM_MC_ADVANCE_RIP(); 7131 IEM_MC_END(); 7132 return VINF_SUCCESS; 7133 7134 case IEMMODE_32BIT: 7135 IEM_MC_BEGIN(0, 2); 7136 IEM_MC_LOCAL(uint32_t, u32Value); 7137 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7138 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7139 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7140 IEM_MC_FETCH_MEM_U8_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 7141 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 7142 IEM_MC_ADVANCE_RIP(); 7143 IEM_MC_END(); 7144 return VINF_SUCCESS; 7145 7146 case IEMMODE_64BIT: 7147 IEM_MC_BEGIN(0, 2); 7148 IEM_MC_LOCAL(uint64_t, u64Value); 7149 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7150 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7151 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7152 IEM_MC_FETCH_MEM_U8_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 7153 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 7154 IEM_MC_ADVANCE_RIP(); 7155 IEM_MC_END(); 7156 return VINF_SUCCESS; 7157 7158 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7159 } 7160 } 7161 } 7162 7163 7164 /** Opcode 0x0f 0xbf. */ 7165 FNIEMOP_DEF(iemOp_movsx_Gv_Ew) 7166 { 7167 IEMOP_MNEMONIC(movsx_Gv_Ew, "movsx Gv,Ew"); 7168 IEMOP_HLP_MIN_386(); 7169 7170 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7171 7172 /** @todo Not entirely sure how the operand size prefix is handled here, 7173 * assuming that it will be ignored. Would be nice to have a few 7174 * test for this. */ 7175 /* 7176 * If rm is denoting a register, no more instruction bytes. 7177 */ 7178 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7179 { 7180 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7181 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT) 7182 { 7183 IEM_MC_BEGIN(0, 1); 7184 IEM_MC_LOCAL(uint32_t, u32Value); 7185 IEM_MC_FETCH_GREG_U16_SX_U32(u32Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7186 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 7187 IEM_MC_ADVANCE_RIP(); 7188 IEM_MC_END(); 7189 } 7190 else 7191 { 7192 IEM_MC_BEGIN(0, 1); 7193 IEM_MC_LOCAL(uint64_t, u64Value); 7194 IEM_MC_FETCH_GREG_U16_SX_U64(u64Value, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7195 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 7196 IEM_MC_ADVANCE_RIP(); 7197 IEM_MC_END(); 7198 } 7199 } 7200 else 7201 { 7202 /* 7203 * We're loading a register from memory. 7204 */ 7205 if (pVCpu->iem.s.enmEffOpSize != IEMMODE_64BIT) 7206 { 7207 IEM_MC_BEGIN(0, 2); 7208 IEM_MC_LOCAL(uint32_t, u32Value); 7209 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7210 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7211 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7212 IEM_MC_FETCH_MEM_U16_SX_U32(u32Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 7213 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32Value); 7214 IEM_MC_ADVANCE_RIP(); 7215 IEM_MC_END(); 7216 } 7217 else 7218 { 7219 IEM_MC_BEGIN(0, 2); 7220 IEM_MC_LOCAL(uint64_t, u64Value); 7221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7222 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7223 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7224 IEM_MC_FETCH_MEM_U16_SX_U64(u64Value, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 7225 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64Value); 7226 IEM_MC_ADVANCE_RIP(); 7227 IEM_MC_END(); 7228 } 7229 } 7230 return VINF_SUCCESS; 7231 } 7232 7233 7234 /** Opcode 0x0f 0xc0. */ 7235 FNIEMOP_DEF(iemOp_xadd_Eb_Gb) 7236 { 7237 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7238 IEMOP_HLP_MIN_486(); 7239 IEMOP_MNEMONIC(xadd_Eb_Gb, "xadd Eb,Gb"); 7240 7241 /* 7242 * If rm is denoting a register, no more instruction bytes. 7243 */ 7244 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7245 { 7246 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7247 7248 IEM_MC_BEGIN(3, 0); 7249 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 7250 IEM_MC_ARG(uint8_t *, pu8Reg, 1); 7251 IEM_MC_ARG(uint32_t *, pEFlags, 2); 7252 7253 IEM_MC_REF_GREG_U8(pu8Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7254 IEM_MC_REF_GREG_U8(pu8Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7255 IEM_MC_REF_EFLAGS(pEFlags); 7256 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags); 7257 7258 IEM_MC_ADVANCE_RIP(); 7259 IEM_MC_END(); 7260 } 7261 else 7262 { 7263 /* 7264 * We're accessing memory. 7265 */ 7266 IEM_MC_BEGIN(3, 3); 7267 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 7268 IEM_MC_ARG(uint8_t *, pu8Reg, 1); 7269 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 7270 IEM_MC_LOCAL(uint8_t, u8RegCopy); 7271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7272 7273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7274 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7275 IEM_MC_FETCH_GREG_U8(u8RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7276 IEM_MC_REF_LOCAL(pu8Reg, u8RegCopy); 7277 IEM_MC_FETCH_EFLAGS(EFlags); 7278 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7279 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8, pu8Dst, pu8Reg, pEFlags); 7280 else 7281 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u8_locked, pu8Dst, pu8Reg, pEFlags); 7282 7283 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); 7284 IEM_MC_COMMIT_EFLAGS(EFlags); 7285 IEM_MC_STORE_GREG_U8(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u8RegCopy); 7286 IEM_MC_ADVANCE_RIP(); 7287 IEM_MC_END(); 7288 return VINF_SUCCESS; 7289 } 7290 return VINF_SUCCESS; 7291 } 7292 7293 7294 /** Opcode 0x0f 0xc1. */ 7295 FNIEMOP_DEF(iemOp_xadd_Ev_Gv) 7296 { 7297 IEMOP_MNEMONIC(xadd_Ev_Gv, "xadd Ev,Gv"); 7298 IEMOP_HLP_MIN_486(); 7299 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7300 7301 /* 7302 * If rm is denoting a register, no more instruction bytes. 7303 */ 7304 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7305 { 7306 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7307 7308 switch (pVCpu->iem.s.enmEffOpSize) 7309 { 7310 case IEMMODE_16BIT: 7311 IEM_MC_BEGIN(3, 0); 7312 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 7313 IEM_MC_ARG(uint16_t *, pu16Reg, 1); 7314 IEM_MC_ARG(uint32_t *, pEFlags, 2); 7315 7316 IEM_MC_REF_GREG_U16(pu16Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7317 IEM_MC_REF_GREG_U16(pu16Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7318 IEM_MC_REF_EFLAGS(pEFlags); 7319 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags); 7320 7321 IEM_MC_ADVANCE_RIP(); 7322 IEM_MC_END(); 7323 return VINF_SUCCESS; 7324 7325 case IEMMODE_32BIT: 7326 IEM_MC_BEGIN(3, 0); 7327 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 7328 IEM_MC_ARG(uint32_t *, pu32Reg, 1); 7329 IEM_MC_ARG(uint32_t *, pEFlags, 2); 7330 7331 IEM_MC_REF_GREG_U32(pu32Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7332 IEM_MC_REF_GREG_U32(pu32Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7333 IEM_MC_REF_EFLAGS(pEFlags); 7334 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags); 7335 7336 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 7337 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Reg); 7338 IEM_MC_ADVANCE_RIP(); 7339 IEM_MC_END(); 7340 return VINF_SUCCESS; 7341 7342 case IEMMODE_64BIT: 7343 IEM_MC_BEGIN(3, 0); 7344 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 7345 IEM_MC_ARG(uint64_t *, pu64Reg, 1); 7346 IEM_MC_ARG(uint32_t *, pEFlags, 2); 7347 7348 IEM_MC_REF_GREG_U64(pu64Dst, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7349 IEM_MC_REF_GREG_U64(pu64Reg, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7350 IEM_MC_REF_EFLAGS(pEFlags); 7351 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags); 7352 7353 IEM_MC_ADVANCE_RIP(); 7354 IEM_MC_END(); 7355 return VINF_SUCCESS; 7356 7357 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7358 } 7359 } 7360 else 7361 { 7362 /* 7363 * We're accessing memory. 7364 */ 7365 switch (pVCpu->iem.s.enmEffOpSize) 7366 { 7367 case IEMMODE_16BIT: 7368 IEM_MC_BEGIN(3, 3); 7369 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 7370 IEM_MC_ARG(uint16_t *, pu16Reg, 1); 7371 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 7372 IEM_MC_LOCAL(uint16_t, u16RegCopy); 7373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7374 7375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7376 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7377 IEM_MC_FETCH_GREG_U16(u16RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7378 IEM_MC_REF_LOCAL(pu16Reg, u16RegCopy); 7379 IEM_MC_FETCH_EFLAGS(EFlags); 7380 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7381 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16, pu16Dst, pu16Reg, pEFlags); 7382 else 7383 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u16_locked, pu16Dst, pu16Reg, pEFlags); 7384 7385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); 7386 IEM_MC_COMMIT_EFLAGS(EFlags); 7387 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u16RegCopy); 7388 IEM_MC_ADVANCE_RIP(); 7389 IEM_MC_END(); 7390 return VINF_SUCCESS; 7391 7392 case IEMMODE_32BIT: 7393 IEM_MC_BEGIN(3, 3); 7394 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 7395 IEM_MC_ARG(uint32_t *, pu32Reg, 1); 7396 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 7397 IEM_MC_LOCAL(uint32_t, u32RegCopy); 7398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7399 7400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7401 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7402 IEM_MC_FETCH_GREG_U32(u32RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7403 IEM_MC_REF_LOCAL(pu32Reg, u32RegCopy); 7404 IEM_MC_FETCH_EFLAGS(EFlags); 7405 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7406 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32, pu32Dst, pu32Reg, pEFlags); 7407 else 7408 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u32_locked, pu32Dst, pu32Reg, pEFlags); 7409 7410 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW); 7411 IEM_MC_COMMIT_EFLAGS(EFlags); 7412 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u32RegCopy); 7413 IEM_MC_ADVANCE_RIP(); 7414 IEM_MC_END(); 7415 return VINF_SUCCESS; 7416 7417 case IEMMODE_64BIT: 7418 IEM_MC_BEGIN(3, 3); 7419 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 7420 IEM_MC_ARG(uint64_t *, pu64Reg, 1); 7421 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); 7422 IEM_MC_LOCAL(uint64_t, u64RegCopy); 7423 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7424 7425 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7426 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7427 IEM_MC_FETCH_GREG_U64(u64RegCopy, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7428 IEM_MC_REF_LOCAL(pu64Reg, u64RegCopy); 7429 IEM_MC_FETCH_EFLAGS(EFlags); 7430 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7431 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64, pu64Dst, pu64Reg, pEFlags); 7432 else 7433 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_xadd_u64_locked, pu64Dst, pu64Reg, pEFlags); 7434 7435 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); 7436 IEM_MC_COMMIT_EFLAGS(EFlags); 7437 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg, u64RegCopy); 7438 IEM_MC_ADVANCE_RIP(); 7439 IEM_MC_END(); 7440 return VINF_SUCCESS; 7441 7442 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7443 } 7444 } 7445 } 7446 7447 7448 /** Opcode 0x0f 0xc2 - vcmpps Vps,Hps,Wps,Ib */ 7449 FNIEMOP_STUB(iemOp_vcmpps_Vps_Hps_Wps_Ib); 7450 /** Opcode 0x66 0x0f 0xc2 - vcmppd Vpd,Hpd,Wpd,Ib */ 7451 FNIEMOP_STUB(iemOp_vcmppd_Vpd_Hpd_Wpd_Ib); 7452 /** Opcode 0xf3 0x0f 0xc2 - vcmpss Vss,Hss,Wss,Ib */ 7453 FNIEMOP_STUB(iemOp_vcmpss_Vss_Hss_Wss_Ib); 7454 /** Opcode 0xf2 0x0f 0xc2 - vcmpsd Vsd,Hsd,Wsd,Ib */ 7455 FNIEMOP_STUB(iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib); 7456 7457 7458 /** Opcode 0x0f 0xc3. */ 7459 FNIEMOP_DEF(iemOp_movnti_My_Gy) 7460 { 7461 IEMOP_MNEMONIC(movnti_My_Gy, "movnti My,Gy"); 7462 7463 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7464 7465 /* Only the register -> memory form makes sense, assuming #UD for the other form. */ 7466 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 7467 { 7468 switch (pVCpu->iem.s.enmEffOpSize) 7469 { 7470 case IEMMODE_32BIT: 7471 IEM_MC_BEGIN(0, 2); 7472 IEM_MC_LOCAL(uint32_t, u32Value); 7473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7474 7475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7476 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7477 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2) 7478 return IEMOP_RAISE_INVALID_OPCODE(); 7479 7480 IEM_MC_FETCH_GREG_U32(u32Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7481 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u32Value); 7482 IEM_MC_ADVANCE_RIP(); 7483 IEM_MC_END(); 7484 break; 7485 7486 case IEMMODE_64BIT: 7487 IEM_MC_BEGIN(0, 2); 7488 IEM_MC_LOCAL(uint64_t, u64Value); 7489 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7490 7491 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7492 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7493 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse2) 7494 return IEMOP_RAISE_INVALID_OPCODE(); 7495 7496 IEM_MC_FETCH_GREG_U64(u64Value, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7497 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffDst, u64Value); 7498 IEM_MC_ADVANCE_RIP(); 7499 IEM_MC_END(); 7500 break; 7501 7502 case IEMMODE_16BIT: 7503 /** @todo check this form. */ 7504 return IEMOP_RAISE_INVALID_OPCODE(); 7505 } 7506 } 7507 else 7508 return IEMOP_RAISE_INVALID_OPCODE(); 7509 return VINF_SUCCESS; 7510 } 7511 /* Opcode 0x66 0x0f 0xc3 - invalid */ 7512 /* Opcode 0xf3 0x0f 0xc3 - invalid */ 7513 /* Opcode 0xf2 0x0f 0xc3 - invalid */ 7514 7515 /** Opcode 0x0f 0xc4 - pinsrw Pq,Ry/Mw,Ib */ 7516 FNIEMOP_STUB(iemOp_pinsrw_Pq_RyMw_Ib); 7517 /** Opcode 0x66 0x0f 0xc4 - vpinsrw Vdq,Hdq,Ry/Mw,Ib */ 7518 FNIEMOP_STUB(iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib); 7519 /* Opcode 0xf3 0x0f 0xc4 - invalid */ 7520 /* Opcode 0xf2 0x0f 0xc4 - invalid */ 7521 7522 /** Opcode 0x0f 0xc5 - pextrw Gd, Nq, Ib */ 7523 FNIEMOP_STUB(iemOp_pextrw_Gd_Nq_Ib); 7524 /** Opcode 0x66 0x0f 0xc5 - vpextrw Gd, Udq, Ib */ 7525 FNIEMOP_STUB(iemOp_vpextrw_Gd_Udq_Ib); 7526 /* Opcode 0xf3 0x0f 0xc5 - invalid */ 7527 /* Opcode 0xf2 0x0f 0xc5 - invalid */ 7528 7529 /** Opcode 0x0f 0xc6 - vshufps Vps,Hps,Wps,Ib */ 7530 FNIEMOP_STUB(iemOp_vshufps_Vps_Hps_Wps_Ib); 7531 /** Opcode 0x66 0x0f 0xc6 - vshufpd Vpd,Hpd,Wpd,Ib */ 7532 FNIEMOP_STUB(iemOp_vshufpd_Vpd_Hpd_Wpd_Ib); 7533 /* Opcode 0xf3 0x0f 0xc6 - invalid */ 7534 /* Opcode 0xf2 0x0f 0xc6 - invalid */ 7535 7536 7537 /** Opcode 0x0f 0xc7 !11/1. */ 7538 FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8b_Mq, uint8_t, bRm) 7539 { 7540 IEMOP_MNEMONIC(cmpxchg8b, "cmpxchg8b Mq"); 7541 7542 IEM_MC_BEGIN(4, 3); 7543 IEM_MC_ARG(uint64_t *, pu64MemDst, 0); 7544 IEM_MC_ARG(PRTUINT64U, pu64EaxEdx, 1); 7545 IEM_MC_ARG(PRTUINT64U, pu64EbxEcx, 2); 7546 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3); 7547 IEM_MC_LOCAL(RTUINT64U, u64EaxEdx); 7548 IEM_MC_LOCAL(RTUINT64U, u64EbxEcx); 7549 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7550 7551 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7552 IEMOP_HLP_DONE_DECODING(); 7553 IEM_MC_MEM_MAP(pu64MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7554 7555 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX); 7556 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX); 7557 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx); 7558 7559 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX); 7560 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX); 7561 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx); 7562 7563 IEM_MC_FETCH_EFLAGS(EFlags); 7564 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7565 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); 7566 else 7567 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg8b_locked, pu64MemDst, pu64EaxEdx, pu64EbxEcx, pEFlags); 7568 7569 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64MemDst, IEM_ACCESS_DATA_RW); 7570 IEM_MC_COMMIT_EFLAGS(EFlags); 7571 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) 7572 /** @todo Testcase: Check effect of cmpxchg8b on bits 63:32 in rax and rdx. */ 7573 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo); 7574 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi); 7575 IEM_MC_ENDIF(); 7576 IEM_MC_ADVANCE_RIP(); 7577 7578 IEM_MC_END(); 7579 return VINF_SUCCESS; 7580 } 7581 7582 7583 /** Opcode REX.W 0x0f 0xc7 !11/1. */ 7584 FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg16b_Mdq, uint8_t, bRm) 7585 { 7586 IEMOP_MNEMONIC(cmpxchg16b, "cmpxchg16b Mdq"); 7587 if (IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b) 7588 { 7589 #if 0 7590 RT_NOREF(bRm); 7591 IEMOP_BITCH_ABOUT_STUB(); 7592 return VERR_IEM_INSTR_NOT_IMPLEMENTED; 7593 #else 7594 IEM_MC_BEGIN(4, 3); 7595 IEM_MC_ARG(PRTUINT128U, pu128MemDst, 0); 7596 IEM_MC_ARG(PRTUINT128U, pu128RaxRdx, 1); 7597 IEM_MC_ARG(PRTUINT128U, pu128RbxRcx, 2); 7598 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 3); 7599 IEM_MC_LOCAL(RTUINT128U, u128RaxRdx); 7600 IEM_MC_LOCAL(RTUINT128U, u128RbxRcx); 7601 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 7602 7603 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); 7604 IEMOP_HLP_DONE_DECODING(); 7605 IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED(GCPtrEffDst, 16); 7606 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); 7607 7608 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX); 7609 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX); 7610 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx); 7611 7612 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX); 7613 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX); 7614 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx); 7615 7616 IEM_MC_FETCH_EFLAGS(EFlags); 7617 # ifdef RT_ARCH_AMD64 7618 if (IEM_GET_HOST_CPU_FEATURES(pVCpu)->fMovCmpXchg16b) 7619 { 7620 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) 7621 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 7622 else 7623 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_locked, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 7624 } 7625 else 7626 # endif 7627 { 7628 /* Note! The fallback for 32-bit systems and systems without CX16 is multiple 7629 accesses and not all all atomic, which works fine on in UNI CPU guest 7630 configuration (ignoring DMA). If guest SMP is active we have no choice 7631 but to use a rendezvous callback here. Sigh. */ 7632 if (pVCpu->CTX_SUFF(pVM)->cCpus == 1) 7633 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_cmpxchg16b_fallback, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 7634 else 7635 { 7636 IEM_MC_CALL_CIMPL_4(iemCImpl_cmpxchg16b_fallback_rendezvous, pu128MemDst, pu128RaxRdx, pu128RbxRcx, pEFlags); 7637 /* Does not get here, tail code is duplicated in iemCImpl_cmpxchg16b_fallback_rendezvous. */ 7638 } 7639 } 7640 7641 IEM_MC_MEM_COMMIT_AND_UNMAP(pu128MemDst, IEM_ACCESS_DATA_RW); 7642 IEM_MC_COMMIT_EFLAGS(EFlags); 7643 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) 7644 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo); 7645 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi); 7646 IEM_MC_ENDIF(); 7647 IEM_MC_ADVANCE_RIP(); 7648 7649 IEM_MC_END(); 7650 return VINF_SUCCESS; 7651 #endif 7652 } 7653 Log(("cmpxchg16b -> #UD\n")); 7654 return IEMOP_RAISE_INVALID_OPCODE(); 7655 } 7656 7657 FNIEMOP_DEF_1(iemOp_Grp9_cmpxchg8bOr16b, uint8_t, bRm) 7658 { 7659 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 7660 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg16b_Mdq, bRm); 7661 return FNIEMOP_CALL_1(iemOp_Grp9_cmpxchg8b_Mq, bRm); 7662 } 7663 7664 /** Opcode 0x0f 0xc7 11/6. */ 7665 FNIEMOP_UD_STUB_1(iemOp_Grp9_rdrand_Rv, uint8_t, bRm); 7666 7667 /** Opcode 0x0f 0xc7 !11/6. */ 7668 FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrld_Mq, uint8_t, bRm); 7669 7670 /** Opcode 0x66 0x0f 0xc7 !11/6. */ 7671 FNIEMOP_UD_STUB_1(iemOp_Grp9_vmclear_Mq, uint8_t, bRm); 7672 7673 /** Opcode 0xf3 0x0f 0xc7 !11/6. */ 7674 FNIEMOP_UD_STUB_1(iemOp_Grp9_vmxon_Mq, uint8_t, bRm); 7675 7676 /** Opcode [0xf3] 0x0f 0xc7 !11/7. */ 7677 FNIEMOP_UD_STUB_1(iemOp_Grp9_vmptrst_Mq, uint8_t, bRm); 7678 7679 /** Opcode 0x0f 0xc7 11/7. */ 7680 FNIEMOP_UD_STUB_1(iemOp_Grp9_rdseed_Rv, uint8_t, bRm); 7681 7682 7683 /** 7684 * Group 9 jump table for register variant. 7685 */ 7686 IEM_STATIC const PFNIEMOPRM g_apfnGroup9RegReg[] = 7687 { /* pfx: none, 066h, 0f3h, 0f2h */ 7688 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM), 7689 /* /1 */ IEMOP_X4(iemOp_InvalidWithRM), 7690 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM), 7691 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM), 7692 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM), 7693 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM), 7694 /* /6 */ iemOp_Grp9_rdrand_Rv, iemOp_Grp9_rdrand_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 7695 /* /7 */ iemOp_Grp9_rdseed_Rv, iemOp_Grp9_rdseed_Rv, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 7696 }; 7697 AssertCompile(RT_ELEMENTS(g_apfnGroup9RegReg) == 8*4); 7698 7699 7700 /** 7701 * Group 9 jump table for memory variant. 7702 */ 7703 IEM_STATIC const PFNIEMOPRM g_apfnGroup9MemReg[] = 7704 { /* pfx: none, 066h, 0f3h, 0f2h */ 7705 /* /0 */ IEMOP_X4(iemOp_InvalidWithRM), 7706 /* /1 */ iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, iemOp_Grp9_cmpxchg8bOr16b, /* see bs3-cpu-decoding-1 */ 7707 /* /2 */ IEMOP_X4(iemOp_InvalidWithRM), 7708 /* /3 */ IEMOP_X4(iemOp_InvalidWithRM), 7709 /* /4 */ IEMOP_X4(iemOp_InvalidWithRM), 7710 /* /5 */ IEMOP_X4(iemOp_InvalidWithRM), 7711 /* /6 */ iemOp_Grp9_vmptrld_Mq, iemOp_Grp9_vmclear_Mq, iemOp_Grp9_vmxon_Mq, iemOp_InvalidWithRM, 7712 /* /7 */ iemOp_Grp9_vmptrst_Mq, iemOp_InvalidWithRM, iemOp_InvalidWithRM, iemOp_InvalidWithRM, 7713 }; 7714 AssertCompile(RT_ELEMENTS(g_apfnGroup9MemReg) == 8*4); 7715 7716 7717 /** Opcode 0x0f 0xc7. */ 7718 FNIEMOP_DEF(iemOp_Grp9) 7719 { 7720 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7721 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7722 /* register, register */ 7723 return FNIEMOP_CALL_1(g_apfnGroup9RegReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 7724 + pVCpu->iem.s.idxPrefix], bRm); 7725 /* memory, register */ 7726 return FNIEMOP_CALL_1(g_apfnGroup9MemReg[ ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) * 4 7727 + pVCpu->iem.s.idxPrefix], bRm); 7728 } 7729 7730 7731 /** 7732 * Common 'bswap register' helper. 7733 */ 7734 FNIEMOP_DEF_1(iemOpCommonBswapGReg, uint8_t, iReg) 7735 { 7736 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7737 switch (pVCpu->iem.s.enmEffOpSize) 7738 { 7739 case IEMMODE_16BIT: 7740 IEM_MC_BEGIN(1, 0); 7741 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 7742 IEM_MC_REF_GREG_U32(pu32Dst, iReg); /* Don't clear the high dword! */ 7743 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u16, pu32Dst); 7744 IEM_MC_ADVANCE_RIP(); 7745 IEM_MC_END(); 7746 return VINF_SUCCESS; 7747 7748 case IEMMODE_32BIT: 7749 IEM_MC_BEGIN(1, 0); 7750 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 7751 IEM_MC_REF_GREG_U32(pu32Dst, iReg); 7752 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); 7753 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u32, pu32Dst); 7754 IEM_MC_ADVANCE_RIP(); 7755 IEM_MC_END(); 7756 return VINF_SUCCESS; 7757 7758 case IEMMODE_64BIT: 7759 IEM_MC_BEGIN(1, 0); 7760 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 7761 IEM_MC_REF_GREG_U64(pu64Dst, iReg); 7762 IEM_MC_CALL_VOID_AIMPL_1(iemAImpl_bswap_u64, pu64Dst); 7763 IEM_MC_ADVANCE_RIP(); 7764 IEM_MC_END(); 7765 return VINF_SUCCESS; 7766 7767 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7768 } 7769 } 7770 7771 7772 /** Opcode 0x0f 0xc8. */ 7773 FNIEMOP_DEF(iemOp_bswap_rAX_r8) 7774 { 7775 IEMOP_MNEMONIC(bswap_rAX_r8, "bswap rAX/r8"); 7776 /* Note! Intel manuals states that R8-R15 can be accessed by using a REX.X 7777 prefix. REX.B is the correct prefix it appears. For a parallel 7778 case, see iemOp_mov_AL_Ib and iemOp_mov_eAX_Iv. */ 7779 IEMOP_HLP_MIN_486(); 7780 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xAX | pVCpu->iem.s.uRexB); 7781 } 7782 7783 7784 /** Opcode 0x0f 0xc9. */ 7785 FNIEMOP_DEF(iemOp_bswap_rCX_r9) 7786 { 7787 IEMOP_MNEMONIC(bswap_rCX_r9, "bswap rCX/r9"); 7788 IEMOP_HLP_MIN_486(); 7789 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xCX | pVCpu->iem.s.uRexB); 7790 } 7791 7792 7793 /** Opcode 0x0f 0xca. */ 7794 FNIEMOP_DEF(iemOp_bswap_rDX_r10) 7795 { 7796 IEMOP_MNEMONIC(bswap_rDX_r9, "bswap rDX/r9"); 7797 IEMOP_HLP_MIN_486(); 7798 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDX | pVCpu->iem.s.uRexB); 7799 } 7800 7801 7802 /** Opcode 0x0f 0xcb. */ 7803 FNIEMOP_DEF(iemOp_bswap_rBX_r11) 7804 { 7805 IEMOP_MNEMONIC(bswap_rBX_r9, "bswap rBX/r9"); 7806 IEMOP_HLP_MIN_486(); 7807 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBX | pVCpu->iem.s.uRexB); 7808 } 7809 7810 7811 /** Opcode 0x0f 0xcc. */ 7812 FNIEMOP_DEF(iemOp_bswap_rSP_r12) 7813 { 7814 IEMOP_MNEMONIC(bswap_rSP_r12, "bswap rSP/r12"); 7815 IEMOP_HLP_MIN_486(); 7816 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSP | pVCpu->iem.s.uRexB); 7817 } 7818 7819 7820 /** Opcode 0x0f 0xcd. */ 7821 FNIEMOP_DEF(iemOp_bswap_rBP_r13) 7822 { 7823 IEMOP_MNEMONIC(bswap_rBP_r13, "bswap rBP/r13"); 7824 IEMOP_HLP_MIN_486(); 7825 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xBP | pVCpu->iem.s.uRexB); 7826 } 7827 7828 7829 /** Opcode 0x0f 0xce. */ 7830 FNIEMOP_DEF(iemOp_bswap_rSI_r14) 7831 { 7832 IEMOP_MNEMONIC(bswap_rSI_r14, "bswap rSI/r14"); 7833 IEMOP_HLP_MIN_486(); 7834 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xSI | pVCpu->iem.s.uRexB); 7835 } 7836 7837 7838 /** Opcode 0x0f 0xcf. */ 7839 FNIEMOP_DEF(iemOp_bswap_rDI_r15) 7840 { 7841 IEMOP_MNEMONIC(bswap_rDI_r15, "bswap rDI/r15"); 7842 IEMOP_HLP_MIN_486(); 7843 return FNIEMOP_CALL_1(iemOpCommonBswapGReg, X86_GREG_xDI | pVCpu->iem.s.uRexB); 7844 } 7845 7846 7847 /* Opcode 0x0f 0xd0 - invalid */ 7848 /** Opcode 0x66 0x0f 0xd0 - vaddsubpd Vpd, Hpd, Wpd */ 7849 FNIEMOP_STUB(iemOp_vaddsubpd_Vpd_Hpd_Wpd); 7850 /* Opcode 0xf3 0x0f 0xd0 - invalid */ 7851 /** Opcode 0xf2 0x0f 0xd0 - vaddsubps Vps, Hps, Wps */ 7852 FNIEMOP_STUB(iemOp_vaddsubps_Vps_Hps_Wps); 7853 7854 /** Opcode 0x0f 0xd1 - psrlw Pq, Qq */ 7855 FNIEMOP_STUB(iemOp_psrlw_Pq_Qq); 7856 /** Opcode 0x66 0x0f 0xd1 - vpsrlw Vx, Hx, W */ 7857 FNIEMOP_STUB(iemOp_vpsrlw_Vx_Hx_W); 7858 /* Opcode 0xf3 0x0f 0xd1 - invalid */ 7859 /* Opcode 0xf2 0x0f 0xd1 - invalid */ 7860 7861 /** Opcode 0x0f 0xd2 - psrld Pq, Qq */ 7862 FNIEMOP_STUB(iemOp_psrld_Pq_Qq); 7863 /** Opcode 0x66 0x0f 0xd2 - vpsrld Vx, Hx, Wx */ 7864 FNIEMOP_STUB(iemOp_vpsrld_Vx_Hx_Wx); 7865 /* Opcode 0xf3 0x0f 0xd2 - invalid */ 7866 /* Opcode 0xf2 0x0f 0xd2 - invalid */ 7867 7868 /** Opcode 0x0f 0xd3 - psrlq Pq, Qq */ 7869 FNIEMOP_STUB(iemOp_psrlq_Pq_Qq); 7870 /** Opcode 0x66 0x0f 0xd3 - vpsrlq Vx, Hx, Wx */ 7871 FNIEMOP_STUB(iemOp_vpsrlq_Vx_Hx_Wx); 7872 /* Opcode 0xf3 0x0f 0xd3 - invalid */ 7873 /* Opcode 0xf2 0x0f 0xd3 - invalid */ 7874 7875 /** Opcode 0x0f 0xd4 - paddq Pq, Qq */ 7876 FNIEMOP_STUB(iemOp_paddq_Pq_Qq); 7877 /** Opcode 0x66 0x0f 0xd4 - vpaddq Vx, Hx, W */ 7878 FNIEMOP_STUB(iemOp_vpaddq_Vx_Hx_W); 7879 /* Opcode 0xf3 0x0f 0xd4 - invalid */ 7880 /* Opcode 0xf2 0x0f 0xd4 - invalid */ 7881 7882 /** Opcode 0x0f 0xd5 - pmullw Pq, Qq */ 7883 FNIEMOP_STUB(iemOp_pmullw_Pq_Qq); 7884 /** Opcode 0x66 0x0f 0xd5 - vpmullw Vx, Hx, Wx */ 7885 FNIEMOP_STUB(iemOp_vpmullw_Vx_Hx_Wx); 7886 /* Opcode 0xf3 0x0f 0xd5 - invalid */ 7887 /* Opcode 0xf2 0x0f 0xd5 - invalid */ 7888 7889 /* Opcode 0x0f 0xd6 - invalid */ 7890 7891 /** 7892 * @opcode 0xd6 7893 * @oppfx 0x66 7894 * @opcpuid sse2 7895 * @opgroup og_sse2_pcksclr_datamove 7896 * @opxcpttype none 7897 * @optest op1=-1 op2=2 -> op1=2 7898 * @optest op1=0 op2=-42 -> op1=-42 7899 */ 7900 FNIEMOP_DEF(iemOp_vmovq_Wq_Vq) 7901 { 7902 IEMOP_MNEMONIC2(MR, MOVQ, movq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 7903 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7904 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 7905 { 7906 /* 7907 * Register, register. 7908 */ 7909 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7910 IEM_MC_BEGIN(0, 2); 7911 IEM_MC_LOCAL(uint64_t, uSrc); 7912 7913 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 7914 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 7915 7916 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7917 IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 7918 7919 IEM_MC_ADVANCE_RIP(); 7920 IEM_MC_END(); 7921 } 7922 else 7923 { 7924 /* 7925 * Memory, register. 7926 */ 7927 IEM_MC_BEGIN(0, 2); 7928 IEM_MC_LOCAL(uint64_t, uSrc); 7929 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 7930 7931 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 7932 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7933 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 7934 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 7935 7936 IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7937 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 7938 7939 IEM_MC_ADVANCE_RIP(); 7940 IEM_MC_END(); 7941 } 7942 return VINF_SUCCESS; 7943 } 7944 7945 7946 /** Opcode 0xf3 0x0f 0xd6 - movq2dq Vdq, Nq */ 7947 FNIEMOP_STUB(iemOp_movq2dq_Vdq_Nq); 7948 /** Opcode 0xf2 0x0f 0xd6 - movdq2q Pq, Uq */ 7949 FNIEMOP_STUB(iemOp_movdq2q_Pq_Uq); 7950 #if 0 7951 FNIEMOP_DEF(iemOp_movq_Wq_Vq__movq2dq_Vdq_Nq__movdq2q_Pq_Uq) 7952 { 7953 /* Docs says register only. */ 7954 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 7955 7956 switch (pVCpu->iem.s.fPrefixes & (IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REPZ)) 7957 { 7958 case IEM_OP_PRF_SIZE_OP: /* SSE */ 7959 I E M O P _ M N E M O N I C(movq_Wq_Vq, "movq Wq,Vq"); 7960 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS); 7961 IEM_MC_BEGIN(2, 0); 7962 IEM_MC_ARG(uint64_t *, pDst, 0); 7963 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 7964 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 7965 IEM_MC_PREPARE_SSE_USAGE(); 7966 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 7967 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 7968 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc); 7969 IEM_MC_ADVANCE_RIP(); 7970 IEM_MC_END(); 7971 return VINF_SUCCESS; 7972 7973 case 0: /* MMX */ 7974 I E M O P _ M N E M O N I C(pmovmskb_Gd_Udq, "pmovmskb Gd,Udq"); 7975 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS); 7976 IEM_MC_BEGIN(2, 0); 7977 IEM_MC_ARG(uint64_t *, pDst, 0); 7978 IEM_MC_ARG(uint64_t const *, pSrc, 1); 7979 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 7980 IEM_MC_PREPARE_FPU_USAGE(); 7981 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 7982 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 7983 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc); 7984 IEM_MC_ADVANCE_RIP(); 7985 IEM_MC_END(); 7986 return VINF_SUCCESS; 7987 7988 default: 7989 return IEMOP_RAISE_INVALID_OPCODE(); 7990 } 7991 } 7992 #endif 7993 7994 7995 /** Opcode 0x0f 0xd7 - pmovmskb Gd, Nq */ 7996 FNIEMOP_DEF(iemOp_pmovmskb_Gd_Nq) 7997 { 7998 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */ 7999 /** @todo testcase: Check that the instruction implicitly clears the high 8000 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256 8001 * and opcode modifications are made to work with the whole width (not 8002 * just 128). */ 8003 IEMOP_MNEMONIC(pmovmskb_Gd_Udq, "pmovmskb Gd,Nq"); 8004 /* Docs says register only. */ 8005 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8006 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */ 8007 { 8008 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_MMX | DISOPTYPE_HARMLESS); 8009 IEM_MC_BEGIN(2, 0); 8010 IEM_MC_ARG(uint64_t *, pDst, 0); 8011 IEM_MC_ARG(uint64_t const *, pSrc, 1); 8012 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT_CHECK_SSE_OR_MMXEXT(); 8013 IEM_MC_PREPARE_FPU_USAGE(); 8014 IEM_MC_REF_GREG_U64(pDst, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 8015 IEM_MC_REF_MREG_U64_CONST(pSrc, bRm & X86_MODRM_RM_MASK); 8016 IEM_MC_CALL_MMX_AIMPL_2(iemAImpl_pmovmskb_u64, pDst, pSrc); 8017 IEM_MC_ADVANCE_RIP(); 8018 IEM_MC_END(); 8019 return VINF_SUCCESS; 8020 } 8021 return IEMOP_RAISE_INVALID_OPCODE(); 8022 } 8023 8024 /** Opcode 0x66 0x0f 0xd7 - */ 8025 FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux) 8026 { 8027 /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */ 8028 /** @todo testcase: Check that the instruction implicitly clears the high 8029 * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256 8030 * and opcode modifications are made to work with the whole width (not 8031 * just 128). */ 8032 IEMOP_MNEMONIC(pmovmskb_Gd_Nq, "vpmovmskb Gd, Ux"); 8033 /* Docs says register only. */ 8034 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8035 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */ 8036 { 8037 IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS); 8038 IEM_MC_BEGIN(2, 0); 8039 IEM_MC_ARG(uint64_t *, pDst, 0); 8040 IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 8041 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 8042 IEM_MC_PREPARE_SSE_USAGE(); 8043 IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 8044 IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 8045 IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc); 8046 IEM_MC_ADVANCE_RIP(); 8047 IEM_MC_END(); 8048 return VINF_SUCCESS; 8049 } 8050 return IEMOP_RAISE_INVALID_OPCODE(); 8051 } 8052 8053 /* Opcode 0xf3 0x0f 0xd7 - invalid */ 8054 /* Opcode 0xf2 0x0f 0xd7 - invalid */ 8055 8056 8057 /** Opcode 0x0f 0xd8 - psubusb Pq, Qq */ 8058 FNIEMOP_STUB(iemOp_psubusb_Pq_Qq); 8059 /** Opcode 0x66 0x0f 0xd8 - vpsubusb Vx, Hx, W */ 2423 FNIEMOP_STUB(iemOp_vmovq_Wq_Vq); 2424 //FNIEMOP_DEF(iemOp_vmovq_Wq_Vq) 2425 //{ 2426 // IEMOP_MNEMONIC2(MR, VMOVQ, vmovq, WqZxReg, Vq, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZE); 2427 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2428 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 2429 // { 2430 // /* 2431 // * Register, register. 2432 // */ 2433 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2434 // IEM_MC_BEGIN(0, 2); 2435 // IEM_MC_LOCAL(uint64_t, uSrc); 2436 // 2437 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2438 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2439 // 2440 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2441 // IEM_MC_STORE_XREG_U64_ZX_U128((bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB, uSrc); 2442 // 2443 // IEM_MC_ADVANCE_RIP(); 2444 // IEM_MC_END(); 2445 // } 2446 // else 2447 // { 2448 // /* 2449 // * Memory, register. 2450 // */ 2451 // IEM_MC_BEGIN(0, 2); 2452 // IEM_MC_LOCAL(uint64_t, uSrc); 2453 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2454 // 2455 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2456 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2457 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2458 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2459 // 2460 // IEM_MC_FETCH_XREG_U64(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2461 // IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2462 // 2463 // IEM_MC_ADVANCE_RIP(); 2464 // IEM_MC_END(); 2465 // } 2466 // return VINF_SUCCESS; 2467 //} 2468 2469 /* Opcode VEX.F3.0F 0xd6 - invalid */ 2470 /* Opcode VEX.F2.0F 0xd6 - invalid */ 2471 2472 2473 /* Opcode VEX.0F 0xd7 - invalid */ 2474 2475 /** Opcode VEX.66.0F 0xd7 - */ 2476 FNIEMOP_STUB(iemOp_vpmovmskb_Gd_Ux); 2477 //FNIEMOP_DEF(iemOp_vpmovmskb_Gd_Ux) 2478 //{ 2479 // /* Note! Taking the lazy approch here wrt the high 32-bits of the GREG. */ 2480 // /** @todo testcase: Check that the instruction implicitly clears the high 2481 // * bits in 64-bit mode. The REX.W is first necessary when VLMAX > 256 2482 // * and opcode modifications are made to work with the whole width (not 2483 // * just 128). */ 2484 // IEMOP_MNEMONIC(vpmovmskb_Gd_Nq, "vpmovmskb Gd, Ux"); 2485 // /* Docs says register only. */ 2486 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2487 // if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) /** @todo test that this is registers only. */ 2488 // { 2489 // IEMOP_HLP_DECODED_NL_2(OP_PMOVMSKB, IEMOPFORM_RM_REG, OP_PARM_Gd, OP_PARM_Vdq, DISOPTYPE_SSE | DISOPTYPE_HARMLESS); 2490 // IEM_MC_BEGIN(2, 0); 2491 // IEM_MC_ARG(uint64_t *, pDst, 0); 2492 // IEM_MC_ARG(PCRTUINT128U, pSrc, 1); 2493 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2494 // IEM_MC_PREPARE_SSE_USAGE(); 2495 // IEM_MC_REF_GREG_U64(pDst, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2496 // IEM_MC_REF_XREG_U128_CONST(pSrc, (bRm & X86_MODRM_RM_MASK) | pVCpu->iem.s.uRexB); 2497 // IEM_MC_CALL_SSE_AIMPL_2(iemAImpl_pmovmskb_u128, pDst, pSrc); 2498 // IEM_MC_ADVANCE_RIP(); 2499 // IEM_MC_END(); 2500 // return VINF_SUCCESS; 2501 // } 2502 // return IEMOP_RAISE_INVALID_OPCODE(); 2503 //} 2504 2505 /* Opcode VEX.F3.0F 0xd7 - invalid */ 2506 /* Opcode VEX.F2.0F 0xd7 - invalid */ 2507 2508 2509 /* Opcode VEX.0F 0xd8 - invalid */ 2510 /** Opcode VEX.66.0F 0xd8 - vpsubusb Vx, Hx, W */ 8060 2511 FNIEMOP_STUB(iemOp_vpsubusb_Vx_Hx_W); 8061 /* Opcode 0xf3 0x0f 0xd8 - invalid */ 8062 /* Opcode 0xf2 0x0f 0xd8 - invalid */ 8063 8064 /** Opcode 0x0f 0xd9 - psubusw Pq, Qq */ 8065 FNIEMOP_STUB(iemOp_psubusw_Pq_Qq); 8066 /** Opcode 0x66 0x0f 0xd9 - vpsubusw Vx, Hx, Wx */ 2512 /* Opcode VEX.F3.0F 0xd8 - invalid */ 2513 /* Opcode VEX.F2.0F 0xd8 - invalid */ 2514 2515 /* Opcode VEX.0F 0xd9 - invalid */ 2516 /** Opcode VEX.66.0F 0xd9 - vpsubusw Vx, Hx, Wx */ 8067 2517 FNIEMOP_STUB(iemOp_vpsubusw_Vx_Hx_Wx); 8068 /* Opcode 0xf3 0x0f 0xd9 - invalid */ 8069 /* Opcode 0xf2 0x0f 0xd9 - invalid */ 8070 8071 /** Opcode 0x0f 0xda - pminub Pq, Qq */ 8072 FNIEMOP_STUB(iemOp_pminub_Pq_Qq); 8073 /** Opcode 0x66 0x0f 0xda - vpminub Vx, Hx, Wx */ 2518 /* Opcode VEX.F3.0F 0xd9 - invalid */ 2519 /* Opcode VEX.F2.0F 0xd9 - invalid */ 2520 2521 /* Opcode VEX.0F 0xda - invalid */ 2522 /** Opcode VEX.66.0F 0xda - vpminub Vx, Hx, Wx */ 8074 2523 FNIEMOP_STUB(iemOp_vpminub_Vx_Hx_Wx); 8075 /* Opcode 0xf3 0x0f 0xda - invalid */ 8076 /* Opcode 0xf2 0x0f 0xda - invalid */ 8077 8078 /** Opcode 0x0f 0xdb - pand Pq, Qq */ 8079 FNIEMOP_STUB(iemOp_pand_Pq_Qq); 8080 /** Opcode 0x66 0x0f 0xdb - vpand Vx, Hx, W */ 2524 /* Opcode VEX.F3.0F 0xda - invalid */ 2525 /* Opcode VEX.F2.0F 0xda - invalid */ 2526 2527 /* Opcode VEX.0F 0xdb - invalid */ 2528 /** Opcode VEX.66.0F 0xdb - vpand Vx, Hx, W */ 8081 2529 FNIEMOP_STUB(iemOp_vpand_Vx_Hx_W); 8082 /* Opcode 0xf3 0x0f 0xdb - invalid */ 8083 /* Opcode 0xf2 0x0f 0xdb - invalid */ 8084 8085 /** Opcode 0x0f 0xdc - paddusb Pq, Qq */ 8086 FNIEMOP_STUB(iemOp_paddusb_Pq_Qq); 8087 /** Opcode 0x66 0x0f 0xdc - vpaddusb Vx, Hx, Wx */ 2530 /* Opcode VEX.F3.0F 0xdb - invalid */ 2531 /* Opcode VEX.F2.0F 0xdb - invalid */ 2532 2533 /* Opcode VEX.0F 0xdc - invalid */ 2534 /** Opcode VEX.66.0F 0xdc - vpaddusb Vx, Hx, Wx */ 8088 2535 FNIEMOP_STUB(iemOp_vpaddusb_Vx_Hx_Wx); 8089 /* Opcode 0xf3 0x0f 0xdc - invalid */ 8090 /* Opcode 0xf2 0x0f 0xdc - invalid */ 8091 8092 /** Opcode 0x0f 0xdd - paddusw Pq, Qq */ 8093 FNIEMOP_STUB(iemOp_paddusw_Pq_Qq); 8094 /** Opcode 0x66 0x0f 0xdd - vpaddusw Vx, Hx, Wx */ 2536 /* Opcode VEX.F3.0F 0xdc - invalid */ 2537 /* Opcode VEX.F2.0F 0xdc - invalid */ 2538 2539 /* Opcode VEX.0F 0xdd - invalid */ 2540 /** Opcode VEX.66.0F 0xdd - vpaddusw Vx, Hx, Wx */ 8095 2541 FNIEMOP_STUB(iemOp_vpaddusw_Vx_Hx_Wx); 8096 /* Opcode 0xf3 0x0f 0xdd - invalid */ 8097 /* Opcode 0xf2 0x0f 0xdd - invalid */ 8098 8099 /** Opcode 0x0f 0xde - pmaxub Pq, Qq */ 8100 FNIEMOP_STUB(iemOp_pmaxub_Pq_Qq); 8101 /** Opcode 0x66 0x0f 0xde - vpmaxub Vx, Hx, W */ 2542 /* Opcode VEX.F3.0F 0xdd - invalid */ 2543 /* Opcode VEX.F2.0F 0xdd - invalid */ 2544 2545 /* Opcode VEX.0F 0xde - invalid */ 2546 /** Opcode VEX.66.0F 0xde - vpmaxub Vx, Hx, W */ 8102 2547 FNIEMOP_STUB(iemOp_vpmaxub_Vx_Hx_W); 8103 /* Opcode 0xf3 0x0f 0xde - invalid */ 8104 /* Opcode 0xf2 0x0f 0xde - invalid */ 8105 8106 /** Opcode 0x0f 0xdf - pandn Pq, Qq */ 8107 FNIEMOP_STUB(iemOp_pandn_Pq_Qq); 8108 /** Opcode 0x66 0x0f 0xdf - vpandn Vx, Hx, Wx */ 2548 /* Opcode VEX.F3.0F 0xde - invalid */ 2549 /* Opcode VEX.F2.0F 0xde - invalid */ 2550 2551 /* Opcode VEX.0F 0xdf - invalid */ 2552 /** Opcode VEX.66.0F 0xdf - vpandn Vx, Hx, Wx */ 8109 2553 FNIEMOP_STUB(iemOp_vpandn_Vx_Hx_Wx); 8110 /* Opcode 0xf3 0x0f 0xdf - invalid */ 8111 /* Opcode 0xf2 0x0f 0xdf - invalid */ 8112 8113 /** Opcode 0x0f 0xe0 - pavgb Pq, Qq */ 8114 FNIEMOP_STUB(iemOp_pavgb_Pq_Qq); 8115 /** Opcode 0x66 0x0f 0xe0 - vpavgb Vx, Hx, Wx */ 2554 /* Opcode VEX.F3.0F 0xdf - invalid */ 2555 /* Opcode VEX.F2.0F 0xdf - invalid */ 2556 2557 /* Opcode VEX.0F 0xe0 - invalid */ 2558 /** Opcode VEX.66.0F 0xe0 - vpavgb Vx, Hx, Wx */ 8116 2559 FNIEMOP_STUB(iemOp_vpavgb_Vx_Hx_Wx); 8117 /* Opcode 0xf3 0x0f 0xe0 - invalid */ 8118 /* Opcode 0xf2 0x0f 0xe0 - invalid */ 8119 8120 /** Opcode 0x0f 0xe1 - psraw Pq, Qq */ 8121 FNIEMOP_STUB(iemOp_psraw_Pq_Qq); 8122 /** Opcode 0x66 0x0f 0xe1 - vpsraw Vx, Hx, W */ 2560 /* Opcode VEX.F3.0F 0xe0 - invalid */ 2561 /* Opcode VEX.F2.0F 0xe0 - invalid */ 2562 2563 /* Opcode VEX.0F 0xe1 - invalid */ 2564 /** Opcode VEX.66.0F 0xe1 - vpsraw Vx, Hx, W */ 8123 2565 FNIEMOP_STUB(iemOp_vpsraw_Vx_Hx_W); 8124 /* Opcode 0xf3 0x0f 0xe1 - invalid */ 8125 /* Opcode 0xf2 0x0f 0xe1 - invalid */ 8126 8127 /** Opcode 0x0f 0xe2 - psrad Pq, Qq */ 8128 FNIEMOP_STUB(iemOp_psrad_Pq_Qq); 8129 /** Opcode 0x66 0x0f 0xe2 - vpsrad Vx, Hx, Wx */ 2566 /* Opcode VEX.F3.0F 0xe1 - invalid */ 2567 /* Opcode VEX.F2.0F 0xe1 - invalid */ 2568 2569 /* Opcode VEX.0F 0xe2 - invalid */ 2570 /** Opcode VEX.66.0F 0xe2 - vpsrad Vx, Hx, Wx */ 8130 2571 FNIEMOP_STUB(iemOp_vpsrad_Vx_Hx_Wx); 8131 /* Opcode 0xf3 0x0f 0xe2 - invalid */ 8132 /* Opcode 0xf2 0x0f 0xe2 - invalid */ 8133 8134 /** Opcode 0x0f 0xe3 - pavgw Pq, Qq */ 8135 FNIEMOP_STUB(iemOp_pavgw_Pq_Qq); 8136 /** Opcode 0x66 0x0f 0xe3 - vpavgw Vx, Hx, Wx */ 2572 /* Opcode VEX.F3.0F 0xe2 - invalid */ 2573 /* Opcode VEX.F2.0F 0xe2 - invalid */ 2574 2575 /* Opcode VEX.0F 0xe3 - invalid */ 2576 /** Opcode VEX.66.0F 0xe3 - vpavgw Vx, Hx, Wx */ 8137 2577 FNIEMOP_STUB(iemOp_vpavgw_Vx_Hx_Wx); 8138 /* Opcode 0xf3 0x0f 0xe3 - invalid */ 8139 /* Opcode 0xf2 0x0f 0xe3 - invalid */ 8140 8141 /** Opcode 0x0f 0xe4 - pmulhuw Pq, Qq */ 8142 FNIEMOP_STUB(iemOp_pmulhuw_Pq_Qq); 8143 /** Opcode 0x66 0x0f 0xe4 - vpmulhuw Vx, Hx, W */ 2578 /* Opcode VEX.F3.0F 0xe3 - invalid */ 2579 /* Opcode VEX.F2.0F 0xe3 - invalid */ 2580 2581 /* Opcode VEX.0F 0xe4 - invalid */ 2582 /** Opcode VEX.66.0F 0xe4 - vpmulhuw Vx, Hx, W */ 8144 2583 FNIEMOP_STUB(iemOp_vpmulhuw_Vx_Hx_W); 8145 /* Opcode 0xf3 0x0f 0xe4 - invalid */ 8146 /* Opcode 0xf2 0x0f 0xe4 - invalid */ 8147 8148 /** Opcode 0x0f 0xe5 - pmulhw Pq, Qq */ 8149 FNIEMOP_STUB(iemOp_pmulhw_Pq_Qq); 8150 /** Opcode 0x66 0x0f 0xe5 - vpmulhw Vx, Hx, Wx */ 2584 /* Opcode VEX.F3.0F 0xe4 - invalid */ 2585 /* Opcode VEX.F2.0F 0xe4 - invalid */ 2586 2587 /* Opcode VEX.0F 0xe5 - invalid */ 2588 /** Opcode VEX.66.0F 0xe5 - vpmulhw Vx, Hx, Wx */ 8151 2589 FNIEMOP_STUB(iemOp_vpmulhw_Vx_Hx_Wx); 8152 /* Opcode 0xf3 0x0f0xe5 - invalid */8153 /* Opcode 0xf2 0x0f0xe5 - invalid */8154 8155 /* Opcode 0x0f0xe6 - invalid */8156 /** Opcode 0x66 0x0f0xe6 - vcvttpd2dq Vx, Wpd */2590 /* Opcode VEX.F3.0F 0xe5 - invalid */ 2591 /* Opcode VEX.F2.0F 0xe5 - invalid */ 2592 2593 /* Opcode VEX.0F 0xe6 - invalid */ 2594 /** Opcode VEX.66.0F 0xe6 - vcvttpd2dq Vx, Wpd */ 8157 2595 FNIEMOP_STUB(iemOp_vcvttpd2dq_Vx_Wpd); 8158 /** Opcode 0xf3 0x0f0xe6 - vcvtdq2pd Vx, Wpd */2596 /** Opcode VEX.F3.0F 0xe6 - vcvtdq2pd Vx, Wpd */ 8159 2597 FNIEMOP_STUB(iemOp_vcvtdq2pd_Vx_Wpd); 8160 /** Opcode 0xf2 0x0f0xe6 - vcvtpd2dq Vx, Wpd */2598 /** Opcode VEX.F2.0F 0xe6 - vcvtpd2dq Vx, Wpd */ 8161 2599 FNIEMOP_STUB(iemOp_vcvtpd2dq_Vx_Wpd); 8162 2600 8163 2601 8164 /** Opcode 0x0f 0xe7 - movntq Mq, Pq */ 8165 FNIEMOP_DEF(iemOp_movntq_Mq_Pq) 8166 { 8167 IEMOP_MNEMONIC(movntq_Mq_Pq, "movntq Mq,Pq"); 8168 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8169 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 8170 { 8171 /* Register, memory. */ 8172 IEM_MC_BEGIN(0, 2); 8173 IEM_MC_LOCAL(uint64_t, uSrc); 8174 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 8175 8176 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 8177 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8178 IEM_MC_MAYBE_RAISE_MMX_RELATED_XCPT(); 8179 IEM_MC_ACTUALIZE_FPU_STATE_FOR_READ(); 8180 8181 IEM_MC_FETCH_MREG_U64(uSrc, (bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK); 8182 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 8183 8184 IEM_MC_ADVANCE_RIP(); 8185 IEM_MC_END(); 8186 return VINF_SUCCESS; 8187 } 8188 /* The register, register encoding is invalid. */ 8189 return IEMOP_RAISE_INVALID_OPCODE(); 8190 } 8191 8192 /** Opcode 0x66 0x0f 0xe7 - vmovntdq Mx, Vx */ 8193 FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx) 8194 { 8195 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 8196 if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 8197 { 8198 /* Register, memory. */ 8199 IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx"); 8200 IEM_MC_BEGIN(0, 2); 8201 IEM_MC_LOCAL(RTUINT128U, uSrc); 8202 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 8203 8204 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 8205 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 8206 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 8207 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 8208 8209 IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 8210 IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 8211 8212 IEM_MC_ADVANCE_RIP(); 8213 IEM_MC_END(); 8214 return VINF_SUCCESS; 8215 } 8216 8217 /* The register, register encoding is invalid. */ 8218 return IEMOP_RAISE_INVALID_OPCODE(); 8219 } 8220 8221 /* Opcode 0xf3 0x0f 0xe7 - invalid */ 8222 /* Opcode 0xf2 0x0f 0xe7 - invalid */ 8223 8224 8225 /** Opcode 0x0f 0xe8 - psubsb Pq, Qq */ 8226 FNIEMOP_STUB(iemOp_psubsb_Pq_Qq); 8227 /** Opcode 0x66 0x0f 0xe8 - vpsubsb Vx, Hx, W */ 2602 /* Opcode VEX.0F 0xe7 - invalid */ 2603 2604 /** Opcode VEX.66.0F 0xe7 - vmovntdq Mx, Vx */ 2605 FNIEMOP_STUB(iemOp_vmovntdq_Mx_Vx); 2606 //FNIEMOP_DEF(iemOp_vmovntdq_Mx_Vx) 2607 //{ 2608 // uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 2609 // if ((bRm & X86_MODRM_MOD_MASK) != (3 << X86_MODRM_MOD_SHIFT)) 2610 // { 2611 // /* Register, memory. */ 2612 // IEMOP_MNEMONIC(vmovntdq_Mx_Vx, "vmovntdq Mx,Vx"); 2613 // IEM_MC_BEGIN(0, 2); 2614 // IEM_MC_LOCAL(RTUINT128U, uSrc); 2615 // IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 2616 // 2617 // IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 2618 // IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 2619 // IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2620 // IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2621 // 2622 // IEM_MC_FETCH_XREG_U128(uSrc, ((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pVCpu->iem.s.uRexReg); 2623 // IEM_MC_STORE_MEM_U128_ALIGN_SSE(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2624 // 2625 // IEM_MC_ADVANCE_RIP(); 2626 // IEM_MC_END(); 2627 // return VINF_SUCCESS; 2628 // } 2629 // 2630 // /* The register, register encoding is invalid. */ 2631 // return IEMOP_RAISE_INVALID_OPCODE(); 2632 //} 2633 2634 /* Opcode VEX.F3.0F 0xe7 - invalid */ 2635 /* Opcode VEX.F2.0F 0xe7 - invalid */ 2636 2637 2638 /* Opcode VEX.0F 0xe8 - invalid */ 2639 /** Opcode VEX.66.0F 0xe8 - vpsubsb Vx, Hx, W */ 8228 2640 FNIEMOP_STUB(iemOp_vpsubsb_Vx_Hx_W); 8229 /* Opcode 0xf3 0x0f 0xe8 - invalid */ 8230 /* Opcode 0xf2 0x0f 0xe8 - invalid */ 8231 8232 /** Opcode 0x0f 0xe9 - psubsw Pq, Qq */ 8233 FNIEMOP_STUB(iemOp_psubsw_Pq_Qq); 8234 /** Opcode 0x66 0x0f 0xe9 - vpsubsw Vx, Hx, Wx */ 2641 /* Opcode VEX.F3.0F 0xe8 - invalid */ 2642 /* Opcode VEX.F2.0F 0xe8 - invalid */ 2643 2644 /* Opcode VEX.0F 0xe9 - invalid */ 2645 /** Opcode VEX.66.0F 0xe9 - vpsubsw Vx, Hx, Wx */ 8235 2646 FNIEMOP_STUB(iemOp_vpsubsw_Vx_Hx_Wx); 8236 /* Opcode 0xf3 0x0f 0xe9 - invalid */ 8237 /* Opcode 0xf2 0x0f 0xe9 - invalid */ 8238 8239 /** Opcode 0x0f 0xea - pminsw Pq, Qq */ 8240 FNIEMOP_STUB(iemOp_pminsw_Pq_Qq); 8241 /** Opcode 0x66 0x0f 0xea - vpminsw Vx, Hx, Wx */ 2647 /* Opcode VEX.F3.0F 0xe9 - invalid */ 2648 /* Opcode VEX.F2.0F 0xe9 - invalid */ 2649 2650 /* Opcode VEX.0F 0xea - invalid */ 2651 /** Opcode VEX.66.0F 0xea - vpminsw Vx, Hx, Wx */ 8242 2652 FNIEMOP_STUB(iemOp_vpminsw_Vx_Hx_Wx); 8243 /* Opcode 0xf3 0x0f 0xea - invalid */ 8244 /* Opcode 0xf2 0x0f 0xea - invalid */ 8245 8246 /** Opcode 0x0f 0xeb - por Pq, Qq */ 8247 FNIEMOP_STUB(iemOp_por_Pq_Qq); 8248 /** Opcode 0x66 0x0f 0xeb - vpor Vx, Hx, W */ 2653 /* Opcode VEX.F3.0F 0xea - invalid */ 2654 /* Opcode VEX.F2.0F 0xea - invalid */ 2655 2656 /* Opcode VEX.0F 0xeb - invalid */ 2657 /** Opcode VEX.66.0F 0xeb - vpor Vx, Hx, W */ 8249 2658 FNIEMOP_STUB(iemOp_vpor_Vx_Hx_W); 8250 /* Opcode 0xf3 0x0f 0xeb - invalid */ 8251 /* Opcode 0xf2 0x0f 0xeb - invalid */ 8252 8253 /** Opcode 0x0f 0xec - paddsb Pq, Qq */ 8254 FNIEMOP_STUB(iemOp_paddsb_Pq_Qq); 8255 /** Opcode 0x66 0x0f 0xec - vpaddsb Vx, Hx, Wx */ 2659 /* Opcode VEX.F3.0F 0xeb - invalid */ 2660 /* Opcode VEX.F2.0F 0xeb - invalid */ 2661 2662 /* Opcode VEX.0F 0xec - invalid */ 2663 /** Opcode VEX.66.0F 0xec - vpaddsb Vx, Hx, Wx */ 8256 2664 FNIEMOP_STUB(iemOp_vpaddsb_Vx_Hx_Wx); 8257 /* Opcode 0xf3 0x0f 0xec - invalid */ 8258 /* Opcode 0xf2 0x0f 0xec - invalid */ 8259 8260 /** Opcode 0x0f 0xed - paddsw Pq, Qq */ 8261 FNIEMOP_STUB(iemOp_paddsw_Pq_Qq); 8262 /** Opcode 0x66 0x0f 0xed - vpaddsw Vx, Hx, Wx */ 2665 /* Opcode VEX.F3.0F 0xec - invalid */ 2666 /* Opcode VEX.F2.0F 0xec - invalid */ 2667 2668 /* Opcode VEX.0F 0xed - invalid */ 2669 /** Opcode VEX.66.0F 0xed - vpaddsw Vx, Hx, Wx */ 8263 2670 FNIEMOP_STUB(iemOp_vpaddsw_Vx_Hx_Wx); 8264 /* Opcode 0xf3 0x0f 0xed - invalid */ 8265 /* Opcode 0xf2 0x0f 0xed - invalid */ 8266 8267 /** Opcode 0x0f 0xee - pmaxsw Pq, Qq */ 8268 FNIEMOP_STUB(iemOp_pmaxsw_Pq_Qq); 8269 /** Opcode 0x66 0x0f 0xee - vpmaxsw Vx, Hx, W */ 2671 /* Opcode VEX.F3.0F 0xed - invalid */ 2672 /* Opcode VEX.F2.0F 0xed - invalid */ 2673 2674 /* Opcode VEX.0F 0xee - invalid */ 2675 /** Opcode VEX.66.0F 0xee - vpmaxsw Vx, Hx, W */ 8270 2676 FNIEMOP_STUB(iemOp_vpmaxsw_Vx_Hx_W); 8271 /* Opcode 0xf3 0x0f 0xee - invalid */ 8272 /* Opcode 0xf2 0x0f 0xee - invalid */ 8273 8274 8275 /** Opcode 0x0f 0xef - pxor Pq, Qq */ 8276 FNIEMOP_DEF(iemOp_pxor_Pq_Qq) 8277 { 8278 IEMOP_MNEMONIC(pxor, "pxor"); 8279 return FNIEMOP_CALL_1(iemOpCommonMmx_FullFull_To_Full, &g_iemAImpl_pxor); 8280 } 8281 8282 /** Opcode 0x66 0x0f 0xef - vpxor Vx, Hx, Wx */ 2677 /* Opcode VEX.F3.0F 0xee - invalid */ 2678 /* Opcode VEX.F2.0F 0xee - invalid */ 2679 2680 2681 /* Opcode VEX.0F 0xef - invalid */ 2682 2683 /** Opcode VEX.66.0F 0xef - vpxor Vx, Hx, Wx */ 8283 2684 FNIEMOP_DEF(iemOp_vpxor_Vx_Hx_Wx) 8284 2685 { … … 8287 2688 } 8288 2689 8289 /* Opcode 0xf3 0x0f0xef - invalid */8290 /* Opcode 0xf2 0x0f0xef - invalid */8291 8292 /* Opcode 0x0f0xf0 - invalid */8293 /* Opcode 0x66 0x0f0xf0 - invalid */8294 /** Opcode 0xf2 0x0f0xf0 - vlddqu Vx, Mx */2690 /* Opcode VEX.F3.0F 0xef - invalid */ 2691 /* Opcode VEX.F2.0F 0xef - invalid */ 2692 2693 /* Opcode VEX.0F 0xf0 - invalid */ 2694 /* Opcode VEX.66.0F 0xf0 - invalid */ 2695 /** Opcode VEX.F2.0F 0xf0 - vlddqu Vx, Mx */ 8295 2696 FNIEMOP_STUB(iemOp_vlddqu_Vx_Mx); 8296 2697 8297 /** Opcode 0x0f 0xf1 - psllw Pq, Qq */ 8298 FNIEMOP_STUB(iemOp_psllw_Pq_Qq); 8299 /** Opcode 0x66 0x0f 0xf1 - vpsllw Vx, Hx, W */ 2698 /* Opcode VEX.0F 0xf1 - invalid */ 2699 /** Opcode VEX.66.0F 0xf1 - vpsllw Vx, Hx, W */ 8300 2700 FNIEMOP_STUB(iemOp_vpsllw_Vx_Hx_W); 8301 /* Opcode 0xf2 0x0f 0xf1 - invalid */ 8302 8303 /** Opcode 0x0f 0xf2 - pslld Pq, Qq */ 8304 FNIEMOP_STUB(iemOp_pslld_Pq_Qq); 8305 /** Opcode 0x66 0x0f 0xf2 - vpslld Vx, Hx, Wx */ 2701 /* Opcode VEX.F2.0F 0xf1 - invalid */ 2702 2703 /* Opcode VEX.0F 0xf2 - invalid */ 2704 /** Opcode VEX.66.0F 0xf2 - vpslld Vx, Hx, Wx */ 8306 2705 FNIEMOP_STUB(iemOp_vpslld_Vx_Hx_Wx); 8307 /* Opcode 0xf2 0x0f 0xf2 - invalid */ 8308 8309 /** Opcode 0x0f 0xf3 - psllq Pq, Qq */ 8310 FNIEMOP_STUB(iemOp_psllq_Pq_Qq); 8311 /** Opcode 0x66 0x0f 0xf3 - vpsllq Vx, Hx, Wx */ 2706 /* Opcode VEX.F2.0F 0xf2 - invalid */ 2707 2708 /* Opcode VEX.0F 0xf3 - invalid */ 2709 /** Opcode VEX.66.0F 0xf3 - vpsllq Vx, Hx, Wx */ 8312 2710 FNIEMOP_STUB(iemOp_vpsllq_Vx_Hx_Wx); 8313 /* Opcode 0xf2 0x0f 0xf3 - invalid */ 8314 8315 /** Opcode 0x0f 0xf4 - pmuludq Pq, Qq */ 8316 FNIEMOP_STUB(iemOp_pmuludq_Pq_Qq); 8317 /** Opcode 0x66 0x0f 0xf4 - vpmuludq Vx, Hx, W */ 2711 /* Opcode VEX.F2.0F 0xf3 - invalid */ 2712 2713 /* Opcode VEX.0F 0xf4 - invalid */ 2714 /** Opcode VEX.66.0F 0xf4 - vpmuludq Vx, Hx, W */ 8318 2715 FNIEMOP_STUB(iemOp_vpmuludq_Vx_Hx_W); 8319 /* Opcode 0xf2 0x0f 0xf4 - invalid */ 8320 8321 /** Opcode 0x0f 0xf5 - pmaddwd Pq, Qq */ 8322 FNIEMOP_STUB(iemOp_pmaddwd_Pq_Qq); 8323 /** Opcode 0x66 0x0f 0xf5 - vpmaddwd Vx, Hx, Wx */ 2716 /* Opcode VEX.F2.0F 0xf4 - invalid */ 2717 2718 /* Opcode VEX.0F 0xf5 - invalid */ 2719 /** Opcode VEX.66.0F 0xf5 - vpmaddwd Vx, Hx, Wx */ 8324 2720 FNIEMOP_STUB(iemOp_vpmaddwd_Vx_Hx_Wx); 8325 /* Opcode 0xf2 0x0f 0xf5 - invalid */ 8326 8327 /** Opcode 0x0f 0xf6 - psadbw Pq, Qq */ 8328 FNIEMOP_STUB(iemOp_psadbw_Pq_Qq); 8329 /** Opcode 0x66 0x0f 0xf6 - vpsadbw Vx, Hx, Wx */ 2721 /* Opcode VEX.F2.0F 0xf5 - invalid */ 2722 2723 /* Opcode VEX.0F 0xf6 - invalid */ 2724 /** Opcode VEX.66.0F 0xf6 - vpsadbw Vx, Hx, Wx */ 8330 2725 FNIEMOP_STUB(iemOp_vpsadbw_Vx_Hx_Wx); 8331 /* Opcode 0xf2 0x0f 0xf6 - invalid */ 8332 8333 /** Opcode 0x0f 0xf7 - maskmovq Pq, Nq */ 8334 FNIEMOP_STUB(iemOp_maskmovq_Pq_Nq); 8335 /** Opcode 0x66 0x0f 0xf7 - vmaskmovdqu Vdq, Udq */ 2726 /* Opcode VEX.F2.0F 0xf6 - invalid */ 2727 2728 /* Opcode VEX.0F 0xf7 - invalid */ 2729 /** Opcode VEX.66.0F 0xf7 - vmaskmovdqu Vdq, Udq */ 8336 2730 FNIEMOP_STUB(iemOp_vmaskmovdqu_Vdq_Udq); 8337 /* Opcode 0xf2 0x0f 0xf7 - invalid */ 8338 8339 /** Opcode 0x0f 0xf8 - psubb Pq, Qq */ 8340 FNIEMOP_STUB(iemOp_psubb_Pq_Qq); 8341 /** Opcode 0x66 0x0f 0xf8 - vpsubb Vx, Hx, W */ 2731 /* Opcode VEX.F2.0F 0xf7 - invalid */ 2732 2733 /* Opcode VEX.0F 0xf8 - invalid */ 2734 /** Opcode VEX.66.0F 0xf8 - vpsubb Vx, Hx, W */ 8342 2735 FNIEMOP_STUB(iemOp_vpsubb_Vx_Hx_W); 8343 /* Opcode 0xf2 0x0f 0xf8 - invalid */ 8344 8345 /** Opcode 0x0f 0xf9 - psubw Pq, Qq */ 8346 FNIEMOP_STUB(iemOp_psubw_Pq_Qq); 8347 /** Opcode 0x66 0x0f 0xf9 - vpsubw Vx, Hx, Wx */ 2736 /* Opcode VEX.F2.0F 0xf8 - invalid */ 2737 2738 /* Opcode VEX.0F 0xf9 - invalid */ 2739 /** Opcode VEX.66.0F 0xf9 - vpsubw Vx, Hx, Wx */ 8348 2740 FNIEMOP_STUB(iemOp_vpsubw_Vx_Hx_Wx); 8349 /* Opcode 0xf2 0x0f 0xf9 - invalid */ 8350 8351 /** Opcode 0x0f 0xfa - psubd Pq, Qq */ 8352 FNIEMOP_STUB(iemOp_psubd_Pq_Qq); 8353 /** Opcode 0x66 0x0f 0xfa - vpsubd Vx, Hx, Wx */ 2741 /* Opcode VEX.F2.0F 0xf9 - invalid */ 2742 2743 /* Opcode VEX.0F 0xfa - invalid */ 2744 /** Opcode VEX.66.0F 0xfa - vpsubd Vx, Hx, Wx */ 8354 2745 FNIEMOP_STUB(iemOp_vpsubd_Vx_Hx_Wx); 8355 /* Opcode 0xf2 0x0f 0xfa - invalid */ 8356 8357 /** Opcode 0x0f 0xfb - psubq Pq, Qq */ 8358 FNIEMOP_STUB(iemOp_psubq_Pq_Qq); 8359 /** Opcode 0x66 0x0f 0xfb - vpsubq Vx, Hx, W */ 2746 /* Opcode VEX.F2.0F 0xfa - invalid */ 2747 2748 /* Opcode VEX.0F 0xfb - invalid */ 2749 /** Opcode VEX.66.0F 0xfb - vpsubq Vx, Hx, W */ 8360 2750 FNIEMOP_STUB(iemOp_vpsubq_Vx_Hx_W); 8361 /* Opcode 0xf2 0x0f 0xfb - invalid */ 8362 8363 /** Opcode 0x0f 0xfc - paddb Pq, Qq */ 8364 FNIEMOP_STUB(iemOp_paddb_Pq_Qq); 8365 /** Opcode 0x66 0x0f 0xfc - vpaddb Vx, Hx, Wx */ 2751 /* Opcode VEX.F2.0F 0xfb - invalid */ 2752 2753 /* Opcode VEX.0F 0xfc - invalid */ 2754 /** Opcode VEX.66.0F 0xfc - vpaddb Vx, Hx, Wx */ 8366 2755 FNIEMOP_STUB(iemOp_vpaddb_Vx_Hx_Wx); 8367 /* Opcode 0xf2 0x0f 0xfc - invalid */ 8368 8369 /** Opcode 0x0f 0xfd - paddw Pq, Qq */ 8370 FNIEMOP_STUB(iemOp_paddw_Pq_Qq); 8371 /** Opcode 0x66 0x0f 0xfd - vpaddw Vx, Hx, Wx */ 2756 /* Opcode VEX.F2.0F 0xfc - invalid */ 2757 2758 /* Opcode VEX.0F 0xfd - invalid */ 2759 /** Opcode VEX.66.0F 0xfd - vpaddw Vx, Hx, Wx */ 8372 2760 FNIEMOP_STUB(iemOp_vpaddw_Vx_Hx_Wx); 8373 /* Opcode 0xf2 0x0f 0xfd - invalid */ 8374 8375 /** Opcode 0x0f 0xfe - paddd Pq, Qq */ 8376 FNIEMOP_STUB(iemOp_paddd_Pq_Qq); 8377 /** Opcode 0x66 0x0f 0xfe - vpaddd Vx, Hx, W */ 2761 /* Opcode VEX.F2.0F 0xfd - invalid */ 2762 2763 /* Opcode VEX.0F 0xfe - invalid */ 2764 /** Opcode VEX.66.0F 0xfe - vpaddd Vx, Hx, W */ 8378 2765 FNIEMOP_STUB(iemOp_vpaddd_Vx_Hx_W); 8379 /* Opcode 0xf2 0x0f0xfe - invalid */2766 /* Opcode VEX.F2.0F 0xfe - invalid */ 8380 2767 8381 2768 8382 2769 /** Opcode **** 0x0f 0xff - UD0 */ 8383 FNIEMOP_DEF(iemOp_ ud0)2770 FNIEMOP_DEF(iemOp_vud0) 8384 2771 { 8385 IEMOP_MNEMONIC( ud0, "ud0");2772 IEMOP_MNEMONIC(vud0, "vud0"); 8386 2773 if (pVCpu->iem.s.enmCpuVendor == CPUMCPUVENDOR_INTEL) 8387 2774 { … … 8398 2785 } 8399 2786 8400 8401 8402 /**8403 * Two byte opcode map, first byte 0x0f.8404 *8405 * @remarks The g_apfnVexMap1 table is currently a subset of this one, so please8406 * check if it needs updating as well when making changes.8407 */8408 IEM_STATIC const PFNIEMOP g_apfnTwoByteMap[] =8409 {8410 /* no prefix, 066h prefix f3h prefix, f2h prefix */8411 /* 0x00 */ IEMOP_X4(iemOp_Grp6),8412 /* 0x01 */ IEMOP_X4(iemOp_Grp7),8413 /* 0x02 */ IEMOP_X4(iemOp_lar_Gv_Ew),8414 /* 0x03 */ IEMOP_X4(iemOp_lsl_Gv_Ew),8415 /* 0x04 */ IEMOP_X4(iemOp_Invalid),8416 /* 0x05 */ IEMOP_X4(iemOp_syscall),8417 /* 0x06 */ IEMOP_X4(iemOp_clts),8418 /* 0x07 */ IEMOP_X4(iemOp_sysret),8419 /* 0x08 */ IEMOP_X4(iemOp_invd),8420 /* 0x09 */ IEMOP_X4(iemOp_wbinvd),8421 /* 0x0a */ IEMOP_X4(iemOp_Invalid),8422 /* 0x0b */ IEMOP_X4(iemOp_ud2),8423 /* 0x0c */ IEMOP_X4(iemOp_Invalid),8424 /* 0x0d */ IEMOP_X4(iemOp_nop_Ev_GrpP),8425 /* 0x0e */ IEMOP_X4(iemOp_femms),8426 /* 0x0f */ IEMOP_X4(iemOp_3Dnow),8427 8428 /* 0x10 */ iemOp_vmovups_Vps_Wps, iemOp_vmovupd_Vpd_Wpd, iemOp_movss_Vss_Wss, iemOp_vmovsd_Vx_Hx_Wsd,8429 /* 0x11 */ iemOp_vmovups_Wps_Vps, iemOp_vmovupd_Wpd_Vpd, iemOp_vmovss_Wss_Hx_Vss, iemOp_vmovsd_Wsd_Hx_Vsd,8430 /* 0x12 */ iemOp_vmovlps_Vq_Hq_Mq__vmovhlps, iemOp_vmovlpd_Vq_Hq_Mq, iemOp_vmovsldup_Vx_Wx, iemOp_vmovddup_Vx_Wx,8431 /* 0x13 */ iemOp_vmovlps_Mq_Vq, iemOp_vmovlpd_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8432 /* 0x14 */ iemOp_vunpcklps_Vx_Hx_Wx, iemOp_vunpcklpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8433 /* 0x15 */ iemOp_vunpckhps_Vx_Hx_Wx, iemOp_vunpckhpd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8434 /* 0x16 */ iemOp_vmovhpsv1_Vdq_Hq_Mq__vmovlhps_Vdq_Hq_Uq, iemOp_vmovhpdv1_Vdq_Hq_Mq, iemOp_vmovshdup_Vx_Wx, iemOp_InvalidNeedRM,8435 /* 0x17 */ iemOp_vmovhpsv1_Mq_Vq, iemOp_vmovhpdv1_Mq_Vq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8436 /* 0x18 */ IEMOP_X4(iemOp_prefetch_Grp16),8437 /* 0x19 */ IEMOP_X4(iemOp_nop_Ev),8438 /* 0x1a */ IEMOP_X4(iemOp_nop_Ev),8439 /* 0x1b */ IEMOP_X4(iemOp_nop_Ev),8440 /* 0x1c */ IEMOP_X4(iemOp_nop_Ev),8441 /* 0x1d */ IEMOP_X4(iemOp_nop_Ev),8442 /* 0x1e */ IEMOP_X4(iemOp_nop_Ev),8443 /* 0x1f */ IEMOP_X4(iemOp_nop_Ev),8444 8445 /* 0x20 */ iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd, iemOp_mov_Rd_Cd,8446 /* 0x21 */ iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd, iemOp_mov_Rd_Dd,8447 /* 0x22 */ iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd, iemOp_mov_Cd_Rd,8448 /* 0x23 */ iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd, iemOp_mov_Dd_Rd,8449 /* 0x24 */ iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td, iemOp_mov_Rd_Td,8450 /* 0x25 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,8451 /* 0x26 */ iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd, iemOp_mov_Td_Rd,8452 /* 0x27 */ iemOp_Invalid, iemOp_Invalid, iemOp_Invalid, iemOp_Invalid,8453 /* 0x28 */ iemOp_vmovaps_Vps_Wps, iemOp_vmovapd_Vpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8454 /* 0x29 */ iemOp_vmovaps_Wps_Vps, iemOp_vmovapd_Wpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8455 /* 0x2a */ iemOp_cvtpi2ps_Vps_Qpi, iemOp_cvtpi2pd_Vpd_Qpi, iemOp_vcvtsi2ss_Vss_Hss_Ey, iemOp_vcvtsi2sd_Vsd_Hsd_Ey,8456 /* 0x2b */ iemOp_vmovntps_Mps_Vps, iemOp_vmovntpd_Mpd_Vpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8457 /* 0x2c */ iemOp_cvttps2pi_Ppi_Wps, iemOp_cvttpd2pi_Ppi_Wpd, iemOp_vcvttss2si_Gy_Wss, iemOp_vcvttsd2si_Gy_Wsd,8458 /* 0x2d */ iemOp_cvtps2pi_Ppi_Wps, iemOp_cvtpd2pi_Qpi_Wpd, iemOp_vcvtss2si_Gy_Wss, iemOp_vcvtsd2si_Gy_Wsd,8459 /* 0x2e */ iemOp_vucomiss_Vss_Wss, iemOp_vucomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8460 /* 0x2f */ iemOp_vcomiss_Vss_Wss, iemOp_vcomisd_Vsd_Wsd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8461 8462 /* 0x30 */ IEMOP_X4(iemOp_wrmsr),8463 /* 0x31 */ IEMOP_X4(iemOp_rdtsc),8464 /* 0x32 */ IEMOP_X4(iemOp_rdmsr),8465 /* 0x33 */ IEMOP_X4(iemOp_rdpmc),8466 /* 0x34 */ IEMOP_X4(iemOp_sysenter),8467 /* 0x35 */ IEMOP_X4(iemOp_sysexit),8468 /* 0x36 */ IEMOP_X4(iemOp_Invalid),8469 /* 0x37 */ IEMOP_X4(iemOp_getsec),8470 /* 0x38 */ IEMOP_X4(iemOp_3byte_Esc_A4),8471 /* 0x39 */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),8472 /* 0x3a */ IEMOP_X4(iemOp_3byte_Esc_A5),8473 /* 0x3b */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),8474 /* 0x3c */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),8475 /* 0x3d */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRM),8476 /* 0x3e */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),8477 /* 0x3f */ IEMOP_X4(iemOp_InvalidNeed3ByteEscRMImm8),8478 8479 /* 0x40 */ IEMOP_X4(iemOp_cmovo_Gv_Ev),8480 /* 0x41 */ IEMOP_X4(iemOp_cmovno_Gv_Ev),8481 /* 0x42 */ IEMOP_X4(iemOp_cmovc_Gv_Ev),8482 /* 0x43 */ IEMOP_X4(iemOp_cmovnc_Gv_Ev),8483 /* 0x44 */ IEMOP_X4(iemOp_cmove_Gv_Ev),8484 /* 0x45 */ IEMOP_X4(iemOp_cmovne_Gv_Ev),8485 /* 0x46 */ IEMOP_X4(iemOp_cmovbe_Gv_Ev),8486 /* 0x47 */ IEMOP_X4(iemOp_cmovnbe_Gv_Ev),8487 /* 0x48 */ IEMOP_X4(iemOp_cmovs_Gv_Ev),8488 /* 0x49 */ IEMOP_X4(iemOp_cmovns_Gv_Ev),8489 /* 0x4a */ IEMOP_X4(iemOp_cmovp_Gv_Ev),8490 /* 0x4b */ IEMOP_X4(iemOp_cmovnp_Gv_Ev),8491 /* 0x4c */ IEMOP_X4(iemOp_cmovl_Gv_Ev),8492 /* 0x4d */ IEMOP_X4(iemOp_cmovnl_Gv_Ev),8493 /* 0x4e */ IEMOP_X4(iemOp_cmovle_Gv_Ev),8494 /* 0x4f */ IEMOP_X4(iemOp_cmovnle_Gv_Ev),8495 8496 /* 0x50 */ iemOp_vmovmskps_Gy_Ups, iemOp_vmovmskpd_Gy_Upd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8497 /* 0x51 */ iemOp_vsqrtps_Vps_Wps, iemOp_vsqrtpd_Vpd_Wpd, iemOp_vsqrtss_Vss_Hss_Wss, iemOp_vsqrtsd_Vsd_Hsd_Wsd,8498 /* 0x52 */ iemOp_vrsqrtps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrsqrtss_Vss_Hss_Wss, iemOp_InvalidNeedRM,8499 /* 0x53 */ iemOp_vrcpps_Vps_Wps, iemOp_InvalidNeedRM, iemOp_vrcpss_Vss_Hss_Wss, iemOp_InvalidNeedRM,8500 /* 0x54 */ iemOp_vandps_Vps_Hps_Wps, iemOp_vandpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8501 /* 0x55 */ iemOp_vandnps_Vps_Hps_Wps, iemOp_vandnpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8502 /* 0x56 */ iemOp_vorps_Vps_Hps_Wps, iemOp_vorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8503 /* 0x57 */ iemOp_vxorps_Vps_Hps_Wps, iemOp_vxorpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8504 /* 0x58 */ iemOp_vaddps_Vps_Hps_Wps, iemOp_vaddpd_Vpd_Hpd_Wpd, iemOp_vaddss_Vss_Hss_Wss, iemOp_vaddsd_Vsd_Hsd_Wsd,8505 /* 0x59 */ iemOp_vmulps_Vps_Hps_Wps, iemOp_vmulpd_Vpd_Hpd_Wpd, iemOp_vmulss_Vss_Hss_Wss, iemOp_vmulsd_Vsd_Hsd_Wsd,8506 /* 0x5a */ iemOp_vcvtps2pd_Vpd_Wps, iemOp_vcvtpd2ps_Vps_Wpd, iemOp_vcvtss2sd_Vsd_Hx_Wss, iemOp_vcvtsd2ss_Vss_Hx_Wsd,8507 /* 0x5b */ iemOp_vcvtdq2ps_Vps_Wdq, iemOp_vcvtps2dq_Vdq_Wps, iemOp_vcvttps2dq_Vdq_Wps, iemOp_InvalidNeedRM,8508 /* 0x5c */ iemOp_vsubps_Vps_Hps_Wps, iemOp_vsubpd_Vpd_Hpd_Wpd, iemOp_vsubss_Vss_Hss_Wss, iemOp_vsubsd_Vsd_Hsd_Wsd,8509 /* 0x5d */ iemOp_vminps_Vps_Hps_Wps, iemOp_vminpd_Vpd_Hpd_Wpd, iemOp_vminss_Vss_Hss_Wss, iemOp_vminsd_Vsd_Hsd_Wsd,8510 /* 0x5e */ iemOp_vdivps_Vps_Hps_Wps, iemOp_vdivpd_Vpd_Hpd_Wpd, iemOp_vdivss_Vss_Hss_Wss, iemOp_vdivsd_Vsd_Hsd_Wsd,8511 /* 0x5f */ iemOp_vmaxps_Vps_Hps_Wps, iemOp_vmaxpd_Vpd_Hpd_Wpd, iemOp_vmaxss_Vss_Hss_Wss, iemOp_vmaxsd_Vsd_Hsd_Wsd,8512 8513 /* 0x60 */ iemOp_punpcklbw_Pq_Qd, iemOp_vpunpcklbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8514 /* 0x61 */ iemOp_punpcklwd_Pq_Qd, iemOp_vpunpcklwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8515 /* 0x62 */ iemOp_punpckldq_Pq_Qd, iemOp_vpunpckldq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8516 /* 0x63 */ iemOp_packsswb_Pq_Qq, iemOp_vpacksswb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8517 /* 0x64 */ iemOp_pcmpgtb_Pq_Qq, iemOp_vpcmpgtb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8518 /* 0x65 */ iemOp_pcmpgtw_Pq_Qq, iemOp_vpcmpgtw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8519 /* 0x66 */ iemOp_pcmpgtd_Pq_Qq, iemOp_vpcmpgtd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8520 /* 0x67 */ iemOp_packuswb_Pq_Qq, iemOp_vpackuswb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8521 /* 0x68 */ iemOp_punpckhbw_Pq_Qd, iemOp_vpunpckhbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8522 /* 0x69 */ iemOp_punpckhwd_Pq_Qd, iemOp_vpunpckhwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8523 /* 0x6a */ iemOp_punpckhdq_Pq_Qd, iemOp_vpunpckhdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8524 /* 0x6b */ iemOp_packssdw_Pq_Qd, iemOp_vpackssdw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8525 /* 0x6c */ iemOp_InvalidNeedRM, iemOp_vpunpcklqdq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8526 /* 0x6d */ iemOp_InvalidNeedRM, iemOp_vpunpckhqdq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8527 /* 0x6e */ iemOp_movd_q_Pd_Ey, iemOp_vmovd_q_Vy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8528 /* 0x6f */ iemOp_movq_Pq_Qq, iemOp_vmovdqa_Vx_Wx, iemOp_vmovdqu_Vx_Wx, iemOp_InvalidNeedRM,8529 8530 /* 0x70 */ iemOp_pshufw_Pq_Qq_Ib, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib,8531 /* 0x71 */ IEMOP_X4(iemOp_Grp12),8532 /* 0x72 */ IEMOP_X4(iemOp_Grp13),8533 /* 0x73 */ IEMOP_X4(iemOp_Grp14),8534 /* 0x74 */ iemOp_pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8535 /* 0x75 */ iemOp_pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8536 /* 0x76 */ iemOp_pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8537 /* 0x77 */ iemOp_emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8538 8539 /* 0x78 */ iemOp_vmread_Ey_Gy, iemOp_AmdGrp17, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8540 /* 0x79 */ iemOp_vmwrite_Gy_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8541 /* 0x7a */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8542 /* 0x7b */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8543 /* 0x7c */ iemOp_InvalidNeedRM, iemOp_vhaddpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhaddps_Vps_Hps_Wps,8544 /* 0x7d */ iemOp_InvalidNeedRM, iemOp_vhsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vhsubps_Vps_Hps_Wps,8545 /* 0x7e */ iemOp_movd_q_Ey_Pd, iemOp_vmovd_q_Ey_Vy, iemOp_vmovq_Vq_Wq, iemOp_InvalidNeedRM,8546 /* 0x7f */ iemOp_movq_Qq_Pq, iemOp_vmovdqa_Wx_Vx, iemOp_vmovdqu_Wx_Vx, iemOp_InvalidNeedRM,8547 8548 /* 0x80 */ IEMOP_X4(iemOp_jo_Jv),8549 /* 0x81 */ IEMOP_X4(iemOp_jno_Jv),8550 /* 0x82 */ IEMOP_X4(iemOp_jc_Jv),8551 /* 0x83 */ IEMOP_X4(iemOp_jnc_Jv),8552 /* 0x84 */ IEMOP_X4(iemOp_je_Jv),8553 /* 0x85 */ IEMOP_X4(iemOp_jne_Jv),8554 /* 0x86 */ IEMOP_X4(iemOp_jbe_Jv),8555 /* 0x87 */ IEMOP_X4(iemOp_jnbe_Jv),8556 /* 0x88 */ IEMOP_X4(iemOp_js_Jv),8557 /* 0x89 */ IEMOP_X4(iemOp_jns_Jv),8558 /* 0x8a */ IEMOP_X4(iemOp_jp_Jv),8559 /* 0x8b */ IEMOP_X4(iemOp_jnp_Jv),8560 /* 0x8c */ IEMOP_X4(iemOp_jl_Jv),8561 /* 0x8d */ IEMOP_X4(iemOp_jnl_Jv),8562 /* 0x8e */ IEMOP_X4(iemOp_jle_Jv),8563 /* 0x8f */ IEMOP_X4(iemOp_jnle_Jv),8564 8565 /* 0x90 */ IEMOP_X4(iemOp_seto_Eb),8566 /* 0x91 */ IEMOP_X4(iemOp_setno_Eb),8567 /* 0x92 */ IEMOP_X4(iemOp_setc_Eb),8568 /* 0x93 */ IEMOP_X4(iemOp_setnc_Eb),8569 /* 0x94 */ IEMOP_X4(iemOp_sete_Eb),8570 /* 0x95 */ IEMOP_X4(iemOp_setne_Eb),8571 /* 0x96 */ IEMOP_X4(iemOp_setbe_Eb),8572 /* 0x97 */ IEMOP_X4(iemOp_setnbe_Eb),8573 /* 0x98 */ IEMOP_X4(iemOp_sets_Eb),8574 /* 0x99 */ IEMOP_X4(iemOp_setns_Eb),8575 /* 0x9a */ IEMOP_X4(iemOp_setp_Eb),8576 /* 0x9b */ IEMOP_X4(iemOp_setnp_Eb),8577 /* 0x9c */ IEMOP_X4(iemOp_setl_Eb),8578 /* 0x9d */ IEMOP_X4(iemOp_setnl_Eb),8579 /* 0x9e */ IEMOP_X4(iemOp_setle_Eb),8580 /* 0x9f */ IEMOP_X4(iemOp_setnle_Eb),8581 8582 /* 0xa0 */ IEMOP_X4(iemOp_push_fs),8583 /* 0xa1 */ IEMOP_X4(iemOp_pop_fs),8584 /* 0xa2 */ IEMOP_X4(iemOp_cpuid),8585 /* 0xa3 */ IEMOP_X4(iemOp_bt_Ev_Gv),8586 /* 0xa4 */ IEMOP_X4(iemOp_shld_Ev_Gv_Ib),8587 /* 0xa5 */ IEMOP_X4(iemOp_shld_Ev_Gv_CL),8588 /* 0xa6 */ IEMOP_X4(iemOp_InvalidNeedRM),8589 /* 0xa7 */ IEMOP_X4(iemOp_InvalidNeedRM),8590 /* 0xa8 */ IEMOP_X4(iemOp_push_gs),8591 /* 0xa9 */ IEMOP_X4(iemOp_pop_gs),8592 /* 0xaa */ IEMOP_X4(iemOp_rsm),8593 /* 0xab */ IEMOP_X4(iemOp_bts_Ev_Gv),8594 /* 0xac */ IEMOP_X4(iemOp_shrd_Ev_Gv_Ib),8595 /* 0xad */ IEMOP_X4(iemOp_shrd_Ev_Gv_CL),8596 /* 0xae */ IEMOP_X4(iemOp_Grp15),8597 /* 0xaf */ IEMOP_X4(iemOp_imul_Gv_Ev),8598 8599 /* 0xb0 */ IEMOP_X4(iemOp_cmpxchg_Eb_Gb),8600 /* 0xb1 */ IEMOP_X4(iemOp_cmpxchg_Ev_Gv),8601 /* 0xb2 */ IEMOP_X4(iemOp_lss_Gv_Mp),8602 /* 0xb3 */ IEMOP_X4(iemOp_btr_Ev_Gv),8603 /* 0xb4 */ IEMOP_X4(iemOp_lfs_Gv_Mp),8604 /* 0xb5 */ IEMOP_X4(iemOp_lgs_Gv_Mp),8605 /* 0xb6 */ IEMOP_X4(iemOp_movzx_Gv_Eb),8606 /* 0xb7 */ IEMOP_X4(iemOp_movzx_Gv_Ew),8607 /* 0xb8 */ iemOp_jmpe, iemOp_InvalidNeedRM, iemOp_popcnt_Gv_Ev, iemOp_InvalidNeedRM,8608 /* 0xb9 */ IEMOP_X4(iemOp_Grp10),8609 /* 0xba */ IEMOP_X4(iemOp_Grp8),8610 /* 0xbb */ IEMOP_X4(iemOp_btc_Ev_Gv), // 0xf3?8611 /* 0xbc */ iemOp_bsf_Gv_Ev, iemOp_bsf_Gv_Ev, iemOp_tzcnt_Gv_Ev, iemOp_bsf_Gv_Ev,8612 /* 0xbd */ iemOp_bsr_Gv_Ev, iemOp_bsr_Gv_Ev, iemOp_lzcnt_Gv_Ev, iemOp_bsr_Gv_Ev,8613 /* 0xbe */ IEMOP_X4(iemOp_movsx_Gv_Eb),8614 /* 0xbf */ IEMOP_X4(iemOp_movsx_Gv_Ew),8615 8616 /* 0xc0 */ IEMOP_X4(iemOp_xadd_Eb_Gb),8617 /* 0xc1 */ IEMOP_X4(iemOp_xadd_Ev_Gv),8618 /* 0xc2 */ iemOp_vcmpps_Vps_Hps_Wps_Ib, iemOp_vcmppd_Vpd_Hpd_Wpd_Ib, iemOp_vcmpss_Vss_Hss_Wss_Ib, iemOp_vcmpsd_Vsd_Hsd_Wsd_Ib,8619 /* 0xc3 */ iemOp_movnti_My_Gy, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8620 /* 0xc4 */ iemOp_pinsrw_Pq_RyMw_Ib, iemOp_vpinsrw_Vdq_Hdq_RyMw_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,8621 /* 0xc5 */ iemOp_pextrw_Gd_Nq_Ib, iemOp_vpextrw_Gd_Udq_Ib, iemOp_InvalidNeedRMImm8, iemOp_InvalidNeedRMImm8,8622 /* 0xc6 */ iemOp_vshufps_Vps_Hps_Wps_Ib, iemOp_vshufpd_Vpd_Hpd_Wpd_Ib, iemOp_InvalidNeedRMImm8,iemOp_InvalidNeedRMImm8,8623 /* 0xc7 */ IEMOP_X4(iemOp_Grp9),8624 /* 0xc8 */ IEMOP_X4(iemOp_bswap_rAX_r8),8625 /* 0xc9 */ IEMOP_X4(iemOp_bswap_rCX_r9),8626 /* 0xca */ IEMOP_X4(iemOp_bswap_rDX_r10),8627 /* 0xcb */ IEMOP_X4(iemOp_bswap_rBX_r11),8628 /* 0xcc */ IEMOP_X4(iemOp_bswap_rSP_r12),8629 /* 0xcd */ IEMOP_X4(iemOp_bswap_rBP_r13),8630 /* 0xce */ IEMOP_X4(iemOp_bswap_rSI_r14),8631 /* 0xcf */ IEMOP_X4(iemOp_bswap_rDI_r15),8632 8633 /* 0xd0 */ iemOp_InvalidNeedRM, iemOp_vaddsubpd_Vpd_Hpd_Wpd, iemOp_InvalidNeedRM, iemOp_vaddsubps_Vps_Hps_Wps,8634 /* 0xd1 */ iemOp_psrlw_Pq_Qq, iemOp_vpsrlw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8635 /* 0xd2 */ iemOp_psrld_Pq_Qq, iemOp_vpsrld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8636 /* 0xd3 */ iemOp_psrlq_Pq_Qq, iemOp_vpsrlq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8637 /* 0xd4 */ iemOp_paddq_Pq_Qq, iemOp_vpaddq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8638 /* 0xd5 */ iemOp_pmullw_Pq_Qq, iemOp_vpmullw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8639 /* 0xd6 */ iemOp_InvalidNeedRM, iemOp_vmovq_Wq_Vq, iemOp_movq2dq_Vdq_Nq, iemOp_movdq2q_Pq_Uq,8640 /* 0xd7 */ iemOp_pmovmskb_Gd_Nq, iemOp_vpmovmskb_Gd_Ux, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8641 /* 0xd8 */ iemOp_psubusb_Pq_Qq, iemOp_vpsubusb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8642 /* 0xd9 */ iemOp_psubusw_Pq_Qq, iemOp_vpsubusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8643 /* 0xda */ iemOp_pminub_Pq_Qq, iemOp_vpminub_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8644 /* 0xdb */ iemOp_pand_Pq_Qq, iemOp_vpand_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8645 /* 0xdc */ iemOp_paddusb_Pq_Qq, iemOp_vpaddusb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8646 /* 0xdd */ iemOp_paddusw_Pq_Qq, iemOp_vpaddusw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8647 /* 0xde */ iemOp_pmaxub_Pq_Qq, iemOp_vpmaxub_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8648 /* 0xdf */ iemOp_pandn_Pq_Qq, iemOp_vpandn_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8649 8650 /* 0xe0 */ iemOp_pavgb_Pq_Qq, iemOp_vpavgb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8651 /* 0xe1 */ iemOp_psraw_Pq_Qq, iemOp_vpsraw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8652 /* 0xe2 */ iemOp_psrad_Pq_Qq, iemOp_vpsrad_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8653 /* 0xe3 */ iemOp_pavgw_Pq_Qq, iemOp_vpavgw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8654 /* 0xe4 */ iemOp_pmulhuw_Pq_Qq, iemOp_vpmulhuw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8655 /* 0xe5 */ iemOp_pmulhw_Pq_Qq, iemOp_vpmulhw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8656 /* 0xe6 */ iemOp_InvalidNeedRM, iemOp_vcvttpd2dq_Vx_Wpd, iemOp_vcvtdq2pd_Vx_Wpd, iemOp_vcvtpd2dq_Vx_Wpd,8657 /* 0xe7 */ iemOp_movntq_Mq_Pq, iemOp_vmovntdq_Mx_Vx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8658 /* 0xe8 */ iemOp_psubsb_Pq_Qq, iemOp_vpsubsb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8659 /* 0xe9 */ iemOp_psubsw_Pq_Qq, iemOp_vpsubsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8660 /* 0xea */ iemOp_pminsw_Pq_Qq, iemOp_vpminsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8661 /* 0xeb */ iemOp_por_Pq_Qq, iemOp_vpor_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8662 /* 0xec */ iemOp_paddsb_Pq_Qq, iemOp_vpaddsb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8663 /* 0xed */ iemOp_paddsw_Pq_Qq, iemOp_vpaddsw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8664 /* 0xee */ iemOp_pmaxsw_Pq_Qq, iemOp_vpmaxsw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8665 /* 0xef */ iemOp_pxor_Pq_Qq, iemOp_vpxor_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8666 8667 /* 0xf0 */ iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_vlddqu_Vx_Mx,8668 /* 0xf1 */ iemOp_psllw_Pq_Qq, iemOp_vpsllw_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8669 /* 0xf2 */ iemOp_pslld_Pq_Qq, iemOp_vpslld_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8670 /* 0xf3 */ iemOp_psllq_Pq_Qq, iemOp_vpsllq_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8671 /* 0xf4 */ iemOp_pmuludq_Pq_Qq, iemOp_vpmuludq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8672 /* 0xf5 */ iemOp_pmaddwd_Pq_Qq, iemOp_vpmaddwd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8673 /* 0xf6 */ iemOp_psadbw_Pq_Qq, iemOp_vpsadbw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8674 /* 0xf7 */ iemOp_maskmovq_Pq_Nq, iemOp_vmaskmovdqu_Vdq_Udq, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8675 /* 0xf8 */ iemOp_psubb_Pq_Qq, iemOp_vpsubb_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8676 /* 0xf9 */ iemOp_psubw_Pq_Qq, iemOp_vpsubw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8677 /* 0xfa */ iemOp_psubd_Pq_Qq, iemOp_vpsubd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8678 /* 0xfb */ iemOp_psubq_Pq_Qq, iemOp_vpsubq_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8679 /* 0xfc */ iemOp_paddb_Pq_Qq, iemOp_vpaddb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8680 /* 0xfd */ iemOp_paddw_Pq_Qq, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8681 /* 0xfe */ iemOp_paddd_Pq_Qq, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8682 /* 0xff */ IEMOP_X4(iemOp_ud0),8683 };8684 AssertCompile(RT_ELEMENTS(g_apfnTwoByteMap) == 1024);8685 2787 8686 2788 … … 8705 2807 /* 0x09 */ IEMOP_X4(iemOp_InvalidNeedRM), 8706 2808 /* 0x0a */ IEMOP_X4(iemOp_InvalidNeedRM), 8707 /* 0x0b */ IEMOP_X4(iemOp_ InvalidNeedRM),2809 /* 0x0b */ IEMOP_X4(iemOp_vud2), /* ?? */ 8708 2810 /* 0x0c */ IEMOP_X4(iemOp_InvalidNeedRM), 8709 2811 /* 0x0d */ IEMOP_X4(iemOp_InvalidNeedRM), … … 8814 2916 8815 2917 /* 0x70 */ iemOp_InvalidNeedRM, iemOp_vpshufd_Vx_Wx_Ib, iemOp_vpshufhw_Vx_Wx_Ib, iemOp_vpshuflw_Vx_Wx_Ib, 8816 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_ Grp12,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8817 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_ Grp13,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8818 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_ Grp14,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8819 /* 0x74 */ iemOp_ pcmpeqb_Pq_Qq, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8820 /* 0x75 */ iemOp_ pcmpeqw_Pq_Qq, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8821 /* 0x76 */ iemOp_ pcmpeqd_Pq_Qq, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,8822 /* 0x77 */ iemOp_ emms__vzeroupperv__vzeroallv, iemOp_InvalidNeedRM,iemOp_InvalidNeedRM, iemOp_InvalidNeedRM,2918 /* 0x71 */ iemOp_InvalidNeedRM, iemOp_VGrp12, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2919 /* 0x72 */ iemOp_InvalidNeedRM, iemOp_VGrp13, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2920 /* 0x73 */ iemOp_InvalidNeedRM, iemOp_VGrp14, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2921 /* 0x74 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqb_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2922 /* 0x75 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2923 /* 0x76 */ iemOp_InvalidNeedRM, iemOp_vpcmpeqd_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 2924 /* 0x77 */ iemOp_vzeroupperv__vzeroallv, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8823 2925 /* 0x78 */ IEMOP_X4(iemOp_InvalidNeedRM), 8824 2926 /* 0x79 */ IEMOP_X4(iemOp_InvalidNeedRM), … … 8964 3066 /* 0xfd */ iemOp_InvalidNeedRM, iemOp_vpaddw_Vx_Hx_Wx, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8965 3067 /* 0xfe */ iemOp_InvalidNeedRM, iemOp_vpaddd_Vx_Hx_W, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 8966 /* 0xff */ IEMOP_X4(iemOp_ ud0),3068 /* 0xff */ IEMOP_X4(iemOp_vud0) /* ?? */ 8967 3069 }; 8968 3070 AssertCompile(RT_ELEMENTS(g_apfnVexMap1) == 1024); -
trunk/src/VBox/VMM/include/IEMInternal.h
r66465 r66469 41 41 # define IEM_STATIC static 42 42 #endif 43 44 /** @def IEM_WITH_VEX 45 * Enables the VEX decoding. */ 46 #define IEM_WITH_VEX 43 47 44 48 /** @def IEM_VERIFICATION_MODE_FULL
Note:
See TracChangeset
for help on using the changeset viewer.