- Timestamp:
- Mar 12, 2023 1:27:21 AM (22 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 1 added
- 6 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsCommon.cpp.h
r98910 r98916 878 878 879 879 880 /**881 * Common worker for instructions like ADD, AND, OR, ++ with a byte882 * memory/register as the destination.883 *884 * @param pImpl Pointer to the instruction implementation (assembly).885 */886 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_r8, PCIEMOPBINSIZES, pImpl)887 {888 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);889 890 /*891 * If rm is denoting a register, no more instruction bytes.892 */893 if (IEM_IS_MODRM_REG_MODE(bRm))894 {895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();896 897 IEM_MC_BEGIN(3, 0);898 IEM_MC_ARG(uint8_t *, pu8Dst, 0);899 IEM_MC_ARG(uint8_t, u8Src, 1);900 IEM_MC_ARG(uint32_t *, pEFlags, 2);901 902 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));903 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm));904 IEM_MC_REF_EFLAGS(pEFlags);905 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);906 907 IEM_MC_ADVANCE_RIP_AND_FINISH();908 IEM_MC_END();909 }910 else911 {912 /*913 * We're accessing memory.914 * Note! We're putting the eflags on the stack here so we can commit them915 * after the memory.916 */917 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R; /* CMP,TEST */918 IEM_MC_BEGIN(3, 2);919 IEM_MC_ARG(uint8_t *, pu8Dst, 0);920 IEM_MC_ARG(uint8_t, u8Src, 1);921 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);922 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);923 924 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);925 if (!pImpl->pfnLockedU8)926 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();927 IEM_MC_MEM_MAP(pu8Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);928 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm));929 IEM_MC_FETCH_EFLAGS(EFlags);930 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))931 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);932 else933 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU8, pu8Dst, u8Src, pEFlags);934 935 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, fAccess);936 IEM_MC_COMMIT_EFLAGS(EFlags);937 IEM_MC_ADVANCE_RIP_AND_FINISH();938 IEM_MC_END();939 }940 }941 942 943 /**944 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with945 * memory/register as the destination.946 *947 * @param pImpl Pointer to the instruction implementation (assembly).948 */949 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rm_rv, PCIEMOPBINSIZES, pImpl)950 {951 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);952 953 /*954 * If rm is denoting a register, no more instruction bytes.955 */956 if (IEM_IS_MODRM_REG_MODE(bRm))957 {958 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();959 960 switch (pVCpu->iem.s.enmEffOpSize)961 {962 case IEMMODE_16BIT:963 IEM_MC_BEGIN(3, 0);964 IEM_MC_ARG(uint16_t *, pu16Dst, 0);965 IEM_MC_ARG(uint16_t, u16Src, 1);966 IEM_MC_ARG(uint32_t *, pEFlags, 2);967 968 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));969 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm));970 IEM_MC_REF_EFLAGS(pEFlags);971 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);972 973 IEM_MC_ADVANCE_RIP_AND_FINISH();974 IEM_MC_END();975 break;976 977 case IEMMODE_32BIT:978 IEM_MC_BEGIN(3, 0);979 IEM_MC_ARG(uint32_t *, pu32Dst, 0);980 IEM_MC_ARG(uint32_t, u32Src, 1);981 IEM_MC_ARG(uint32_t *, pEFlags, 2);982 983 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));984 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm));985 IEM_MC_REF_EFLAGS(pEFlags);986 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);987 988 if (pImpl != &g_iemAImpl_test && pImpl != &g_iemAImpl_cmp)989 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);990 IEM_MC_ADVANCE_RIP_AND_FINISH();991 IEM_MC_END();992 break;993 994 case IEMMODE_64BIT:995 IEM_MC_BEGIN(3, 0);996 IEM_MC_ARG(uint64_t *, pu64Dst, 0);997 IEM_MC_ARG(uint64_t, u64Src, 1);998 IEM_MC_ARG(uint32_t *, pEFlags, 2);999 1000 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));1001 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm));1002 IEM_MC_REF_EFLAGS(pEFlags);1003 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);1004 1005 IEM_MC_ADVANCE_RIP_AND_FINISH();1006 IEM_MC_END();1007 break;1008 1009 IEM_NOT_REACHED_DEFAULT_CASE_RET();1010 }1011 }1012 else1013 {1014 /*1015 * We're accessing memory.1016 * Note! We're putting the eflags on the stack here so we can commit them1017 * after the memory.1018 */1019 uint32_t const fAccess = pImpl->pfnLockedU8 ? IEM_ACCESS_DATA_RW : IEM_ACCESS_DATA_R /* CMP,TEST */;1020 switch (pVCpu->iem.s.enmEffOpSize)1021 {1022 case IEMMODE_16BIT:1023 IEM_MC_BEGIN(3, 2);1024 IEM_MC_ARG(uint16_t *, pu16Dst, 0);1025 IEM_MC_ARG(uint16_t, u16Src, 1);1026 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);1027 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1028 1029 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1030 if (!pImpl->pfnLockedU16)1031 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1032 IEM_MC_MEM_MAP(pu16Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);1033 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm));1034 IEM_MC_FETCH_EFLAGS(EFlags);1035 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))1036 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);1037 else1038 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU16, pu16Dst, u16Src, pEFlags);1039 1040 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, fAccess);1041 IEM_MC_COMMIT_EFLAGS(EFlags);1042 IEM_MC_ADVANCE_RIP_AND_FINISH();1043 IEM_MC_END();1044 break;1045 1046 case IEMMODE_32BIT:1047 IEM_MC_BEGIN(3, 2);1048 IEM_MC_ARG(uint32_t *, pu32Dst, 0);1049 IEM_MC_ARG(uint32_t, u32Src, 1);1050 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);1051 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1052 1053 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1054 if (!pImpl->pfnLockedU32)1055 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1056 IEM_MC_MEM_MAP(pu32Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);1057 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm));1058 IEM_MC_FETCH_EFLAGS(EFlags);1059 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))1060 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);1061 else1062 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU32, pu32Dst, u32Src, pEFlags);1063 1064 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, fAccess);1065 IEM_MC_COMMIT_EFLAGS(EFlags);1066 IEM_MC_ADVANCE_RIP_AND_FINISH();1067 IEM_MC_END();1068 break;1069 1070 case IEMMODE_64BIT:1071 IEM_MC_BEGIN(3, 2);1072 IEM_MC_ARG(uint64_t *, pu64Dst, 0);1073 IEM_MC_ARG(uint64_t, u64Src, 1);1074 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2);1075 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1076 1077 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1078 if (!pImpl->pfnLockedU64)1079 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1080 IEM_MC_MEM_MAP(pu64Dst, fAccess, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/);1081 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm));1082 IEM_MC_FETCH_EFLAGS(EFlags);1083 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK))1084 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);1085 else1086 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnLockedU64, pu64Dst, u64Src, pEFlags);1087 1088 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, fAccess);1089 IEM_MC_COMMIT_EFLAGS(EFlags);1090 IEM_MC_ADVANCE_RIP_AND_FINISH();1091 IEM_MC_END();1092 break;1093 1094 IEM_NOT_REACHED_DEFAULT_CASE_RET();1095 }1096 }1097 }1098 1099 1100 /**1101 * Common worker for byte instructions like ADD, AND, OR, ++ with a register as1102 * the destination.1103 *1104 * @param pImpl Pointer to the instruction implementation (assembly).1105 */1106 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_r8_rm, PCIEMOPBINSIZES, pImpl)1107 {1108 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);1109 1110 /*1111 * If rm is denoting a register, no more instruction bytes.1112 */1113 if (IEM_IS_MODRM_REG_MODE(bRm))1114 {1115 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1116 IEM_MC_BEGIN(3, 0);1117 IEM_MC_ARG(uint8_t *, pu8Dst, 0);1118 IEM_MC_ARG(uint8_t, u8Src, 1);1119 IEM_MC_ARG(uint32_t *, pEFlags, 2);1120 1121 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm));1122 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1123 IEM_MC_REF_EFLAGS(pEFlags);1124 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);1125 1126 IEM_MC_ADVANCE_RIP_AND_FINISH();1127 IEM_MC_END();1128 }1129 else1130 {1131 /*1132 * We're accessing memory.1133 */1134 IEM_MC_BEGIN(3, 1);1135 IEM_MC_ARG(uint8_t *, pu8Dst, 0);1136 IEM_MC_ARG(uint8_t, u8Src, 1);1137 IEM_MC_ARG(uint32_t *, pEFlags, 2);1138 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1139 1140 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1141 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1142 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);1143 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1144 IEM_MC_REF_EFLAGS(pEFlags);1145 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);1146 1147 IEM_MC_ADVANCE_RIP_AND_FINISH();1148 IEM_MC_END();1149 }1150 }1151 1152 1153 /**1154 * Common worker for word/dword/qword instructions like ADD, AND, OR, ++ with a1155 * register as the destination.1156 *1157 * @param pImpl Pointer to the instruction implementation (assembly).1158 */1159 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rv_rm, PCIEMOPBINSIZES, pImpl)1160 {1161 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm);1162 1163 /*1164 * If rm is denoting a register, no more instruction bytes.1165 */1166 if (IEM_IS_MODRM_REG_MODE(bRm))1167 {1168 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1169 switch (pVCpu->iem.s.enmEffOpSize)1170 {1171 case IEMMODE_16BIT:1172 IEM_MC_BEGIN(3, 0);1173 IEM_MC_ARG(uint16_t *, pu16Dst, 0);1174 IEM_MC_ARG(uint16_t, u16Src, 1);1175 IEM_MC_ARG(uint32_t *, pEFlags, 2);1176 1177 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_RM(pVCpu, bRm));1178 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1179 IEM_MC_REF_EFLAGS(pEFlags);1180 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);1181 1182 IEM_MC_ADVANCE_RIP_AND_FINISH();1183 IEM_MC_END();1184 break;1185 1186 case IEMMODE_32BIT:1187 IEM_MC_BEGIN(3, 0);1188 IEM_MC_ARG(uint32_t *, pu32Dst, 0);1189 IEM_MC_ARG(uint32_t, u32Src, 1);1190 IEM_MC_ARG(uint32_t *, pEFlags, 2);1191 1192 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_RM(pVCpu, bRm));1193 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1194 IEM_MC_REF_EFLAGS(pEFlags);1195 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);1196 1197 if (pImpl != &g_iemAImpl_cmp) /* Not used with TEST. */1198 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);1199 IEM_MC_ADVANCE_RIP_AND_FINISH();1200 IEM_MC_END();1201 break;1202 1203 case IEMMODE_64BIT:1204 IEM_MC_BEGIN(3, 0);1205 IEM_MC_ARG(uint64_t *, pu64Dst, 0);1206 IEM_MC_ARG(uint64_t, u64Src, 1);1207 IEM_MC_ARG(uint32_t *, pEFlags, 2);1208 1209 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm));1210 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1211 IEM_MC_REF_EFLAGS(pEFlags);1212 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);1213 1214 IEM_MC_ADVANCE_RIP_AND_FINISH();1215 IEM_MC_END();1216 break;1217 1218 IEM_NOT_REACHED_DEFAULT_CASE_RET();1219 }1220 }1221 else1222 {1223 /*1224 * We're accessing memory.1225 */1226 switch (pVCpu->iem.s.enmEffOpSize)1227 {1228 case IEMMODE_16BIT:1229 IEM_MC_BEGIN(3, 1);1230 IEM_MC_ARG(uint16_t *, pu16Dst, 0);1231 IEM_MC_ARG(uint16_t, u16Src, 1);1232 IEM_MC_ARG(uint32_t *, pEFlags, 2);1233 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1234 1235 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1236 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1237 IEM_MC_FETCH_MEM_U16(u16Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);1238 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1239 IEM_MC_REF_EFLAGS(pEFlags);1240 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);1241 1242 IEM_MC_ADVANCE_RIP_AND_FINISH();1243 IEM_MC_END();1244 break;1245 1246 case IEMMODE_32BIT:1247 IEM_MC_BEGIN(3, 1);1248 IEM_MC_ARG(uint32_t *, pu32Dst, 0);1249 IEM_MC_ARG(uint32_t, u32Src, 1);1250 IEM_MC_ARG(uint32_t *, pEFlags, 2);1251 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1252 1253 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1254 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1255 IEM_MC_FETCH_MEM_U32(u32Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);1256 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1257 IEM_MC_REF_EFLAGS(pEFlags);1258 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);1259 1260 if (pImpl != &g_iemAImpl_cmp)1261 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);1262 IEM_MC_ADVANCE_RIP_AND_FINISH();1263 IEM_MC_END();1264 break;1265 1266 case IEMMODE_64BIT:1267 IEM_MC_BEGIN(3, 1);1268 IEM_MC_ARG(uint64_t *, pu64Dst, 0);1269 IEM_MC_ARG(uint64_t, u64Src, 1);1270 IEM_MC_ARG(uint32_t *, pEFlags, 2);1271 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst);1272 1273 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0);1274 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1275 IEM_MC_FETCH_MEM_U64(u64Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst);1276 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_REG(pVCpu, bRm));1277 IEM_MC_REF_EFLAGS(pEFlags);1278 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);1279 1280 IEM_MC_ADVANCE_RIP_AND_FINISH();1281 IEM_MC_END();1282 break;1283 1284 IEM_NOT_REACHED_DEFAULT_CASE_RET();1285 }1286 }1287 }1288 1289 1290 /**1291 * Common worker for instructions like ADD, AND, OR, ++ with working on AL with1292 * a byte immediate.1293 *1294 * @param pImpl Pointer to the instruction implementation (assembly).1295 */1296 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_AL_Ib, PCIEMOPBINSIZES, pImpl)1297 {1298 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm);1299 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1300 1301 IEM_MC_BEGIN(3, 0);1302 IEM_MC_ARG(uint8_t *, pu8Dst, 0);1303 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1);1304 IEM_MC_ARG(uint32_t *, pEFlags, 2);1305 1306 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX);1307 IEM_MC_REF_EFLAGS(pEFlags);1308 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU8, pu8Dst, u8Src, pEFlags);1309 1310 IEM_MC_ADVANCE_RIP_AND_FINISH();1311 IEM_MC_END();1312 }1313 1314 1315 /**1316 * Common worker for instructions like ADD, AND, OR, ++ with working on1317 * AX/EAX/RAX with a word/dword immediate.1318 *1319 * @param pImpl Pointer to the instruction implementation (assembly).1320 */1321 FNIEMOP_DEF_1(iemOpHlpBinaryOperator_rAX_Iz, PCIEMOPBINSIZES, pImpl)1322 {1323 switch (pVCpu->iem.s.enmEffOpSize)1324 {1325 case IEMMODE_16BIT:1326 {1327 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm);1328 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1329 1330 IEM_MC_BEGIN(3, 0);1331 IEM_MC_ARG(uint16_t *, pu16Dst, 0);1332 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1);1333 IEM_MC_ARG(uint32_t *, pEFlags, 2);1334 1335 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX);1336 IEM_MC_REF_EFLAGS(pEFlags);1337 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU16, pu16Dst, u16Src, pEFlags);1338 1339 IEM_MC_ADVANCE_RIP_AND_FINISH();1340 IEM_MC_END();1341 }1342 1343 case IEMMODE_32BIT:1344 {1345 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm);1346 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1347 1348 IEM_MC_BEGIN(3, 0);1349 IEM_MC_ARG(uint32_t *, pu32Dst, 0);1350 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1);1351 IEM_MC_ARG(uint32_t *, pEFlags, 2);1352 1353 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX);1354 IEM_MC_REF_EFLAGS(pEFlags);1355 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU32, pu32Dst, u32Src, pEFlags);1356 1357 if ((pImpl != &g_iemAImpl_test) && (pImpl != &g_iemAImpl_cmp))1358 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst);1359 IEM_MC_ADVANCE_RIP_AND_FINISH();1360 IEM_MC_END();1361 }1362 1363 case IEMMODE_64BIT:1364 {1365 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm);1366 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX();1367 1368 IEM_MC_BEGIN(3, 0);1369 IEM_MC_ARG(uint64_t *, pu64Dst, 0);1370 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1);1371 IEM_MC_ARG(uint32_t *, pEFlags, 2);1372 1373 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX);1374 IEM_MC_REF_EFLAGS(pEFlags);1375 IEM_MC_CALL_VOID_AIMPL_3(pImpl->pfnNormalU64, pu64Dst, u64Src, pEFlags);1376 1377 IEM_MC_ADVANCE_RIP_AND_FINISH();1378 IEM_MC_END();1379 }1380 1381 IEM_NOT_REACHED_DEFAULT_CASE_RET();1382 }1383 }1384 1385 880 1386 881 /** Opcodes 0xf1, 0xd6. */ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsInterpretOnly.cpp
r98800 r98916 85 85 * Include common bits. 86 86 */ 87 #include "IEMAllInstructionsCommonBodyMacros.h" 87 88 #include "IEMAllInstructionsCommon.cpp.h" 88 89 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r98880 r98916 57 57 */ 58 58 59 /** 60 * Body for instructions like ADD, AND, OR, TEST, CMP, ++ with a byte 61 * memory/register as the destination. 62 * 63 * Used with IEMOP_BODY_BINARY_rm_r8_NO_LOCK or IEMOP_BODY_BINARY_rm_r8_LOCKED. 64 */ 65 #define IEMOP_BODY_BINARY_rm_r8(a_fnNormalU8, a_fRW) \ 66 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 67 \ 68 /* \ 69 * If rm is denoting a register, no more instruction bytes. \ 70 */ \ 71 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 72 { \ 73 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 74 \ 75 IEM_MC_BEGIN(3, 0); \ 76 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 77 IEM_MC_ARG(uint8_t, u8Src, 1); \ 78 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 79 \ 80 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 81 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 82 IEM_MC_REF_EFLAGS(pEFlags); \ 83 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 84 \ 85 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 86 IEM_MC_END(); \ 87 } \ 88 else \ 89 { \ 90 /* \ 91 * We're accessing memory. \ 92 * Note! We're putting the eflags on the stack here so we can commit them \ 93 * after the memory. \ 94 */ \ 95 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 96 { \ 97 IEM_MC_BEGIN(3, 2); \ 98 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 99 IEM_MC_ARG(uint8_t, u8Src, 1); \ 100 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 101 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 102 \ 103 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 104 IEMOP_HLP_DONE_DECODING(); \ 105 IEM_MC_MEM_MAP(pu8Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 106 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 107 IEM_MC_FETCH_EFLAGS(EFlags); \ 108 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 109 \ 110 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, a_fRW); \ 111 IEM_MC_COMMIT_EFLAGS(EFlags); \ 112 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 113 IEM_MC_END(); \ 114 } \ 115 else \ 116 { \ 117 (void)0 118 119 #define IEMOP_BODY_BINARY_rm_r8_NO_LOCK() \ 120 IEMOP_HLP_DONE_DECODING(); \ 121 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 122 } \ 123 } \ 124 (void)0 125 126 #define IEMOP_BODY_BINARY_rm_r8_LOCKED(a_fnLockedU8) \ 127 IEM_MC_BEGIN(3, 2); \ 128 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 129 IEM_MC_ARG(uint8_t, u8Src, 1); \ 130 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 131 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 132 \ 133 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 134 IEMOP_HLP_DONE_DECODING(); \ 135 IEM_MC_MEM_MAP(pu8Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 136 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 137 IEM_MC_FETCH_EFLAGS(EFlags); \ 138 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU8, pu8Dst, u8Src, pEFlags); \ 139 \ 140 IEM_MC_MEM_COMMIT_AND_UNMAP(pu8Dst, IEM_ACCESS_DATA_RW); \ 141 IEM_MC_COMMIT_EFLAGS(EFlags); \ 142 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 143 IEM_MC_END(); \ 144 } \ 145 } \ 146 (void)0 147 148 /** 149 * Body for byte instructions like ADD, AND, OR, ++ with a register as the 150 * destination. 151 */ 152 #define IEMOP_BODY_BINARY_r8_rm(a_fnNormalU8) \ 153 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 154 \ 155 /* \ 156 * If rm is denoting a register, no more instruction bytes. \ 157 */ \ 158 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 159 { \ 160 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 161 IEM_MC_BEGIN(3, 0); \ 162 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 163 IEM_MC_ARG(uint8_t, u8Src, 1); \ 164 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 165 \ 166 IEM_MC_FETCH_GREG_U8(u8Src, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 167 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 168 IEM_MC_REF_EFLAGS(pEFlags); \ 169 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 170 \ 171 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 172 IEM_MC_END(); \ 173 } \ 174 else \ 175 { \ 176 /* \ 177 * We're accessing memory. \ 178 */ \ 179 IEM_MC_BEGIN(3, 1); \ 180 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 181 IEM_MC_ARG(uint8_t, u8Src, 1); \ 182 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 183 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 184 \ 185 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 186 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 187 IEM_MC_FETCH_MEM_U8(u8Src, pVCpu->iem.s.iEffSeg, GCPtrEffDst); \ 188 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 189 IEM_MC_REF_EFLAGS(pEFlags); \ 190 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 191 \ 192 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 193 IEM_MC_END(); \ 194 } \ 195 (void)0 196 197 198 /** 199 * Body for word/dword/qword instructions like ADD, AND, OR, ++ with 200 * memory/register as the destination. 201 */ 202 #define IEMOP_BODY_BINARY_rm_rv(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fRW) \ 203 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 204 \ 205 /* \ 206 * If rm is denoting a register, no more instruction bytes. \ 207 */ \ 208 if (IEM_IS_MODRM_REG_MODE(bRm)) \ 209 { \ 210 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 211 switch (pVCpu->iem.s.enmEffOpSize) \ 212 { \ 213 case IEMMODE_16BIT: \ 214 IEM_MC_BEGIN(3, 0); \ 215 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 216 IEM_MC_ARG(uint16_t, u16Src, 1); \ 217 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 218 \ 219 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 220 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 221 IEM_MC_REF_EFLAGS(pEFlags); \ 222 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 223 \ 224 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 225 IEM_MC_END(); \ 226 break; \ 227 \ 228 case IEMMODE_32BIT: \ 229 IEM_MC_BEGIN(3, 0); \ 230 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 231 IEM_MC_ARG(uint32_t, u32Src, 1); \ 232 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 233 \ 234 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 235 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 236 IEM_MC_REF_EFLAGS(pEFlags); \ 237 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 238 \ 239 if ((a_fRW) == IEM_ACCESS_DATA_RW) /* not TEST and CMP */ \ 240 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 241 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 242 IEM_MC_END(); \ 243 break; \ 244 \ 245 case IEMMODE_64BIT: \ 246 IEM_MC_BEGIN(3, 0); \ 247 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 248 IEM_MC_ARG(uint64_t, u64Src, 1); \ 249 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 250 \ 251 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 252 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 253 IEM_MC_REF_EFLAGS(pEFlags); \ 254 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 255 \ 256 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 257 IEM_MC_END(); \ 258 break; \ 259 \ 260 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 261 } \ 262 } \ 263 else \ 264 { \ 265 /* \ 266 * We're accessing memory. \ 267 * Note! We're putting the eflags on the stack here so we can commit them \ 268 * after the memory. \ 269 */ \ 270 if (!(pVCpu->iem.s.fPrefixes & IEM_OP_PRF_LOCK)) \ 271 { \ 272 switch (pVCpu->iem.s.enmEffOpSize) \ 273 { \ 274 case IEMMODE_16BIT: \ 275 IEM_MC_BEGIN(3, 2); \ 276 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 277 IEM_MC_ARG(uint16_t, u16Src, 1); \ 278 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 279 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 280 \ 281 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 282 IEMOP_HLP_DONE_DECODING(); \ 283 IEM_MC_MEM_MAP(pu16Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 284 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 285 IEM_MC_FETCH_EFLAGS(EFlags); \ 286 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 287 \ 288 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, a_fRW); \ 289 IEM_MC_COMMIT_EFLAGS(EFlags); \ 290 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 291 IEM_MC_END(); \ 292 break; \ 293 \ 294 case IEMMODE_32BIT: \ 295 IEM_MC_BEGIN(3, 2); \ 296 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 297 IEM_MC_ARG(uint32_t, u32Src, 1); \ 298 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 299 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 300 \ 301 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 302 IEMOP_HLP_DONE_DECODING(); \ 303 IEM_MC_MEM_MAP(pu32Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 304 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 305 IEM_MC_FETCH_EFLAGS(EFlags); \ 306 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 307 \ 308 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, a_fRW); \ 309 IEM_MC_COMMIT_EFLAGS(EFlags); \ 310 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 311 IEM_MC_END(); \ 312 break; \ 313 \ 314 case IEMMODE_64BIT: \ 315 IEM_MC_BEGIN(3, 2); \ 316 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 317 IEM_MC_ARG(uint64_t, u64Src, 1); \ 318 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 319 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 320 \ 321 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 322 IEMOP_HLP_DONE_DECODING(); \ 323 IEM_MC_MEM_MAP(pu64Dst, a_fRW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 324 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 325 IEM_MC_FETCH_EFLAGS(EFlags); \ 326 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 327 \ 328 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, a_fRW); \ 329 IEM_MC_COMMIT_EFLAGS(EFlags); \ 330 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 331 IEM_MC_END(); \ 332 break; \ 333 \ 334 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 335 } \ 336 } \ 337 else \ 338 { \ 339 (void)0 340 341 #define IEMOP_BODY_BINARY_rm_rv_NO_LOCK() \ 342 IEMOP_HLP_DONE_DECODING(); \ 343 return IEMOP_RAISE_INVALID_LOCK_PREFIX(); \ 344 } \ 345 } \ 346 (void)0 347 348 #define IEMOP_BODY_BINARY_rm_rv_LOCKED(a_fnLockedU16, a_fnLockedU32, a_fnLockedU64) \ 349 switch (pVCpu->iem.s.enmEffOpSize) \ 350 { \ 351 case IEMMODE_16BIT: \ 352 IEM_MC_BEGIN(3, 2); \ 353 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 354 IEM_MC_ARG(uint16_t, u16Src, 1); \ 355 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 356 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 357 \ 358 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 359 IEMOP_HLP_DONE_DECODING(); \ 360 IEM_MC_MEM_MAP(pu16Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 361 IEM_MC_FETCH_GREG_U16(u16Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 362 IEM_MC_FETCH_EFLAGS(EFlags); \ 363 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU16, pu16Dst, u16Src, pEFlags); \ 364 \ 365 IEM_MC_MEM_COMMIT_AND_UNMAP(pu16Dst, IEM_ACCESS_DATA_RW); \ 366 IEM_MC_COMMIT_EFLAGS(EFlags); \ 367 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 368 IEM_MC_END(); \ 369 break; \ 370 \ 371 case IEMMODE_32BIT: \ 372 IEM_MC_BEGIN(3, 2); \ 373 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 374 IEM_MC_ARG(uint32_t, u32Src, 1); \ 375 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 376 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 377 \ 378 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 379 IEMOP_HLP_DONE_DECODING(); \ 380 IEM_MC_MEM_MAP(pu32Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 381 IEM_MC_FETCH_GREG_U32(u32Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 382 IEM_MC_FETCH_EFLAGS(EFlags); \ 383 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU32, pu32Dst, u32Src, pEFlags); \ 384 \ 385 IEM_MC_MEM_COMMIT_AND_UNMAP(pu32Dst, IEM_ACCESS_DATA_RW /* CMP,TEST */); \ 386 IEM_MC_COMMIT_EFLAGS(EFlags); \ 387 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 388 IEM_MC_END(); \ 389 break; \ 390 \ 391 case IEMMODE_64BIT: \ 392 IEM_MC_BEGIN(3, 2); \ 393 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 394 IEM_MC_ARG(uint64_t, u64Src, 1); \ 395 IEM_MC_ARG_LOCAL_EFLAGS(pEFlags, EFlags, 2); \ 396 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); \ 397 \ 398 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 0); \ 399 IEMOP_HLP_DONE_DECODING(); \ 400 IEM_MC_MEM_MAP(pu64Dst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 401 IEM_MC_FETCH_GREG_U64(u64Src, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 402 IEM_MC_FETCH_EFLAGS(EFlags); \ 403 IEM_MC_CALL_VOID_AIMPL_3(a_fnLockedU64, pu64Dst, u64Src, pEFlags); \ 404 \ 405 IEM_MC_MEM_COMMIT_AND_UNMAP(pu64Dst, IEM_ACCESS_DATA_RW); \ 406 IEM_MC_COMMIT_EFLAGS(EFlags); \ 407 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 408 IEM_MC_END(); \ 409 break; \ 410 \ 411 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 412 } \ 413 } \ 414 } \ 415 (void)0 416 417 418 /** 419 * Body for instructions like ADD, AND, OR, ++ with working on AL with 420 * a byte immediate. 421 */ 422 #define IEMOP_BODY_BINARY_AL_Ib(a_fnNormalU8) \ 423 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); \ 424 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 425 \ 426 IEM_MC_BEGIN(3, 0); \ 427 IEM_MC_ARG(uint8_t *, pu8Dst, 0); \ 428 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/ u8Imm, 1); \ 429 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 430 \ 431 IEM_MC_REF_GREG_U8(pu8Dst, X86_GREG_xAX); \ 432 IEM_MC_REF_EFLAGS(pEFlags); \ 433 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU8, pu8Dst, u8Src, pEFlags); \ 434 \ 435 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 436 IEM_MC_END() 437 438 /** 439 * Body for instructions like ADD, AND, OR, ++ with working on 440 * AX/EAX/RAX with a word/dword immediate. 441 */ 442 #define IEMOP_BODY_BINARY_rAX_Iz(a_fnNormalU16, a_fnNormalU32, a_fnNormalU64, a_fModifiesDstReg) \ 443 switch (pVCpu->iem.s.enmEffOpSize) \ 444 { \ 445 case IEMMODE_16BIT: \ 446 { \ 447 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); \ 448 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 449 \ 450 IEM_MC_BEGIN(3, 0); \ 451 IEM_MC_ARG(uint16_t *, pu16Dst, 0); \ 452 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/ u16Imm, 1); \ 453 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 454 \ 455 IEM_MC_REF_GREG_U16(pu16Dst, X86_GREG_xAX); \ 456 IEM_MC_REF_EFLAGS(pEFlags); \ 457 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU16, pu16Dst, u16Src, pEFlags); \ 458 \ 459 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 460 IEM_MC_END(); \ 461 } \ 462 \ 463 case IEMMODE_32BIT: \ 464 { \ 465 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); \ 466 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 467 \ 468 IEM_MC_BEGIN(3, 0); \ 469 IEM_MC_ARG(uint32_t *, pu32Dst, 0); \ 470 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/ u32Imm, 1); \ 471 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 472 \ 473 IEM_MC_REF_GREG_U32(pu32Dst, X86_GREG_xAX); \ 474 IEM_MC_REF_EFLAGS(pEFlags); \ 475 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU32, pu32Dst, u32Src, pEFlags); \ 476 \ 477 if (a_fModifiesDstReg) \ 478 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pu32Dst); \ 479 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 480 IEM_MC_END(); \ 481 } \ 482 \ 483 case IEMMODE_64BIT: \ 484 { \ 485 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); \ 486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); \ 487 \ 488 IEM_MC_BEGIN(3, 0); \ 489 IEM_MC_ARG(uint64_t *, pu64Dst, 0); \ 490 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/ u64Imm, 1); \ 491 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 492 \ 493 IEM_MC_REF_GREG_U64(pu64Dst, X86_GREG_xAX); \ 494 IEM_MC_REF_EFLAGS(pEFlags); \ 495 IEM_MC_CALL_VOID_AIMPL_3(a_fnNormalU64, pu64Dst, u64Src, pEFlags); \ 496 \ 497 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 498 IEM_MC_END(); \ 499 } \ 500 \ 501 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 502 } \ 503 (void)0 504 505 506 59 507 /* Instruction specification format - work in progress: */ 60 508 … … 78 526 { 79 527 IEMOP_MNEMONIC2(MR, ADD, add, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 80 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_add); 528 IEMOP_BODY_BINARY_rm_r8( iemAImpl_add_u8, IEM_ACCESS_DATA_RW); 529 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_add_u8_locked); 81 530 } 82 531 … … 94 543 { 95 544 IEMOP_MNEMONIC2(MR, ADD, add, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 96 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_add); 545 IEMOP_BODY_BINARY_rm_rv( iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, IEM_ACCESS_DATA_RW); 546 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_add_u16_locked, iemAImpl_add_u32_locked, iemAImpl_add_u64_locked); 97 547 } 98 548 … … 107 557 { 108 558 IEMOP_MNEMONIC2(RM, ADD, add, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 109 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_add);559 IEMOP_BODY_BINARY_r8_rm(iemAImpl_add_u8); 110 560 } 111 561 … … 120 570 { 121 571 IEMOP_MNEMONIC2(RM, ADD, add, Gv, Ev, DISOPTYPE_HARMLESS, 0); 122 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_add);572 IEMOP_BODY_BINARY_rv_rm(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1); 123 573 } 124 574 … … 133 583 { 134 584 IEMOP_MNEMONIC2(FIXED, ADD, add, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 135 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_add);585 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_add_u8); 136 586 } 137 587 … … 149 599 { 150 600 IEMOP_MNEMONIC2(FIXED, ADD, add, rAX, Iz, DISOPTYPE_HARMLESS, 0); 151 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_add);601 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_add_u16, iemAImpl_add_u32, iemAImpl_add_u64, 1); 152 602 } 153 603 … … 193 643 IEMOP_MNEMONIC2(MR, OR, or, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 194 644 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 195 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_or); 645 IEMOP_BODY_BINARY_rm_r8( iemAImpl_or_u8, IEM_ACCESS_DATA_RW); 646 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_or_u8_locked); 196 647 } 197 648 … … 214 665 IEMOP_MNEMONIC2(MR, OR, or, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 215 666 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 216 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_or); 667 IEMOP_BODY_BINARY_rm_rv( iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, IEM_ACCESS_DATA_RW); 668 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_or_u16_locked, iemAImpl_or_u32_locked, iemAImpl_or_u64_locked); 217 669 } 218 670 … … 230 682 IEMOP_MNEMONIC2(RM, OR, or, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 231 683 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 232 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_or);684 IEMOP_BODY_BINARY_r8_rm(iemAImpl_or_u8); 233 685 } 234 686 … … 246 698 IEMOP_MNEMONIC2(RM, OR, or, Gv, Ev, DISOPTYPE_HARMLESS, 0); 247 699 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 248 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_or);700 IEMOP_BODY_BINARY_rv_rm(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1); 249 701 } 250 702 … … 262 714 IEMOP_MNEMONIC2(FIXED, OR, or, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 263 715 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 264 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_or);716 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_or_u8); 265 717 } 266 718 … … 284 736 IEMOP_MNEMONIC2(FIXED, OR, or, rAX, Iz, DISOPTYPE_HARMLESS, 0); 285 737 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 286 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_or);738 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_or_u16, iemAImpl_or_u32, iemAImpl_or_u64, 1); 287 739 } 288 740 … … 356 808 { 357 809 IEMOP_MNEMONIC2(MR, ADC, adc, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 358 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_adc); 810 IEMOP_BODY_BINARY_rm_r8( iemAImpl_adc_u8, IEM_ACCESS_DATA_RW); 811 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_adc_u8_locked); 359 812 } 360 813 … … 374 827 { 375 828 IEMOP_MNEMONIC2(MR, ADC, adc, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 376 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_adc); 829 IEMOP_BODY_BINARY_rm_rv( iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, IEM_ACCESS_DATA_RW); 830 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_adc_u16_locked, iemAImpl_adc_u32_locked, iemAImpl_adc_u64_locked); 377 831 } 378 832 … … 388 842 { 389 843 IEMOP_MNEMONIC2(RM, ADC, adc, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 390 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_adc);844 IEMOP_BODY_BINARY_r8_rm(iemAImpl_adc_u8); 391 845 } 392 846 … … 402 856 { 403 857 IEMOP_MNEMONIC2(RM, ADC, adc, Gv, Ev, DISOPTYPE_HARMLESS, 0); 404 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_adc);858 IEMOP_BODY_BINARY_rv_rm(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1); 405 859 } 406 860 … … 416 870 { 417 871 IEMOP_MNEMONIC2(FIXED, ADC, adc, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 418 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_adc);872 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_adc_u8); 419 873 } 420 874 … … 430 884 { 431 885 IEMOP_MNEMONIC2(FIXED, ADC, adc, rAX, Iz, DISOPTYPE_HARMLESS, 0); 432 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_adc);886 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_adc_u16, iemAImpl_adc_u32, iemAImpl_adc_u64, 1); 433 887 } 434 888 … … 469 923 { 470 924 IEMOP_MNEMONIC2(MR, SBB, sbb, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 471 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sbb); 925 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sbb_u8, IEM_ACCESS_DATA_RW); 926 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sbb_u8_locked); 472 927 } 473 928 … … 482 937 { 483 938 IEMOP_MNEMONIC2(MR, SBB, sbb, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 484 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sbb); 939 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, IEM_ACCESS_DATA_RW); 940 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sbb_u16_locked, iemAImpl_sbb_u32_locked, iemAImpl_sbb_u64_locked); 485 941 } 486 942 … … 495 951 { 496 952 IEMOP_MNEMONIC2(RM, SBB, sbb, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 497 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sbb);953 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sbb_u8); 498 954 } 499 955 … … 508 964 { 509 965 IEMOP_MNEMONIC2(RM, SBB, sbb, Gv, Ev, DISOPTYPE_HARMLESS, 0); 510 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sbb);966 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1); 511 967 } 512 968 … … 521 977 { 522 978 IEMOP_MNEMONIC2(FIXED, SBB, sbb, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 523 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sbb);979 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sbb_u8); 524 980 } 525 981 … … 534 990 { 535 991 IEMOP_MNEMONIC2(FIXED, SBB, sbb, rAX, Iz, DISOPTYPE_HARMLESS, 0); 536 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sbb);992 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sbb_u16, iemAImpl_sbb_u32, iemAImpl_sbb_u64, 1); 537 993 } 538 994 … … 574 1030 IEMOP_MNEMONIC2(MR, AND, and, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 575 1031 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 576 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_and); 1032 IEMOP_BODY_BINARY_rm_r8( iemAImpl_and_u8, IEM_ACCESS_DATA_RW); 1033 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_and_u8_locked); 577 1034 } 578 1035 … … 589 1046 IEMOP_MNEMONIC2(MR, AND, and, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 590 1047 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 591 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_and); 1048 IEMOP_BODY_BINARY_rm_rv( iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, IEM_ACCESS_DATA_RW); 1049 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_and_u16_locked, iemAImpl_and_u32_locked, iemAImpl_and_u64_locked); 592 1050 } 593 1051 … … 604 1062 IEMOP_MNEMONIC2(RM, AND, and, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 605 1063 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 606 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_and);1064 IEMOP_BODY_BINARY_r8_rm(iemAImpl_and_u8); 607 1065 } 608 1066 … … 619 1077 IEMOP_MNEMONIC2(RM, AND, and, Gv, Ev, DISOPTYPE_HARMLESS, 0); 620 1078 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 621 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_and);1079 IEMOP_BODY_BINARY_rv_rm(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1); 622 1080 } 623 1081 … … 634 1092 IEMOP_MNEMONIC2(FIXED, AND, and, AL, Ib, DISOPTYPE_HARMLESS, 0); 635 1093 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 636 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_and);1094 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_and_u8); 637 1095 } 638 1096 … … 649 1107 IEMOP_MNEMONIC2(FIXED, AND, and, rAX, Iz, DISOPTYPE_HARMLESS, 0); 650 1108 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 651 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_and);1109 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_and_u16, iemAImpl_and_u32, iemAImpl_and_u64, 1); 652 1110 } 653 1111 … … 697 1155 { 698 1156 IEMOP_MNEMONIC2(MR, SUB, sub, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 699 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_sub); 1157 IEMOP_BODY_BINARY_rm_r8( iemAImpl_sub_u8, IEM_ACCESS_DATA_RW); 1158 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_sub_u8_locked); 700 1159 } 701 1160 … … 709 1168 { 710 1169 IEMOP_MNEMONIC2(MR, SUB, sub, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 711 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_sub); 1170 IEMOP_BODY_BINARY_rm_rv( iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, IEM_ACCESS_DATA_RW); 1171 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_sub_u16_locked, iemAImpl_sub_u32_locked, iemAImpl_sub_u64_locked); 712 1172 } 713 1173 … … 721 1181 { 722 1182 IEMOP_MNEMONIC2(RM, SUB, sub, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 723 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_sub);1183 IEMOP_BODY_BINARY_r8_rm(iemAImpl_sub_u8); 724 1184 } 725 1185 … … 733 1193 { 734 1194 IEMOP_MNEMONIC2(RM, SUB, sub, Gv, Ev, DISOPTYPE_HARMLESS, 0); 735 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_sub);1195 IEMOP_BODY_BINARY_rv_rm(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1); 736 1196 } 737 1197 … … 745 1205 { 746 1206 IEMOP_MNEMONIC2(FIXED, SUB, sub, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 747 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_sub);1207 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_sub_u8); 748 1208 } 749 1209 … … 757 1217 { 758 1218 IEMOP_MNEMONIC2(FIXED, SUB, sub, rAX, Iz, DISOPTYPE_HARMLESS, 0); 759 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_sub);1219 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_sub_u16, iemAImpl_sub_u32, iemAImpl_sub_u64, 1); 760 1220 } 761 1221 … … 808 1268 IEMOP_MNEMONIC2(MR, XOR, xor, Eb, Gb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES | IEMOPHINT_LOCK_ALLOWED); 809 1269 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 810 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_xor); 1270 IEMOP_BODY_BINARY_rm_r8( iemAImpl_xor_u8, IEM_ACCESS_DATA_RW); 1271 IEMOP_BODY_BINARY_rm_r8_LOCKED(iemAImpl_xor_u8_locked); 811 1272 } 812 1273 … … 823 1284 IEMOP_MNEMONIC2(MR, XOR, xor, Ev, Gv, DISOPTYPE_HARMLESS, IEMOPHINT_LOCK_ALLOWED); 824 1285 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 825 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_xor); 1286 IEMOP_BODY_BINARY_rm_rv( iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, IEM_ACCESS_DATA_RW); 1287 IEMOP_BODY_BINARY_rm_rv_LOCKED(iemAImpl_xor_u16_locked, iemAImpl_xor_u32_locked, iemAImpl_xor_u64_locked); 826 1288 } 827 1289 … … 838 1300 IEMOP_MNEMONIC2(RM, XOR, xor, Gb, Eb, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 839 1301 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 840 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_xor);1302 IEMOP_BODY_BINARY_r8_rm(iemAImpl_xor_u8); 841 1303 } 842 1304 … … 853 1315 IEMOP_MNEMONIC2(RM, XOR, xor, Gv, Ev, DISOPTYPE_HARMLESS, 0); 854 1316 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 855 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_xor);1317 IEMOP_BODY_BINARY_rv_rm(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1); 856 1318 } 857 1319 … … 868 1330 IEMOP_MNEMONIC2(FIXED, XOR, xor, AL, Ib, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 869 1331 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 870 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_xor);1332 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_xor_u8); 871 1333 } 872 1334 … … 883 1345 IEMOP_MNEMONIC2(FIXED, XOR, xor, rAX, Iz, DISOPTYPE_HARMLESS, IEMOPHINT_IGNORES_OP_SIZES); 884 1346 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 885 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_xor);1347 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_xor_u16, iemAImpl_xor_u32, iemAImpl_xor_u64, 1); 886 1348 } 887 1349 … … 962 1424 { 963 1425 IEMOP_MNEMONIC(cmp_Eb_Gb, "cmp Eb,Gb"); 964 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_cmp); 1426 IEMOP_BODY_BINARY_rm_r8(iemAImpl_cmp_u8, IEM_ACCESS_DATA_R); 1427 IEMOP_BODY_BINARY_rm_r8_NO_LOCK(); 965 1428 } 966 1429 … … 972 1435 { 973 1436 IEMOP_MNEMONIC(cmp_Ev_Gv, "cmp Ev,Gv"); 974 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_cmp); 1437 IEMOP_BODY_BINARY_rm_rv(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, IEM_ACCESS_DATA_R); 1438 IEMOP_BODY_BINARY_rm_rv_NO_LOCK(); 975 1439 } 976 1440 … … 982 1446 { 983 1447 IEMOP_MNEMONIC(cmp_Gb_Eb, "cmp Gb,Eb"); 984 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_r8_rm, &g_iemAImpl_cmp);1448 IEMOP_BODY_BINARY_r8_rm(iemAImpl_cmp_u8); 985 1449 } 986 1450 … … 992 1456 { 993 1457 IEMOP_MNEMONIC(cmp_Gv_Ev, "cmp Gv,Ev"); 994 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, &g_iemAImpl_cmp);1458 IEMOP_BODY_BINARY_rv_rm(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0); 995 1459 } 996 1460 … … 1002 1466 { 1003 1467 IEMOP_MNEMONIC(cmp_al_Ib, "cmp al,Ib"); 1004 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_cmp);1468 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_cmp_u8); 1005 1469 } 1006 1470 … … 1012 1476 { 1013 1477 IEMOP_MNEMONIC(cmp_rAX_Iz, "cmp rAX,Iz"); 1014 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_cmp);1478 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_cmp_u16, iemAImpl_cmp_u32, iemAImpl_cmp_u64, 0); 1015 1479 } 1016 1480 … … 3590 4054 IEMOP_MNEMONIC(test_Eb_Gb, "test Eb,Gb"); 3591 4055 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3592 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_r8, &g_iemAImpl_test); 4056 IEMOP_BODY_BINARY_rm_r8(iemAImpl_test_u8, IEM_ACCESS_DATA_R); 4057 IEMOP_BODY_BINARY_rm_r8_NO_LOCK(); 3593 4058 } 3594 4059 … … 3601 4066 IEMOP_MNEMONIC(test_Ev_Gv, "test Ev,Gv"); 3602 4067 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 3603 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rm_rv, &g_iemAImpl_test); 4068 IEMOP_BODY_BINARY_rm_rv(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, IEM_ACCESS_DATA_R); 4069 IEMOP_BODY_BINARY_rm_rv_NO_LOCK(); 3604 4070 } 3605 4071 … … 5292 5758 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib"); 5293 5759 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 5294 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_AL_Ib, &g_iemAImpl_test);5760 IEMOP_BODY_BINARY_AL_Ib(iemAImpl_test_u8); 5295 5761 } 5296 5762 … … 5303 5769 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz"); 5304 5770 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 5305 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rAX_Iz, &g_iemAImpl_test);5771 IEMOP_BODY_BINARY_rAX_Iz(iemAImpl_test_u16, iemAImpl_test_u32, iemAImpl_test_u64, 0); 5306 5772 } 5307 5773 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r98910 r98916 2835 2835 2836 2836 2837 def __init__(self, sSrcFile, asLines, sDefaultMap ):2837 def __init__(self, sSrcFile, asLines, sDefaultMap, oInheritMacrosFrom = None): 2838 2838 self.sSrcFile = sSrcFile; 2839 2839 self.asLines = asLines; … … 2848 2848 self.dMacros = {} # type: Dict[str,SimpleParser.Macro] 2849 2849 self.oReMacros = None # type: re ##< Regular expression matching invocations of anything in self.dMacros. 2850 if oInheritMacrosFrom: 2851 self.dMacros = dict(oInheritMacrosFrom.dMacros); 2852 self.oReMacros = oInheritMacrosFrom.oReMacros; 2850 2853 2851 2854 assert sDefaultMap in g_dInstructionMaps; … … 4829 4832 asArgs = []; 4830 4833 while True: 4834 if offCur >= len(sLine): 4835 self.raiseError('expandMacros: Invocation of macro %s spans multiple lines!' % (sName,)); 4831 4836 ch = sLine[offCur]; 4832 4837 if ch == '(': … … 4948 4953 self.doneInstructions(fEndOfFunction = True); 4949 4954 self.debug('%3s%% / %3s stubs out of %4s instructions and %4s MC blocks in %s' 4950 % (self.cTotalStubs * 100 // self.cTotalInstr, self.cTotalStubs, self.cTotalInstr, self.cTotalMcBlocks,4951 os.path.basename(self.sSrcFile),));4955 % (self.cTotalStubs * 100 // max(self.cTotalInstr, 1), self.cTotalStubs, self.cTotalInstr, 4956 self.cTotalMcBlocks, os.path.basename(self.sSrcFile),)); 4952 4957 return self.printErrors(); 4953 4958 4959 ## The parsed content of IEMAllInstructionsCommonBodyMacros.h. 4960 g_oParsedCommonBodyMacros = None # type: SimpleParser 4954 4961 4955 4962 def __parseFileByName(sSrcFile, sDefaultMap): … … 4972 4979 4973 4980 # 4981 # On the first call, we parse IEMAllInstructionsCommonBodyMacros.h so we 4982 # can use the macros from it when processing the other files. 4983 # 4984 global g_oParsedCommonBodyMacros; 4985 if g_oParsedCommonBodyMacros is None: 4986 # Locate the file. 4987 sCommonBodyMacros = os.path.join(os.path.split(sSrcFile)[0], 'IEMAllInstructionsCommonBodyMacros.h'); 4988 if not os.path.isfile(sCommonBodyMacros): 4989 sCommonBodyMacros = os.path.join(os.path.split(__file__)[0], 'IEMAllInstructionsCommonBodyMacros.h'); 4990 4991 # Read it. 4992 try: 4993 with open(sCommonBodyMacros, "r") as oIncFile: # pylint: disable=unspecified-encoding 4994 asIncFiles = oIncFile.readlines(); 4995 except Exception as oXcpt: 4996 raise Exception("failed to open/read %s: %s" % (sCommonBodyMacros, oXcpt,)); 4997 4998 # Parse it. 4999 try: 5000 oParser = SimpleParser(sCommonBodyMacros, asIncFiles, 'one'); 5001 if oParser.parse() != 0: 5002 raise ParserException('%s: errors: See above' % (sCommonBodyMacros, )); 5003 if oParser.cTotalInstr != 0 or oParser.cTotalStubs != 0 or oParser.cTotalTagged != 0 or oParser.cTotalMcBlocks != 0: 5004 raise ParserException('%s: error: Unexpectedly found %u instr, %u tags, %u stubs and %u MCs, expecting zero. %s' 5005 % (sCommonBodyMacros, oParser.cTotalInstr, oParser.cTotalStubs, oParser.cTotalTagged, 5006 oParser.cTotalMcBlocks, 5007 ', '.join(sorted( [str(oMcBlock.iBeginLine) for oMcBlock in g_aoMcBlocks] 5008 + [str(oInstr.iLineCreated) for oInstr in g_aoAllInstructions])),)); 5009 except ParserException as oXcpt: 5010 print(str(oXcpt), file = sys.stderr); 5011 raise; 5012 g_oParsedCommonBodyMacros = oParser; 5013 5014 # 4974 5015 # Do the parsing. 4975 5016 # 4976 5017 try: 4977 oParser = SimpleParser(sSrcFile, asLines, sDefaultMap );5018 oParser = SimpleParser(sSrcFile, asLines, sDefaultMap, g_oParsedCommonBodyMacros); 4978 5019 return (oParser.parse(), oParser) ; 4979 5020 except ParserException as oXcpt: -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r98913 r98916 9996 9996 IEMOP_HLP_MIN_386(); 9997 9997 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_SF | X86_EFL_ZF | X86_EFL_AF | X86_EFL_PF); 9998 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags)); 9998 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT(g_iemAImpl_imul_two_eflags); 9999 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1); 9999 10000 } 10000 10001 … … 10548 10549 { NULL, NULL, iemAImpl_popcnt_u16_fallback, NULL, iemAImpl_popcnt_u32_fallback, NULL, iemAImpl_popcnt_u64_fallback, NULL }; 10549 10550 #endif 10550 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback)); 10551 const IEMOPBINSIZES * const pImpl = IEM_SELECT_HOST_OR_FALLBACK(fPopCnt, &s_Native, &s_Fallback); 10552 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1); 10551 10553 } 10552 10554 … … 10923 10925 #endif 10924 10926 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF); 10925 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, 10926 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1)); 10927 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_tzcnt_eflags, 10928 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1); 10929 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1); 10927 10930 } 10928 10931 … … 10959 10962 #endif 10960 10963 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_OF | X86_EFL_SF | X86_EFL_AF | X86_EFL_PF); 10961 return FNIEMOP_CALL_1(iemOpHlpBinaryOperator_rv_rm, 10962 IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1)); 10964 const IEMOPBINSIZES * const pImpl = IEMTARGETCPU_EFL_BEHAVIOR_SELECT_EX(s_iemAImpl_lzcnt_eflags, 10965 IEM_GET_HOST_CPU_FEATURES(pVCpu)->fBmi1); 10966 IEMOP_BODY_BINARY_rv_rm(pImpl->pfnNormalU16, pImpl->pfnNormalU32, pImpl->pfnNormalU64, 1); 10963 10967 } 10964 10968 -
trunk/src/VBox/VMM/VMMAll/IEMAllThreadedPython.py
r98910 r98916 60 60 'uintptr_t': ( 64, False, ), # ASSUMES 64-bit host pointer size. 61 61 'bool': ( 1, False, ), 62 'IEMMODE': ( 8, False, ),62 'IEMMODE': ( 2, False, ), 63 63 }; 64 64 … … 442 442 dMinParamCounts[oThreadedFunction.cMinParams] = dMinParamCounts.get(oThreadedFunction.cMinParams, 0) + 1; 443 443 print('debug: param count distribution, raw and optimized:', file = sys.stderr); 444 for cCount in sorted( list(dRawParamCounts.keys()) + list(set(dMinParamCounts.keys()) - set(dRawParamCounts.keys()))):444 for cCount in sorted({cBits: True for cBits in list(dRawParamCounts.keys()) + list(dMinParamCounts.keys())}.keys()): 445 445 print('debug: %s params: %4s raw, %4s min' 446 446 % (cCount, dRawParamCounts.get(cCount, 0), dMinParamCounts.get(cCount, 0)),
Note:
See TracChangeset
for help on using the changeset viewer.