Changeset 102012 in vbox for trunk/src/VBox
- Timestamp:
- Nov 9, 2023 2:09:51 AM (13 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 8 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r102011 r102012 2441 2441 2442 2442 # Hand it to the handler. 2443 fnParser = g_dMcStmtParsers.get(sName) [0];2443 fnParser = g_dMcStmtParsers.get(sName); 2444 2444 if not fnParser: 2445 2445 self.raiseDecodeError(sRawCode, off, 'Unknown MC statement: %s' % (sName,)); 2446 fnParser = fnParser[0]; 2446 2447 oStmt = fnParser(self, sName, asParams); 2447 2448 if not isinstance(oStmt, (list, tuple)): … … 2826 2827 'IEM_MC_FETCH_GREG_U8_ZX_U32': (McBlock.parseMcGeneric, False, False, ), 2827 2828 'IEM_MC_FETCH_GREG_U8_ZX_U64': (McBlock.parseMcGeneric, False, False, ), 2829 'IEM_MC_FETCH_GREG_PAIR_U32': (McBlock.parseMcGeneric, False, False, ), 2830 'IEM_MC_FETCH_GREG_PAIR_U64': (McBlock.parseMcGeneric, False, False, ), 2828 2831 'IEM_MC_FETCH_MEM_D80': (McBlock.parseMcGeneric, True, False, ), 2829 2832 'IEM_MC_FETCH_MEM_I16': (McBlock.parseMcGeneric, True, False, ), … … 2837 2840 'IEM_MC_FETCH_MEM_U128_ALIGN_SSE': (McBlock.parseMcGeneric, True, False, ), 2838 2841 'IEM_MC_FETCH_MEM_U128_NO_AC': (McBlock.parseMcGeneric, True, False, ), 2842 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128': (McBlock.parseMcGeneric, True, False, ), 2843 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64': (McBlock.parseMcGeneric, True, False, ), 2844 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64':(McBlock.parseMcGeneric, True, False, ), 2839 2845 'IEM_MC_FETCH_MEM_U16': (McBlock.parseMcGeneric, True, False, ), 2840 2846 'IEM_MC_FETCH_MEM_U16_DISP': (McBlock.parseMcGeneric, True, False, ), … … 2865 2871 'IEM_MC_FETCH_MEM_XMM_U32': (McBlock.parseMcGeneric, True, False, ), 2866 2872 'IEM_MC_FETCH_MEM_XMM_U64': (McBlock.parseMcGeneric, True, False, ), 2873 'IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM': (McBlock.parseMcGeneric, True, False, ), 2874 'IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM': (McBlock.parseMcGeneric, True, False, ), 2875 'IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM': (McBlock.parseMcGeneric, True, False, ), 2867 2876 'IEM_MC_FETCH_MEM_YMM': (McBlock.parseMcGeneric, True, False, ), 2868 2877 'IEM_MC_FETCH_MEM_YMM_ALIGN_AVX': (McBlock.parseMcGeneric, True, False, ), … … 2883 2892 'IEM_MC_FETCH_XREG_U8': (McBlock.parseMcGeneric, False, False, ), 2884 2893 'IEM_MC_FETCH_XREG_XMM': (McBlock.parseMcGeneric, False, False, ), 2894 'IEM_MC_FETCH_XREG_PAIR_U128': (McBlock.parseMcGeneric, False, False, ), 2895 'IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64': (McBlock.parseMcGeneric, False, False, ), 2896 'IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64': (McBlock.parseMcGeneric, False, False, ), 2897 'IEM_MC_FETCH_XREG_PAIR_XMM': (McBlock.parseMcGeneric, False, False, ), 2885 2898 'IEM_MC_FETCH_YREG_2ND_U64': (McBlock.parseMcGeneric, False, False, ), 2886 2899 'IEM_MC_FETCH_YREG_U128': (McBlock.parseMcGeneric, False, False, ), … … 3056 3069 'IEM_MC_STORE_GREG_U8': (McBlock.parseMcGeneric, True, False, ), 3057 3070 'IEM_MC_STORE_GREG_U8_CONST': (McBlock.parseMcGeneric, True, False, ), 3071 'IEM_MC_STORE_GREG_PAIR_U32': (McBlock.parseMcGeneric, True, False, ), 3072 'IEM_MC_STORE_GREG_PAIR_U64': (McBlock.parseMcGeneric, True, False, ), 3058 3073 'IEM_MC_STORE_MEM_I16_CONST_BY_REF': (McBlock.parseMcGeneric, True, False, ), 3059 3074 'IEM_MC_STORE_MEM_I32_CONST_BY_REF': (McBlock.parseMcGeneric, True, False, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstThree0f3a.cpp.h
r101952 r102012 182 182 IEM_MC_PREPARE_SSE_USAGE(); 183 183 IEM_MC_REF_MXCSR(pfMxcsr); 184 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 185 IEM_MC_FETCH_XREG_ XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm));184 185 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 186 186 IEM_MC_CALL_VOID_AIMPL_4(pfnU128, pfMxcsr, pDst, pSrc, bImmArg); 187 187 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 209 209 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); 210 210 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 211 IEM_MC_ FETCH_MEM_XMM_ALIGN_SSE(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);212 213 IEM_MC_ PREPARE_SSE_USAGE();211 IEM_MC_PREPARE_SSE_USAGE(); 212 213 IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 214 214 IEM_MC_REF_MXCSR(pfMxcsr); 215 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));216 215 IEM_MC_CALL_VOID_AIMPL_4(pfnU128, pfMxcsr, pDst, pSrc, bImmArg); 217 216 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 334 333 IEM_MC_PREPARE_SSE_USAGE(); 335 334 IEM_MC_REF_MXCSR(pfMxcsr); 336 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 337 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 335 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 338 336 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundss_u128, pfMxcsr, pDst, pSrc, bImmArg); 339 337 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 361 359 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); 362 360 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 363 IEM_MC_FETCH_MEM_XMM_U32(Src.uSrc2, 0 /*a_iDword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 364 365 IEM_MC_PREPARE_SSE_USAGE(); 361 IEM_MC_PREPARE_SSE_USAGE(); 362 363 IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 364 0 /*a_iDword*/, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 366 365 IEM_MC_REF_MXCSR(pfMxcsr); 367 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));368 366 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundss_u128, pfMxcsr, pDst, pSrc, bImmArg); 369 367 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 399 397 IEM_MC_PREPARE_SSE_USAGE(); 400 398 IEM_MC_REF_MXCSR(pfMxcsr); 401 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 402 IEM_MC_FETCH_XREG_XMM(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 399 IEM_MC_FETCH_XREG_PAIR_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 403 400 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundsd_u128, pfMxcsr, pDst, pSrc, bImmArg); 404 401 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 426 423 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse41); 427 424 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 428 IEM_MC_FETCH_MEM_XMM_U64(Src.uSrc2, 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 429 430 IEM_MC_PREPARE_SSE_USAGE(); 425 IEM_MC_PREPARE_SSE_USAGE(); 426 427 IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 428 0 /*a_iQword */, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 431 429 IEM_MC_REF_MXCSR(pfMxcsr); 432 IEM_MC_FETCH_XREG_XMM(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));433 430 IEM_MC_CALL_VOID_AIMPL_4(iemAImpl_roundsd_u128, pfMxcsr, pDst, pSrc, bImmArg); 434 431 IEM_MC_MAYBE_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT(); … … 1146 1143 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1147 1144 IEM_MC_PREPARE_SSE_USAGE(); 1148 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1149 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1150 IEM_MC_FETCH_GREG_U64(Src.u64Rax, X86_GREG_xAX); 1151 IEM_MC_FETCH_GREG_U64(Src.u64Rdx, X86_GREG_xDX); 1145 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1152 1146 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/); 1153 1147 IEM_MC_REF_EFLAGS(pEFlags); … … 1176 1170 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1177 1171 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1178 IEM_MC_FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1179 1180 1172 IEM_MC_PREPARE_SSE_USAGE(); 1173 1174 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 1175 pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1181 1176 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1182 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1183 IEM_MC_FETCH_GREG_U64(Src.u64Rax, X86_GREG_xAX);1184 IEM_MC_FETCH_GREG_U64(Src.u64Rdx, X86_GREG_xDX);1185 1186 1177 IEM_MC_REF_EFLAGS(pEFlags); 1187 1178 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1210 1201 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1211 1202 IEM_MC_PREPARE_SSE_USAGE(); 1212 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1213 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1214 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rax, X86_GREG_xAX); 1215 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rdx, X86_GREG_xDX); 1203 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1216 1204 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/); 1217 1205 IEM_MC_REF_EFLAGS(pEFlags); … … 1240 1228 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1241 1229 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1242 IEM_MC_FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1243 1244 1230 IEM_MC_PREPARE_SSE_USAGE(); 1231 1232 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 1233 pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1245 1234 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1246 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1247 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rax, X86_GREG_xAX);1248 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rdx, X86_GREG_xDX);1249 1235 IEM_MC_REF_EFLAGS(pEFlags); 1250 1236 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1283 1269 IEM_MC_PREPARE_SSE_USAGE(); 1284 1270 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1285 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1286 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1287 IEM_MC_FETCH_GREG_U64(Src.u64Rax, X86_GREG_xAX); 1288 IEM_MC_FETCH_GREG_U64(Src.u64Rdx, X86_GREG_xDX); 1271 IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1289 1272 IEM_MC_REF_EFLAGS(pEFlags); 1290 1273 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1313 1296 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1314 1297 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1315 IEM_MC_FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1316 1317 1298 IEM_MC_PREPARE_SSE_USAGE(); 1299 1300 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 1301 pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1318 1302 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1319 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1320 IEM_MC_FETCH_GREG_U64(Src.u64Rax, X86_GREG_xAX);1321 IEM_MC_FETCH_GREG_U64(Src.u64Rdx, X86_GREG_xDX);1322 1303 IEM_MC_REF_EFLAGS(pEFlags); 1323 1304 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1348 1329 IEM_MC_PREPARE_SSE_USAGE(); 1349 1330 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1350 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1351 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1352 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rax, X86_GREG_xAX); 1353 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rdx, X86_GREG_xDX); 1331 IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1354 1332 IEM_MC_REF_EFLAGS(pEFlags); 1355 1333 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1378 1356 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1379 1357 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1380 IEM_MC_FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1381 1382 1358 IEM_MC_PREPARE_SSE_USAGE(); 1359 1360 IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(Src, IEM_GET_MODRM_REG(pVCpu, bRm), 1361 pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1383 1362 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1384 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1385 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rax, X86_GREG_xAX);1386 IEM_MC_FETCH_GREG_U32_SX_U64(Src.u64Rdx, X86_GREG_xDX);1387 1363 IEM_MC_REF_EFLAGS(pEFlags); 1388 1364 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1419 1395 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1420 1396 IEM_MC_PREPARE_SSE_USAGE(); 1421 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1422 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1397 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1423 1398 IEM_MC_REF_XREG_U128(puDst, 0 /*xmm0*/); 1424 1399 IEM_MC_REF_EFLAGS(pEFlags); … … 1447 1422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1448 1423 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1449 IEM_MC_ FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1450 1451 IEM_MC_ PREPARE_SSE_USAGE();1424 IEM_MC_PREPARE_SSE_USAGE(); 1425 1426 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1452 1427 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1453 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1454 1428 IEM_MC_REF_EFLAGS(pEFlags); 1455 1429 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1485 1459 IEM_MC_PREPARE_SSE_USAGE(); 1486 1460 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1487 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm)); 1488 IEM_MC_FETCH_XREG_U128(Src.uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 1461 IEM_MC_FETCH_XREG_PAIR_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), IEM_GET_MODRM_RM(pVCpu, bRm)); 1489 1462 IEM_MC_REF_EFLAGS(pEFlags); 1490 1463 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, … … 1513 1486 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX_EX(fSse42); 1514 1487 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 1515 IEM_MC_ FETCH_MEM_U128(Src.uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc);1516 1517 IEM_MC_ PREPARE_SSE_USAGE();1488 IEM_MC_PREPARE_SSE_USAGE(); 1489 1490 IEM_MC_FETCH_MEM_U128_AND_XREG_U128(Src, IEM_GET_MODRM_REG(pVCpu, bRm), pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 1518 1491 IEM_MC_REF_GREG_U32(pu32Ecx, X86_GREG_xCX); 1519 IEM_MC_FETCH_XREG_U128(Src.uSrc1, IEM_GET_MODRM_REG(pVCpu, bRm));1520 1492 IEM_MC_REF_EFLAGS(pEFlags); 1521 1493 IEM_MC_CALL_VOID_AIMPL_4(IEM_SELECT_HOST_OR_FALLBACK(fSse42, -
trunk/src/VBox/VMM/VMMAll/IEMAllInstTwoByte0f.cpp.h
r102011 r102012 12438 12438 IEM_MC_MEM_MAP_U64_RW(pu64MemDst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 12439 12439 12440 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Lo, X86_GREG_xAX); 12441 IEM_MC_FETCH_GREG_U32(u64EaxEdx.s.Hi, X86_GREG_xDX); 12440 IEM_MC_FETCH_GREG_PAIR_U32(u64EaxEdx, X86_GREG_xAX, X86_GREG_xDX); 12442 12441 IEM_MC_REF_LOCAL(pu64EaxEdx, u64EaxEdx); 12443 12442 12444 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Lo, X86_GREG_xBX); 12445 IEM_MC_FETCH_GREG_U32(u64EbxEcx.s.Hi, X86_GREG_xCX); 12443 IEM_MC_FETCH_GREG_PAIR_U32(u64EbxEcx, X86_GREG_xBX, X86_GREG_xCX); 12446 12444 IEM_MC_REF_LOCAL(pu64EbxEcx, u64EbxEcx); 12447 12445 … … 12456 12454 IEM_MC_COMMIT_EFLAGS(EFlags); 12457 12455 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { 12458 IEM_MC_STORE_GREG_U32(X86_GREG_xAX, u64EaxEdx.s.Lo); 12459 IEM_MC_STORE_GREG_U32(X86_GREG_xDX, u64EaxEdx.s.Hi); 12456 IEM_MC_STORE_GREG_PAIR_U32(X86_GREG_xAX, X86_GREG_xDX, u64EaxEdx); 12460 12457 } IEM_MC_ENDIF(); 12461 12458 IEM_MC_ADVANCE_RIP_AND_FINISH(); … … 12491 12488 IEM_MC_MEM_MAP(pu128MemDst, IEM_ACCESS_DATA_RW, pVCpu->iem.s.iEffSeg, GCPtrEffDst, 0 /*arg*/); \ 12492 12489 \ 12493 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Lo, X86_GREG_xAX); \ 12494 IEM_MC_FETCH_GREG_U64(u128RaxRdx.s.Hi, X86_GREG_xDX); \ 12490 IEM_MC_FETCH_GREG_PAIR_U64(u128RaxRdx, X86_GREG_xAX, X86_GREG_xDX); \ 12495 12491 IEM_MC_REF_LOCAL(pu128RaxRdx, u128RaxRdx); \ 12496 12492 \ 12497 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Lo, X86_GREG_xBX); \ 12498 IEM_MC_FETCH_GREG_U64(u128RbxRcx.s.Hi, X86_GREG_xCX); \ 12493 IEM_MC_FETCH_GREG_PAIR_U64(u128RbxRcx, X86_GREG_xBX, X86_GREG_xCX); \ 12499 12494 IEM_MC_REF_LOCAL(pu128RbxRcx, u128RbxRcx); \ 12500 12495 \ … … 12505 12500 IEM_MC_COMMIT_EFLAGS(EFlags); \ 12506 12501 IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF) { \ 12507 IEM_MC_STORE_GREG_U64(X86_GREG_xAX, u128RaxRdx.s.Lo); \ 12508 IEM_MC_STORE_GREG_U64(X86_GREG_xDX, u128RaxRdx.s.Hi); \ 12502 IEM_MC_STORE_GREG_PAIR_U64(X86_GREG_xAX, X86_GREG_xDX, u128RaxRdx); \ 12509 12503 } IEM_MC_ENDIF(); \ 12510 12504 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ -
trunk/src/VBox/VMM/VMMAll/IEMAllN8vePython.py
r102010 r102012 141 141 'IEM_MC_FETCH_MEM_FLAT_XMM_U32': (None, True, False, ), 142 142 'IEM_MC_FETCH_MEM_FLAT_XMM_U64': (None, True, False, ), 143 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128': (None, True, False, ), 144 'IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE_AND_XREG_XMM': (None, True, False, ), 145 'IEM_MC_FETCH_MEM_FLAT_XMM_U32_AND_XREG_XMM': (None, True, False, ), 146 'IEM_MC_FETCH_MEM_FLAT_XMM_U64_AND_XREG_XMM': (None, True, False, ), 147 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_RAX_RDX_U64': (None, True, False, ), 148 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64': (None, True, False, ), 143 149 'IEM_MC_MEM_FLAT_MAP_EX': (None, True, False, ), 144 150 'IEM_MC_MEM_FLAT_MAP': (None, True, False, ), … … 291 297 g_dUnsupportedMcStmtLastOneStats[sStmt] = [oVariation,]; 292 298 293 if ( len(dUnsupportedStmts) in (1,2)299 if ( len(dUnsupportedStmts) == 1 #in (1,2) 294 300 and iai.McStmt.findStmtByNames(aoStmts, 295 301 { 'IEM_MC_LOCAL': 1, 'IEM_MC_LOCAL_CONST': 1, 'IEM_MC_ARG': 1, 'IEM_MC_ARG_CONST': 1, -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102011 r102012 5756 5756 iemNativeEmitStoreGregU8Const(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGRegEx, uint8_t u8Value) 5757 5757 { 5758 Assert(iGRegEx < 20); 5758 5759 uint8_t const idxGstTmpReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, 5759 5760 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + (iGRegEx & 15)), 5760 5761 kIemNativeGstRegUse_ForUpdate); 5761 5762 #ifdef RT_ARCH_AMD64 5762 5763 uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 12); … … 5825 5826 5826 5827 5827 /* 5828 * General purpose register manipulation (add, sub). 5829 */ 5828 #if 0 5829 #define IEM_MC_STORE_GREG_U16(a_iGReg, a_u16Value) \ 5830 off = iemNativeEmitStoreGregU16Const(pReNative, off, a_iGReg, a_u16Value) 5831 5832 /** Emits code for IEM_MC_STORE_GREG_U16. */ 5833 DECL_INLINE_THROW(uint32_t) 5834 iemNativeEmitStoreGregU16(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGReg, uint8_t idxValueVar) 5835 { 5836 Assert(iGReg < 16) 5837 uint8_t const idxGstTmpReg = iemNativeRegAllocTmpForGuestReg(pReNative, &off, 5838 (IEMNATIVEGSTREG)(kIemNativeGstReg_GprFirst + iGReg), 5839 kIemNativeGstRegUse_ForUpdate); 5840 5841 5842 #ifdef RT_ARCH_AMD64 5843 uint8_t * const pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 12); 5844 5845 /* To the lowest byte of the register: mov r8, imm8 */ 5846 if (iGRegEx < 16) 5847 { 5848 if (idxGstTmpReg >= 8) 5849 pbCodeBuf[off++] = X86_OP_REX_B; 5850 else if (idxGstTmpReg >= 4) 5851 pbCodeBuf[off++] = X86_OP_REX; 5852 pbCodeBuf[off++] = 0xb0 + (idxGstTmpReg & 7); 5853 pbCodeBuf[off++] = u8Value; 5854 } 5855 /* Otherwise it's to ah, ch, dh or bh: use mov r8, imm8 if we can, otherwise, we rotate. */ 5856 else if (idxGstTmpReg < 4) 5857 { 5858 pbCodeBuf[off++] = 0xb4 + idxGstTmpReg; 5859 pbCodeBuf[off++] = u8Value; 5860 } 5861 else 5862 { 5863 /* ror reg64, 8 */ 5864 pbCodeBuf[off++] = X86_OP_REX_W | (idxGstTmpReg < 8 ? 0 : X86_OP_REX_B); 5865 pbCodeBuf[off++] = 0xc1; 5866 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 1, idxGstTmpReg & 7); 5867 pbCodeBuf[off++] = 8; 5868 5869 /* mov reg8, imm8 */ 5870 if (idxGstTmpReg >= 8) 5871 pbCodeBuf[off++] = X86_OP_REX_B; 5872 else if (idxGstTmpReg >= 4) 5873 pbCodeBuf[off++] = X86_OP_REX; 5874 pbCodeBuf[off++] = 0xb0 + (idxGstTmpReg & 7); 5875 pbCodeBuf[off++] = u8Value; 5876 5877 /* rol reg64, 8 */ 5878 pbCodeBuf[off++] = X86_OP_REX_W | (idxGstTmpReg < 8 ? 0 : X86_OP_REX_B); 5879 pbCodeBuf[off++] = 0xc1; 5880 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, 0, idxGstTmpReg & 7); 5881 pbCodeBuf[off++] = 8; 5882 } 5883 5884 #elif defined(RT_ARCH_ARM64) 5885 uint8_t const idxImmReg = iemNativeRegAllocTmpImm(pReNative, &off, u8Value); 5886 uint32_t * const pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 2); 5887 if (iGRegEx < 16) 5888 /* bfi w1, w2, 0, 8 - moves bits 7:0 from idxImmReg to idxGstTmpReg bits 7:0. */ 5889 pu32CodeBuf[off++] = Armv8A64MkInstrBfi(idxGstTmpReg, idxImmReg, 0, 8); 5890 else 5891 /* bfi w1, w2, 8, 8 - moves bits 7:0 from idxImmReg to idxGstTmpReg bits 15:8. */ 5892 pu32CodeBuf[off++] = Armv8A64MkInstrBfi(idxGstTmpReg, idxImmReg, 8, 8); 5893 iemNativeRegFreeTmp(pReNative, idxImmReg); 5894 5895 #else 5896 # error "Port me!" 5897 #endif 5898 5899 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 5900 5901 off = iemNativeEmitStoreGprToVCpuU64(pReNative, off, idxGstTmpReg, RT_UOFFSETOF_DYN(VMCPU, cpum.GstCtx.aGRegs[iGRegEx & 15])); 5902 5903 iemNativeRegFreeTmp(pReNative, idxGstTmpReg); 5904 return off; 5905 } 5906 #endif 5907 5908 5909 5910 /********************************************************************************************************************************* 5911 * General purpose register manipulation (add, sub). * 5912 *********************************************************************************************************************************/ 5830 5913 5831 5914 #define IEM_MC_SUB_GREG_U16(a_iGReg, a_u8SubtrahendConst) \ -
trunk/src/VBox/VMM/VMMAll/IEMAllThrdPython.py
r102011 r102012 684 684 'IEM_MC_FETCH_MEM_U16_SX_U64': ( 1, 'IEM_MC_FETCH_MEM_FLAT_U16_SX_U64' ), 685 685 'IEM_MC_FETCH_MEM_U32_SX_U64': ( 1, 'IEM_MC_FETCH_MEM_FLAT_U32_SX_U64' ), 686 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128': ( 2, 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128' ), 687 'IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM': ( 2, 'IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE_AND_XREG_XMM' ), 688 'IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM': ( 3, 'IEM_MC_FETCH_MEM_FLAT_XMM_U32_AND_XREG_XMM' ), 689 'IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM': ( 3, 'IEM_MC_FETCH_MEM_FLAT_XMM_U64_AND_XREG_XMM' ), 690 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64': 691 ( 2, 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_RAX_RDX_U64' ), 692 'IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64': 693 ( 2, 'IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64' ), 686 694 'IEM_MC_STORE_MEM_U8': ( 0, 'IEM_MC_STORE_MEM_FLAT_U8' ), 687 695 'IEM_MC_STORE_MEM_U16': ( 0, 'IEM_MC_STORE_MEM_FLAT_U16' ), -
trunk/src/VBox/VMM/include/IEMMc.h
r102011 r102012 226 226 #define IEM_MC_FETCH_GREG_U64(a_u64Dst, a_iGReg) (a_u64Dst) = iemGRegFetchU64(pVCpu, (a_iGReg)) 227 227 #define IEM_MC_FETCH_GREG_U64_ZX_U64 IEM_MC_FETCH_GREG_U64 228 #define IEM_MC_FETCH_GREG_PAIR_U32(a_u64Dst, a_iGRegLo, a_iGRegHi) do { \ 229 (a_u64Dst).s.Lo = iemGRegFetchU32(pVCpu, (a_iGRegLo)); \ 230 (a_u64Dst).s.Hi = iemGRegFetchU32(pVCpu, (a_iGRegHi)); \ 231 } while(0) 232 #define IEM_MC_FETCH_GREG_PAIR_U64(a_u128Dst, a_iGRegLo, a_iGRegHi) do { \ 233 (a_u128Dst).s.Lo = iemGRegFetchU64(pVCpu, (a_iGRegLo)); \ 234 (a_u128Dst).s.Hi = iemGRegFetchU64(pVCpu, (a_iGRegHi)); \ 235 } while(0) 228 236 #define IEM_MC_FETCH_SREG_U16(a_u16Dst, a_iSReg) do { \ 229 237 IEM_CTX_IMPORT_NORET(pVCpu, CPUMCTX_EXTRN_SREG_FROM_IDX(a_iSReg)); \ … … 262 270 #define IEM_MC_STORE_GREG_U32_CONST IEM_MC_STORE_GREG_U32 263 271 #define IEM_MC_STORE_GREG_U64_CONST IEM_MC_STORE_GREG_U64 272 #define IEM_MC_STORE_GREG_PAIR_U32(a_iGRegLo, a_iGRegHi, a_u64Value) do { \ 273 *iemGRegRefU64(pVCpu, (a_iGRegLo)) = (uint32_t)(a_u64Value).s.Lo; \ 274 *iemGRegRefU64(pVCpu, (a_iGRegHi)) = (uint32_t)(a_u64Value).s.Hi; \ 275 } while(0) 276 #define IEM_MC_STORE_GREG_PAIR_U64(a_iGRegLo, a_iGRegHi, a_u128Value) do { \ 277 *iemGRegRefU64(pVCpu, (a_iGRegLo)) = (uint64_t)(a_u128Value).s.Lo; \ 278 *iemGRegRefU64(pVCpu, (a_iGRegHi)) = (uint64_t)(a_u128Value).s.Hi; \ 279 } while(0) 264 280 #define IEM_MC_CLEAR_HIGH_GREG_U64(a_iGReg) *iemGRegRefU64(pVCpu, (a_iGReg)) &= UINT32_MAX 281 265 282 /** @todo IEM_MC_STORE_SREG_BASE_U64 & IEM_MC_STORE_SREG_BASE_U32 aren't worth it... */ 266 283 #define IEM_MC_STORE_SREG_BASE_U64(a_iSReg, a_u64Value) do { \ … … 441 458 #define IEM_MC_FETCH_XREG_U8( a_u8Value, a_iXReg, a_iByte) \ 442 459 do { (a_u8Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au16[(a_iByte)]; } while (0) 460 #define IEM_MC_FETCH_XREG_PAIR_U128(a_Dst, a_iXReg1, a_iXReg2) \ 461 do { (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 462 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 463 (a_Dst).uSrc2.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[0]; \ 464 (a_Dst).uSrc2.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[1]; \ 465 } while (0) 466 #define IEM_MC_FETCH_XREG_PAIR_XMM(a_Dst, a_iXReg1, a_iXReg2) \ 467 do { (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 468 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 469 (a_Dst).uSrc2.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[0]; \ 470 (a_Dst).uSrc2.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[1]; \ 471 } while (0) 472 #define IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_iXReg2) \ 473 do { (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 474 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 475 (a_Dst).uSrc2.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[0]; \ 476 (a_Dst).uSrc2.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[1]; \ 477 (a_Dst).u64Rax = pVCpu->cpum.GstCtx.rax; \ 478 (a_Dst).u64Rdx = pVCpu->cpum.GstCtx.rdx; \ 479 } while (0) 480 #define IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iXReg2) \ 481 do { (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 482 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 483 (a_Dst).uSrc2.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[0]; \ 484 (a_Dst).uSrc2.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg2)].au64[1]; \ 485 (a_Dst).u64Rax = (int64_t)(int32_t)pVCpu->cpum.GstCtx.eax; \ 486 (a_Dst).u64Rdx = (int64_t)(int32_t)pVCpu->cpum.GstCtx.edx; \ 487 } while (0) 443 488 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) \ 444 489 do { pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[0] = (a_u128Value).au64[0]; \ … … 958 1003 # define IEM_MC_FETCH_MEM_XMM_U64(a_XmmDst, a_iQWord, a_iSeg, a_GCPtrMem) \ 959 1004 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU64(pVCpu, &(a_XmmDst).au64[(a_iQWord)], (a_iSeg), (a_GCPtrMem))) 1005 1006 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128(a_u128Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1007 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2))); \ 1008 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1009 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1010 } while (0) 1011 1012 # define IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1013 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128AlignedSse(pVCpu, &(a_Dst).uSrc2.uXmm, (a_iSeg2), (a_GCPtrMem2))); \ 1014 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1015 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1016 } while (0) 1017 1018 # define IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(a_Dst, a_iXReg1, a_iDWord2, a_iSeg2, a_GCPtrMem2) do { \ 1019 (a_Dst).uSrc2.uXmm.au64[0] = 0; \ 1020 (a_Dst).uSrc2.uXmm.au64[1] = 0; \ 1021 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU32(pVCpu, &(a_Dst).uSrc2.uXmm.au32[(a_iDWord2)], (a_iSeg2), (a_GCPtrMem2))); \ 1022 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1023 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1024 } while (0) 1025 1026 # define IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(a_Dst, a_iXReg1, a_iQWord2, a_iSeg2, a_GCPtrMem2) do { \ 1027 (a_Dst).uSrc2.uXmm.au64[1] = 0; \ 1028 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU64(pVCpu, &(a_Dst).uSrc2.uXmm.au64[(a_iQWord2)], (a_iSeg2), (a_GCPtrMem2))); \ 1029 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1030 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1031 } while (0) 1032 1033 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1034 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2))); \ 1035 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1036 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1037 (a_Dst).u64Rax = pVCpu->cpum.GstCtx.rax; \ 1038 (a_Dst).u64Rdx = pVCpu->cpum.GstCtx.rdx; \ 1039 } while (0) 1040 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1041 IEM_MC_RETURN_ON_FAILURE(iemMemFetchDataU128(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2))); \ 1042 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1043 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1044 (a_Dst).u64Rax = (int64_t)(int32_t)pVCpu->cpum.GstCtx.eax; \ 1045 (a_Dst).u64Rdx = (int64_t)(int32_t)pVCpu->cpum.GstCtx.edx; \ 1046 } while (0) 1047 960 1048 #else 961 1049 # define IEM_MC_FETCH_MEM_U128(a_u128Dst, a_iSeg, a_GCPtrMem) \ … … 994 1082 # define IEM_MC_FETCH_MEM_FLAT_XMM_U64(a_XmmDst, a_iQWord, a_GCPtrMem) \ 995 1083 (a_XmmDst).au64[(a_iQWord)] = iemMemFetchDataU64Jmp(pVCpu, UINT8_MAX, (a_GCPtrMem)) 1084 1085 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1086 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2)); \ 1087 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1088 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1089 } while (0) 1090 # define IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128(a_Dst, a_iXReg1, a_GCPtrMem2) do { \ 1091 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, UINT8_MAX, (a_GCPtrMem2)); \ 1092 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1093 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1094 } while (0) 1095 1096 # define IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1097 iemMemFetchDataU128AlignedSseJmp(pVCpu, &(a_Dst).uSrc2.uXmm, (a_iSeg2), (a_GCPtrMem2)); \ 1098 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1099 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1100 } while (0) 1101 # define IEM_MC_FETCH_MEM_FLAT_XMM_ALIGN_SSE_AND_XREG_XMM(a_Dst, a_iXReg1, a_GCPtrMem2) do { \ 1102 iemMemFetchDataU128AlignedSseJmp(pVCpu, &(a_Dst).uSrc2.uXmm, UINT8_MAX, (a_GCPtrMem2)); \ 1103 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1104 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1105 } while (0) 1106 1107 # define IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(a_Dst, a_iXReg1, a_iDWord2, a_iSeg2, a_GCPtrMem2) do { \ 1108 (a_Dst).uSrc2.uXmm.au64[0] = 0; \ 1109 (a_Dst).uSrc2.uXmm.au64[1] = 0; \ 1110 (a_Dst).uSrc2.uXmm.au32[(a_iDWord2)] = iemMemFetchDataU32Jmp(pVCpu, (a_iSeg2), (a_GCPtrMem2)); \ 1111 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1112 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1113 } while (0) 1114 # define IEM_MC_FETCH_MEM_FLAT_XMM_U32_AND_XREG_XMM(a_Dst, a_iXReg1, a_iDWord2, a_GCPtrMem2) do { \ 1115 (a_Dst).uSrc2.uXmm.au64[0] = 0; \ 1116 (a_Dst).uSrc2.uXmm.au64[1] = 0; \ 1117 (a_Dst).uSrc2.uXmm.au32[(a_iDWord2)] = iemMemFlatFetchDataU32Jmp(pVCpu, (a_GCPtrMem2)); \ 1118 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1119 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1120 } while (0) 1121 1122 # define IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(a_Dst, a_iXReg1, a_iQWord2, a_iSeg2, a_GCPtrMem2) do { \ 1123 (a_Dst).uSrc2.uXmm.au64[!(a_iQWord2)] = 0; \ 1124 (a_Dst).uSrc2.uXmm.au64[(a_iQWord2)] = iemMemFetchDataU64Jmp(pVCpu, (a_iSeg2), (a_GCPtrMem2)); \ 1125 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1126 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1127 } while (0) 1128 # define IEM_MC_FETCH_MEM_FLAT_XMM_U64_AND_XREG_XMM(a_Dst, a_iXReg1, a_iQWord2, a_GCPtrMem2) do { \ 1129 (a_Dst).uSrc2.uXmm.au64[1] = 0; \ 1130 (a_Dst).uSrc2.uXmm.au64[(a_iQWord2)] = iemMemFlatFetchDataU64Jmp(pVCpu, (a_GCPtrMem2)); \ 1131 (a_Dst).uSrc1.uXmm.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1132 (a_Dst).uSrc1.uXmm.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1133 } while (0) 1134 1135 1136 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1137 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2)); \ 1138 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1139 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1140 (a_Dst).u64Rax = pVCpu->cpum.GstCtx.rax; \ 1141 (a_Dst).u64Rdx = pVCpu->cpum.GstCtx.rdx; \ 1142 } while (0) 1143 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) do { \ 1144 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, (a_iSeg2), (a_GCPtrMem2)); \ 1145 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1146 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1147 (a_Dst).u64Rax = (int64_t)(int32_t)pVCpu->cpum.GstCtx.eax; \ 1148 (a_Dst).u64Rdx = (int64_t)(int32_t)pVCpu->cpum.GstCtx.edx; \ 1149 } while (0) 1150 1151 # define IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_GCPtrMem2) do { \ 1152 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, UINT8_MAX, (a_GCPtrMem2)); \ 1153 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1154 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1155 (a_Dst).u64Rax = pVCpu->cpum.GstCtx.rax; \ 1156 (a_Dst).u64Rdx = pVCpu->cpum.GstCtx.rdx; \ 1157 } while (0) 1158 # define IEM_MC_FETCH_MEM_FLAT_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_GCPtrMem2) do { \ 1159 iemMemFetchDataU128Jmp(pVCpu, &(a_Dst).uSrc2, UINT8_MAX, (a_GCPtrMem2)); \ 1160 (a_Dst).uSrc1.au64[0] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[0]; \ 1161 (a_Dst).uSrc1.au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg1)].au64[1]; \ 1162 (a_Dst).u64Rax = (int64_t)(int32_t)pVCpu->cpum.GstCtx.eax; \ 1163 (a_Dst).u64Rdx = (int64_t)(int32_t)pVCpu->cpum.GstCtx.edx; \ 1164 } while (0) 1165 996 1166 #endif 997 1167 -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r102011 r102012 591 591 #define IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT() do { (void)fMcBegin; } while (0) 592 592 593 #define CHK_VAR(a_Name) do { RT_CONCAT(iVarCheck_,a_Name) = 1; } while (0) 593 594 #define IEM_MC_LOCAL(a_Type, a_Name) (void)fMcBegin; \ 594 a_Type a_Name; NOREF(a_Name); (void)fMcBegin 595 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 596 a_Type a_Name; NOREF(a_Name) 595 597 #define IEM_MC_LOCAL_CONST(a_Type, a_Name, a_Value) (void)fMcBegin; \ 596 a_Type const a_Name = (a_Value); \ 598 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 599 a_Type const a_Name = (a_Value); \ 597 600 NOREF(a_Name) 598 601 #define IEM_MC_LOCAL_ASSIGN(a_Type, a_Name, a_Value) (void)fMcBegin; \ 602 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 599 603 a_Type a_Name = (a_Value); \ 600 604 NOREF(a_Name) … … 606 610 int RT_CONCAT3(iArgCheck_,a_iArg,a_Name); \ 607 611 AssertCompile((a_iArg) < cArgs); \ 612 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 608 613 a_Type a_Name; \ 609 614 NOREF(a_Name) … … 612 617 int RT_CONCAT3(iArgCheck_,a_iArg,a_Name); \ 613 618 AssertCompile((a_iArg) < cArgs); \ 619 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 614 620 a_Type const a_Name = (a_Value); \ 615 621 NOREF(a_Name) … … 621 627 int RT_CONCAT3(iArgCheck_,a_iArg,a_Name); \ 622 628 AssertCompile((a_iArg) < cArgs); \ 629 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 623 630 a_Type const a_Name = &(a_Local); \ 624 631 NOREF(a_Name) … … 627 634 int RT_CONCAT3(iArgCheck_,a_iArg,a_pName); \ 628 635 AssertCompile((a_iArg) < cArgs); \ 636 int RT_CONCAT(iVarCheck_,a_Name) = 0; \ 637 int RT_CONCAT(iVarCheck_,a_pName) = 0; \ 629 638 uint32_t a_Name; \ 630 639 uint32_t *a_pName = &a_Name; \ … … 634 643 #define IEM_MC_ASSIGN_TO_SMALLER(a_VarOrArg, a_CVariableOrConst) do { (a_VarOrArg) = (0); (void)fMcBegin; } while (0) 635 644 636 #define IEM_MC_FETCH_GREG_U8(a_u8Dst, a_iGReg) do { (a_u8Dst) = 0; CHK_TYPE(uint8_t, a_u8Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 637 #define IEM_MC_FETCH_GREG_U8_ZX_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 638 #define IEM_MC_FETCH_GREG_U8_ZX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 639 #define IEM_MC_FETCH_GREG_U8_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 640 #define IEM_MC_FETCH_GREG_U8_SX_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 641 #define IEM_MC_FETCH_GREG_U8_SX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 642 #define IEM_MC_FETCH_GREG_U8_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 643 #define IEM_MC_FETCH_GREG_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 644 #define IEM_MC_FETCH_GREG_U16_ZX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 645 #define IEM_MC_FETCH_GREG_U16_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 646 #define IEM_MC_FETCH_GREG_U16_SX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 647 #define IEM_MC_FETCH_GREG_U16_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 648 #define IEM_MC_FETCH_GREG_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 649 #define IEM_MC_FETCH_GREG_U32_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 650 #define IEM_MC_FETCH_GREG_U32_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 651 #define IEM_MC_FETCH_GREG_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 645 #define IEM_MC_FETCH_GREG_PAIR_U32(a_u64Dst, a_iGRegLo, a_iGRegHi) do { (a_u64Dst).s.Lo = (a_u64Dst).s.Hi = 0; CHK_TYPE(RTUINT64U, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGRegLo); CHK_GREG_IDX(a_iGRegHi); (void)fMcBegin; } while(0) 646 #define IEM_MC_FETCH_GREG_PAIR_U64(a_u128Dst, a_iGRegLo, a_iGRegHi) do { (a_u128Dst).s.Lo = (a_u128Dst).s.Hi = 0; CHK_TYPE(RTUINT128U, a_u128Dst); CHK_VAR(a_u128Dst); CHK_GREG_IDX(a_iGRegLo); CHK_GREG_IDX(a_iGRegHi); (void)fMcBegin; } while(0) 647 #define IEM_MC_STORE_GREG_PAIR_U32(a_iGRegLo, a_iGRegHi, a_u64Src) do { uint32_t const uTmp = (a_u64Src).s.Lo ^ (a_u64Src).s.Hi; RT_NOREF(uTmp); CHK_TYPE(RTUINT64U, a_u64Src); CHK_VAR(a_u64Src); CHK_GREG_IDX(a_iGRegLo); CHK_GREG_IDX(a_iGRegHi); (void)fMcBegin; } while(0) 648 #define IEM_MC_STORE_GREG_PAIR_U64(a_iGRegLo, a_iGRegHi, a_u128Src) do { uint64_t const uTmp = (a_u128Src).s.Lo ^ (a_u128Src).s.Hi; RT_NOREF(uTmp); CHK_TYPE(RTUINT128U, a_u128Src); CHK_VAR(a_u128Src); CHK_GREG_IDX(a_iGRegLo); CHK_GREG_IDX(a_iGRegHi); (void)fMcBegin; } while(0) 649 650 #define IEM_MC_FETCH_GREG_U8(a_u8Dst, a_iGReg) do { (a_u8Dst) = 0; CHK_TYPE(uint8_t, a_u8Dst); CHK_VAR(a_u8Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 651 #define IEM_MC_FETCH_GREG_U8_ZX_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_VAR(a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 652 #define IEM_MC_FETCH_GREG_U8_ZX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 653 #define IEM_MC_FETCH_GREG_U8_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 654 #define IEM_MC_FETCH_GREG_U8_SX_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_VAR(a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 655 #define IEM_MC_FETCH_GREG_U8_SX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 656 #define IEM_MC_FETCH_GREG_U8_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 657 #define IEM_MC_FETCH_GREG_U16(a_u16Dst, a_iGReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_VAR(a_u16Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 658 #define IEM_MC_FETCH_GREG_U16_ZX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 659 #define IEM_MC_FETCH_GREG_U16_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 660 #define IEM_MC_FETCH_GREG_U16_SX_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 661 #define IEM_MC_FETCH_GREG_U16_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 662 #define IEM_MC_FETCH_GREG_U32(a_u32Dst, a_iGReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 663 #define IEM_MC_FETCH_GREG_U32_ZX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 664 #define IEM_MC_FETCH_GREG_U32_SX_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 665 #define IEM_MC_FETCH_GREG_U64(a_u64Dst, a_iGReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 652 666 #define IEM_MC_FETCH_GREG_U64_ZX_U64 IEM_MC_FETCH_GREG_U64 653 #define IEM_MC_FETCH_SREG_U16(a_u16Dst, a_iSReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); (void)fMcBegin; } while (0) 654 #define IEM_MC_FETCH_SREG_ZX_U32(a_u32Dst, a_iSReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); (void)fMcBegin; } while (0) 655 #define IEM_MC_FETCH_SREG_ZX_U64(a_u64Dst, a_iSReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); (void)fMcBegin; } while (0) 656 #define IEM_MC_FETCH_SREG_BASE_U64(a_u64Dst, a_iSReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); (void)fMcBegin; } while (0) 657 #define IEM_MC_FETCH_SREG_BASE_U32(a_u32Dst, a_iSReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); (void)fMcBegin; } while (0) 658 #define IEM_MC_FETCH_EFLAGS(a_EFlags) do { (a_EFlags) = 0; CHK_TYPE(uint32_t, a_EFlags); (void)fMcBegin; } while (0) 659 #define IEM_MC_FETCH_EFLAGS_U8(a_EFlags) do { (a_EFlags) = 0; CHK_TYPE(uint8_t, a_EFlags); (void)fMcBegin; } while (0) 660 #define IEM_MC_FETCH_FSW(a_u16Fsw) do { (a_u16Fsw) = 0; CHK_TYPE(uint16_t, a_u16Fsw); (void)fFpuRead; (void)fMcBegin; } while (0) 661 #define IEM_MC_FETCH_FCW(a_u16Fcw) do { (a_u16Fcw) = 0; CHK_TYPE(uint16_t, a_u16Fcw); (void)fFpuRead; (void)fMcBegin; } while (0) 662 #define IEM_MC_STORE_GREG_U8(a_iGReg, a_u8Value) do { CHK_GREG_IDX(a_iGReg); CHK_TYPE(uint8_t, a_u8Value); (void)fMcBegin; } while (0) 663 #define IEM_MC_STORE_GREG_U16(a_iGReg, a_u16Value) do { CHK_GREG_IDX(a_iGReg); CHK_TYPE(uint16_t, a_u16Value); (void)fMcBegin; } while (0) 664 #define IEM_MC_STORE_GREG_U32(a_iGReg, a_u32Value) do { CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 665 #define IEM_MC_STORE_GREG_U64(a_iGReg, a_u64Value) do { CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 666 #define IEM_MC_STORE_GREG_I64(a_iGReg, a_i64Value) do { CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 667 #define IEM_MC_FETCH_SREG_U16(a_u16Dst, a_iSReg) do { (a_u16Dst) = 0; CHK_TYPE(uint16_t, a_u16Dst); CHK_VAR(a_u16Dst); (void)fMcBegin; } while (0) 668 #define IEM_MC_FETCH_SREG_ZX_U32(a_u32Dst, a_iSReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); (void)fMcBegin; } while (0) 669 #define IEM_MC_FETCH_SREG_ZX_U64(a_u64Dst, a_iSReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); (void)fMcBegin; } while (0) 670 #define IEM_MC_FETCH_SREG_BASE_U64(a_u64Dst, a_iSReg) do { (a_u64Dst) = 0; CHK_TYPE(uint64_t, a_u64Dst); CHK_VAR(a_u64Dst); (void)fMcBegin; } while (0) 671 #define IEM_MC_FETCH_SREG_BASE_U32(a_u32Dst, a_iSReg) do { (a_u32Dst) = 0; CHK_TYPE(uint32_t, a_u32Dst); CHK_VAR(a_u32Dst); (void)fMcBegin; } while (0) 672 #define IEM_MC_FETCH_EFLAGS(a_EFlags) do { (a_EFlags) = 0; CHK_TYPE(uint32_t, a_EFlags); CHK_VAR(a_EFlags); (void)fMcBegin; } while (0) 673 #define IEM_MC_FETCH_EFLAGS_U8(a_EFlags) do { (a_EFlags) = 0; CHK_TYPE(uint8_t, a_EFlags); CHK_VAR(a_EFlags); (void)fMcBegin; } while (0) 674 #define IEM_MC_FETCH_FSW(a_u16Fsw) do { (a_u16Fsw) = 0; CHK_TYPE(uint16_t, a_u16Fsw); CHK_VAR(a_u16Fsw); (void)fFpuRead; (void)fMcBegin; } while (0) 675 #define IEM_MC_FETCH_FCW(a_u16Fcw) do { (a_u16Fcw) = 0; CHK_TYPE(uint16_t, a_u16Fcw); CHK_VAR(a_u16Fcw); (void)fFpuRead; (void)fMcBegin; } while (0) 676 677 #define IEM_MC_STORE_GREG_U8(a_iGReg, a_u8Value) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_u8Value); CHK_TYPE(uint8_t, a_u8Value); (void)fMcBegin; } while (0) 678 #define IEM_MC_STORE_GREG_U16(a_iGReg, a_u16Value) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_u16Value); CHK_TYPE(uint16_t, a_u16Value); (void)fMcBegin; } while (0) 679 #define IEM_MC_STORE_GREG_U32(a_iGReg, a_u32Value) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_u32Value); (void)fMcBegin; } while (0) 680 #define IEM_MC_STORE_GREG_U64(a_iGReg, a_u64Value) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_u64Value); (void)fMcBegin; } while (0) 681 #define IEM_MC_STORE_GREG_I64(a_iGReg, a_i64Value) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_i64Value); (void)fMcBegin; } while (0) 667 682 #define IEM_MC_STORE_GREG_U8_CONST(a_iGReg, a_u8C) do { CHK_GREG_IDX(a_iGReg); uint8_t const uTmp = (a_u8C); RT_NOREF_PV(uTmp); (void)fMcBegin; } while (0) 668 683 #define IEM_MC_STORE_GREG_U16_CONST(a_iGReg, a_u16C) do { CHK_GREG_IDX(a_iGReg); uint16_t const uTmp = (a_u16C); RT_NOREF_PV(uTmp); (void)fMcBegin; } while (0) 669 684 #define IEM_MC_STORE_GREG_U32_CONST(a_iGReg, a_u32C) do { CHK_GREG_IDX(a_iGReg); uint32_t const uTmp = (a_u32C); RT_NOREF_PV(uTmp); (void)fMcBegin; } while (0) 670 685 #define IEM_MC_STORE_GREG_U64_CONST(a_iGReg, a_u64C) do { CHK_GREG_IDX(a_iGReg); uint64_t const uTmp = (a_u64C); RT_NOREF_PV(uTmp); (void)fMcBegin; } while (0) 671 #define IEM_MC_STORE_FPUREG_R80_SRC_REF(a_iSt, a_pr80Src) do { CHK_PTYPE(PCRTFLOAT80U, a_pr80Src); Assert((a_iSt) < 8); (void)fMcBegin; } while (0)686 #define IEM_MC_STORE_FPUREG_R80_SRC_REF(a_iSt, a_pr80Src) do { CHK_PTYPE(PCRTFLOAT80U, a_pr80Src); CHK_VAR(a_pr80Src); Assert((a_iSt) < 8); (void)fMcBegin; } while (0) 672 687 #define IEM_MC_CLEAR_HIGH_GREG_U64(a_iGReg) do { CHK_GREG_IDX(a_iGReg); (void)fMcBegin; } while (0) 673 #define IEM_MC_STORE_SREG_BASE_U64(a_iSeg, a_u64Value) do { (void)fMcBegin; CHK_SEG_IDX(a_iSeg); } while (0) 674 #define IEM_MC_STORE_SREG_BASE_U32(a_iSeg, a_u32Value) do { (void)fMcBegin; CHK_SEG_IDX(a_iSeg); } while (0) 675 #define IEM_MC_REF_GREG_U8(a_pu8Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu8Dst) = (uint8_t *)((uintptr_t)0); CHK_PTYPE(uint8_t *, a_pu8Dst); (void)fMcBegin; } while (0) 676 #define IEM_MC_REF_GREG_U16(a_pu16Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu16Dst) = (uint16_t *)((uintptr_t)0); CHK_PTYPE(uint16_t *, a_pu16Dst); (void)fMcBegin; } while (0) 677 #define IEM_MC_REF_GREG_U32(a_pu32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu32Dst) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pu32Dst); (void)fMcBegin; } while (0) 678 #define IEM_MC_REF_GREG_U64(a_pu64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu64Dst) = (uint64_t *)((uintptr_t)0); CHK_PTYPE(uint64_t *, a_pu64Dst); (void)fMcBegin; } while (0) 679 #define IEM_MC_REF_GREG_U8_CONST(a_pu8Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu8Dst) = (uint8_t const *)((uintptr_t)0); CHK_PTYPE(uint8_t const *, a_pu8Dst); (void)fMcBegin; } while (0) 680 #define IEM_MC_REF_GREG_U16_CONST(a_pu16Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu16Dst) = (uint16_t const *)((uintptr_t)0); CHK_PTYPE(uint16_t const *, a_pu16Dst); (void)fMcBegin; } while (0) 681 #define IEM_MC_REF_GREG_U32_CONST(a_pu32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu32Dst) = (uint32_t const *)((uintptr_t)0); CHK_PTYPE(uint32_t const *, a_pu32Dst); (void)fMcBegin; } while (0) 682 #define IEM_MC_REF_GREG_U64_CONST(a_pu64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pu64Dst) = (uint64_t const *)((uintptr_t)0); CHK_PTYPE(uint64_t const *, a_pu64Dst); (void)fMcBegin; } while (0) 683 #define IEM_MC_REF_GREG_I32(a_pi32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pi32Dst) = (int32_t *)((uintptr_t)0); CHK_PTYPE(int32_t *, a_pi32Dst); (void)fMcBegin; } while (0) 684 #define IEM_MC_REF_GREG_I64(a_pi64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pi64Dst) = (int64_t *)((uintptr_t)0); CHK_PTYPE(int64_t *, a_pi64Dst); (void)fMcBegin; } while (0) 685 #define IEM_MC_REF_GREG_I32_CONST(a_pi32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pi32Dst) = (int32_t const *)((uintptr_t)0); CHK_PTYPE(int32_t const *, a_pi32Dst); (void)fMcBegin; } while (0) 686 #define IEM_MC_REF_GREG_I64_CONST(a_pi64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); (a_pi64Dst) = (int64_t const *)((uintptr_t)0); CHK_PTYPE(int64_t const *, a_pi64Dst); (void)fMcBegin; } while (0) 687 #define IEM_MC_REF_EFLAGS(a_pEFlags) do { (a_pEFlags) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pEFlags); (void)fMcBegin; } while (0) 688 #define IEM_MC_REF_FPUREG(a_pr80Dst, a_iSt) do { (a_pr80Dst) = (PRTFLOAT80U)((uintptr_t)0); CHK_PTYPE(PCRTFLOAT80U, a_pr80Dst); AssertCompile((a_iSt) < 8); (void)fMcBegin; } while (0) 689 #define IEM_MC_REF_MXCSR(a_pfMxcsr) do { (a_pfMxcsr) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pfMxcsr); (void)fMcBegin; (void)fSseRead; } while (0) 688 #define IEM_MC_STORE_SREG_BASE_U64(a_iSeg, a_u64Value) do { (void)fMcBegin; CHK_VAR(a_u64Value); CHK_SEG_IDX(a_iSeg); } while (0) 689 #define IEM_MC_STORE_SREG_BASE_U32(a_iSeg, a_u32Value) do { (void)fMcBegin; CHK_VAR(a_u32Value); CHK_SEG_IDX(a_iSeg); } while (0) 690 691 #define IEM_MC_REF_GREG_U8(a_pu8Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu8Dst); (a_pu8Dst) = (uint8_t *)((uintptr_t)0); CHK_PTYPE(uint8_t *, a_pu8Dst); (void)fMcBegin; } while (0) 692 #define IEM_MC_REF_GREG_U16(a_pu16Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu16Dst); (a_pu16Dst) = (uint16_t *)((uintptr_t)0); CHK_PTYPE(uint16_t *, a_pu16Dst); (void)fMcBegin; } while (0) 693 #define IEM_MC_REF_GREG_U32(a_pu32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu32Dst); (a_pu32Dst) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pu32Dst); (void)fMcBegin; } while (0) 694 #define IEM_MC_REF_GREG_U64(a_pu64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu64Dst); (a_pu64Dst) = (uint64_t *)((uintptr_t)0); CHK_PTYPE(uint64_t *, a_pu64Dst); (void)fMcBegin; } while (0) 695 #define IEM_MC_REF_GREG_U8_CONST(a_pu8Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu8Dst); (a_pu8Dst) = (uint8_t const *)((uintptr_t)0); CHK_PTYPE(uint8_t const *, a_pu8Dst); (void)fMcBegin; } while (0) 696 #define IEM_MC_REF_GREG_U16_CONST(a_pu16Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu16Dst); (a_pu16Dst) = (uint16_t const *)((uintptr_t)0); CHK_PTYPE(uint16_t const *, a_pu16Dst); (void)fMcBegin; } while (0) 697 #define IEM_MC_REF_GREG_U32_CONST(a_pu32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu32Dst); (a_pu32Dst) = (uint32_t const *)((uintptr_t)0); CHK_PTYPE(uint32_t const *, a_pu32Dst); (void)fMcBegin; } while (0) 698 #define IEM_MC_REF_GREG_U64_CONST(a_pu64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pu64Dst); (a_pu64Dst) = (uint64_t const *)((uintptr_t)0); CHK_PTYPE(uint64_t const *, a_pu64Dst); (void)fMcBegin; } while (0) 699 #define IEM_MC_REF_GREG_I32(a_pi32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pi32Dst); (a_pi32Dst) = (int32_t *)((uintptr_t)0); CHK_PTYPE(int32_t *, a_pi32Dst); (void)fMcBegin; } while (0) 700 #define IEM_MC_REF_GREG_I64(a_pi64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pi64Dst); (a_pi64Dst) = (int64_t *)((uintptr_t)0); CHK_PTYPE(int64_t *, a_pi64Dst); (void)fMcBegin; } while (0) 701 #define IEM_MC_REF_GREG_I32_CONST(a_pi32Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pi32Dst); (a_pi32Dst) = (int32_t const *)((uintptr_t)0); CHK_PTYPE(int32_t const *, a_pi32Dst); (void)fMcBegin; } while (0) 702 #define IEM_MC_REF_GREG_I64_CONST(a_pi64Dst, a_iGReg) do { CHK_GREG_IDX(a_iGReg); CHK_VAR(a_pi64Dst); (a_pi64Dst) = (int64_t const *)((uintptr_t)0); CHK_PTYPE(int64_t const *, a_pi64Dst); (void)fMcBegin; } while (0) 703 #define IEM_MC_REF_EFLAGS(a_pEFlags) do { (a_pEFlags) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pEFlags); CHK_VAR(a_pEFlags); (void)fMcBegin; } while (0) 704 #define IEM_MC_REF_FPUREG(a_pr80Dst, a_iSt) do { (a_pr80Dst) = (PRTFLOAT80U)((uintptr_t)0); CHK_PTYPE(PCRTFLOAT80U, a_pr80Dst); CHK_VAR(a_pr80Dst); AssertCompile((a_iSt) < 8); (void)fMcBegin; } while (0) 705 #define IEM_MC_REF_MXCSR(a_pfMxcsr) do { (a_pfMxcsr) = (uint32_t *)((uintptr_t)0); CHK_PTYPE(uint32_t *, a_pfMxcsr); CHK_VAR(a_pfMxcsr); (void)fMcBegin; (void)fSseRead; } while (0) 690 706 691 707 #define IEM_MC_ADD_GREG_U16(a_iGReg, a_u16Value) do { CHK_GREG_IDX(a_iGReg); CHK_CONST(uint16_t, a_u16Value); (void)fMcBegin; } while (0) … … 760 776 #define IEM_MC_FETCH_XREG_U16(a_u16Value, a_iXReg, a_iWord ) do { CHK_XREG_IDX(a_iXReg); (a_u16Value) = 0; CHK_TYPE(uint16_t, a_u16Value); (void)fSseRead; (void)fMcBegin; } while (0) 761 777 #define IEM_MC_FETCH_XREG_U8( a_u8Value, a_iXReg, a_iByte) do { CHK_XREG_IDX(a_iXReg); (a_u8Value) = 0; CHK_TYPE(uint8_t, a_u8Value); (void)fSseRead; (void)fMcBegin; } while (0) 778 #define IEM_MC_FETCH_XREG_PAIR_U128(a_Dst, a_iXReg1, a_iXReg2) do { CHK_XREG_IDX(a_iXReg1); CHK_XREG_IDX(a_iXReg2); CHK_VAR(a_Dst); (a_Dst).uSrc1.au64[1] = (a_Dst).uSrc2.au64[1] = 0; CHK_TYPE(IEMPCMPISTRXSRC, a_Dst); (void)fSseRead; (void)fMcBegin; } while (0) 779 #define IEM_MC_FETCH_XREG_PAIR_XMM(a_Dst, a_iXReg1, a_iXReg2) do { CHK_XREG_IDX(a_iXReg1); CHK_XREG_IDX(a_iXReg2); CHK_VAR(a_Dst); (a_Dst).uSrc1.uXmm.au64[1] = (a_Dst).uSrc2.uXmm.au64[1] = 0; CHK_TYPE(IEMMEDIAF2XMMSRC, a_Dst); (void)fSseRead; (void)fMcBegin; } while (0) 780 #define IEM_MC_FETCH_XREG_PAIR_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_iXReg2) do { CHK_XREG_IDX(a_iXReg1); CHK_XREG_IDX(a_iXReg2); CHK_VAR(a_Dst); (a_Dst).uSrc1.au64[1] = (a_Dst).uSrc2.au64[1] = (a_Dst).u64Rax = (a_Dst).u64Rdx = 0; CHK_TYPE(IEMPCMPESTRXSRC, a_Dst); (void)fSseRead; (void)fMcBegin; } while (0) 781 #define IEM_MC_FETCH_XREG_PAIR_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iXReg2) do { CHK_XREG_IDX(a_iXReg1); CHK_XREG_IDX(a_iXReg2); CHK_VAR(a_Dst); (a_Dst).uSrc1.au64[1] = (a_Dst).uSrc2.au64[1] = (a_Dst).u64Rax = (a_Dst).u64Rdx = 0; CHK_TYPE(IEMPCMPESTRXSRC, a_Dst); (void)fSseRead; (void)fMcBegin; } while (0) 782 762 783 #define IEM_MC_STORE_XREG_U32_U128(a_iXReg, a_iDwDst, a_u128Value, a_iDwSrc) do { CHK_XREG_IDX(a_iXReg); CHK_TYPE(RTUINT128U, a_u128Value); AssertCompile((a_iDwDst) < RT_ELEMENTS((a_u128Value).au32)); AssertCompile((a_iDwSrc) < RT_ELEMENTS((a_u128Value).au32)); (void)fSseWrite; (void)fMcBegin; } while (0) 763 784 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) do { CHK_TYPE(RTUINT128U, a_u128Value); (void)fSseWrite; (void)fMcBegin; } while (0) … … 865 886 #define IEM_MC_FETCH_MEM_U256_NO_AC(a_u256Dst, a_iSeg, a_GCPtrMem) do { CHK_SEG_IDX(a_iSeg); CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT256U, a_u256Dst); (void)fMcBegin; } while (0) 866 887 #define IEM_MC_FETCH_MEM_U256_ALIGN_AVX(a_u256Dst, a_iSeg, a_GCPtrMem) do { CHK_SEG_IDX(a_iSeg); CHK_GCPTR(a_GCPtrMem); CHK_TYPE(RTUINT256U, a_u256Dst); (void)fMcBegin; } while (0) 888 889 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) \ 890 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMPCMPISTRXSRC, a_Dst); (void)fMcBegin; } while (0) 891 # define IEM_MC_FETCH_MEM_XMM_ALIGN_SSE_AND_XREG_XMM(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) \ 892 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMMEDIAF2XMMSRC, a_Dst); (void)fMcBegin; } while (0) 893 # define IEM_MC_FETCH_MEM_XMM_U32_AND_XREG_XMM(a_Dst, a_iXReg1, a_iDWord2, a_iSeg2, a_GCPtrMem2) \ 894 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMMEDIAF2XMMSRC, a_Dst); AssertCompile((a_iDWord2) < RT_ELEMENTS((a_Dst).uSrc2.uXmm.au32)); (void)fMcBegin; } while (0) 895 # define IEM_MC_FETCH_MEM_XMM_U64_AND_XREG_XMM(a_Dst, a_iXReg1, a_iQWord2, a_iSeg2, a_GCPtrMem2) \ 896 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMMEDIAF2XMMSRC, a_Dst); AssertCompile((a_iQWord2) < RT_ELEMENTS((a_Dst).uSrc2.uXmm.au64)); (void)fMcBegin; } while (0) 897 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_RAX_RDX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) \ 898 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMPCMPESTRXSRC, a_Dst); (void)fMcBegin; } while (0) 899 # define IEM_MC_FETCH_MEM_U128_AND_XREG_U128_AND_EAX_EDX_U32_SX_U64(a_Dst, a_iXReg1, a_iSeg2, a_GCPtrMem2) \ 900 do { CHK_XREG_IDX(a_iXReg1); (void)fSseRead; CHK_SEG_IDX(a_iSeg2); CHK_GCPTR(a_GCPtrMem2); CHK_TYPE(IEMPCMPESTRXSRC, a_Dst); (void)fMcBegin; } while (0) 867 901 868 902 #define IEM_MC_STORE_MEM_U8(a_iSeg, a_GCPtrMem, a_u8Value) do { CHK_SEG_IDX(a_iSeg); CHK_GCPTR(a_GCPtrMem); CHK_TYPE(uint8_t, a_u8Value); CHK_SEG_IDX(a_iSeg); (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.