Changeset 97543 in vbox
- Timestamp:
- Nov 15, 2022 12:59:28 PM (2 years ago)
- svn:sync-xref-src-repo-rev:
- 154573
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h
r97534 r97543 809 809 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); \ 810 810 IEM_MC_PREPARE_SSE_USAGE(); \ 811 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) ); \811 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword */); \ 812 812 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 813 813 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse41, \ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f3a.cpp.h
r97534 r97543 333 333 /** Opcode 0x66 0x0f 0x15. */ 334 334 FNIEMOP_STUB(iemOp_pextrw_RdMw_Vdq_Ib); 335 /** Opcode 0x66 0x0f 0x16. */ 336 FNIEMOP_STUB(iemOp_pextrd_q_RdMw_Vdq_Ib); 335 336 337 FNIEMOP_DEF(iemOp_pextrd_q_RdMw_Vdq_Ib) 338 { 339 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 340 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 341 { 342 /** 343 * @opcode 0x16 344 * @opcodesub rex.w=1 345 * @oppfx 0x66 346 * @opcpuid sse 347 */ 348 IEMOP_MNEMONIC3(RMI, PEXTRQ, pextrq, Ev, Vq, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX); 349 if (IEM_IS_MODRM_REG_MODE(bRm)) 350 { 351 /* 352 * XMM, greg64. 353 */ 354 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 355 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 356 IEM_MC_BEGIN(0, 1); 357 IEM_MC_LOCAL(uint64_t, uSrc); 358 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); 359 IEM_MC_PREPARE_SSE_USAGE(); 360 IEM_MC_AND_LOCAL_U8(bImm, 1); 361 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm /*a_iQword*/); 362 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc); 363 IEM_MC_ADVANCE_RIP_AND_FINISH(); 364 IEM_MC_END(); 365 } 366 else 367 { 368 /* 369 * XMM, [mem64]. 370 */ 371 IEM_MC_BEGIN(0, 2); 372 IEM_MC_LOCAL(uint64_t, uSrc); 373 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 374 375 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 376 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 377 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 378 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); 379 IEM_MC_PREPARE_SSE_USAGE(); 380 381 IEM_MC_AND_LOCAL_U8(bImm, 1); 382 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm /*a_iQword*/); 383 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 384 IEM_MC_ADVANCE_RIP_AND_FINISH(); 385 IEM_MC_END(); 386 } 387 } 388 else 389 { 390 /** 391 * @opdone 392 * @opcode 0x16 393 * @opcodesub rex.w=0 394 * @oppfx 0x66 395 * @opcpuid sse 396 */ 397 IEMOP_MNEMONIC3(RMI, PEXTRD, pextrd, Ey, Vd, Ib, DISOPTYPE_HARMLESS | DISOPTYPE_SSE, IEMOPHINT_IGNORES_OZ_PFX); 398 if (IEM_IS_MODRM_REG_MODE(bRm)) 399 { 400 /* 401 * XMM, greg32. 402 */ 403 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 404 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 405 IEM_MC_BEGIN(0, 1); 406 IEM_MC_LOCAL(uint32_t, uSrc); 407 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); 408 IEM_MC_PREPARE_SSE_USAGE(); 409 IEM_MC_AND_LOCAL_U8(bImm, 3); 410 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm /*a_iDword*/); 411 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc); 412 IEM_MC_ADVANCE_RIP_AND_FINISH(); 413 IEM_MC_END(); 414 } 415 else 416 { 417 /* 418 * XMM, [mem32]. 419 */ 420 IEM_MC_BEGIN(0, 2); 421 IEM_MC_LOCAL(uint32_t, uSrc); 422 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 423 424 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 1); 425 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 426 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 427 IEM_MC_MAYBE_RAISE_SSE41_RELATED_XCPT(); 428 IEM_MC_PREPARE_SSE_USAGE(); 429 IEM_MC_AND_LOCAL_U8(bImm, 3); 430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), bImm /*a_iDword*/); 431 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 432 IEM_MC_ADVANCE_RIP_AND_FINISH(); 433 IEM_MC_END(); 434 } 435 } 436 } 437 438 337 439 /** Opcode 0x66 0x0f 0x17. */ 338 440 FNIEMOP_STUB(iemOp_extractps_Ed_Vdq_Ib); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r97534 r97543 2222 2222 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 2223 2223 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );2224 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/ ); 2225 2225 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/, uSrc); 2226 2226 … … 2275 2275 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2276 2276 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );2277 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 2278 2278 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc); 2279 2279 … … 2428 2428 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 2429 2429 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2430 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/); 2431 2431 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /*a_iDword*/, uSrc); 2432 2432 … … 2448 2448 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2449 2449 2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2450 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/); 2451 2451 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2452 2452 … … 2481 2481 IEM_MC_MAYBE_RAISE_SSE2_RELATED_XCPT(); 2482 2482 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2483 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 2484 2484 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/, uSrc); 2485 2485 … … 2501 2501 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2502 2502 2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2503 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 2504 2504 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2505 2505 … … 2533 2533 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 2534 2534 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2535 IEM_MC_FETCH_XREG_ HI_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm));2535 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 1 /* a_iQword*/); 2536 2536 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc); 2537 2537 … … 2706 2706 IEM_MC_PREPARE_SSE_USAGE(); 2707 2707 2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );2708 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 2709 2709 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc); 2710 2710 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); … … 2763 2763 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2764 2764 2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2765 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 2766 2766 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2767 2767 … … 2810 2810 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 2811 2811 2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );2812 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 2813 2813 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 2814 2814 … … 2952 2952 IEM_MC_MAYBE_RAISE_SSE_RELATED_XCPT(); 2953 2953 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );2954 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 2955 2955 IEM_MC_STORE_XREG_HI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); 2956 2956 … … 3136 3136 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3137 3137 3138 IEM_MC_FETCH_XREG_ HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));3138 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/); 3139 3139 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 3140 3140 … … 3184 3184 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 3185 3185 3186 IEM_MC_FETCH_XREG_ HI_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm));3186 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/); 3187 3187 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 3188 3188 … … 4110 4110 4111 4111 IEM_MC_REF_MXCSR(pfMxcsr); 4112 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm) );4112 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 4113 4113 4114 4114 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvttps2pi_u128, pfMxcsr, pu64Dst, u64Src); … … 4498 4498 4499 4499 IEM_MC_REF_MXCSR(pfMxcsr); 4500 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm) );4500 IEM_MC_FETCH_XREG_U64(u64Src, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 4501 4501 4502 4502 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_cvtps2pi_u128, pfMxcsr, pu64Dst, u64Src); … … 7427 7427 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 7428 7428 7429 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm) );7429 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 7430 7430 IEM_MC_STORE_GREG_U64(IEM_GET_MODRM_RM(pVCpu, bRm), u64Tmp); 7431 7431 … … 7445 7445 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 7446 7446 7447 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm) );7447 IEM_MC_FETCH_XREG_U64(u64Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 7448 7448 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u64Tmp); 7449 7449 … … 7477 7477 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 7478 7478 7479 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm) );7479 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/); 7480 7480 IEM_MC_STORE_GREG_U32(IEM_GET_MODRM_RM(pVCpu, bRm), u32Tmp); 7481 7481 … … 7495 7495 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 7496 7496 7497 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm) );7497 IEM_MC_FETCH_XREG_U32(u32Tmp, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /*a_iDword*/); 7498 7498 IEM_MC_STORE_MEM_U32(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, u32Tmp); 7499 7499 … … 7529 7529 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 7530 7530 7531 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );7531 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 7532 7532 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); 7533 7533 … … 12632 12632 IEM_MC_ACTUALIZE_SSE_STATE_FOR_CHANGE(); 12633 12633 12634 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );12634 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 12635 12635 IEM_MC_STORE_XREG_U64_ZX_U128(IEM_GET_MODRM_RM(pVCpu, bRm), uSrc); 12636 12636 … … 12652 12652 IEM_MC_ACTUALIZE_SSE_STATE_FOR_READ(); 12653 12653 12654 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm) );12654 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/); 12655 12655 IEM_MC_STORE_MEM_U64(pVCpu->iem.s.iEffSeg, GCPtrEffSrc, uSrc); 12656 12656 … … 12739 12739 IEM_MC_FPU_TO_MMX_MODE(); 12740 12740 12741 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );12741 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 12742 12742 IEM_MC_STORE_MREG_U64(IEM_GET_MODRM_REG_8(bRm), uSrc); 12743 12743 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap1.cpp.h
r97534 r97543 1225 1225 IEM_MC_PREPARE_AVX_USAGE(); 1226 1226 1227 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) );1227 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); 1228 1228 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0 /* a_iQword*/, uSrc); 1229 1229 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1 /* a_iQword*/, uSrc); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r97361 r97543 394 394 IEM_MC_PREPARE_AVX_USAGE(); \ 395 395 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 396 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm) ); \396 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0 /* a_iQword*/); \ 397 397 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fAvx2, iemAImpl_ ## a_Instr ## _u128, \ 398 398 iemAImpl_## a_Instr ## _u128_fallback), \ -
trunk/src/VBox/VMM/include/IEMMc.h
r97534 r97543 482 482 (a_XmmValue).au64[1] = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[1]; \ 483 483 } while (0) 484 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg) \ 485 do { (a_u64Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[0]; } while (0) 486 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg) \ 487 do { (a_u32Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au32[0]; } while (0) 488 #define IEM_MC_FETCH_XREG_HI_U64(a_u64Value, a_iXReg) \ 489 do { (a_u64Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[1]; } while (0) 484 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg, a_iQWord) \ 485 do { (a_u64Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[(a_iQWord)]; } while (0) 486 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg, a_iDWord) \ 487 do { (a_u32Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au32[(a_iDWord)]; } while (0) 488 #define IEM_MC_FETCH_XREG_U16(a_u16Value, a_iXReg, a_iWord) \ 489 do { (a_u16Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au16[(a_iWord)]; } while (0) 490 #define IEM_MC_FETCH_XREG_U8( a_u8Value, a_iXReg, a_iByte) \ 491 do { (a_u8Value) = pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au16[(a_iByte)]; } while (0) 490 492 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) \ 491 493 do { pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iXReg)].au64[0] = (a_u128Value).au64[0]; \ -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r97534 r97543 768 768 #define IEM_MC_FETCH_XREG_U128(a_u128Value, a_iXReg) do { CHK_XREG_IDX(a_iXReg); (a_u128Value) = g_u128Zero; CHK_TYPE(RTUINT128U, a_u128Value); (void)fSseRead; (void)fMcBegin; } while (0) 769 769 #define IEM_MC_FETCH_XREG_XMM(a_XmmValue, a_iXReg) do { CHK_XREG_IDX(a_iXReg); (a_XmmValue) = g_XmmZero; CHK_TYPE(X86XMMREG, a_XmmValue); (void)fSseRead; (void)fMcBegin; } while (0) 770 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg) do { CHK_XREG_IDX(a_iXReg); (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); (void)fSseRead; (void)fMcBegin; } while (0) 771 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg) do { CHK_XREG_IDX(a_iXReg); (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); (void)fSseRead; (void)fMcBegin; } while (0) 770 #define IEM_MC_FETCH_XREG_U64(a_u64Value, a_iXReg, a_iQWord) do { CHK_XREG_IDX(a_iXReg); (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); (void)fSseRead; (void)fMcBegin; } while (0) 771 #define IEM_MC_FETCH_XREG_U32(a_u32Value, a_iXReg, a_iDWord) do { CHK_XREG_IDX(a_iXReg); (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); (void)fSseRead; (void)fMcBegin; } while (0) 772 #define IEM_MC_FETCH_XREG_U16(a_u16Value, a_iXReg, a_iWord ) do { CHK_XREG_IDX(a_iXReg); (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); (void)fSseRead; (void)fMcBegin; } while (0) 773 #define IEM_MC_FETCH_XREG_U8( a_u8Value, a_iXReg, a_iByte) do { CHK_XREG_IDX(a_iXReg); (a_u32Value) = 0; CHK_TYPE(uint32_t, a_u32Value); (void)fSseRead; (void)fMcBegin; } while (0) 772 774 #define IEM_MC_STORE_XREG_U32_U128(a_iXReg, a_iDwDst, a_u128Value, a_iDwSrc) do { CHK_XREG_IDX(a_iXReg); CHK_TYPE(RTUINT128U, a_u128Value); AssertCompile((a_iDwDst) < RT_ELEMENTS((a_u128Value).au32)); AssertCompile((a_iDwSrc) < RT_ELEMENTS((a_u128Value).au32)); (void)fSseWrite; (void)fMcBegin; } while (0) 773 #define IEM_MC_FETCH_XREG_HI_U64(a_u64Value, a_iXReg) do { CHK_XREG_IDX(a_iXReg); (a_u64Value) = 0; CHK_TYPE(uint64_t, a_u64Value); (void)fSseRead; (void)fMcBegin; } while (0)774 775 #define IEM_MC_STORE_XREG_U128(a_iXReg, a_u128Value) do { CHK_TYPE(RTUINT128U, a_u128Value); (void)fSseWrite; (void)fMcBegin; } while (0) 775 776 #define IEM_MC_STORE_XREG_XMM(a_iXReg, a_XmmValue) do { CHK_TYPE(X86XMMREG, a_XmmValue); (void)fSseWrite; (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.