Changeset 100567 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Jul 13, 2023 7:19:33 PM (17 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r100138 r100567 311 311 312 312 /* Opcode VEX.0F38 0x18 - invalid */ 313 314 313 315 /** Opcode VEX.66.0F38 0x18. */ 314 FNIEMOP_STUB(iemOp_vbroadcastss_Vx_Wd); 316 FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd) 317 { 318 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0); 319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 320 if (IEM_IS_MODRM_REG_MODE(bRm)) 321 { 322 /* 323 * Register, register. 324 */ 325 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 326 if (pVCpu->iem.s.uVexLength) 327 { 328 IEM_MC_BEGIN(0, 1); 329 IEM_MC_LOCAL(uint32_t, uSrc); 330 331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 332 IEM_MC_PREPARE_AVX_USAGE(); 333 334 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 335 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 336 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 337 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 338 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 339 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 340 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 341 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 342 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 343 344 IEM_MC_ADVANCE_RIP_AND_FINISH(); 345 IEM_MC_END(); 346 } 347 else 348 { 349 IEM_MC_BEGIN(0, 1); 350 IEM_MC_LOCAL(uint32_t, uSrc); 351 352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 353 IEM_MC_PREPARE_AVX_USAGE(); 354 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 355 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 356 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 357 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 358 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 359 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 360 361 IEM_MC_ADVANCE_RIP_AND_FINISH(); 362 IEM_MC_END(); 363 } 364 } 365 else 366 { 367 /* 368 * Register, memory. 369 */ 370 if (pVCpu->iem.s.uVexLength) 371 { 372 IEM_MC_BEGIN(0, 2); 373 IEM_MC_LOCAL(uint32_t, uSrc); 374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 375 376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 377 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 378 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 379 IEM_MC_PREPARE_AVX_USAGE(); 380 381 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 382 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 383 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 384 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 385 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 386 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 387 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 388 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 389 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 390 391 IEM_MC_ADVANCE_RIP_AND_FINISH(); 392 IEM_MC_END(); 393 } 394 else 395 { 396 IEM_MC_BEGIN(3, 3); 397 IEM_MC_LOCAL(uint32_t, uSrc); 398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 399 400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 401 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 403 IEM_MC_PREPARE_AVX_USAGE(); 404 405 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 406 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 407 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 408 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 409 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 410 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 411 412 IEM_MC_ADVANCE_RIP_AND_FINISH(); 413 IEM_MC_END(); 414 } 415 } 416 } 417 418 315 419 /* Opcode VEX.0F38 0x19 - invalid */ 420 421 316 422 /** Opcode VEX.66.0F38 0x19. */ 317 FNIEMOP_STUB(iemOp_vbroadcastsd_Vqq_Wq); 423 FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq) 424 { 425 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0); 426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 427 if (IEM_IS_MODRM_REG_MODE(bRm)) 428 { 429 /* 430 * Register, register. 431 */ 432 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 433 if (pVCpu->iem.s.uVexLength) 434 { 435 IEM_MC_BEGIN(0, 1); 436 IEM_MC_LOCAL(uint64_t, uSrc); 437 438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 439 IEM_MC_PREPARE_AVX_USAGE(); 440 441 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 442 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 443 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 444 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 445 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 446 447 IEM_MC_ADVANCE_RIP_AND_FINISH(); 448 IEM_MC_END(); 449 } 450 else 451 { 452 IEM_MC_BEGIN(0, 1); 453 IEM_MC_LOCAL(uint64_t, uSrc); 454 455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 456 IEM_MC_PREPARE_AVX_USAGE(); 457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 458 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 459 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 460 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 461 462 IEM_MC_ADVANCE_RIP_AND_FINISH(); 463 IEM_MC_END(); 464 } 465 } 466 else 467 { 468 /* 469 * Register, memory. 470 */ 471 IEM_MC_BEGIN(0, 2); 472 IEM_MC_LOCAL(uint64_t, uSrc); 473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 474 475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 476 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx); 477 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 478 IEM_MC_PREPARE_AVX_USAGE(); 479 480 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 481 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 482 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 483 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 484 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 485 486 IEM_MC_ADVANCE_RIP_AND_FINISH(); 487 IEM_MC_END(); 488 } 489 } 490 491 318 492 /* Opcode VEX.0F38 0x1a - invalid */ 493 494 319 495 /** Opcode VEX.66.0F38 0x1a. */ 320 FNIEMOP_STUB(iemOp_vbroadcastf128_Vqq_Mdq); 496 FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq) 497 { 498 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0); 499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 500 if (IEM_IS_MODRM_REG_MODE(bRm)) 501 { 502 /* 503 * No register, register. 504 */ 505 IEMOP_RAISE_INVALID_OPCODE_RET(); 506 } 507 else 508 { 509 /* 510 * Register, memory. 511 */ 512 IEM_MC_BEGIN(0, 2); 513 IEM_MC_LOCAL(RTUINT128U, uSrc); 514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 515 516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 517 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx); 518 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 519 IEM_MC_PREPARE_AVX_USAGE(); 520 521 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 522 IEM_MC_STORE_YREG_BROADCAST_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); 523 524 IEM_MC_ADVANCE_RIP_AND_FINISH(); 525 IEM_MC_END(); 526 } 527 } 528 529 321 530 /* Opcode VEX.0F38 0x1b - invalid */ 322 531 /* Opcode VEX.66.0F38 0x1b - invalid */ -
trunk/src/VBox/VMM/include/IEMMc.h
r100326 r100567 516 516 (a_u256Dst).au64[3] = pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegSrcTmp].au64[1]; \ 517 517 } while (0) 518 #define IEM_MC_STORE_YREG_U64(a_iYRegDst, a_iQword, a_u64Value) \ 519 do { pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iYRegDst)].au64[(a_iQword)] = (a_u64Value); } while (0) 520 #define IEM_MC_STORE_YREG_U32(a_iYRegDst, a_iDword, a_u32Value) \ 521 do { pVCpu->cpum.GstCtx.XState.x87.aXMM[(a_iYRegDst)].au32[(a_iDword)] = (a_u32Value); } while (0) 522 #define IEM_MC_STORE_YREGHI_U64(a_iYRegDst, a_iQword, a_u64Value) \ 523 do { pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[(a_iYRegDst)].au64[(a_iQword)] = (a_u64Value); } while (0) 524 #define IEM_MC_STORE_YREGHI_U32(a_iYRegDst, a_iDword, a_u32Value) \ 525 do { pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[(a_iYRegDst)].au32[(a_iDword)] = (a_u32Value); } while (0) 518 526 519 527 #define IEM_MC_INT_CLEAR_ZMM_256_UP(a_iXRegDst) do { /* For AVX512 and AVX1024 support. */ } while (0) … … 549 557 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDstTmp].au64[0] = (a_u256Src).au64[2]; \ 550 558 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDstTmp].au64[1] = (a_u256Src).au64[3]; \ 559 IEM_MC_INT_CLEAR_ZMM_256_UP(iYRegDstTmp); \ 560 } while (0) 561 562 #define IEM_MC_STORE_YREG_BROADCAST_U128_ZX_VLMAX(a_iYRegDst, a_u128Src) \ 563 do { uintptr_t const iYRegDstTmp = (a_iYRegDst); \ 564 pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegDstTmp].au64[0] = (a_u128Src).au64[0]; \ 565 pVCpu->cpum.GstCtx.XState.x87.aXMM[iYRegDstTmp].au64[1] = (a_u128Src).au64[1]; \ 566 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDstTmp].au64[0] = (a_u128Src).au64[0]; \ 567 pVCpu->cpum.GstCtx.XState.u.YmmHi.aYmmHi[iYRegDstTmp].au64[1] = (a_u128Src).au64[1]; \ 551 568 IEM_MC_INT_CLEAR_ZMM_256_UP(iYRegDstTmp); \ 552 569 } while (0) -
trunk/src/VBox/VMM/include/IEMOpHlp.h
r100072 r100567 529 529 } while (0) 530 530 531 /** 532 * Done decoding VEX, no V, L=1. 533 * Raises \#UD exception if rex, rep, opsize or lock prefixes are present, if 534 * we're in real or v8086 mode, if VEX.V!=0xf, if VEX.L!=1, or if the a_fFeature 535 * is not present in the guest CPU. 536 */ 537 #define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) \ 538 do \ 539 { \ 540 if (RT_LIKELY( !( pVCpu->iem.s.fPrefixes \ 541 & (IEM_OP_PRF_LOCK | IEM_OP_PRF_SIZE_OP | IEM_OP_PRF_REPZ | IEM_OP_PRF_REPNZ | IEM_OP_PRF_REX)) \ 542 && pVCpu->iem.s.uVexLength == 1 \ 543 && pVCpu->iem.s.uVex3rdReg == 0 \ 544 && !IEM_IS_REAL_OR_V86_MODE(pVCpu) \ 545 && IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeature )) \ 546 { /* likely */ } \ 547 else \ 548 IEMOP_RAISE_INVALID_OPCODE_RET(); \ 549 } while (0) 550 531 551 #define IEMOP_HLP_DECODED_NL_1(a_uDisOpNo, a_fIemOpFlags, a_uDisParam0, a_fDisOpType) \ 532 552 do \ -
trunk/src/VBox/VMM/testcase/tstIEMCheckMc.cpp
r100326 r100567 169 169 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV() do { } while (0) 170 170 #define IEMOP_HLP_DONE_VEX_DECODING_L0_AND_NO_VVVV_EX(a_fFeature) do { } while (0) 171 #define IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(a_fFeature) do { } while (0) 171 172 #define IEMOP_HLP_DONE_DECODING_NO_LOCK_REPZ_OR_REPNZ_PREFIXES() do { } while (0) 172 173 #define IEMOP_HLP_DONE_DECODING_NO_SIZE_OP_REPZ_OR_REPNZ_PREFIXES() do { } while (0) … … 782 783 #define IEM_MC_FETCH_YREG_2ND_U64(a_u64Value, a_iYRegSrc) do { CHK_YREG_IDX(a_iYRegSrc); (a_u64Value) = UINT64_MAX; CHK_TYPE(uint64_t, a_u64Value); (void)fAvxRead; (void)fMcBegin; } while (0) 783 784 #define IEM_MC_FETCH_YREG_U32(a_u32Value, a_iYRegSrc) do { CHK_YREG_IDX(a_iYRegSrc); (a_u32Value) = UINT32_MAX; CHK_TYPE(uint32_t, a_u32Value); (void)fAvxRead; (void)fMcBegin; } while (0) 785 #define IEM_MC_STORE_YREG_U64(a_iYRegDst, a_iQword, a_u64Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint64_t, a_u64Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 786 #define IEM_MC_STORE_YREG_U32(a_iYRegDst, a_iDword, a_u32Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint32_t, a_u32Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 787 #define IEM_MC_STORE_YREGHI_U64(a_iYRegDst, a_iQword, a_u64Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint64_t, a_u64Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 788 #define IEM_MC_STORE_YREGHI_U32(a_iYRegDst, a_iDword, a_u32Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint32_t, a_u32Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 784 789 #define IEM_MC_STORE_YREG_U32_ZX_VLMAX(a_iYRegDst, a_u32Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint32_t, a_u32Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 785 790 #define IEM_MC_STORE_YREG_U64_ZX_VLMAX(a_iYRegDst, a_u64Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(uint64_t, a_u64Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 786 791 #define IEM_MC_STORE_YREG_U128_ZX_VLMAX(a_iYRegDst, a_u128Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(RTUINT128U, a_u128Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 787 792 #define IEM_MC_STORE_YREG_U256_ZX_VLMAX(a_iYRegDst, a_u256Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(RTUINT256U, a_u256Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 793 #define IEM_MC_STORE_YREG_BROADCAST_U128_ZX_VLMAX(a_iYRegDst, a_u128Value) do { CHK_YREG_IDX(a_iYRegDst); CHK_TYPE(RTUINT128U, a_u128Value); (void)fAvxWrite; (void)fMcBegin; } while (0) 788 794 #define IEM_MC_REF_YREG_U128(a_pu128Dst, a_iYReg) do { CHK_YREG_IDX(a_iYReg); (a_pu128Dst) = (PRTUINT128U)((uintptr_t)0); CHK_PTYPE(PRTUINT128U, a_pu128Dst); (void)fAvxWrite; (void)fMcBegin; } while (0) 789 795 #define IEM_MC_REF_YREG_U128_CONST(a_pu128Dst, a_iYReg) do { CHK_YREG_IDX(a_iYReg); (a_pu128Dst) = (PCRTUINT128U)((uintptr_t)0); CHK_PTYPE(PCRTUINT128U, a_pu128Dst); (void)fAvxWrite; (void)fMcBegin; } while (0)
Note:
See TracChangeset
for help on using the changeset viewer.