Changeset 100567 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Jul 13, 2023 7:19:33 PM (20 months ago)
- svn:sync-xref-src-repo-rev:
- 158393
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r100138 r100567 311 311 312 312 /* Opcode VEX.0F38 0x18 - invalid */ 313 314 313 315 /** Opcode VEX.66.0F38 0x18. */ 314 FNIEMOP_STUB(iemOp_vbroadcastss_Vx_Wd); 316 FNIEMOP_DEF(iemOp_vbroadcastss_Vx_Wd) 317 { 318 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSS, vbroadcastss, Vx, Wx, DISOPTYPE_HARMLESS, 0); 319 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 320 if (IEM_IS_MODRM_REG_MODE(bRm)) 321 { 322 /* 323 * Register, register. 324 */ 325 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 326 if (pVCpu->iem.s.uVexLength) 327 { 328 IEM_MC_BEGIN(0, 1); 329 IEM_MC_LOCAL(uint32_t, uSrc); 330 331 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 332 IEM_MC_PREPARE_AVX_USAGE(); 333 334 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 335 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 336 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 337 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 338 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 339 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 340 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 341 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 342 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 343 344 IEM_MC_ADVANCE_RIP_AND_FINISH(); 345 IEM_MC_END(); 346 } 347 else 348 { 349 IEM_MC_BEGIN(0, 1); 350 IEM_MC_LOCAL(uint32_t, uSrc); 351 352 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 353 IEM_MC_PREPARE_AVX_USAGE(); 354 IEM_MC_FETCH_XREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 355 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 356 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 357 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 358 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 359 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 360 361 IEM_MC_ADVANCE_RIP_AND_FINISH(); 362 IEM_MC_END(); 363 } 364 } 365 else 366 { 367 /* 368 * Register, memory. 369 */ 370 if (pVCpu->iem.s.uVexLength) 371 { 372 IEM_MC_BEGIN(0, 2); 373 IEM_MC_LOCAL(uint32_t, uSrc); 374 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 375 376 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 377 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 378 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 379 IEM_MC_PREPARE_AVX_USAGE(); 380 381 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 382 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 383 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 384 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 385 IEM_MC_STORE_YREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 386 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 387 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 388 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 389 IEM_MC_STORE_YREGHI_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 390 391 IEM_MC_ADVANCE_RIP_AND_FINISH(); 392 IEM_MC_END(); 393 } 394 else 395 { 396 IEM_MC_BEGIN(3, 3); 397 IEM_MC_LOCAL(uint32_t, uSrc); 398 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 399 400 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 401 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx); 402 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 403 IEM_MC_PREPARE_AVX_USAGE(); 404 405 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 406 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 407 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 408 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 2, uSrc); 409 IEM_MC_STORE_XREG_U32(IEM_GET_MODRM_REG(pVCpu, bRm), 3, uSrc); 410 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 411 412 IEM_MC_ADVANCE_RIP_AND_FINISH(); 413 IEM_MC_END(); 414 } 415 } 416 } 417 418 315 419 /* Opcode VEX.0F38 0x19 - invalid */ 420 421 316 422 /** Opcode VEX.66.0F38 0x19. */ 317 FNIEMOP_STUB(iemOp_vbroadcastsd_Vqq_Wq); 423 FNIEMOP_DEF(iemOp_vbroadcastsd_Vqq_Wq) 424 { 425 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTSD, vbroadcastsd, Vx, Wx, DISOPTYPE_HARMLESS, 0); 426 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 427 if (IEM_IS_MODRM_REG_MODE(bRm)) 428 { 429 /* 430 * Register, register. 431 */ 432 IEMOP_HLP_DONE_VEX_DECODING_NO_VVVV_EX(fAvx2); 433 if (pVCpu->iem.s.uVexLength) 434 { 435 IEM_MC_BEGIN(0, 1); 436 IEM_MC_LOCAL(uint64_t, uSrc); 437 438 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 439 IEM_MC_PREPARE_AVX_USAGE(); 440 441 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 442 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 443 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 444 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 445 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 446 447 IEM_MC_ADVANCE_RIP_AND_FINISH(); 448 IEM_MC_END(); 449 } 450 else 451 { 452 IEM_MC_BEGIN(0, 1); 453 IEM_MC_LOCAL(uint64_t, uSrc); 454 455 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 456 IEM_MC_PREPARE_AVX_USAGE(); 457 IEM_MC_FETCH_XREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm), 0); 458 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 459 IEM_MC_STORE_XREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 460 IEM_MC_CLEAR_YREG_128_UP(IEM_GET_MODRM_REG(pVCpu, bRm)); 461 462 IEM_MC_ADVANCE_RIP_AND_FINISH(); 463 IEM_MC_END(); 464 } 465 } 466 else 467 { 468 /* 469 * Register, memory. 470 */ 471 IEM_MC_BEGIN(0, 2); 472 IEM_MC_LOCAL(uint64_t, uSrc); 473 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 474 475 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 476 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx); 477 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 478 IEM_MC_PREPARE_AVX_USAGE(); 479 480 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 481 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 482 IEM_MC_STORE_YREG_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 483 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 0, uSrc); 484 IEM_MC_STORE_YREGHI_U64(IEM_GET_MODRM_REG(pVCpu, bRm), 1, uSrc); 485 486 IEM_MC_ADVANCE_RIP_AND_FINISH(); 487 IEM_MC_END(); 488 } 489 } 490 491 318 492 /* Opcode VEX.0F38 0x1a - invalid */ 493 494 319 495 /** Opcode VEX.66.0F38 0x1a. */ 320 FNIEMOP_STUB(iemOp_vbroadcastf128_Vqq_Mdq); 496 FNIEMOP_DEF(iemOp_vbroadcastf128_Vqq_Mdq) 497 { 498 IEMOP_MNEMONIC2(VEX_RM, VBROADCASTF128, vbroadcastf128, Vx, Wx, DISOPTYPE_HARMLESS, 0); 499 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 500 if (IEM_IS_MODRM_REG_MODE(bRm)) 501 { 502 /* 503 * No register, register. 504 */ 505 IEMOP_RAISE_INVALID_OPCODE_RET(); 506 } 507 else 508 { 509 /* 510 * Register, memory. 511 */ 512 IEM_MC_BEGIN(0, 2); 513 IEM_MC_LOCAL(RTUINT128U, uSrc); 514 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 515 516 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 517 IEMOP_HLP_DONE_VEX_DECODING_L1_AND_NO_VVVV_EX(fAvx); 518 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 519 IEM_MC_PREPARE_AVX_USAGE(); 520 521 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 522 IEM_MC_STORE_YREG_BROADCAST_U128_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uSrc); 523 524 IEM_MC_ADVANCE_RIP_AND_FINISH(); 525 IEM_MC_END(); 526 } 527 } 528 529 321 530 /* Opcode VEX.0F38 0x1b - invalid */ 322 531 /* Opcode VEX.66.0F38 0x1b - invalid */
Note:
See TracChangeset
for help on using the changeset viewer.