Changeset 99324 in vbox for trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap3.cpp.h
- Timestamp:
- Apr 6, 2023 11:34:00 PM (23 months ago)
- svn:sync-xref-src-repo-rev:
- 156726
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap3.cpp.h
r98103 r99324 81 81 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 82 82 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 83 IEM_MC_MAYBE_RAISE_AVX 2_RELATED_XCPT();83 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 84 84 IEM_MC_PREPARE_AVX_USAGE(); 85 85 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); … … 136 136 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 137 137 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 138 IEM_MC_MAYBE_RAISE_AVX2_RELATED_XCPT(); 138 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 139 IEM_MC_PREPARE_AVX_USAGE(); 140 141 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 142 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 143 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 144 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg); 145 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 146 147 IEM_MC_ADVANCE_RIP_AND_FINISH(); 148 IEM_MC_END(); 149 } 150 } 151 } 152 153 154 /** 155 * Common worker for AVX instructions on the forms: 156 * - vblendps/d xmm0, xmm1, xmm2/mem128, imm8 157 * - vblendps/d ymm0, ymm1, ymm2/mem256, imm8 158 * 159 * Takes function table for function w/o implicit state parameter. 160 * 161 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operation. 162 */ 163 FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, PCIEMOPMEDIAOPTF3IMM8, pImpl) 164 { 165 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 166 if (IEM_IS_MODRM_REG_MODE(bRm)) 167 { 168 /* 169 * Register, register. 170 */ 171 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 172 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 173 if (pVCpu->iem.s.uVexLength) 174 { 175 IEM_MC_BEGIN(4, 3); 176 IEM_MC_LOCAL(RTUINT256U, uDst); 177 IEM_MC_LOCAL(RTUINT256U, uSrc1); 178 IEM_MC_LOCAL(RTUINT256U, uSrc2); 179 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 180 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 181 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 182 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 183 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 184 IEM_MC_PREPARE_AVX_USAGE(); 185 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 186 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 187 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg); 188 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 189 IEM_MC_ADVANCE_RIP_AND_FINISH(); 190 IEM_MC_END(); 191 } 192 else 193 { 194 IEM_MC_BEGIN(4, 0); 195 IEM_MC_ARG(PRTUINT128U, puDst, 0); 196 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 197 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 198 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 199 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 200 IEM_MC_PREPARE_AVX_USAGE(); 201 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 202 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 203 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 204 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, bImmArg); 205 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 206 IEM_MC_ADVANCE_RIP_AND_FINISH(); 207 IEM_MC_END(); 208 } 209 } 210 else 211 { 212 /* 213 * Register, memory. 214 */ 215 if (pVCpu->iem.s.uVexLength) 216 { 217 IEM_MC_BEGIN(4, 4); 218 IEM_MC_LOCAL(RTUINT256U, uDst); 219 IEM_MC_LOCAL(RTUINT256U, uSrc1); 220 IEM_MC_LOCAL(RTUINT256U, uSrc2); 221 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 222 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 223 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 224 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 225 226 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 227 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 228 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 229 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 230 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 231 IEM_MC_PREPARE_AVX_USAGE(); 232 233 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 234 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 235 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, bImmArg); 236 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 237 238 IEM_MC_ADVANCE_RIP_AND_FINISH(); 239 IEM_MC_END(); 240 } 241 else 242 { 243 IEM_MC_BEGIN(4, 2); 244 IEM_MC_LOCAL(RTUINT128U, uSrc2); 245 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 246 IEM_MC_ARG(PRTUINT128U, puDst, 0); 247 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 248 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); 249 250 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 251 uint8_t bImm; IEM_OPCODE_GET_NEXT_U8(&bImm); 252 IEM_MC_ARG_CONST(uint8_t, bImmArg, /*=*/ bImm, 3); 253 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 254 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 139 255 IEM_MC_PREPARE_AVX_USAGE(); 140 256 … … 176 292 177 293 178 /** Opcode VEX.66.0F3A 0x0c. */ 294 /** Opcode VEX.66.0F3A 0x0c. 295 * AVX,AVX */ 179 296 FNIEMOP_DEF(iemOp_vblendps_Vx_Hx_Wx_Ib) 180 297 { 181 298 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPS, vblendps, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */ 182 299 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendps); 183 return FNIEMOP_CALL_1(iemOpCommonAvxAvx 2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));300 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback)); 184 301 } 185 302 186 303 187 /** Opcode VEX.66.0F3A 0x0d. */ 304 /** Opcode VEX.66.0F3A 0x0d. 305 * AVX,AVX */ 188 306 FNIEMOP_DEF(iemOp_vblendpd_Vx_Hx_Wx_Ib) 189 307 { 190 308 IEMOP_MNEMONIC3(VEX_RVM, VBLENDPD, vblendpd, Vx, Hx, Wx, DISOPTYPE_HARMLESS, 0); /* @todo */ 191 309 IEMOPMEDIAOPTF3IMM8_INIT_VARS(vblendpd); 192 return FNIEMOP_CALL_1(iemOpCommonAvxAvx 2_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback));310 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Ib_Opt, IEM_SELECT_HOST_OR_FALLBACK(fAvx, &s_Host, &s_Fallback)); 193 311 } 194 312 195 313 196 /** Opcode VEX.66.0F3A 0x0e. */ 314 /** Opcode VEX.66.0F3A 0x0e. 315 * AVX,AVX2 */ 197 316 FNIEMOP_DEF(iemOp_vpblendw_Vx_Hx_Wx_Ib) 198 317 { … … 206 325 207 326 208 /** Opcode VEX.66.0F3A 0x0f. */ 327 /** Opcode VEX.66.0F3A 0x0f. 328 * AVX,AVX2 */ 209 329 FNIEMOP_DEF(iemOp_vpalignr_Vx_Hx_Wx_Ib) 210 330 { … … 363 483 /** 364 484 * Common worker for AVX2 instructions on the forms: 485 * - vblendvps/d xmm0, xmm1, xmm2/mem128, xmm4 486 * - vblendvps/d ymm0, ymm1, ymm2/mem256, ymm4 487 * 488 * Exceptions type 4. AVX cpuid check for both 128-bit and 256-bit operations. 489 */ 490 FNIEMOP_DEF_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, PCIEMOPBLENDOP, pImpl) 491 { 492 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 493 if (IEM_IS_MODRM_REG_MODE(bRm)) 494 { 495 /* 496 * Register, register. 497 */ 498 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 499 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 500 if (pVCpu->iem.s.uVexLength) 501 { 502 IEM_MC_BEGIN(4, 4); 503 IEM_MC_LOCAL(RTUINT256U, uDst); 504 IEM_MC_LOCAL(RTUINT256U, uSrc1); 505 IEM_MC_LOCAL(RTUINT256U, uSrc2); 506 IEM_MC_LOCAL(RTUINT256U, uSrc3); 507 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 508 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 509 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 510 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3); 511 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 512 IEM_MC_PREPARE_AVX_USAGE(); 513 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 514 IEM_MC_FETCH_YREG_U256(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 515 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 516 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3); 517 IEM_MC_STORE_YREG_U256_ZX_VLMAX(IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 518 IEM_MC_ADVANCE_RIP_AND_FINISH(); 519 IEM_MC_END(); 520 } 521 else 522 { 523 IEM_MC_BEGIN(4, 0); 524 IEM_MC_ARG(PRTUINT128U, puDst, 0); 525 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 526 IEM_MC_ARG(PCRTUINT128U, puSrc2, 2); 527 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3); 528 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 529 IEM_MC_PREPARE_AVX_USAGE(); 530 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 531 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 532 IEM_MC_REF_XREG_U128_CONST(puSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); 533 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 534 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3); 535 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 536 IEM_MC_ADVANCE_RIP_AND_FINISH(); 537 IEM_MC_END(); 538 } 539 } 540 else 541 { 542 /* 543 * Register, memory. 544 */ 545 if (pVCpu->iem.s.uVexLength) 546 { 547 IEM_MC_BEGIN(4, 5); 548 IEM_MC_LOCAL(RTUINT256U, uDst); 549 IEM_MC_LOCAL(RTUINT256U, uSrc1); 550 IEM_MC_LOCAL(RTUINT256U, uSrc2); 551 IEM_MC_LOCAL(RTUINT256U, uSrc3); 552 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 553 IEM_MC_ARG_LOCAL_REF(PRTUINT256U, puDst, uDst, 0); 554 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc1, uSrc1, 1); 555 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc2, uSrc2, 2); 556 IEM_MC_ARG_LOCAL_REF(PCRTUINT256U, puSrc3, uSrc3, 3); 557 558 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 559 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 560 561 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 562 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 563 IEM_MC_PREPARE_AVX_USAGE(); 564 565 IEM_MC_FETCH_MEM_U256_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 566 IEM_MC_FETCH_YREG_U256(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 567 IEM_MC_FETCH_YREG_U256(uSrc3, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 568 IEM_MC_FETCH_YREG_U256(uSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 569 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU256, puDst, puSrc1, puSrc2, puSrc3); 570 IEM_MC_STORE_YREG_U256_ZX_VLMAX( IEM_GET_MODRM_REG(pVCpu, bRm), uDst); 571 572 IEM_MC_ADVANCE_RIP_AND_FINISH(); 573 IEM_MC_END(); 574 } 575 else 576 { 577 IEM_MC_BEGIN(4, 2); 578 IEM_MC_LOCAL(RTUINT128U, uSrc2); 579 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 580 IEM_MC_ARG(PRTUINT128U, puDst, 0); 581 IEM_MC_ARG(PCRTUINT128U, puSrc1, 1); 582 IEM_MC_ARG_LOCAL_REF(PCRTUINT128U, puSrc2, uSrc2, 2); 583 IEM_MC_ARG(PCRTUINT128U, puSrc3, 3); 584 585 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 586 uint8_t bOp4; IEM_OPCODE_GET_NEXT_U8(&bOp4); 587 588 IEMOP_HLP_DONE_VEX_DECODING_EX(fAvx); 589 IEM_MC_MAYBE_RAISE_AVX_RELATED_XCPT(); 590 IEM_MC_PREPARE_AVX_USAGE(); 591 592 IEM_MC_FETCH_MEM_U128_NO_AC(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 593 IEM_MC_REF_XREG_U128(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 594 IEM_MC_REF_XREG_U128_CONST(puSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); 595 IEM_MC_REF_XREG_U128_CONST(puSrc3, bOp4 >> 4); /** @todo Ignore MSB in 32-bit mode. */ 596 IEM_MC_CALL_VOID_AIMPL_4(pImpl->pfnU128, puDst, puSrc1, puSrc2, puSrc3); 597 IEM_MC_CLEAR_YREG_128_UP( IEM_GET_MODRM_REG(pVCpu, bRm)); 598 599 IEM_MC_ADVANCE_RIP_AND_FINISH(); 600 IEM_MC_END(); 601 } 602 } 603 } 604 605 606 /** Opcode VEX.66.0F3A 0x4a (vex only). 607 * AVX, AVX */ 608 FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx) 609 { 610 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 611 IEMOPBLENDOP_INIT_VARS(vblendvps); 612 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 613 } 614 615 616 /** Opcode VEX.66.0F3A 0x4b (vex only). 617 * AVX, AVX */ 618 FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx) 619 { 620 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 621 IEMOPBLENDOP_INIT_VARS(vblendvpd); 622 return FNIEMOP_CALL_1(iemOpCommonAvxAvx_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 623 } 624 625 626 /** 627 * Common worker for AVX2 instructions on the forms: 365 628 * - vpxxx xmm0, xmm1, xmm2/mem128, xmm4 366 629 * - vpxxx ymm0, ymm1, ymm2/mem256, ymm4 … … 488 751 489 752 490 /** Opcode VEX.66.0F3A 0x4a (vex only). */ 491 FNIEMOP_DEF(iemOp_vblendvps_Vx_Hx_Wx_Lx) 492 { 493 //IEMOP_MNEMONIC4(VEX_RVM, VBLENDVPS, vpblendvps, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 494 IEMOPBLENDOP_INIT_VARS(vblendvps); 495 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 496 } 497 498 499 /** Opcode VEX.66.0F3A 0x4b (vex only). */ 500 FNIEMOP_DEF(iemOp_vblendvpd_Vx_Hx_Wx_Lx) 501 { 502 //IEMOP_MNEMONIC4(VEX_RVM, VPBLENDVPD, blendvpd, Vx, Hx, Wx, Lx, DISOPTYPE_HARMLESS, 0); @todo 503 IEMOPBLENDOP_INIT_VARS(vblendvpd); 504 return FNIEMOP_CALL_1(iemOpCommonAvxAvx2_Vx_Hx_Wx_Lx, IEM_SELECT_HOST_OR_FALLBACK(fAvx2, &s_Host, &s_Fallback)); 505 } 506 507 508 /** Opcode VEX.66.0F3A 0x4c (vex only). */ 753 /** Opcode VEX.66.0F3A 0x4c (vex only). 754 * AVX, AVX2 */ 509 755 FNIEMOP_DEF(iemOp_vpblendvb_Vx_Hx_Wx_Lx) 510 756 {
Note:
See TracChangeset
for help on using the changeset viewer.