Changeset 95345 in vbox
- Timestamp:
- Jun 22, 2022 4:15:23 PM (2 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r95343 r95345 638 638 ; 639 639 ; @param 1 The instruction mnemonic. 640 ; 641 %macro IEMIMPL_VEX_BIN_OP_NOEFL 2 640 ; @param 2 Fallback instruction if applicable. 641 ; @param 3 Whether to emit fallback or not. 642 ; 643 %macro IEMIMPL_VEX_BIN_OP_NOEFL 3 642 644 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u32, 12 643 645 PROLOGUE_3_ARGS … … 647 649 ENDPROC iemAImpl_ %+ %1 %+ _u32 648 650 651 %if %3 649 652 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u32_fallback, 12 650 653 PROLOGUE_3_ARGS 651 %ifdef ASM_CALL64_GCC654 %ifdef ASM_CALL64_GCC 652 655 mov cl, A2_8 653 656 %2 A1_32, cl 654 657 mov [A0], A1_32 655 %else658 %else 656 659 xchg A2, A0 657 660 %2 A1_32, cl 658 661 mov [A2], A1_32 659 %endif662 %endif 660 663 EPILOGUE_3_ARGS 661 664 ENDPROC iemAImpl_ %+ %1 %+ _u32_fallback 665 %endif 662 666 663 667 %ifdef RT_ARCH_AMD64 … … 669 673 ENDPROC iemAImpl_ %+ %1 %+ _u64 670 674 675 %if %3 671 676 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64_fallback, 12 672 677 PROLOGUE_3_ARGS 673 %ifdef ASM_CALL64_GCC678 %ifdef ASM_CALL64_GCC 674 679 mov cl, A2_8 675 680 %2 A1, cl 676 681 mov [A0], A1_32 677 %else682 %else 678 683 xchg A2, A0 679 684 %2 A1, cl 680 685 mov [A2], A1_32 681 %endif686 %endif 682 687 mov [A0], A1 683 688 EPILOGUE_3_ARGS 684 689 ENDPROC iemAImpl_ %+ %1 %+ _u64_fallback 690 %endif 685 691 %endif ; RT_ARCH_AMD64 686 692 %endmacro 687 693 688 ; instr, fallback instr 689 IEMIMPL_VEX_BIN_OP_NOEFL sarx, sar 690 IEMIMPL_VEX_BIN_OP_NOEFL shlx, shl 691 IEMIMPL_VEX_BIN_OP_NOEFL shrx, shr 694 ; instr, fallback instr, emit fallback 695 IEMIMPL_VEX_BIN_OP_NOEFL sarx, sar, 1 696 IEMIMPL_VEX_BIN_OP_NOEFL shlx, shl, 1 697 IEMIMPL_VEX_BIN_OP_NOEFL shrx, shr, 1 698 IEMIMPL_VEX_BIN_OP_NOEFL pdep, nop, 0 699 IEMIMPL_VEX_BIN_OP_NOEFL pext, nop, 0 692 700 693 701 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r95343 r95345 3382 3382 EMIT_SHLX(64, uint64_t, _fallback) 3383 3383 #endif 3384 3385 3384 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 3386 3385 EMIT_SHLX(32, uint32_t, RT_NOTHING) … … 3403 3402 EMIT_SHRX(64, uint64_t, _fallback) 3404 3403 #endif 3405 3406 3404 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 3407 3405 EMIT_SHRX(32, uint32_t, RT_NOTHING) … … 3424 3422 EMIT_SARX(64, uint64_t, int64_t, _fallback) 3425 3423 #endif 3426 3427 3424 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 3428 3425 EMIT_SARX(32, uint32_t, int32_t, RT_NOTHING) 3429 3426 EMIT_SARX(32, uint32_t, int32_t, _fallback) 3430 3427 #endif 3428 3429 3430 /* 3431 * PDEP (BMI2) 3432 */ 3433 #define EMIT_PDEP(a_cBitsWidth, a_uType, a_Suffix) \ 3434 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_pdep_u,a_cBitsWidth,a_Suffix),(a_uType *puDst, a_uType uSrc, a_uType fMask)) \ 3435 { \ 3436 a_uType uResult = 0; \ 3437 for (unsigned iMaskBit = 0, iBit = 0; iMaskBit < a_cBitsWidth; iMaskBit++) \ 3438 if (fMask & ((a_uType)1 << iMaskBit)) \ 3439 { \ 3440 uResult |= ((uSrc >> iBit) & 1) << iMaskBit; \ 3441 iBit++; \ 3442 } \ 3443 *puDst = uResult; \ 3444 } 3445 3446 #if !defined(RT_ARCH_AMD64) || defined(IEM_WITHOUT_ASSEMBLY) 3447 EMIT_PDEP(64, uint64_t, RT_NOTHING) 3448 #endif 3449 EMIT_PDEP(64, uint64_t, _fallback) 3450 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 3451 EMIT_PDEP(32, uint32_t, RT_NOTHING) 3452 #endif 3453 EMIT_PDEP(32, uint32_t, _fallback) 3454 3455 /* 3456 * PEXT (BMI2) 3457 */ 3458 #define EMIT_PEXT(a_cBitsWidth, a_uType, a_Suffix) \ 3459 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_pext_u,a_cBitsWidth,a_Suffix),(a_uType *puDst, a_uType uSrc, a_uType fMask)) \ 3460 { \ 3461 a_uType uResult = 0; \ 3462 for (unsigned iMaskBit = 0, iBit = 0; iMaskBit < a_cBitsWidth; iMaskBit++) \ 3463 if (fMask & ((a_uType)1 << iMaskBit)) \ 3464 { \ 3465 uResult |= ((uSrc >> iMaskBit) & 1) << iBit; \ 3466 iBit++; \ 3467 } \ 3468 *puDst = uResult; \ 3469 } 3470 3471 #if !defined(RT_ARCH_AMD64) || defined(IEM_WITHOUT_ASSEMBLY) 3472 EMIT_PEXT(64, uint64_t, RT_NOTHING) 3473 #endif 3474 EMIT_PEXT(64, uint64_t, _fallback) 3475 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 3476 EMIT_PEXT(32, uint32_t, RT_NOTHING) 3477 #endif 3478 EMIT_PEXT(32, uint32_t, _fallback) 3431 3479 3432 3480 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r95343 r95345 540 540 FNIEMOP_DEF(iemOp_andn_Gy_By_Ey) 541 541 { 542 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, 0);542 IEMOP_MNEMONIC3(VEX_RVM, ANDN, andn, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 543 543 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1) 544 544 return iemOp_InvalidNeedRM(pVCpu); … … 550 550 * Register, register. 551 551 */ 552 IEMOP_HLP_DONE_VEX_DECODING ();552 IEMOP_HLP_DONE_VEX_DECODING_L0(); 553 553 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) 554 554 { … … 599 599 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 600 600 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 601 IEMOP_HLP_DONE_VEX_DECODING ();601 IEMOP_HLP_DONE_VEX_DECODING_L0(); 602 602 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 603 603 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); … … 618 618 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 619 619 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 620 IEMOP_HLP_DONE_VEX_DECODING ();620 IEMOP_HLP_DONE_VEX_DECODING_L0(); 621 621 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 622 622 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); … … 733 733 FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm) 734 734 { 735 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, 0);735 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 736 736 IEMOP_BODY_By_Ey(blsr); 737 737 } … … 743 743 FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm) 744 744 { 745 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, 0);745 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 746 746 IEMOP_BODY_By_Ey(blsmsk); 747 747 } … … 753 753 FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm) 754 754 { 755 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, 0);755 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 756 756 IEMOP_BODY_By_Ey(blsi); 757 757 } … … 979 979 FNIEMOP_DEF(iemOp_bzhi_Gy_Ey_By) 980 980 { 981 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, 0);981 IEMOP_MNEMONIC3(VEX_RMV, BZHI, bzhi, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 982 982 IEMOP_BODY_Gy_Ey_By(bzhi, fBmi2, X86_EFL_AF | X86_EFL_PF); 983 983 } 984 984 985 985 /* Opcode VEX.66.0F38 0xf5 - invalid. */ 986 987 /** Body for PDEP and PEXT (similar to ANDN, except no EFLAGS). */ 988 #define IEMOP_BODY_Gy_By_Ey_NoEflags(a_Instr, a_fFeatureMember) \ 989 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->a_fFeatureMember) \ 990 return iemOp_InvalidNeedRM(pVCpu); \ 991 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); \ 992 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \ 993 { \ 994 /* \ 995 * Register, register. \ 996 */ \ 997 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 998 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \ 999 { \ 1000 IEM_MC_BEGIN(3, 0); \ 1001 IEM_MC_ARG(uint64_t *, pDst, 0); \ 1002 IEM_MC_ARG(uint64_t, uSrc1, 1); \ 1003 IEM_MC_ARG(uint64_t, uSrc2, 2); \ 1004 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 1005 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1006 IEM_MC_FETCH_GREG_U64(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 1007 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \ 1008 iemAImpl_ ## a_Instr ## _u64, \ 1009 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \ 1010 IEM_MC_ADVANCE_RIP(); \ 1011 IEM_MC_END(); \ 1012 } \ 1013 else \ 1014 { \ 1015 IEM_MC_BEGIN(3, 0); \ 1016 IEM_MC_ARG(uint32_t *, pDst, 0); \ 1017 IEM_MC_ARG(uint32_t, uSrc1, 1); \ 1018 IEM_MC_ARG(uint32_t, uSrc2, 2); \ 1019 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 1020 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1021 IEM_MC_FETCH_GREG_U32(uSrc2, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 1022 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \ 1023 iemAImpl_ ## a_Instr ## _u32, \ 1024 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \ 1025 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \ 1026 IEM_MC_ADVANCE_RIP(); \ 1027 IEM_MC_END(); \ 1028 } \ 1029 } \ 1030 else \ 1031 { \ 1032 /* \ 1033 * Register, memory. \ 1034 */ \ 1035 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \ 1036 { \ 1037 IEM_MC_BEGIN(3, 1); \ 1038 IEM_MC_ARG(uint64_t *, pDst, 0); \ 1039 IEM_MC_ARG(uint64_t, uSrc1, 1); \ 1040 IEM_MC_ARG(uint64_t, uSrc2, 2); \ 1041 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1042 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 1043 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 1044 IEM_MC_FETCH_MEM_U64(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 1045 IEM_MC_FETCH_GREG_U64(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1046 IEM_MC_REF_GREG_U64(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 1047 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \ 1048 iemAImpl_ ## a_Instr ## _u64, \ 1049 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc1, uSrc2); \ 1050 IEM_MC_ADVANCE_RIP(); \ 1051 IEM_MC_END(); \ 1052 } \ 1053 else \ 1054 { \ 1055 IEM_MC_BEGIN(3, 1); \ 1056 IEM_MC_ARG(uint32_t *, pDst, 0); \ 1057 IEM_MC_ARG(uint32_t, uSrc1, 1); \ 1058 IEM_MC_ARG(uint32_t, uSrc2, 2); \ 1059 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1060 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 1061 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 1062 IEM_MC_FETCH_MEM_U32(uSrc2, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 1063 IEM_MC_FETCH_GREG_U32(uSrc1, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1064 IEM_MC_REF_GREG_U32(pDst, IEM_GET_MODRM_REG(pVCpu, bRm)); \ 1065 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(a_fFeatureMember, \ 1066 iemAImpl_ ## a_Instr ## _u32, \ 1067 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc1, uSrc2); \ 1068 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \ 1069 IEM_MC_ADVANCE_RIP(); \ 1070 IEM_MC_END(); \ 1071 } \ 1072 } \ 1073 return VINF_SUCCESS; 1074 1075 986 1076 /** Opcode VEX.F3.0F38 0xf5 (vex only). */ 987 FNIEMOP_STUB(iemOp_pext_Gy_By_Ey); 1077 FNIEMOP_DEF(iemOp_pext_Gy_By_Ey) 1078 { 1079 IEMOP_MNEMONIC3(VEX_RVM, PEXT, pext, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1080 IEMOP_BODY_Gy_By_Ey_NoEflags(pext, fBmi2); 1081 } 1082 1083 988 1084 /** Opcode VEX.F2.0F38 0xf5 (vex only). */ 989 FNIEMOP_STUB(iemOp_pdep_Gy_By_Ey); 1085 FNIEMOP_DEF(iemOp_pdep_Gy_By_Ey) 1086 { 1087 IEMOP_MNEMONIC3(VEX_RVM, PDEP, pdep, Gy, By, Ey, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1088 IEMOP_BODY_Gy_By_Ey_NoEflags(pdep, fBmi2); 1089 } 1090 990 1091 991 1092 /* Opcode VEX.0F38 0xf6 - invalid. */ … … 999 1100 FNIEMOP_DEF(iemOp_bextr_Gy_Ey_By) 1000 1101 { 1001 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, 0);1102 IEMOP_MNEMONIC3(VEX_RMV, BEXTR, bextr, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1002 1103 IEMOP_BODY_Gy_Ey_By(bextr, fBmi1, X86_EFL_SF | X86_EFL_AF | X86_EFL_PF); 1003 1104 } … … 1007 1108 FNIEMOP_DEF(iemOp_shlx_Gy_Ey_By) 1008 1109 { 1009 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, 0);1110 IEMOP_MNEMONIC3(VEX_RMV, SHLX, shlx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1010 1111 IEMOP_BODY_Gy_Ey_By_NoEflags(shlx, fBmi2, 0); 1011 1112 } … … 1015 1116 FNIEMOP_DEF(iemOp_sarx_Gy_Ey_By) 1016 1117 { 1017 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, 0);1118 IEMOP_MNEMONIC3(VEX_RMV, SARX, sarx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1018 1119 IEMOP_BODY_Gy_Ey_By_NoEflags(sarx, fBmi2, 0); 1019 1120 } … … 1023 1124 FNIEMOP_DEF(iemOp_shrx_Gy_Ey_By) 1024 1125 { 1025 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, 0);1126 IEMOP_MNEMONIC3(VEX_RMV, SHRX, shrx, Gy, Ey, By, DISOPTYPE_HARMLESS, IEMOPHINT_VEX_L_ZERO); 1026 1127 IEMOP_BODY_Gy_Ey_By_NoEflags(shrx, fBmi2, 0); 1027 1128 } -
trunk/src/VBox/VMM/include/IEMInternal.h
r95343 r95345 1266 1266 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU32NOEFL, (uint32_t *pu32Dst, uint32_t u32Src1, uint32_t u32Src2)); 1267 1267 typedef FNIEMAIMPLBINVEXU32NOEFL *PFNIEMAIMPLBINVEXU32NOEFL; 1268 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pdep_u32, iemAImpl_pdep_u32_fallback; 1269 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_pext_u32, iemAImpl_pext_u32_fallback; 1268 1270 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_sarx_u32, iemAImpl_sarx_u32_fallback; 1269 1271 FNIEMAIMPLBINVEXU32NOEFL iemAImpl_shlx_u32, iemAImpl_shlx_u32_fallback; … … 1276 1278 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINVEXU64NOEFL, (uint64_t *pu64Dst, uint64_t u64Src1, uint64_t u64Src2)); 1277 1279 typedef FNIEMAIMPLBINVEXU64NOEFL *PFNIEMAIMPLBINVEXU64NOEFL; 1280 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pdep_u64, iemAImpl_pdep_u64_fallback; 1281 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_pext_u64, iemAImpl_pext_u64_fallback; 1278 1282 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_sarx_u64, iemAImpl_sarx_u64_fallback; 1279 1283 FNIEMAIMPLBINVEXU64NOEFL iemAImpl_shlx_u64, iemAImpl_shlx_u64_fallback;
Note:
See TracChangeset
for help on using the changeset viewer.