- Timestamp:
- Jun 22, 2022 10:37:37 AM (3 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r95308 r95341 582 582 IEMIMPL_VEX_BIN_OP andn, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_CF), (X86_EFL_AF | X86_EFL_PF) 583 583 IEMIMPL_VEX_BIN_OP bextr, (X86_EFL_OF | X86_EFL_ZF | X86_EFL_CF), (X86_EFL_SF | X86_EFL_AF | X86_EFL_PF) 584 585 ;; 586 ; Macro for implementing BLSR, BLCMSK and BLSI (fallbacks implemented in C). 587 ; 588 ; This will generate code for the 32 and 64 bit accesses, except on 32-bit system 589 ; where the 64-bit accesses requires hand coding. 590 ; 591 ; All the functions takes a pointer to the destination memory operand in A0, 592 ; the source register operand in A1 and a pointer to eflags in A2. 593 ; 594 ; @param 1 The instruction mnemonic. 595 ; @param 2 The modified flags. 596 ; @param 3 The undefined flags. 597 ; 598 %macro IEMIMPL_VEX_BIN_OP_2 3 599 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u32, 12 600 PROLOGUE_4_ARGS 601 IEM_MAYBE_LOAD_FLAGS A2, %2, %3 602 mov T0_32, [A0] 603 %1 T0_32, A1_32 604 mov [A0], T0_32 605 IEM_SAVE_FLAGS A2, %2, %3 606 EPILOGUE_4_ARGS 607 ENDPROC iemAImpl_ %+ %1 %+ _u32 608 609 %ifdef RT_ARCH_AMD64 610 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 12 611 PROLOGUE_4_ARGS 612 IEM_MAYBE_LOAD_FLAGS A2, %2, %3 613 mov T0, [A0] 614 %1 T0, A1 615 mov [A0], T0 616 IEM_SAVE_FLAGS A2, %2, %3 617 EPILOGUE_4_ARGS 618 ENDPROC iemAImpl_ %+ %1 %+ _u64 619 %endif ; RT_ARCH_AMD64 620 %endmacro 621 622 ; instr, modified-flags, undefined-flags 623 IEMIMPL_VEX_BIN_OP_2 blsr, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_CF), (X86_EFL_AF | X86_EFL_PF) 624 IEMIMPL_VEX_BIN_OP_2 blsmsk, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_CF), (X86_EFL_AF | X86_EFL_PF) 625 IEMIMPL_VEX_BIN_OP_2 blsi, (X86_EFL_OF | X86_EFL_SF | X86_EFL_ZF | X86_EFL_CF), (X86_EFL_AF | X86_EFL_PF) 584 626 585 627 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r95308 r95341 1682 1682 #endif 1683 1683 1684 /* 1685 * BLSR (BMI1 instruction) 1686 */ 1687 #define EMIT_BLSR(a_cBits, a_Type, a_Suffix) \ 1688 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_blsr_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \ 1689 { \ 1690 uint32_t fEfl1 = *pfEFlags; \ 1691 uint32_t fEfl2 = fEfl1; \ 1692 *puDst = uSrc; \ 1693 iemAImpl_sub_u ## a_cBits(&uSrc, 1, &fEfl1); \ 1694 iemAImpl_and_u ## a_cBits(puDst, uSrc, &fEfl2); \ 1695 \ 1696 /* AMD: The carry flag is from the SUB operation. */ \ 1697 /* 10890xe: PF always cleared? */ \ 1698 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1699 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1700 *pfEFlags = fEfl2; \ 1701 } 1702 1703 EMIT_BLSR(64, uint64_t, _fallback) 1704 EMIT_BLSR(32, uint32_t, _fallback) 1705 #if defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 1706 EMIT_BLSR(64, uint64_t, RT_NOTHING) 1707 #endif 1708 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 1709 EMIT_BLSR(32, uint32_t, RT_NOTHING) 1710 #endif 1711 1712 /* 1713 * BLSMSK (BMI1 instruction) 1714 */ 1715 #define EMIT_BLSMSK(a_cBits, a_Type, a_Suffix) \ 1716 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_blsmsk_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \ 1717 { \ 1718 uint32_t fEfl1 = *pfEFlags; \ 1719 uint32_t fEfl2 = fEfl1; \ 1720 *puDst = uSrc; \ 1721 iemAImpl_sub_u ## a_cBits(&uSrc, 1, &fEfl1); \ 1722 iemAImpl_xor_u ## a_cBits(puDst, uSrc, &fEfl2); \ 1723 \ 1724 /* AMD: The carry flag is from the SUB operation. */ \ 1725 /* 10890xe: PF always cleared? */ \ 1726 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1727 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1728 *pfEFlags = fEfl2; \ 1729 } 1730 1731 EMIT_BLSMSK(64, uint64_t, _fallback) 1732 EMIT_BLSMSK(32, uint32_t, _fallback) 1733 #if defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 1734 EMIT_BLSMSK(64, uint64_t, RT_NOTHING) 1735 #endif 1736 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 1737 EMIT_BLSMSK(32, uint32_t, RT_NOTHING) 1738 #endif 1739 1740 /* 1741 * BLSI (BMI1 instruction) 1742 */ 1743 #define EMIT_BLSI(a_cBits, a_Type, a_Suffix) \ 1744 IEM_DECL_IMPL_DEF(void, RT_CONCAT3(iemAImpl_blsi_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \ 1745 { \ 1746 uint32_t fEfl1 = *pfEFlags; \ 1747 uint32_t fEfl2 = fEfl1; \ 1748 *puDst = uSrc; \ 1749 iemAImpl_neg_u ## a_cBits(&uSrc, &fEfl1); \ 1750 iemAImpl_and_u ## a_cBits(puDst, uSrc, &fEfl2); \ 1751 \ 1752 /* AMD: The carry flag is from the SUB operation. */ \ 1753 /* 10890xe: PF always cleared? */ \ 1754 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1755 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1756 *pfEFlags = fEfl2; \ 1757 } 1758 1759 EMIT_BLSI(64, uint64_t, _fallback) 1760 EMIT_BLSI(32, uint32_t, _fallback) 1761 #if defined(RT_ARCH_X86) || defined(IEM_WITHOUT_ASSEMBLY) 1762 EMIT_BLSI(64, uint64_t, RT_NOTHING) 1763 #endif 1764 #if (!defined(RT_ARCH_X86) && !defined(RT_ARCH_AMD64)) || defined(IEM_WITHOUT_ASSEMBLY) 1765 EMIT_BLSI(32, uint32_t, RT_NOTHING) 1766 #endif 1767 1684 1768 #if !defined(RT_ARCH_AMD64) || defined(IEM_WITHOUT_ASSEMBLY) 1685 1769 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsPython.py
r95308 r95341 380 380 'VEX_MVR_REG': ( 'VEX.ModR/M', [ 'rm', 'vvvv', 'reg' ], '11 mr/reg', ), 381 381 'VEX_MVR_MEM': ( 'VEX.ModR/M', [ 'rm', 'vvvv', 'reg' ], '!11 mr/reg', ), 382 383 'VEX_VM': ( 'VEX.ModR/M', [ 'vvvv', 'rm' ], '', ), 384 'VEX_VM_REG': ( 'VEX.ModR/M', [ 'vvvv', 'rm' ], '11 mr/reg', ), 385 'VEX_VM_MEM': ( 'VEX.ModR/M', [ 'vvvv', 'rm' ], '!11 mr/reg', ), 382 386 383 387 'FIXED': ( 'fixed', None, '', ), -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsVexMap2.cpp.h
r95308 r95341 641 641 /* Opcode VEX.66.0F38 0xf3 - invalid. */ 642 642 643 /* Opcode VEX.F3.0F38 0xf3 /0 - invalid). */ 644 /* Opcode VEX.F3.0F38 0xf3 /1). */ 645 FNIEMOP_STUB_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm); 646 /* Opcode VEX.F3.0F38 0xf3 /2). */ 647 FNIEMOP_STUB_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm); 648 /* Opcode VEX.F3.0F38 0xf3 /3). */ 649 FNIEMOP_STUB_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm); 650 /* Opcode VEX.F3.0F38 0xf3 /4 - invalid). */ 651 /* Opcode VEX.F3.0F38 0xf3 /5 - invalid). */ 652 /* Opcode VEX.F3.0F38 0xf3 /6 - invalid). */ 653 /* Opcode VEX.F3.0F38 0xf3 /7 - invalid). */ 643 /* Opcode VEX.F3.0F38 0xf3 /0 - invalid. */ 644 645 /** Body for the vex group 17 instructions. */ 646 #define IEMOP_BODY_By_Ey(a_Instr) \ 647 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fBmi1) \ 648 return iemOp_InvalidWithRM(pVCpu, bRm); /* decode memory variant? */ \ 649 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF | X86_EFL_PF); \ 650 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \ 651 { \ 652 /* \ 653 * Register, register. \ 654 */ \ 655 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 656 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \ 657 { \ 658 IEM_MC_BEGIN(3, 0); \ 659 IEM_MC_ARG(uint64_t *, pDst, 0); \ 660 IEM_MC_ARG(uint64_t, uSrc, 1); \ 661 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 662 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 663 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 664 IEM_MC_REF_EFLAGS(pEFlags); \ 665 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 666 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \ 667 IEM_MC_ADVANCE_RIP(); \ 668 IEM_MC_END(); \ 669 } \ 670 else \ 671 { \ 672 IEM_MC_BEGIN(3, 0); \ 673 IEM_MC_ARG(uint32_t *, pDst, 0); \ 674 IEM_MC_ARG(uint32_t, uSrc, 1); \ 675 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 676 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 677 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 678 IEM_MC_REF_EFLAGS(pEFlags); \ 679 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 680 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \ 681 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \ 682 IEM_MC_ADVANCE_RIP(); \ 683 IEM_MC_END(); \ 684 } \ 685 } \ 686 else \ 687 { \ 688 /* \ 689 * Register, memory. \ 690 */ \ 691 if (pVCpu->iem.s.fPrefixes & IEM_OP_PRF_SIZE_REX_W) \ 692 { \ 693 IEM_MC_BEGIN(3, 1); \ 694 IEM_MC_ARG(uint64_t *, pDst, 0); \ 695 IEM_MC_ARG(uint64_t, uSrc, 1); \ 696 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 697 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 698 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 699 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 700 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 701 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 702 IEM_MC_REF_EFLAGS(pEFlags); \ 703 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 704 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \ 705 IEM_MC_ADVANCE_RIP(); \ 706 IEM_MC_END(); \ 707 } \ 708 else \ 709 { \ 710 IEM_MC_BEGIN(3, 1); \ 711 IEM_MC_ARG(uint32_t *, pDst, 0); \ 712 IEM_MC_ARG(uint32_t, uSrc, 1); \ 713 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 714 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 715 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 716 IEMOP_HLP_DONE_VEX_DECODING_L0(); \ 717 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 718 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 719 IEM_MC_REF_EFLAGS(pEFlags); \ 720 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 721 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \ 722 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(pDst); \ 723 IEM_MC_ADVANCE_RIP(); \ 724 IEM_MC_END(); \ 725 } \ 726 } \ 727 return VINF_SUCCESS 728 729 730 /* Opcode VEX.F3.0F38 0xf3 /1. */ 731 /** @opcode /1 732 * @opmaps vexgrp17 */ 733 FNIEMOP_DEF_1(iemOp_VGrp17_blsr_By_Ey, uint8_t, bRm) 734 { 735 IEMOP_MNEMONIC2(VEX_VM, BLSR, blsr, By, Ey, DISOPTYPE_HARMLESS, 0); 736 IEMOP_BODY_By_Ey(blsr); 737 } 738 739 740 /* Opcode VEX.F3.0F38 0xf3 /2. */ 741 /** @opcode /2 742 * @opmaps vexgrp17 */ 743 FNIEMOP_DEF_1(iemOp_VGrp17_blsmsk_By_Ey, uint8_t, bRm) 744 { 745 IEMOP_MNEMONIC2(VEX_VM, BLSMSK, blsmsk, By, Ey, DISOPTYPE_HARMLESS, 0); 746 IEMOP_BODY_By_Ey(blsmsk); 747 } 748 749 750 /* Opcode VEX.F3.0F38 0xf3 /3. */ 751 /** @opcode /3 752 * @opmaps vexgrp17 */ 753 FNIEMOP_DEF_1(iemOp_VGrp17_blsi_By_Ey, uint8_t, bRm) 754 { 755 IEMOP_MNEMONIC2(VEX_VM, BLSI, blsi, By, Ey, DISOPTYPE_HARMLESS, 0); 756 IEMOP_BODY_By_Ey(blsi); 757 } 758 759 760 /* Opcode VEX.F3.0F38 0xf3 /4 - invalid. */ 761 /* Opcode VEX.F3.0F38 0xf3 /5 - invalid. */ 762 /* Opcode VEX.F3.0F38 0xf3 /6 - invalid. */ 763 /* Opcode VEX.F3.0F38 0xf3 /7 - invalid. */ 654 764 655 765 /** … … 1217 1327 /* 0xf1 */ IEMOP_X4(iemOp_InvalidNeedRM), 1218 1328 /* 0xf2 */ iemOp_andn_Gy_By_Ey, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 1219 /* 0xf3 */ iemOp_ InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_VGrp17_f3,iemOp_InvalidNeedRM,1329 /* 0xf3 */ iemOp_VGrp17_f3, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, iemOp_InvalidNeedRM, 1220 1330 /* 0xf4 */ IEMOP_X4(iemOp_InvalidNeedRM), 1221 1331 /* 0xf5 */ iemOp_bzhi_Gy_Ey_By, iemOp_InvalidNeedRM, iemOp_pext_Gy_By_Ey, iemOp_pdep_Gy_By_Ey, -
trunk/src/VBox/VMM/include/IEMInternal.h
r95308 r95341 994 994 /** VEX+ModR/M: r/m, vvvv, reg (memory) */ 995 995 #define IEMOPFORM_VEX_MVR_MEM (IEMOPFORM_VEX_MVR | IEMOPFORM_NOT_MOD3) 996 /** VEX+ModR/M+/n: vvvv, r/m */ 997 #define IEMOPFORM_VEX_VM 12 998 /** VEX+ModR/M+/n: vvvv, r/m (register) */ 999 #define IEMOPFORM_VEX_VM_REG (IEMOPFORM_VEX_VM | IEMOPFORM_MOD3) 1000 /** VEX+ModR/M+/n: vvvv, r/m (memory) */ 1001 #define IEMOPFORM_VEX_VM_MEM (IEMOPFORM_VEX_VM | IEMOPFORM_NOT_MOD3) 996 1002 997 1003 /** Fixed register instruction, no R/M. */ … … 1185 1191 FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked; 1186 1192 FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked; 1193 FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback; 1194 FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback; 1195 FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback; 1187 1196 /** @} */ 1188 1197 … … 1198 1207 FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked; 1199 1208 FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked; 1209 FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback; 1210 FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback; 1211 FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback; 1200 1212 /** @} */ 1201 1213
Note:
See TracChangeset
for help on using the changeset viewer.