Changeset 95460 in vbox for trunk/src/VBox
- Timestamp:
- Jun 30, 2022 12:52:13 PM (3 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r95458 r95460 3865 3865 IEMIMPL_MEDIA_F3 vpaddq 3866 3866 3867 3868 ; 3869 ; The SSE 4.2 crc32 3870 ; 3871 ; @param 1 The instruction 3872 ; 3873 ; @param A1 Pointer to the 32-bit destination. 3874 ; @param A2 The source operand, sized according to the suffix. 3875 ; 3876 3877 BEGINPROC_FASTCALL iemAImpl_crc32_u8, 8 3878 PROLOGUE_2_ARGS 3879 3880 mov T0_32, [A0] 3881 crc32 T0_32, A1_8 3882 mov [A0], T0_32 3883 3884 EPILOGUE_2_ARGS 3885 ENDPROC iemAImpl_crc32_u8 3886 3887 BEGINPROC_FASTCALL iemAImpl_crc32_u16, 8 3888 PROLOGUE_2_ARGS 3889 3890 mov T0_32, [A0] 3891 crc32 T0_32, A1_16 3892 mov [A0], T0_32 3893 3894 EPILOGUE_2_ARGS 3895 ENDPROC iemAImpl_crc32_u16 3896 3897 BEGINPROC_FASTCALL iemAImpl_crc32_u32, 8 3898 PROLOGUE_2_ARGS 3899 3900 mov T0_32, [A0] 3901 crc32 T0_32, A1_32 3902 mov [A0], T0_32 3903 3904 EPILOGUE_2_ARGS 3905 ENDPROC iemAImpl_crc32_u32 3906 3907 %ifdef RT_ARCH_AMD64 3908 BEGINPROC_FASTCALL iemAImpl_crc32_u64, 8 3909 PROLOGUE_2_ARGS 3910 3911 mov T0_32, [A0] 3912 crc32 T0, A1 3913 mov [A0], T0_32 3914 3915 EPILOGUE_2_ARGS 3916 ENDPROC iemAImpl_crc32_u64 3917 %endif 3918 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r95458 r95460 26 26 #include <iprt/uint128.h> 27 27 #include <iprt/uint256.h> 28 #include <iprt/crc.h> 28 29 29 30 RT_C_DECLS_BEGIN … … 8144 8145 8145 8146 #endif /* IEM_WITHOUT_ASSEMBLY */ 8147 8148 8149 /* 8150 * CRC32 (SEE 4.2). 8151 */ 8152 8153 IEM_DECL_IMPL_DEF(void, iemAImpl_crc32_u8_fallback,(uint32_t *puDst, uint8_t uSrc)) 8154 { 8155 *puDst = RTCrc32CProcess(*puDst, &uSrc, sizeof(uSrc)); 8156 } 8157 8158 8159 IEM_DECL_IMPL_DEF(void, iemAImpl_crc32_u16_fallback,(uint32_t *puDst, uint16_t uSrc)) 8160 { 8161 *puDst = RTCrc32CProcess(*puDst, &uSrc, sizeof(uSrc)); 8162 } 8163 8164 IEM_DECL_IMPL_DEF(void, iemAImpl_crc32_u32_fallback,(uint32_t *puDst, uint32_t uSrc)) 8165 { 8166 *puDst = RTCrc32CProcess(*puDst, &uSrc, sizeof(uSrc)); 8167 } 8168 8169 IEM_DECL_IMPL_DEF(void, iemAImpl_crc32_u64_fallback,(uint32_t *puDst, uint64_t uSrc)) 8170 { 8171 *puDst = RTCrc32CProcess(*puDst, &uSrc, sizeof(uSrc)); 8172 } 8173 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsThree0f38.cpp.h
r95453 r95460 706 706 FNIEMOP_STUB(iemOp_movbe_Gw_Mw); 707 707 /* Opcode 0xf3 0x0f 0x38 0xf0 - invalid. */ 708 709 708 710 /** Opcode 0xf2 0x0f 0x38 0xf0. */ 709 FNIEMOP_STUB(iemOp_crc32_Gb_Eb); 711 FNIEMOP_DEF(iemOp_crc32_Gd_Eb) 712 { 713 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Eb, DISOPTYPE_HARMLESS, 0); 714 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42) 715 return iemOp_InvalidNeedRM(pVCpu); 716 717 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 718 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 719 { 720 /* 721 * Register, register. 722 */ 723 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 724 IEM_MC_BEGIN(2, 0); 725 IEM_MC_ARG(uint32_t *, puDst, 0); 726 IEM_MC_ARG(uint8_t, uSrc, 1); 727 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 728 IEM_MC_FETCH_GREG_U8(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 729 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc); 730 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 731 IEM_MC_ADVANCE_RIP(); 732 IEM_MC_END(); 733 } 734 else 735 { 736 /* 737 * Register, memory. 738 */ 739 IEM_MC_BEGIN(2, 1); 740 IEM_MC_ARG(uint32_t *, puDst, 0); 741 IEM_MC_ARG(uint8_t, uSrc, 1); 742 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 743 744 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 745 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 746 IEM_MC_FETCH_MEM_U8(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 747 748 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 749 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback), puDst, uSrc); 750 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 751 752 IEM_MC_ADVANCE_RIP(); 753 IEM_MC_END(); 754 } 755 return VINF_SUCCESS; 756 } 757 710 758 711 759 /** Opcode 0x0f 0x38 0xf1. */ … … 714 762 FNIEMOP_STUB(iemOp_movbe_Mw_Gw); 715 763 /* Opcode 0xf3 0x0f 0x38 0xf1 - invalid. */ 764 765 716 766 /** Opcode 0xf2 0x0f 0x38 0xf1. */ 717 FNIEMOP_STUB(iemOp_crc32_Gv_Ev); 767 FNIEMOP_DEF(iemOp_crc32_Gv_Ev) 768 { 769 IEMOP_MNEMONIC2(RM, CRC32, crc32, Gd, Ev, DISOPTYPE_HARMLESS, 0); 770 if (!IEM_GET_GUEST_CPU_FEATURES(pVCpu)->fSse42) 771 return iemOp_InvalidNeedRM(pVCpu); 772 773 uint8_t bRm; IEM_OPCODE_GET_NEXT_U8(&bRm); 774 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 775 { 776 /* 777 * Register, register. 778 */ 779 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 780 switch (pVCpu->iem.s.enmEffOpSize) 781 { 782 case IEMMODE_16BIT: 783 IEM_MC_BEGIN(2, 0); 784 IEM_MC_ARG(uint32_t *, puDst, 0); 785 IEM_MC_ARG(uint16_t, uSrc, 1); 786 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 787 IEM_MC_FETCH_GREG_U16(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 788 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback), 789 puDst, uSrc); 790 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 791 IEM_MC_ADVANCE_RIP(); 792 IEM_MC_END(); 793 return VINF_SUCCESS; 794 795 case IEMMODE_32BIT: 796 IEM_MC_BEGIN(2, 0); 797 IEM_MC_ARG(uint32_t *, puDst, 0); 798 IEM_MC_ARG(uint32_t, uSrc, 1); 799 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 800 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 801 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback), 802 puDst, uSrc); 803 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 804 IEM_MC_ADVANCE_RIP(); 805 IEM_MC_END(); 806 return VINF_SUCCESS; 807 808 case IEMMODE_64BIT: 809 IEM_MC_BEGIN(2, 0); 810 IEM_MC_ARG(uint32_t *, puDst, 0); 811 IEM_MC_ARG(uint64_t, uSrc, 1); 812 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 813 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); 814 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback), 815 puDst, uSrc); 816 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 817 IEM_MC_ADVANCE_RIP(); 818 IEM_MC_END(); 819 return VINF_SUCCESS; 820 821 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 822 } 823 } 824 else 825 { 826 /* 827 * Register, memory. 828 */ 829 switch (pVCpu->iem.s.enmEffOpSize) 830 { 831 case IEMMODE_16BIT: 832 IEM_MC_BEGIN(2, 1); 833 IEM_MC_ARG(uint32_t *, puDst, 0); 834 IEM_MC_ARG(uint16_t, uSrc, 1); 835 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 836 837 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 838 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 839 IEM_MC_FETCH_MEM_U16(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 840 841 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 842 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback), 843 puDst, uSrc); 844 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 845 846 IEM_MC_ADVANCE_RIP(); 847 IEM_MC_END(); 848 return VINF_SUCCESS; 849 850 case IEMMODE_32BIT: 851 IEM_MC_BEGIN(2, 1); 852 IEM_MC_ARG(uint32_t *, puDst, 0); 853 IEM_MC_ARG(uint32_t, uSrc, 1); 854 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 855 856 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 857 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 858 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 859 860 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 861 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback), 862 puDst, uSrc); 863 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 864 865 IEM_MC_ADVANCE_RIP(); 866 IEM_MC_END(); 867 return VINF_SUCCESS; 868 869 case IEMMODE_64BIT: 870 IEM_MC_BEGIN(2, 1); 871 IEM_MC_ARG(uint32_t *, puDst, 0); 872 IEM_MC_ARG(uint64_t, uSrc, 1); 873 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 874 875 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); 876 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 877 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 878 879 IEM_MC_REF_GREG_U32(puDst, IEM_GET_MODRM_REG(pVCpu, bRm)); 880 IEM_MC_CALL_VOID_AIMPL_2(IEM_SELECT_HOST_OR_FALLBACK(fSse42, iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback), 881 puDst, uSrc); 882 IEM_MC_CLEAR_HIGH_GREG_U64_BY_REF(puDst); 883 884 IEM_MC_ADVANCE_RIP(); 885 IEM_MC_END(); 886 return VINF_SUCCESS; 887 888 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 889 } 890 } 891 } 892 718 893 719 894 /* Opcode 0x0f 0x38 0xf2 - invalid (vex only). */ … … 1052 1227 /* 0xef */ IEMOP_X4(iemOp_InvalidNeedRM), 1053 1228 1054 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_G b_Eb,1229 /* 0xf0 */ iemOp_movbe_Gy_My, iemOp_movbe_Gw_Mw, iemOp_InvalidNeedRM, iemOp_crc32_Gd_Eb, 1055 1230 /* 0xf1 */ iemOp_movbe_My_Gy, iemOp_movbe_Mw_Gw, iemOp_InvalidNeedRM, iemOp_crc32_Gv_Ev, 1056 1231 /* 0xf2 */ IEMOP_X4(iemOp_InvalidNeedRM), -
trunk/src/VBox/VMM/include/IEMInternal.h
r95458 r95460 1843 1843 IEM_DECL_IMPL_DEF(void, iemAImpl_vmovddup_256_rm,(PX86XSAVEAREA pXState, uint8_t iYRegDst, PCRTUINT256U pSrc)); 1844 1844 1845 /** @} */ 1846 1847 /** @name Media Odds and Ends 1848 * @{ */ 1849 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U8,(uint32_t *puDst, uint8_t uSrc)); 1850 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U16,(uint32_t *puDst, uint16_t uSrc)); 1851 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U32,(uint32_t *puDst, uint32_t uSrc)); 1852 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLCR32U64,(uint32_t *puDst, uint64_t uSrc)); 1853 FNIEMAIMPLCR32U8 iemAImpl_crc32_u8, iemAImpl_crc32_u8_fallback; 1854 FNIEMAIMPLCR32U16 iemAImpl_crc32_u16, iemAImpl_crc32_u16_fallback; 1855 FNIEMAIMPLCR32U32 iemAImpl_crc32_u32, iemAImpl_crc32_u32_fallback; 1856 FNIEMAIMPLCR32U64 iemAImpl_crc32_u64, iemAImpl_crc32_u64_fallback; 1845 1857 /** @} */ 1846 1858
Note:
See TracChangeset
for help on using the changeset viewer.