Changeset 104209 in vbox
- Timestamp:
- Apr 5, 2024 9:31:16 PM (8 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllAImpl.asm
r104208 r104209 1083 1083 ; where the 64-bit accesses requires hand coding. 1084 1084 ; 1085 ; All the functions takes a pointer to the destination memory operand in A0, 1086 ; the source register operand in A1 and a pointer to eflags in A2. 1085 ; All the functions takes a pointer to the destination memory operand in A1, 1086 ; the source register operand in A2 and incoming EFLAGS in A0. Updated EFLAGS 1087 ; are returned in EAX. 1087 1088 ; 1088 1089 ; @param 1 The instruction mnemonic. … … 1094 1095 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u32, 12 1095 1096 PROLOGUE_4_ARGS 1096 IEM_MAYBE_LOAD_FLAGS _OLD A2, %2, %3, 0 ;; @todo check if any undefined flags are passed thru1097 mov T0_32, [A 0]1098 %1 T0_32, A 1_321099 mov [A 0], T0_321100 IEM_SAVE_FLAGS_ OLD A2, %2, %3, %41097 IEM_MAYBE_LOAD_FLAGS A0_32, %2, %3, 0 ;; @todo check if any undefined flags are passed thru 1098 mov T0_32, [A1] 1099 %1 T0_32, A2_32 1100 mov [A1], T0_32 1101 IEM_SAVE_FLAGS_RETVAL A0_32, %2, %3, %4 1101 1102 EPILOGUE_4_ARGS 1102 1103 ENDPROC iemAImpl_ %+ %1 %+ _u32 … … 1105 1106 BEGINPROC_FASTCALL iemAImpl_ %+ %1 %+ _u64, 12 1106 1107 PROLOGUE_4_ARGS 1107 IEM_MAYBE_LOAD_FLAGS _OLD A2, %2, %3, 01108 mov T0, [A 0]1109 %1 T0, A 11110 mov [A 0], T01111 IEM_SAVE_FLAGS_ OLD A2, %2, %3, %41108 IEM_MAYBE_LOAD_FLAGS A0_32, %2, %3, 0 1109 mov T0, [A1] 1110 %1 T0, A2 1111 mov [A1], T0 1112 IEM_SAVE_FLAGS_RETVAL A0_32, %2, %3, %4 1112 1113 EPILOGUE_4_ARGS 1113 1114 ENDPROC iemAImpl_ %+ %1 %+ _u64 -
trunk/src/VBox/VMM/VMMAll/IEMAllAImplC.cpp
r104208 r104209 1821 1821 */ 1822 1822 #define EMIT_BLSR(a_cBits, a_Type, a_Suffix) \ 1823 IEM_DECL_IMPL_DEF( void, RT_CONCAT3(iemAImpl_blsr_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \1823 IEM_DECL_IMPL_DEF(uint32_t, RT_CONCAT3(iemAImpl_blsr_u,a_cBits,a_Suffix),(uint32_t fEFlags, a_Type *puDst, a_Type uSrc)) \ 1824 1824 { \ 1825 uint32_t fEfl1 = *pfEFlags; \1826 uint32_t fEfl2 = fEfl1; \1827 1825 *puDst = uSrc; \ 1828 fEfl1 = iemAImpl_sub_u ## a_cBits(fEfl1, &uSrc, 1); \1829 fEfl2 = iemAImpl_and_u ## a_cBits(fEfl2, puDst, uSrc); \1826 uint32_t fEfl1 = iemAImpl_sub_u ## a_cBits(fEFlags, &uSrc, 1); \ 1827 uint32_t fEfl2 = iemAImpl_and_u ## a_cBits(fEFlags, puDst, uSrc); \ 1830 1828 \ 1831 1829 /* AMD: The carry flag is from the SUB operation. */ \ … … 1833 1831 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1834 1832 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1835 *pfEFlags =fEfl2; \1833 return fEfl2; \ 1836 1834 } 1837 1835 … … 1849 1847 */ 1850 1848 #define EMIT_BLSMSK(a_cBits, a_Type, a_Suffix) \ 1851 IEM_DECL_IMPL_DEF( void, RT_CONCAT3(iemAImpl_blsmsk_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \1849 IEM_DECL_IMPL_DEF(uint32_t, RT_CONCAT3(iemAImpl_blsmsk_u,a_cBits,a_Suffix),(uint32_t fEFlags, a_Type *puDst, a_Type uSrc)) \ 1852 1850 { \ 1853 uint32_t fEfl1 = *pfEFlags; \1854 uint32_t fEfl2 = fEfl1; \1855 1851 *puDst = uSrc; \ 1856 fEfl1 = iemAImpl_sub_u ## a_cBits(fEfl1, &uSrc, 1); \1857 fEfl2 = iemAImpl_xor_u ## a_cBits(fEfl2, puDst, uSrc); \1852 uint32_t fEfl1 = iemAImpl_sub_u ## a_cBits(fEFlags, &uSrc, 1); \ 1853 uint32_t fEfl2 = iemAImpl_xor_u ## a_cBits(fEFlags, puDst, uSrc); \ 1858 1854 \ 1859 1855 /* AMD: The carry flag is from the SUB operation. */ \ … … 1861 1857 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1862 1858 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1863 *pfEFlags =fEfl2; \1859 return fEfl2; \ 1864 1860 } 1865 1861 … … 1877 1873 */ 1878 1874 #define EMIT_BLSI(a_cBits, a_Type, a_Suffix) \ 1879 IEM_DECL_IMPL_DEF( void, RT_CONCAT3(iemAImpl_blsi_u,a_cBits,a_Suffix),(a_Type *puDst, a_Type uSrc, uint32_t *pfEFlags)) \1875 IEM_DECL_IMPL_DEF(uint32_t, RT_CONCAT3(iemAImpl_blsi_u,a_cBits,a_Suffix),(uint32_t fEFlags, a_Type *puDst, a_Type uSrc)) \ 1880 1876 { \ 1881 uint32_t fEfl1 = *pfEFlags; \ 1882 uint32_t fEfl2 = fEfl1; \ 1877 uint32_t fEfl1 = fEFlags; \ 1883 1878 *puDst = uSrc; \ 1884 1879 iemAImpl_neg_u ## a_cBits(&uSrc, &fEfl1); \ 1885 fEfl2 = iemAImpl_and_u ## a_cBits(fEfl2, puDst, uSrc); \1880 uint32_t fEfl2 = iemAImpl_and_u ## a_cBits(fEFlags, puDst, uSrc); \ 1886 1881 \ 1887 1882 /* AMD: The carry flag is from the SUB operation. */ \ … … 1889 1884 fEfl2 &= ~(X86_EFL_CF | X86_EFL_PF); \ 1890 1885 fEfl2 |= fEfl1 & X86_EFL_CF; \ 1891 *pfEFlags =fEfl2; \1886 return fEfl2; \ 1892 1887 } 1893 1888 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstVexMap2.cpp.h
r104192 r104209 1822 1822 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \ 1823 1823 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \ 1824 IEM_MC_ARG(uint64_t *, pDst, 0); \ 1825 IEM_MC_ARG(uint64_t, uSrc, 1); \ 1826 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 1824 IEM_MC_ARG(uint64_t, uSrc, 2); \ 1827 1825 IEM_MC_FETCH_GREG_U64(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 1826 IEM_MC_ARG(uint64_t *, pDst, 1); \ 1828 1827 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1829 IEM_MC_REF_EFLAGS(pEFlags); \ 1830 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 1831 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \ 1828 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \ 1829 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \ 1830 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 1831 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \ 1832 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \ 1832 1833 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 1833 1834 IEM_MC_END(); \ … … 1837 1838 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 1838 1839 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \ 1839 IEM_MC_ARG(uint32_t *, pDst, 0); \ 1840 IEM_MC_ARG(uint32_t, uSrc, 1); \ 1841 IEM_MC_ARG(uint32_t *, pEFlags, 2); \ 1840 IEM_MC_ARG(uint32_t, uSrc, 2); \ 1842 1841 IEM_MC_FETCH_GREG_U32(uSrc, IEM_GET_MODRM_RM(pVCpu, bRm)); \ 1842 IEM_MC_ARG(uint32_t *, pDst, 1); \ 1843 1843 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1844 IEM_MC_REF_EFLAGS(pEFlags); \ 1845 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 1846 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \ 1844 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \ 1845 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \ 1846 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 1847 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \ 1847 1848 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1849 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \ 1848 1850 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 1849 1851 IEM_MC_END(); \ … … 1858 1860 { \ 1859 1861 IEM_MC_BEGIN(IEM_MC_F_64BIT, 0); \ 1860 IEM_MC_ARG(uint64_t *, pDst, 0); \1861 IEM_MC_ARG(uint64_t, uSrc, 1); \1862 IEM_MC_ARG(uint32_t *, pEFlags, 2); \1863 1862 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1864 1863 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 1865 1864 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \ 1865 \ 1866 IEM_MC_ARG(uint64_t, uSrc, 2); \ 1866 1867 IEM_MC_FETCH_MEM_U64(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 1868 IEM_MC_ARG(uint64_t *, pDst, 1); \ 1867 1869 IEM_MC_REF_GREG_U64(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1868 IEM_MC_REF_EFLAGS(pEFlags); \ 1869 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 1870 iemAImpl_ ## a_Instr ## _u64_fallback), pDst, uSrc, pEFlags); \ 1870 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \ 1871 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \ 1872 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u64, \ 1873 iemAImpl_ ## a_Instr ## _u64_fallback), fEFlagsIn, pDst, uSrc); \ 1874 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \ 1871 1875 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 1872 1876 IEM_MC_END(); \ … … 1875 1879 { \ 1876 1880 IEM_MC_BEGIN(IEM_MC_F_NOT_286_OR_OLDER, 0); \ 1877 IEM_MC_ARG(uint32_t *, pDst, 0); \1878 IEM_MC_ARG(uint32_t, uSrc, 1); \1879 IEM_MC_ARG(uint32_t *, pEFlags, 2); \1880 1881 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1881 1882 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm, 0); \ 1882 1883 IEMOP_HLP_DONE_VEX_DECODING_L0_EX(fBmi1); \ 1884 \ 1885 IEM_MC_ARG(uint32_t, uSrc, 2); \ 1883 1886 IEM_MC_FETCH_MEM_U32(uSrc, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 1887 IEM_MC_ARG(uint32_t *, pDst, 1); \ 1884 1888 IEM_MC_REF_GREG_U32(pDst, IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1885 IEM_MC_REF_EFLAGS(pEFlags); \ 1886 IEM_MC_CALL_VOID_AIMPL_3(IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 1887 iemAImpl_ ## a_Instr ## _u32_fallback), pDst, uSrc, pEFlags); \ 1889 IEM_MC_ARG_EFLAGS( fEFlagsIn, 0); \ 1890 IEM_MC_CALL_AIMPL_3(uint32_t, fEFlagsRet, \ 1891 IEM_SELECT_HOST_OR_FALLBACK(fBmi1, iemAImpl_ ## a_Instr ## _u32, \ 1892 iemAImpl_ ## a_Instr ## _u32_fallback), fEFlagsIn, pDst, uSrc); \ 1888 1893 IEM_MC_CLEAR_HIGH_GREG_U64(IEM_GET_EFFECTIVE_VVVV(pVCpu)); \ 1894 IEM_MC_COMMIT_EFLAGS(fEFlagsRet); \ 1889 1895 IEM_MC_ADVANCE_RIP_AND_FINISH(); \ 1890 1896 IEM_MC_END(); \ -
trunk/src/VBox/VMM/include/IEMInternal.h
r104208 r104209 2601 2601 FNIEMAIMPLBINU8 iemAImpl_xor_u8, iemAImpl_xor_u8_locked; 2602 2602 FNIEMAIMPLBINU8 iemAImpl_and_u8, iemAImpl_and_u8_locked; 2603 2604 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINTODOU8, (uint8_t *pu8Dst, uint8_t u8Src, uint32_t *pEFlags));2605 typedef FNIEMAIMPLBINTODOU8 *PFNIEMAIMPLBINTODOU8;2606 2603 /** @} */ 2607 2604 … … 2617 2614 FNIEMAIMPLBINU16 iemAImpl_xor_u16, iemAImpl_xor_u16_locked; 2618 2615 FNIEMAIMPLBINU16 iemAImpl_and_u16, iemAImpl_and_u16_locked; 2619 2620 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINTODOU16, (uint16_t *pu16Dst, uint16_t u16Src, uint32_t *pEFlags));2621 typedef FNIEMAIMPLBINTODOU16 *PFNIEMAIMPLBINTODOU16;2622 2616 /** @} */ 2623 2617 … … 2634 2628 FNIEMAIMPLBINU32 iemAImpl_xor_u32, iemAImpl_xor_u32_locked; 2635 2629 FNIEMAIMPLBINU32 iemAImpl_and_u32, iemAImpl_and_u32_locked; 2636 2637 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINTODOU32, (uint32_t *pu32Dst, uint32_t u32Src, uint32_t *pEFlags)); 2638 typedef FNIEMAIMPLBINTODOU32 *PFNIEMAIMPLBINTODOU32; 2639 FNIEMAIMPLBINTODOU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback; 2640 FNIEMAIMPLBINTODOU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback; 2641 FNIEMAIMPLBINTODOU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback; 2630 FNIEMAIMPLBINU32 iemAImpl_blsi_u32, iemAImpl_blsi_u32_fallback; 2631 FNIEMAIMPLBINU32 iemAImpl_blsr_u32, iemAImpl_blsr_u32_fallback; 2632 FNIEMAIMPLBINU32 iemAImpl_blsmsk_u32, iemAImpl_blsmsk_u32_fallback; 2642 2633 /** @} */ 2643 2634 … … 2653 2644 FNIEMAIMPLBINU64 iemAImpl_xor_u64, iemAImpl_xor_u64_locked; 2654 2645 FNIEMAIMPLBINU64 iemAImpl_and_u64, iemAImpl_and_u64_locked; 2655 2656 typedef IEM_DECL_IMPL_TYPE(void, FNIEMAIMPLBINTODOU64, (uint64_t *pu64Dst, uint64_t u64Src, uint32_t *pEFlags)); 2657 typedef FNIEMAIMPLBINTODOU64 *PFNIEMAIMPLBINTODOU64; 2658 FNIEMAIMPLBINTODOU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback; 2659 FNIEMAIMPLBINTODOU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback; 2660 FNIEMAIMPLBINTODOU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback; 2646 FNIEMAIMPLBINU64 iemAImpl_blsi_u64, iemAImpl_blsi_u64_fallback; 2647 FNIEMAIMPLBINU64 iemAImpl_blsr_u64, iemAImpl_blsr_u64_fallback; 2648 FNIEMAIMPLBINU64 iemAImpl_blsmsk_u64, iemAImpl_blsmsk_u64_fallback; 2661 2649 /** @} */ 2662 2650 … … 3984 3972 /** Pointer to a binary operator function table. */ 3985 3973 typedef IEMOPBINSIZES const *PCIEMOPBINSIZES; 3986 3987 3988 /**3989 * Function table for a binary operator providing implementation based on3990 * operand size.3991 */3992 typedef struct IEMOPBINTODOSIZES3993 {3994 PFNIEMAIMPLBINTODOU8 pfnNormalU8, pfnLockedU8;3995 PFNIEMAIMPLBINTODOU16 pfnNormalU16, pfnLockedU16;3996 PFNIEMAIMPLBINTODOU32 pfnNormalU32, pfnLockedU32;3997 PFNIEMAIMPLBINTODOU64 pfnNormalU64, pfnLockedU64;3998 } IEMOPBINTODOSIZES;3999 /** Pointer to a binary operator function table. */4000 typedef IEMOPBINTODOSIZES const *PCIEMOPBINTODOSIZES;4001 3974 4002 3975
Note:
See TracChangeset
for help on using the changeset viewer.