Changeset 102850 in vbox for trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
- Timestamp:
- Jan 12, 2024 12:47:47 AM (11 months ago)
- File:
-
- 1 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/include/IEMN8veRecompilerEmit.h
r102803 r102850 710 710 * Emits a store of a GPR value to a 64-bit VCpu field. 711 711 */ 712 DECL_FORCE_INLINE_THROW(uint32_t) 713 iemNativeEmitStoreGprToVCpuU64Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGpr, uint32_t offVCpu, 714 uint8_t iGprTmp = UINT8_MAX) 715 { 716 #ifdef RT_ARCH_AMD64 717 /* mov mem64, reg64 */ 718 if (iGpr < 8) 719 pCodeBuf[off++] = X86_OP_REX_W; 720 else 721 pCodeBuf[off++] = X86_OP_REX_W | X86_OP_REX_R; 722 pCodeBuf[off++] = 0x89; 723 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, iGpr, offVCpu); 724 RT_NOREF(iGprTmp); 725 726 #elif defined(RT_ARCH_ARM64) 727 off = iemNativeEmitGprByVCpuLdStEx(pReNative, off, iGpr, offVCpu, kArmv8A64InstrLdStType_St_Dword, sizeof(uint64_t), iGprTmp); 728 729 #else 730 # error "port me" 731 #endif 732 return off; 733 } 734 735 736 /** 737 * Emits a store of a GPR value to a 64-bit VCpu field. 738 */ 712 739 DECL_INLINE_THROW(uint32_t) 713 740 iemNativeEmitStoreGprToVCpuU64(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGpr, uint32_t offVCpu) 714 741 { 715 742 #ifdef RT_ARCH_AMD64 716 /* mov mem64, reg64 */ 717 uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 7); 718 if (iGpr < 8) 719 pbCodeBuf[off++] = X86_OP_REX_W; 720 else 721 pbCodeBuf[off++] = X86_OP_REX_W | X86_OP_REX_R; 722 pbCodeBuf[off++] = 0x89; 723 off = iemNativeEmitGprByVCpuDisp(pbCodeBuf,off,iGpr, offVCpu); 724 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 725 726 #elif defined(RT_ARCH_ARM64) 727 off = iemNativeEmitGprByVCpuLdSt(pReNative, off, iGpr, offVCpu, kArmv8A64InstrLdStType_St_Dword, sizeof(uint64_t)); 728 729 #else 730 # error "port me" 731 #endif 743 off = iemNativeEmitStoreGprToVCpuU64Ex(iemNativeInstrBufEnsure(pReNative, off, 7), off, iGpr, offVCpu); 744 #elif defined(RT_ARCH_ARM64) 745 off = iemNativeEmitStoreGprToVCpuU64Ex(iemNativeInstrBufEnsure(pReNative, off, 5), off, iGpr, offVCpu, 746 IEMNATIVE_REG_FIXED_TMP0); 747 #else 748 # error "port me" 749 #endif 750 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 732 751 return off; 733 752 } … … 802 821 #elif defined(RT_ARCH_ARM64) 803 822 off = iemNativeEmitGprByVCpuLdSt(pReNative, off, iGpr, offVCpu, kArmv8A64InstrLdStType_St_Byte, sizeof(uint8_t)); 823 824 #else 825 # error "port me" 826 #endif 827 return off; 828 } 829 830 831 /** 832 * Emits a store of an immediate value to a 16-bit VCpu field. 833 * 834 * @note ARM64: A idxTmp1 is always required! The idxTmp2 depends on whehter the 835 * offset can be encoded as an immediate or not. The @a offVCpu immediate 836 * range is 0..8190 bytes from VMCPU and the same from CPUMCPU. 837 */ 838 DECL_FORCE_INLINE_THROW(uint32_t) 839 iemNativeEmitStoreImmToVCpuU16Ex(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint16_t uImm, uint32_t offVCpu, 840 uint8_t idxTmp1 = UINT8_MAX, uint8_t idxTmp2 = UINT8_MAX) 841 { 842 #ifdef RT_ARCH_AMD64 843 /* mov mem16, imm16 */ 844 pCodeBuf[off++] = X86_OP_PRF_SIZE_OP; 845 pCodeBuf[off++] = 0xc7; 846 off = iemNativeEmitGprByVCpuDisp(pCodeBuf, off, 0, offVCpu); 847 pCodeBuf[off++] = RT_BYTE1(uImm); 848 pCodeBuf[off++] = RT_BYTE2(uImm); 849 RT_NOREF(idxTmp1, idxTmp2); 850 851 #elif defined(RT_ARCH_ARM64) 852 if (idxTmp1 != UINT8_MAX) 853 { 854 pCodeBuf[off++] = Armv8A64MkInstrMovZ(idxTmp1, uImm); 855 off = iemNativeEmitGprByVCpuLdStEx(pCodeBuf, off, idxTmp1, offVCpu, kArmv8A64InstrLdStType_St_Half, 856 sizeof(uint16_t), idxTmp2); 857 } 858 else 859 # ifdef IEM_WITH_THROW_CATCH 860 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IEM_IPE_9)); 861 # else 862 AssertReleaseFailedStmt(off = UINT32_MAX); 863 # endif 804 864 805 865 #else … … 3857 3917 * and ARM64 hosts. 3858 3918 */ 3859 DECL_ INLINE_THROW(uint32_t)3860 iemNativeEmitAndGprByGpr (PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, bool fSetFlags = false)3919 DECL_FORCE_INLINE(uint32_t) 3920 iemNativeEmitAndGprByGprEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, bool fSetFlags = false) 3861 3921 { 3862 3922 #if defined(RT_ARCH_AMD64) 3863 3923 /* and Gv, Ev */ 3864 uint8_t *pbCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 3); 3865 pbCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B); 3866 pbCodeBuf[off++] = 0x23; 3867 pbCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, iGprDst & 7, iGprSrc & 7); 3924 pCodeBuf[off++] = X86_OP_REX_W | (iGprDst < 8 ? 0 : X86_OP_REX_R) | (iGprSrc < 8 ? 0 : X86_OP_REX_B); 3925 pCodeBuf[off++] = 0x23; 3926 pCodeBuf[off++] = X86_MODRM_MAKE(X86_MOD_REG, iGprDst & 7, iGprSrc & 7); 3868 3927 RT_NOREF(fSetFlags); 3869 3928 3870 3929 #elif defined(RT_ARCH_ARM64) 3871 uint32_t *pu32CodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1);3872 3930 if (!fSetFlags) 3873 pu32CodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc); 3874 else 3875 pu32CodeBuf[off++] = Armv8A64MkInstrAnds(iGprDst, iGprDst, iGprSrc); 3876 3931 pCodeBuf[off++] = Armv8A64MkInstrAnd(iGprDst, iGprDst, iGprSrc); 3932 else 3933 pCodeBuf[off++] = Armv8A64MkInstrAnds(iGprDst, iGprDst, iGprSrc); 3934 3935 #else 3936 # error "Port me" 3937 #endif 3938 return off; 3939 } 3940 3941 3942 /** 3943 * Emits code for AND'ing two 64-bit GPRs. 3944 * 3945 * @note When fSetFlags=true, JZ/JNZ jumps can be used afterwards on both AMD64 3946 * and ARM64 hosts. 3947 */ 3948 DECL_INLINE_THROW(uint32_t) 3949 iemNativeEmitAndGprByGpr(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, bool fSetFlags = false) 3950 { 3951 #if defined(RT_ARCH_AMD64) 3952 off = iemNativeEmitAndGprByGprEx(iemNativeInstrBufEnsure(pReNative, off, 3), off, iGprDst, iGprSrc, fSetFlags); 3953 #elif defined(RT_ARCH_ARM64) 3954 off = iemNativeEmitAndGprByGprEx(iemNativeInstrBufEnsure(pReNative, off, 1), off, iGprDst, iGprSrc, fSetFlags); 3877 3955 #else 3878 3956 # error "Port me" … … 4087 4165 } 4088 4166 4167 4168 /** 4169 * Emits code for AND'ing an 64-bit GPRs with a constant. 4170 * 4171 * @note For ARM64 any complicated immediates w/o a AND/ANDS compatible 4172 * encoding will assert / throw exception if @a iGprDst and @a iGprSrc are 4173 * the same. 4174 */ 4175 DECL_FORCE_INLINE_THROW(uint32_t) 4176 iemNativeEmitGprEqGprAndImmEx(PIEMNATIVEINSTR pCodeBuf, uint32_t off, uint8_t iGprDst, uint8_t iGprSrc, uint64_t uImm, 4177 bool fSetFlags = false) 4178 { 4179 #if defined(RT_ARCH_AMD64) 4180 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, iGprDst, uImm); 4181 off = iemNativeEmitAndGprByGprEx(pCodeBuf, off, iGprDst, iGprSrc); 4182 RT_NOREF(fSetFlags); 4183 4184 #elif defined(RT_ARCH_ARM64) 4185 uint32_t uImmR = 0; 4186 uint32_t uImmNandS = 0; 4187 if (Armv8A64ConvertMask64ToImmRImmS(uImm, &uImmNandS, &uImmR)) 4188 { 4189 if (!fSetFlags) 4190 pCodeBuf[off++] = Armv8A64MkInstrAndImm(iGprDst, iGprSrc, uImmNandS, uImmR); 4191 else 4192 pCodeBuf[off++] = Armv8A64MkInstrAndsImm(iGprDst, iGprSrc, uImmNandS, uImmR); 4193 } 4194 else if (iGprDst != iGprSrc) 4195 { 4196 off = iemNativeEmitLoadGprImmEx(pCodeBuf, off, iGprDst, uImm); 4197 off = iemNativeEmitAndGprByGprEx(pCodeBuf, off, iGprDst, iGprSrc, fSetFlags); 4198 } 4199 else 4200 # ifdef IEM_WITH_THROW_CATCH 4201 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(NULL, VERR_IEM_IPE_9)); 4202 # else 4203 AssertReleaseFailedStmt(off = UINT32_MAX); 4204 # endif 4205 4206 #else 4207 # error "Port me" 4208 #endif 4209 return off; 4210 } 4089 4211 4090 4212 /**
Note:
See TracChangeset
for help on using the changeset viewer.