Changeset 103799 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Mar 11, 2024 10:23:37 PM (9 months ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 2 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r103797 r103799 7787 7787 IEMOP_MNEMONIC(test_al_Ib, "test al,Ib"); 7788 7788 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 7789 IEMOP_BODY_BINARY_AL_Ib(test, 0);7789 IEMOP_BODY_BINARY_AL_Ib(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 7790 7790 } 7791 7791 … … 7799 7799 IEMOP_MNEMONIC(test_rAX_Iz, "test rAX,Iz"); 7800 7800 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 7801 IEMOP_BODY_BINARY_rAX_Iz_RO(test, 0);7801 IEMOP_BODY_BINARY_rAX_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 7802 7802 } 7803 7803 … … 14410 14410 * @todo also /1 14411 14411 */ 14412 FNIEMOP_DEF_1(iemOp_grp3_test_Eb , uint8_t, bRm)14412 FNIEMOP_DEF_1(iemOp_grp3_test_Eb_Ib, uint8_t, bRm) 14413 14413 { 14414 14414 IEMOP_MNEMONIC(test_Eb_Ib, "test Eb,Ib"); 14415 14415 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 14416 14417 if (IEM_IS_MODRM_REG_MODE(bRm)) 14418 { 14419 /* register access */ 14420 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 14421 IEM_MC_BEGIN(3, 0, 0, 0); 14422 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14423 IEM_MC_ARG(uint8_t *, pu8Dst, 0); 14424 IEM_MC_ARG_CONST(uint8_t, u8Src,/*=*/u8Imm, 1); 14425 IEM_MC_ARG(uint32_t *, pEFlags, 2); 14426 IEM_MC_REF_GREG_U8(pu8Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 14427 IEM_MC_REF_EFLAGS(pEFlags); 14428 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags); 14429 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14430 IEM_MC_END(); 14431 } 14432 else 14433 { 14434 /* memory access. */ 14435 IEM_MC_BEGIN(3, 3, 0, 0); 14436 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 14437 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 1); 14438 14439 uint8_t u8Imm; IEM_OPCODE_GET_NEXT_U8(&u8Imm); 14440 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14441 14442 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 14443 IEM_MC_ARG(uint8_t const *, pu8Dst, 0); 14444 IEM_MC_MEM_MAP_U8_RO(pu8Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 14445 14446 IEM_MC_ARG_CONST(uint8_t, u8Src, u8Imm, 1); 14447 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 14448 IEM_MC_FETCH_EFLAGS(EFlags); 14449 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u8, pu8Dst, u8Src, pEFlags); 14450 14451 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); 14452 IEM_MC_COMMIT_EFLAGS(EFlags); 14453 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14454 IEM_MC_END(); 14455 } 14416 IEMOP_BODY_BINARY_Eb_Ib_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 14456 14417 } 14457 14418 … … 14711 14672 { 14712 14673 case 0: 14713 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb , bRm);14674 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm); 14714 14675 case 1: 14715 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb , bRm);14676 return FNIEMOP_CALL_1(iemOp_grp3_test_Eb_Ib, bRm); 14716 14677 case 2: 14717 14678 return FNIEMOP_CALL_1(iemOp_grp3_not_Eb, bRm); … … 14780 14741 * @opflclass logical 14781 14742 */ 14782 FNIEMOP_DEF_1(iemOp_grp3_test_Ev , uint8_t, bRm)14743 FNIEMOP_DEF_1(iemOp_grp3_test_Ev_Iz, uint8_t, bRm) 14783 14744 { 14784 14745 IEMOP_MNEMONIC(test_Ev_Iv, "test Ev,Iv"); 14785 14746 IEMOP_VERIFICATION_UNDEFINED_EFLAGS(X86_EFL_AF); 14786 14787 if (IEM_IS_MODRM_REG_MODE(bRm)) 14788 { 14789 /* register access */ 14790 switch (pVCpu->iem.s.enmEffOpSize) 14791 { 14792 case IEMMODE_16BIT: 14793 IEM_MC_BEGIN(3, 0, 0, 0); 14794 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 14795 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14796 IEM_MC_ARG(uint16_t *, pu16Dst, 0); 14797 IEM_MC_ARG_CONST(uint16_t, u16Src,/*=*/u16Imm, 1); 14798 IEM_MC_ARG(uint32_t *, pEFlags, 2); 14799 IEM_MC_REF_GREG_U16(pu16Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 14800 IEM_MC_REF_EFLAGS(pEFlags); 14801 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags); 14802 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14803 IEM_MC_END(); 14804 break; 14805 14806 case IEMMODE_32BIT: 14807 IEM_MC_BEGIN(3, 0, IEM_MC_F_MIN_386, 0); 14808 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); 14809 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14810 IEM_MC_ARG(uint32_t *, pu32Dst, 0); 14811 IEM_MC_ARG_CONST(uint32_t, u32Src,/*=*/u32Imm, 1); 14812 IEM_MC_ARG(uint32_t *, pEFlags, 2); 14813 IEM_MC_REF_GREG_U32(pu32Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 14814 IEM_MC_REF_EFLAGS(pEFlags); 14815 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags); 14816 /* No clearing the high dword here - test doesn't write back the result. */ 14817 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14818 IEM_MC_END(); 14819 break; 14820 14821 case IEMMODE_64BIT: 14822 IEM_MC_BEGIN(3, 0, IEM_MC_F_64BIT, 0); 14823 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); 14824 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14825 IEM_MC_ARG(uint64_t *, pu64Dst, 0); 14826 IEM_MC_ARG_CONST(uint64_t, u64Src,/*=*/u64Imm, 1); 14827 IEM_MC_ARG(uint32_t *, pEFlags, 2); 14828 IEM_MC_REF_GREG_U64(pu64Dst, IEM_GET_MODRM_RM(pVCpu, bRm)); 14829 IEM_MC_REF_EFLAGS(pEFlags); 14830 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags); 14831 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14832 IEM_MC_END(); 14833 break; 14834 14835 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 14836 } 14837 } 14838 else 14839 { 14840 /* memory access. */ 14841 switch (pVCpu->iem.s.enmEffOpSize) 14842 { 14843 case IEMMODE_16BIT: 14844 IEM_MC_BEGIN(3, 3, 0, 0); 14845 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 14846 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 2); 14847 14848 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 14849 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14850 14851 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 14852 IEM_MC_ARG(uint16_t const *, pu16Dst, 0); 14853 IEM_MC_MEM_MAP_U16_RO(pu16Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 14854 14855 IEM_MC_ARG_CONST(uint16_t, u16Src, u16Imm, 1); 14856 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 14857 IEM_MC_FETCH_EFLAGS(EFlags); 14858 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u16, pu16Dst, u16Src, pEFlags); 14859 14860 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); 14861 IEM_MC_COMMIT_EFLAGS(EFlags); 14862 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14863 IEM_MC_END(); 14864 break; 14865 14866 case IEMMODE_32BIT: 14867 IEM_MC_BEGIN(3, 3, IEM_MC_F_MIN_386, 0); 14868 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 14869 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); 14870 14871 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); 14872 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14873 14874 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 14875 IEM_MC_ARG(uint32_t const *, pu32Dst, 0); 14876 IEM_MC_MEM_MAP_U32_RO(pu32Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 14877 14878 IEM_MC_ARG_CONST(uint32_t, u32Src, u32Imm, 1); 14879 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 14880 IEM_MC_FETCH_EFLAGS(EFlags); 14881 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u32, pu32Dst, u32Src, pEFlags); 14882 14883 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); 14884 IEM_MC_COMMIT_EFLAGS(EFlags); 14885 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14886 IEM_MC_END(); 14887 break; 14888 14889 case IEMMODE_64BIT: 14890 IEM_MC_BEGIN(3, 3, IEM_MC_F_64BIT, 0); 14891 IEM_MC_LOCAL(RTGCPTR, GCPtrEffDst); 14892 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffDst, bRm, 4); 14893 14894 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); 14895 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 14896 14897 IEM_MC_ARG(uint64_t const *, pu64Dst, 0); 14898 IEM_MC_LOCAL(uint8_t, bUnmapInfo); 14899 IEM_MC_MEM_MAP_U64_RO(pu64Dst, bUnmapInfo, pVCpu->iem.s.iEffSeg, GCPtrEffDst); 14900 14901 IEM_MC_ARG_CONST(uint64_t, u64Src, u64Imm, 1); 14902 IEM_MC_ARG_LOCAL_EFLAGS( pEFlags, EFlags, 2); 14903 IEM_MC_FETCH_EFLAGS(EFlags); 14904 IEM_MC_CALL_VOID_AIMPL_3(iemAImpl_test_u64, pu64Dst, u64Src, pEFlags); 14905 14906 IEM_MC_MEM_COMMIT_AND_UNMAP_RO(bUnmapInfo); 14907 IEM_MC_COMMIT_EFLAGS(EFlags); 14908 IEM_MC_ADVANCE_RIP_AND_FINISH(); 14909 IEM_MC_END(); 14910 break; 14911 14912 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 14913 } 14914 } 14747 IEMOP_BODY_BINARY_Ev_Iz_RO(test, RT_ARCH_VAL_AMD64 | RT_ARCH_VAL_ARM64); 14915 14748 } 14916 14749 … … 15003 14836 switch (IEM_GET_MODRM_REG_8(bRm)) 15004 14837 { 15005 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev , bRm);15006 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev , bRm);14838 case 0: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm); 14839 case 1: return FNIEMOP_CALL_1(iemOp_grp3_test_Ev_Iz, bRm); 15007 14840 case 2: return FNIEMOP_CALL_1(iemOp_grp3_not_Ev, bRm); 15008 14841 case 3: return FNIEMOP_CALL_1(iemOp_grp3_neg_Ev, bRm); -
trunk/src/VBox/VMM/VMMAll/target-x86/IEMAllN8veEmit-x86.h
r103798 r103799 126 126 { 127 127 Assert(idxRegReg < 8); Assert(idxRegRm < 16); 128 if (cImmBits == 8 || uImmOp <= (uint64_t)0x7f) 128 if ( cImmBits == 8 129 || (uImmOp <= (uint64_t)0x7f && bOpcodeOtherImm8 != 0xcc)) 129 130 { 130 131 switch (cOpBits) … … 136 137 if (idxRegRm >= 8) 137 138 pCodeBuf[off++] = X86_OP_REX_B; 138 pCodeBuf[off++] = bOpcodeOtherImm8; 139 pCodeBuf[off++] = bOpcodeOtherImm8; Assert(bOpcodeOtherImm8 != 0xcc); 139 140 break; 140 141 … … 142 143 case 64: 143 144 pCodeBuf[off++] = X86_OP_REX_W | (idxRegRm >= 8 ? X86_OP_REX_B : 0); 144 pCodeBuf[off++] = bOpcodeOtherImm8; 145 pCodeBuf[off++] = bOpcodeOtherImm8; Assert(bOpcodeOtherImm8 != 0xcc); 145 146 break; 146 147 … … 150 151 else if (idxRegRm >= 4) 151 152 pCodeBuf[off++] = X86_OP_REX; 152 pCodeBuf[off++] = bOpcode8; 153 pCodeBuf[off++] = bOpcode8; Assert(bOpcode8 != 0xcc); 153 154 break; 154 155 } … … 594 595 uint8_t idxVarDst, uint64_t uImmOp, uint8_t idxVarEfl, uint8_t cOpBits, uint8_t cImmBits) 595 596 { 596 RT_NOREF(pReNative, off, idxVarDst, uImmOp, idxVarEfl, cOpBits, cImmBits); 597 uint8_t const idxRegDst = iemNativeVarRegisterAcquire(pReNative, idxVarDst, &off, true /*fInitialized*/); 598 #ifdef RT_ARCH_AMD64 599 /* On AMD64 we just use the correctly size AND instruction harvest the EFLAGS. */ 600 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 8); 601 off = iemNativeEmitAmd64OneByteModRmInstrRIEx(pCodeBuf, off, 0xf6, 0xcc, 0xf7, cOpBits, cImmBits, 0, idxRegDst, uImmOp); 602 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 603 iemNativeVarRegisterRelease(pReNative, idxVarDst); 604 605 off = iemNativeEmitEFlagsForLogical(pReNative, off, idxVarEfl); 606 607 #elif defined(RT_ARCH_ARM64) 608 /* On ARM64 we use 32-bit AND for the 8-bit and 16-bit bit ones. We also 609 need to keep the result in order to calculate the flags. */ 610 uint8_t const idxRegResult = iemNativeRegAllocTmp(pReNative, &off); 611 uint32_t uImmSizeLen, uImmRotations; 612 if ( cOpBits > 32 613 ? Armv8A64ConvertMask64ToImmRImmS(uImmOp, &uImmSizeLen, &uImmRotations) 614 : Armv8A64ConvertMask32ToImmRImmS(uImmOp, &uImmSizeLen, &uImmRotations)) 615 { 616 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 617 if (cOpBits >= 32) 618 pCodeBuf[off++] = Armv8A64MkInstrAndsImm(idxRegResult, idxRegDst, uImmSizeLen, uImmRotations, cOpBits > 32 /*f64Bit*/); 619 else 620 pCodeBuf[off++] = Armv8A64MkInstrAndImm(idxRegResult, idxRegDst, uImmSizeLen, uImmRotations, cOpBits > 32 /*f64Bit*/); 621 } 622 else 623 { 624 uint8_t const idxRegTmpImm = iemNativeRegAllocTmpImm(pReNative, &off, uImmOp); 625 PIEMNATIVEINSTR const pCodeBuf = iemNativeInstrBufEnsure(pReNative, off, 1); 626 if (cOpBits >= 32) 627 pCodeBuf[off++] = Armv8A64MkInstrAnds(idxRegResult, idxRegDst, idxRegTmpImm, cOpBits > 32 /*f64Bit*/); 628 else 629 pCodeBuf[off++] = Armv8A64MkInstrAnd(idxRegResult, idxRegDst, idxRegTmpImm, cOpBits > 32 /*f64Bit*/); 630 iemNativeRegFreeTmpImm(pReNative, idxRegTmpImm); 631 } 632 IEMNATIVE_ASSERT_INSTR_BUF_ENSURE(pReNative, off); 633 iemNativeVarRegisterRelease(pReNative, idxVarDst); 634 635 off = iemNativeEmitEFlagsForLogical(pReNative, off, idxVarEfl, cOpBits, idxRegResult, cOpBits >= 32 /*fNativeFlags*/); 636 637 iemNativeRegFreeTmp(pReNative, idxRegResult); 638 RT_NOREF_PV(cImmBits) 639 640 #else 641 # error "Port me" 642 #endif 597 643 return off; 598 644 }
Note:
See TracChangeset
for help on using the changeset viewer.