Changeset 36841 in vbox for trunk/src/VBox/VMM/VMMAll
- Timestamp:
- Apr 26, 2011 12:09:06 AM (14 years ago)
- Location:
- trunk/src/VBox/VMM/VMMAll
- Files:
-
- 3 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r36840 r36841 3546 3546 #define IEM_MC_STORE_GREG_U32(a_iGReg, a_u32Value) *(uint64_t *)iemGRegRef(pIemCpu, (a_iGReg)) = (uint32_t)(a_u32Value) /* clear high bits. */ 3547 3547 #define IEM_MC_STORE_GREG_U64(a_iGReg, a_u64Value) *(uint64_t *)iemGRegRef(pIemCpu, (a_iGReg)) = (a_u64Value) 3548 #define IEM_MC_CLEAR_HIGH_GREG_U64(a_iGReg) *(uint64_t *)iemGRegRef(pIemCpu, (a_iGReg)) &= UINT32_MAX 3548 3549 3549 3550 #define IEM_MC_REF_GREG_U8(a_pu8Dst, a_iGReg) (a_pu8Dst) = iemGRegRefU8(pIemCpu, (a_iGReg)) … … 3771 3772 #define IEM_MC_DEFER_TO_CIMPL_2(a_pfnCImpl, a0, a1) (a_pfnCImpl)(pIemCpu, pIemCpu->offOpcode, a0, a1) 3772 3773 3774 /** 3775 * Defers the entire instruction emulation to a C implementation routine and 3776 * returns, taking three arguments in addition to the standard ones. 3777 * 3778 * This shall be used without any IEM_MC_BEGIN or IEM_END macro surrounding it. 3779 * 3780 * @param a_pfnCImpl The pointer to the C routine. 3781 * @param a0 The first extra argument. 3782 * @param a1 The second extra argument. 3783 * @param a2 The third extra argument. 3784 */ 3785 #define IEM_MC_DEFER_TO_CIMPL_3(a_pfnCImpl, a0, a1, a2) (a_pfnCImpl)(pIemCpu, pIemCpu->offOpcode, a0, a1, a2) 3786 3773 3787 #define IEM_MC_IF_EFL_BIT_SET(a_fBit) if (pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit)) { 3788 #define IEM_MC_IF_EFL_BIT_NOT_SET(a_fBit) if (!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit))) { 3774 3789 #define IEM_MC_IF_EFL_ANY_BITS_SET(a_fBits) if (pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBits)) { 3790 #define IEM_MC_IF_EFL_NO_BITS_SET(a_fBits) if (!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBits))) { 3775 3791 #define IEM_MC_IF_EFL_BITS_NE(a_fBit1, a_fBit2) \ 3776 3792 if ( !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit1)) \ 3777 3793 != !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit2)) ) { 3794 #define IEM_MC_IF_EFL_BITS_EQ(a_fBit1, a_fBit2) \ 3795 if ( !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit1)) \ 3796 == !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit2)) ) { 3778 3797 #define IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(a_fBit, a_fBit1, a_fBit2) \ 3779 3798 if ( (pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit)) \ 3780 || !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit1)) \ 3781 != !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit2)) ) { 3799 || !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit1)) \ 3800 != !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit2)) ) { 3801 #define IEM_MC_IF_EFL_BIT_NOT_SET_OR_BITS_EQ(a_fBit, a_fBit1, a_fBit2) \ 3802 if ( !(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit)) \ 3803 || !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit1)) \ 3804 == !!(pIemCpu->CTX_SUFF(pCtx)->eflags.u & (a_fBit2)) ) { 3782 3805 #define IEM_MC_IF_CX_IS_NZ() if (pIemCpu->CTX_SUFF(pCtx)->cx != 0) { 3783 3806 #define IEM_MC_IF_ECX_IS_NZ() if (pIemCpu->CTX_SUFF(pCtx)->ecx != 0) { -
trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp.h
r36840 r36841 674 674 * Implements far jumps. 675 675 * 676 * @param uSel The selector. 677 * @param offSeg The segment offset. 678 */ 679 IEM_CIMPL_DEF_2(iemCImpl_FarJmp, uint16_t, uSel, uint32_t, offSeg) 676 * @param uSel The selector. 677 * @param offSeg The segment offset. 678 * @param enmEffOpSize The effective operand size. 679 */ 680 IEM_CIMPL_DEF_3(iemCImpl_FarJmp, uint16_t, uSel, uint32_t, offSeg, IEMMODE, enmEffOpSize) 680 681 { 681 682 PCPUMCTX pCtx = pIemCpu->CTX_SUFF(pCtx); … … 692 693 return iemRaiseGeneralProtectionFault0(pIemCpu); 693 694 694 if ( pIemCpu->enmEffOpSize == IEMMODE_16BIT) /** @todo WRONG, must pass this. */695 if (enmEffOpSize == IEMMODE_16BIT) /** @todo WRONG, must pass this. */ 695 696 pCtx->rip = offSeg; 696 697 else -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructions.cpp.h
r36840 r36841 1037 1037 /** Opcode 0x0f 0x3c (?). */ 1038 1038 FNIEMOP_STUB(iemOp_movnti_Gv_Ev); 1039 1040 /** 1041 * Implements a conditional move. 1042 * 1043 * Wish there was an obvious way to do this where we could share and reduce 1044 * code bloat. 1045 * 1046 * @param a_Cnd The conditional "microcode" operation. 1047 */ 1048 #define CMOV_X(a_Cnd) \ 1049 uint8_t bRm; IEM_OPCODE_GET_NEXT_BYTE(pIemCpu, &bRm); \ 1050 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) \ 1051 { \ 1052 switch (pIemCpu->enmEffOpSize) \ 1053 { \ 1054 case IEMMODE_16BIT: \ 1055 IEM_MC_BEGIN(0, 1); \ 1056 IEM_MC_LOCAL(uint16_t, u16Tmp); \ 1057 a_Cnd { \ 1058 IEM_MC_FETCH_GREG_U16(u16Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \ 1059 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \ 1060 } IEM_MC_ENDIF(); \ 1061 IEM_MC_ADVANCE_RIP(); \ 1062 IEM_MC_END(); \ 1063 return VINF_SUCCESS; \ 1064 \ 1065 case IEMMODE_32BIT: \ 1066 IEM_MC_BEGIN(0, 1); \ 1067 IEM_MC_LOCAL(uint32_t, u32Tmp); \ 1068 a_Cnd { \ 1069 IEM_MC_FETCH_GREG_U32(u32Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \ 1070 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \ 1071 } IEM_MC_ELSE() { \ 1072 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \ 1073 } IEM_MC_ENDIF(); \ 1074 IEM_MC_ADVANCE_RIP(); \ 1075 IEM_MC_END(); \ 1076 return VINF_SUCCESS; \ 1077 \ 1078 case IEMMODE_64BIT: \ 1079 IEM_MC_BEGIN(0, 1); \ 1080 IEM_MC_LOCAL(uint64_t, u64Tmp); \ 1081 a_Cnd { \ 1082 IEM_MC_FETCH_GREG_U64(u64Tmp, (bRm & X86_MODRM_RM_MASK) | pIemCpu->uRexB); \ 1083 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \ 1084 } IEM_MC_ENDIF(); \ 1085 IEM_MC_ADVANCE_RIP(); \ 1086 IEM_MC_END(); \ 1087 return VINF_SUCCESS; \ 1088 \ 1089 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 1090 } \ 1091 } \ 1092 else \ 1093 { \ 1094 switch (pIemCpu->enmEffOpSize) \ 1095 { \ 1096 case IEMMODE_16BIT: \ 1097 IEM_MC_BEGIN(0, 2); \ 1098 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1099 IEM_MC_LOCAL(uint16_t, u16Tmp); \ 1100 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \ 1101 IEM_MC_FETCH_MEM_U16(u16Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \ 1102 a_Cnd { \ 1103 IEM_MC_STORE_GREG_U16(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u16Tmp); \ 1104 } IEM_MC_ENDIF(); \ 1105 IEM_MC_ADVANCE_RIP(); \ 1106 IEM_MC_END(); \ 1107 return VINF_SUCCESS; \ 1108 \ 1109 case IEMMODE_32BIT: \ 1110 IEM_MC_BEGIN(0, 2); \ 1111 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1112 IEM_MC_LOCAL(uint32_t, u32Tmp); \ 1113 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \ 1114 IEM_MC_FETCH_MEM_U32(u32Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \ 1115 a_Cnd { \ 1116 IEM_MC_STORE_GREG_U32(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u32Tmp); \ 1117 } IEM_MC_ELSE() { \ 1118 IEM_MC_CLEAR_HIGH_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg); \ 1119 } IEM_MC_ENDIF(); \ 1120 IEM_MC_ADVANCE_RIP(); \ 1121 IEM_MC_END(); \ 1122 return VINF_SUCCESS; \ 1123 \ 1124 case IEMMODE_64BIT: \ 1125 IEM_MC_BEGIN(0, 2); \ 1126 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); \ 1127 IEM_MC_LOCAL(uint64_t, u64Tmp); \ 1128 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); \ 1129 IEM_MC_FETCH_MEM_U64(u64Tmp, pIemCpu->iEffSeg, GCPtrEffSrc); \ 1130 a_Cnd { \ 1131 IEM_MC_STORE_GREG_U64(((bRm >> X86_MODRM_REG_SHIFT) & X86_MODRM_REG_SMASK) | pIemCpu->uRexReg, u64Tmp); \ 1132 } IEM_MC_ENDIF(); \ 1133 IEM_MC_ADVANCE_RIP(); \ 1134 IEM_MC_END(); \ 1135 return VINF_SUCCESS; \ 1136 \ 1137 IEM_NOT_REACHED_DEFAULT_CASE_RET(); \ 1138 } \ 1139 } do {} while (0) 1140 1141 1142 1039 1143 /** Opcode 0x0f 0x40. */ 1040 FNIEMOP_STUB(iemOp_cmovo_Gv_Ev); 1144 FNIEMOP_DEF(iemOp_cmovo_Gv_Ev) 1145 { 1146 IEMOP_MNEMONIC("cmovo Gv,Ev"); 1147 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_OF)); 1148 } 1149 1150 1041 1151 /** Opcode 0x0f 0x41. */ 1042 FNIEMOP_STUB(iemOp_cmovno_Gv_Ev); 1152 FNIEMOP_DEF(iemOp_cmovno_Gv_Ev) 1153 { 1154 IEMOP_MNEMONIC("cmovno Gv,Ev"); 1155 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_OF)); 1156 } 1157 1158 1043 1159 /** Opcode 0x0f 0x42. */ 1044 FNIEMOP_STUB(iemOp_cmovc_Gv_Ev); 1160 FNIEMOP_DEF(iemOp_cmovc_Gv_Ev) 1161 { 1162 IEMOP_MNEMONIC("cmovc Gv,Ev"); 1163 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_CF)); 1164 } 1165 1166 1045 1167 /** Opcode 0x0f 0x43. */ 1046 FNIEMOP_STUB(iemOp_cmovnc_Gv_Ev); 1168 FNIEMOP_DEF(iemOp_cmovnc_Gv_Ev) 1169 { 1170 IEMOP_MNEMONIC("cmovnc Gv,Ev"); 1171 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_CF)); 1172 } 1173 1174 1047 1175 /** Opcode 0x0f 0x44. */ 1048 FNIEMOP_STUB(iemOp_cmove_Gv_Ev); 1176 FNIEMOP_DEF(iemOp_cmove_Gv_Ev) 1177 { 1178 IEMOP_MNEMONIC("cmove Gv,Ev"); 1179 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_ZF)); 1180 } 1181 1182 1049 1183 /** Opcode 0x0f 0x45. */ 1050 FNIEMOP_STUB(iemOp_cmovne_Gv_Ev); 1184 FNIEMOP_DEF(iemOp_cmovne_Gv_Ev) 1185 { 1186 IEMOP_MNEMONIC("cmovne Gv,Ev"); 1187 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_ZF)); 1188 } 1189 1190 1051 1191 /** Opcode 0x0f 0x46. */ 1052 FNIEMOP_STUB(iemOp_cmovbe_Gv_Ev); 1192 FNIEMOP_DEF(iemOp_cmovbe_Gv_Ev) 1193 { 1194 IEMOP_MNEMONIC("cmovbe Gv,Ev"); 1195 CMOV_X(IEM_MC_IF_EFL_ANY_BITS_SET(X86_EFL_CF | X86_EFL_ZF)); 1196 } 1197 1198 1053 1199 /** Opcode 0x0f 0x47. */ 1054 FNIEMOP_STUB(iemOp_cmovnbe_Gv_Ev); 1200 FNIEMOP_DEF(iemOp_cmovnbe_Gv_Ev) 1201 { 1202 IEMOP_MNEMONIC("cmovnbe Gv,Ev"); 1203 CMOV_X(IEM_MC_IF_EFL_NO_BITS_SET(X86_EFL_CF | X86_EFL_ZF)); 1204 } 1205 1206 1055 1207 /** Opcode 0x0f 0x48. */ 1056 FNIEMOP_STUB(iemOp_cmovs_Gv_Ev); 1208 FNIEMOP_DEF(iemOp_cmovs_Gv_Ev) 1209 { 1210 IEMOP_MNEMONIC("cmovs Gv,Ev"); 1211 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_SF)); 1212 } 1213 1214 1057 1215 /** Opcode 0x0f 0x49. */ 1058 FNIEMOP_STUB(iemOp_cmovns_Gv_Ev); 1216 FNIEMOP_DEF(iemOp_cmovns_Gv_Ev) 1217 { 1218 IEMOP_MNEMONIC("cmovns Gv,Ev"); 1219 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_SF)); 1220 } 1221 1222 1059 1223 /** Opcode 0x0f 0x4a. */ 1060 FNIEMOP_STUB(iemOp_cmovp_Gv_Ev); 1224 FNIEMOP_DEF(iemOp_cmovp_Gv_Ev) 1225 { 1226 IEMOP_MNEMONIC("cmovp Gv,Ev"); 1227 CMOV_X(IEM_MC_IF_EFL_BIT_SET(X86_EFL_PF)); 1228 } 1229 1230 1061 1231 /** Opcode 0x0f 0x4b. */ 1062 FNIEMOP_STUB(iemOp_cmovnp_Gv_Ev); 1232 FNIEMOP_DEF(iemOp_cmovnp_Gv_Ev) 1233 { 1234 IEMOP_MNEMONIC("cmovnp Gv,Ev"); 1235 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET(X86_EFL_PF)); 1236 } 1237 1238 1063 1239 /** Opcode 0x0f 0x4c. */ 1064 FNIEMOP_STUB(iemOp_cmovl_Gv_Ev); 1240 FNIEMOP_DEF(iemOp_cmovl_Gv_Ev) 1241 { 1242 IEMOP_MNEMONIC("cmovl Gv,Ev"); 1243 CMOV_X(IEM_MC_IF_EFL_BITS_NE(X86_EFL_SF, X86_EFL_OF)); 1244 } 1245 1246 1065 1247 /** Opcode 0x0f 0x4d. */ 1066 FNIEMOP_STUB(iemOp_cmovnl_Gv_Ev); 1248 FNIEMOP_DEF(iemOp_cmovnl_Gv_Ev) 1249 { 1250 IEMOP_MNEMONIC("cmovnl Gv,Ev"); 1251 CMOV_X(IEM_MC_IF_EFL_BITS_EQ(X86_EFL_SF, X86_EFL_OF)); 1252 } 1253 1254 1067 1255 /** Opcode 0x0f 0x4e. */ 1068 FNIEMOP_STUB(iemOp_cmovle_Gv_Ev); 1256 FNIEMOP_DEF(iemOp_cmovle_Gv_Ev) 1257 { 1258 IEMOP_MNEMONIC("cmovle Gv,Ev"); 1259 CMOV_X(IEM_MC_IF_EFL_BIT_SET_OR_BITS_NE(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF)); 1260 } 1261 1262 1069 1263 /** Opcode 0x0f 0x4f. */ 1070 FNIEMOP_STUB(iemOp_cmovnle_Gv_Ev); 1264 FNIEMOP_DEF(iemOp_cmovnle_Gv_Ev) 1265 { 1266 IEMOP_MNEMONIC("cmovnle Gv,Ev"); 1267 CMOV_X(IEM_MC_IF_EFL_BIT_NOT_SET_OR_BITS_EQ(X86_EFL_ZF, X86_EFL_SF, X86_EFL_OF)); 1268 } 1269 1270 #undef CMOV_X 1271 1071 1272 /** Opcode 0x0f 0x50. */ 1072 1273 FNIEMOP_STUB(iemOp_movmskps_Gy_Ups__movmskpd_Gy_Upd); … … 8778 8979 uint16_t uSel; IEM_OPCODE_GET_NEXT_U16(pIemCpu, &uSel); 8779 8980 IEMOP_HLP_NO_LOCK_PREFIX(); 8780 return IEM_MC_DEFER_TO_CIMPL_ 2(iemCImpl_FarJmp, uSel, offSeg);8981 return IEM_MC_DEFER_TO_CIMPL_3(iemCImpl_FarJmp, uSel, offSeg, pIemCpu->enmEffOpSize); 8781 8982 } 8782 8983 … … 9678 9879 IEM_MC_ARG(uint16_t, u16Sel, 0); 9679 9880 IEM_MC_ARG(uint16_t, offSeg, 1); 9680 IEM_MC_ARG_CONST( uint16_t,enmEffOpSize, IEMMODE_16BIT, 2);9881 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); 9681 9882 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 9682 9883 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 9683 9884 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 9684 9885 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 2); 9685 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_16BIT);9886 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, enmEffOpSize); 9686 9887 IEM_MC_END(); 9687 9888 return VINF_SUCCESS; 9688 9889 9689 9890 case IEMMODE_32BIT: 9690 if (pIemCpu->enmCpuMode != IEMMODE_64BIT) 9691 { 9692 IEM_MC_BEGIN(3, 1); 9693 IEM_MC_ARG(uint16_t, u16Sel, 0); 9694 IEM_MC_ARG(uint32_t, offSeg, 1); 9695 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2); 9696 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 9697 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 9698 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 9699 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4); 9700 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_32BIT); 9701 IEM_MC_END(); 9702 } 9703 else 9704 { 9705 IEM_MC_BEGIN(3, 1); 9706 IEM_MC_ARG(uint16_t, u16Sel, 0); 9707 IEM_MC_ARG(uint64_t, offSeg, 1); 9708 IEM_MC_ARG_CONST(uint16_t, enmEffOpSize, IEMMODE_16BIT, 2); 9709 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 9710 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 9711 IEM_MC_FETCH_MEM_S32_SX_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 9712 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4); 9713 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_32BIT); 9714 IEM_MC_END(); 9715 } 9891 IEM_MC_BEGIN(3, 1); 9892 IEM_MC_ARG(uint16_t, u16Sel, 0); 9893 IEM_MC_ARG(uint32_t, offSeg, 1); 9894 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); 9895 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 9896 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 9897 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 9898 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4); 9899 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, enmEffOpSize); 9900 IEM_MC_END(); 9716 9901 return VINF_SUCCESS; 9717 9902 … … 9720 9905 IEM_MC_ARG(uint16_t, u16Sel, 0); 9721 9906 IEM_MC_ARG(uint64_t, offSeg, 1); 9722 IEM_MC_ARG_CONST( uint16_t,enmEffOpSize, IEMMODE_16BIT, 2);9907 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); 9723 9908 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 9724 9909 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 9725 9910 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 9726 9911 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 8); 9727 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, IEMMODE_64BIT);9912 IEM_MC_CALL_CIMPL_3(iemCImpl_callf, u16Sel, offSeg, enmEffOpSize); 9728 9913 IEM_MC_END(); 9729 9914 return VINF_SUCCESS; … … 9821 10006 * @param bRm The RM byte. 9822 10007 */ 9823 FNIEMOP_STUB_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm); 9824 //{ 9825 // /* decode and use a C worker. */ 9826 // AssertFailed(); // FNIEMOP_STUB 9827 // return VERR_NOT_IMPLEMENTED; 9828 //} 10008 FNIEMOP_DEF_1(iemOp_Grp5_jmpf_Ep, uint8_t, bRm) 10009 { 10010 IEMOP_MNEMONIC("jmp Ap"); 10011 IEMOP_HLP_NO_64BIT(); 10012 /** @todo could share all the decoding with iemOp_Grp5_callf_Ep. */ 10013 10014 /* Decode the far pointer address and pass it on to the far call C 10015 implementation. */ 10016 if ((bRm & X86_MODRM_MOD_MASK) == (3 << X86_MODRM_MOD_SHIFT)) 10017 { 10018 /** @todo How the heck does a 'callf eax' work? Probably just have to 10019 * search the docs... */ 10020 AssertFailedReturn(VERR_NOT_IMPLEMENTED); 10021 } 10022 10023 /* Far pointer loaded from memory. */ 10024 switch (pIemCpu->enmEffOpSize) 10025 { 10026 case IEMMODE_16BIT: 10027 IEM_MC_BEGIN(3, 1); 10028 IEM_MC_ARG(uint16_t, u16Sel, 0); 10029 IEM_MC_ARG(uint16_t, offSeg, 1); 10030 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); 10031 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 10032 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 10033 IEM_MC_FETCH_MEM_U16(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 10034 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 2); 10035 IEM_MC_CALL_CIMPL_3(iemCImpl_FarJmp, u16Sel, offSeg, enmEffOpSize); 10036 IEM_MC_END(); 10037 return VINF_SUCCESS; 10038 10039 case IEMMODE_32BIT: 10040 IEM_MC_BEGIN(3, 1); 10041 IEM_MC_ARG(uint16_t, u16Sel, 0); 10042 IEM_MC_ARG(uint32_t, offSeg, 1); 10043 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_32BIT, 2); 10044 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 10045 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 10046 IEM_MC_FETCH_MEM_U32(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 10047 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 4); 10048 IEM_MC_CALL_CIMPL_3(iemCImpl_FarJmp, u16Sel, offSeg, enmEffOpSize); 10049 IEM_MC_END(); 10050 return VINF_SUCCESS; 10051 10052 case IEMMODE_64BIT: 10053 IEM_MC_BEGIN(3, 1); 10054 IEM_MC_ARG(uint16_t, u16Sel, 0); 10055 IEM_MC_ARG(uint64_t, offSeg, 1); 10056 IEM_MC_ARG_CONST(IEMMODE, enmEffOpSize, IEMMODE_16BIT, 2); 10057 IEM_MC_LOCAL(RTGCPTR, GCPtrEffSrc); 10058 IEM_MC_CALC_RM_EFF_ADDR(GCPtrEffSrc, bRm); 10059 IEM_MC_FETCH_MEM_U64(offSeg, pIemCpu->iEffSeg, GCPtrEffSrc); 10060 IEM_MC_FETCH_MEM_U16(u16Sel, pIemCpu->iEffSeg, GCPtrEffSrc + 8); 10061 IEM_MC_CALL_CIMPL_3(iemCImpl_FarJmp, u16Sel, offSeg, enmEffOpSize); 10062 IEM_MC_END(); 10063 return VINF_SUCCESS; 10064 10065 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 10066 } 10067 } 9829 10068 9830 10069
Note:
See TracChangeset
for help on using the changeset viewer.