- Timestamp:
- Jun 5, 2018 2:35:15 PM (7 years ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 1 added
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/Makefile.kmk
r72286 r72451 1077 1077 IEMAllInstructions3DNow.cpp.o IEMAllInstructions3DNow.cpp.obj \ 1078 1078 IEMAllCImpl.cpp.o IEMAllCImpl.cpp.obj \ 1079 IEMAllCImplStrInstr.cpp.o IEMAllCImplStrInstr.cpp.obj: IEMAll.o 1079 IEMAllCImplStrInstr.cpp.o IEMAllCImplStrInstr.cpp.obj \ 1080 IEMAllCImplSvmInstr.cpp.o IEMAllCImplSvmInstr.cpp.obj \ 1081 IEMAllCImplVmxInstr.cpp.o IEMAllCImplVmxInstr.cpp.obj: IEMAll.o 1080 1082 1081 1083 # Alias the NEM template to the objects where it is used: -
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r72448 r72451 102 102 #include <VBox/vmm/em.h> 103 103 #include <VBox/vmm/hm.h> 104 #include <VBox/vmm/gim.h> 104 105 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 105 106 # include <VBox/vmm/em.h> -
trunk/src/VBox/VMM/VMMAll/IEMAllCImpl.cpp.h
r72209 r72451 16 16 */ 17 17 18 #i fdef VBOX_WITH_NESTED_HWVIRT_SVM19 # include "IEMAllCImplSvmInstr.cpp.h"20 #endif 18 #include "IEMAllCImplSvmInstr.cpp.h" 19 #include "IEMAllCImplVmxInstr.cpp.h" 20 21 21 22 22 /** @name Misc Helpers -
trunk/src/VBox/VMM/VMMAll/IEMAllCImplSvmInstr.cpp.h
r72421 r72451 17 17 18 18 19 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 20 19 21 /** 20 22 * Converts an IEM exception event type to an SVM event type. … … 66 68 */ 67 69 int rc = PGMChangeMode(pVCpu, pCtx->cr0 | X86_CR0_PE, pCtx->cr4, pCtx->msrEFER); 68 # ifdef IN_RING370 # ifdef IN_RING3 69 71 Assert(rc != VINF_PGM_CHANGE_MODE); 70 # endif72 # endif 71 73 AssertRCReturn(rc, rc); 72 74 … … 671 673 * in iemSvmWorldSwitch() below. 672 674 */ 673 # if 0675 # if 0 674 676 /** @todo @bugref{7243}: ASID based PGM TLB flushes. */ 675 677 if ( pVmcbCtrl->TLBCtrl.n.u8TLBFlush == SVM_TLB_FLUSH_ENTIRE … … 677 679 || pVmcbCtrl->TLBCtrl.n.u8TLBFlush == SVM_TLB_FLUSH_SINGLE_CONTEXT_RETAIN_GLOBALS) 678 680 PGMFlushTLB(pVCpu, pVmcbNstGst->u64CR3, true /* fGlobal */); 679 # endif681 # endif 680 682 681 683 /* … … 891 893 { 892 894 PSVMVMCBCTRL pVmcbCtrl = &pCtx->hwvirt.svm.CTX_SUFF(pVmcb)->ctrl; 893 # ifdef IEM_WITH_CODE_TLB895 # ifdef IEM_WITH_CODE_TLB 894 896 uint8_t const *pbInstrBuf = pVCpu->iem.s.pbInstrBuf; 895 897 uint8_t const cbInstrBuf = pVCpu->iem.s.cbInstrBuf; … … 898 900 && cbInstrBuf > 0) 899 901 memcpy(&pVmcbCtrl->abInstr[0], pbInstrBuf, pVmcbCtrl->cbInstrFetched); 900 # else902 # else 901 903 uint8_t const cbOpcode = pVCpu->iem.s.cbOpcode; 902 904 pVmcbCtrl->cbInstrFetched = RT_MIN(cbOpcode, SVM_CTRL_GUEST_INSTR_BYTES_MAX); 903 905 if (cbOpcode > 0) 904 906 memcpy(&pVmcbCtrl->abInstr[0], &pVCpu->iem.s.abOpcode[0], pVmcbCtrl->cbInstrFetched); 905 # endif907 # endif 906 908 } 907 909 if (u8Vector == X86_XCPT_BR) … … 1052 1054 IEM_CIMPL_DEF_0(iemCImpl_vmrun) 1053 1055 { 1054 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3)1056 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1055 1057 RT_NOREF2(pVCpu, cbInstr); 1056 1058 return VINF_EM_RAW_EMULATE_INSTR; 1057 # else1059 # else 1058 1060 LogFlow(("iemCImpl_vmrun\n")); 1059 1061 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu); … … 1082 1084 } 1083 1085 return rcStrict; 1084 #endif 1085 } 1086 1086 # endif 1087 } 1088 1089 #endif /* VBOX_WITH_NESTED_HWVIRT_SVM */ 1087 1090 1088 1091 /** … … 1110 1113 } 1111 1114 1115 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 1112 1116 1113 1117 /** … … 1116 1120 IEM_CIMPL_DEF_0(iemCImpl_vmload) 1117 1121 { 1118 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3)1122 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1119 1123 RT_NOREF2(pVCpu, cbInstr); 1120 1124 return VINF_EM_RAW_EMULATE_INSTR; 1121 # else1125 # else 1122 1126 LogFlow(("iemCImpl_vmload\n")); 1123 1127 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu); … … 1163 1167 } 1164 1168 return rcStrict; 1165 # endif1169 # endif 1166 1170 } 1167 1171 … … 1172 1176 IEM_CIMPL_DEF_0(iemCImpl_vmsave) 1173 1177 { 1174 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3)1178 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1175 1179 RT_NOREF2(pVCpu, cbInstr); 1176 1180 return VINF_EM_RAW_EMULATE_INSTR; 1177 # else1181 # else 1178 1182 LogFlow(("iemCImpl_vmsave\n")); 1179 1183 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu); … … 1222 1226 } 1223 1227 return rcStrict; 1224 # endif1228 # endif 1225 1229 } 1226 1230 … … 1231 1235 IEM_CIMPL_DEF_0(iemCImpl_clgi) 1232 1236 { 1233 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3)1237 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1234 1238 RT_NOREF2(pVCpu, cbInstr); 1235 1239 return VINF_EM_RAW_EMULATE_INSTR; 1236 # else1240 # else 1237 1241 LogFlow(("iemCImpl_clgi\n")); 1238 1242 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu); … … 1247 1251 iemRegAddToRipAndClearRF(pVCpu, cbInstr); 1248 1252 1249 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && defined(IN_RING3)1253 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && defined(IN_RING3) 1250 1254 return EMR3SetExecutionPolicy(pVCpu->CTX_SUFF(pVM)->pUVM, EMEXECPOLICY_IEM_ALL, true); 1251 # else1255 # else 1252 1256 return VINF_SUCCESS; 1257 # endif 1253 1258 # endif 1254 #endif1255 1259 } 1256 1260 … … 1261 1265 IEM_CIMPL_DEF_0(iemCImpl_stgi) 1262 1266 { 1263 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3)1267 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && !defined(IN_RING3) 1264 1268 RT_NOREF2(pVCpu, cbInstr); 1265 1269 return VINF_EM_RAW_EMULATE_INSTR; 1266 # else1270 # else 1267 1271 LogFlow(("iemCImpl_stgi\n")); 1268 1272 PCPUMCTX pCtx = IEM_GET_CTX(pVCpu); … … 1277 1281 iemRegAddToRipAndClearRF(pVCpu, cbInstr); 1278 1282 1279 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && defined(IN_RING3)1283 # if defined(VBOX_WITH_NESTED_HWVIRT_ONLY_IN_IEM) && defined(IN_RING3) 1280 1284 return EMR3SetExecutionPolicy(pVCpu->CTX_SUFF(pVM)->pUVM, EMEXECPOLICY_IEM_ALL, false); 1281 # else1285 # else 1282 1286 return VINF_SUCCESS; 1287 # endif 1283 1288 # endif 1284 #endif1285 1289 } 1286 1290 … … 1295 1299 RTGCPTR const GCPtrPage = pVCpu->iem.s.enmCpuMode == IEMMODE_64BIT ? pCtx->rax : pCtx->eax; 1296 1300 /** @todo PGM needs virtual ASID support. */ 1297 # if 01301 # if 0 1298 1302 uint32_t const uAsid = pCtx->ecx; 1299 # endif1303 # endif 1300 1304 1301 1305 IEM_SVM_INSTR_COMMON_CHECKS(pVCpu, invlpga); … … 1335 1339 } 1336 1340 1341 #endif /* VBOX_WITH_NESTED_HWVIRT_SVM */ 1342 -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r72209 r72451 303 303 FNIEMOP_DEF(iemOp_Grp7_vmcall) 304 304 { 305 IEMOP_ BITCH_ABOUT_STUB();306 return IEM OP_RAISE_INVALID_OPCODE();305 IEMOP_MNEMONIC(vmcall, "vmcall"); 306 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmcall); 307 307 } 308 308 … … 432 432 433 433 434 /** Opcode 0x0f 0x01 0xd8. */ 434 435 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 435 /** Opcode 0x0f 0x01 0xd8. */436 436 FNIEMOP_DEF(iemOp_Grp7_Amd_vmrun) 437 437 { … … 439 439 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmrun); 440 440 } 441 #else 442 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun); 443 #endif 441 444 442 445 /** Opcode 0x0f 0x01 0xd9. */ … … 447 450 } 448 451 449 450 452 /** Opcode 0x0f 0x01 0xda. */ 453 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 451 454 FNIEMOP_DEF(iemOp_Grp7_Amd_vmload) 452 455 { … … 454 457 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmload); 455 458 } 459 #else 460 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload); 461 #endif 456 462 457 463 458 464 /** Opcode 0x0f 0x01 0xdb. */ 465 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 459 466 FNIEMOP_DEF(iemOp_Grp7_Amd_vmsave) 460 467 { … … 462 469 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_vmsave); 463 470 } 471 #else 472 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave); 473 #endif 464 474 465 475 466 476 /** Opcode 0x0f 0x01 0xdc. */ 477 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 467 478 FNIEMOP_DEF(iemOp_Grp7_Amd_stgi) 468 479 { … … 470 481 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_stgi); 471 482 } 483 #else 484 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi); 485 #endif 472 486 473 487 474 488 /** Opcode 0x0f 0x01 0xdd. */ 489 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 475 490 FNIEMOP_DEF(iemOp_Grp7_Amd_clgi) 476 491 { … … 478 493 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_clgi); 479 494 } 495 #else 496 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi); 497 #endif 480 498 481 499 482 500 /** Opcode 0x0f 0x01 0xdf. */ 501 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 483 502 FNIEMOP_DEF(iemOp_Grp7_Amd_invlpga) 484 503 { … … 486 505 return IEM_MC_DEFER_TO_CIMPL_0(iemCImpl_invlpga); 487 506 } 507 #else 508 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga); 509 #endif 488 510 489 511 490 512 /** Opcode 0x0f 0x01 0xde. */ 513 #ifdef VBOX_WITH_NESTED_HWVIRT_SVM 491 514 FNIEMOP_DEF(iemOp_Grp7_Amd_skinit) 492 515 { … … 495 518 } 496 519 #else 497 /** Opcode 0x0f 0x01 0xd8. */498 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmrun);499 500 /** Opcode 0x0f 0x01 0xd9. */501 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmmcall);502 503 /** Opcode 0x0f 0x01 0xda. */504 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmload);505 506 /** Opcode 0x0f 0x01 0xdb. */507 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_vmsave);508 509 /** Opcode 0x0f 0x01 0xdc. */510 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_stgi);511 512 /** Opcode 0x0f 0x01 0xdd. */513 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_clgi);514 515 /** Opcode 0x0f 0x01 0xdf. */516 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_invlpga);517 518 /** Opcode 0x0f 0x01 0xde. */519 520 FNIEMOP_UD_STUB(iemOp_Grp7_Amd_skinit); 520 #endif /* VBOX_WITH_NESTED_HWVIRT_SVM */ 521 #endif 522 521 523 522 524 /** Opcode 0x0f 0x01 /4. */
Note:
See TracChangeset
for help on using the changeset viewer.