Changeset 100701 in vbox for trunk/src/VBox/VMM
- Timestamp:
- Jul 25, 2023 9:47:02 PM (17 months ago)
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 5 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsOneByte.cpp.h
r100623 r100701 791 791 IEMOP_HLP_NO_64BIT(); 792 792 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 793 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_END_TB/*?*/,793 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_END_TB/*?*/, 794 794 iemCImpl_pop_Sreg, X86_SREG_CS, pVCpu->iem.s.enmEffOpSize); 795 795 } … … 5656 5656 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel); 5657 5657 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5658 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,5658 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_UNCOND | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 5659 5659 iemCImpl_callf, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize); 5660 5660 } … … 7177 7177 { 7178 7178 case IEMMODE_16BIT: 7179 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_iw_16, u16Imm);7179 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_iw_16, u16Imm); 7180 7180 case IEMMODE_32BIT: 7181 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_iw_32, u16Imm);7181 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_iw_32, u16Imm); 7182 7182 case IEMMODE_64BIT: 7183 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_iw_64, u16Imm);7183 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_iw_64, u16Imm); 7184 7184 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7185 7185 } … … 7198 7198 { 7199 7199 case IEMMODE_16BIT: 7200 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_16);7200 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_16); 7201 7201 case IEMMODE_32BIT: 7202 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_32);7202 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_32); 7203 7203 case IEMMODE_64BIT: 7204 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH , iemCImpl_retn_64);7204 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retn_64); 7205 7205 IEM_NOT_REACHED_DEFAULT_CASE_RET(); 7206 7206 } … … 7486 7486 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 7487 7487 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7488 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH , iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm);7488 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, u16Imm); 7489 7489 } 7490 7490 … … 7497 7497 IEMOP_MNEMONIC(retf, "retf"); 7498 7498 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7499 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH , iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0);7499 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_retf, pVCpu->iem.s.enmEffOpSize, 0); 7500 7500 } 7501 7501 … … 7508 7508 IEMOP_MNEMONIC(int3, "int3"); 7509 7509 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,7510 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, 7511 7511 iemCImpl_int, X86_XCPT_BP, IEMINT_INT3); 7512 7512 } … … 7521 7521 uint8_t u8Int; IEM_OPCODE_GET_NEXT_U8(&u8Int); 7522 7522 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7523 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,7523 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, 7524 7524 iemCImpl_int, u8Int, IEMINT_INTN); 7525 7525 } … … 7533 7533 IEMOP_MNEMONIC(into, "into"); 7534 7534 IEMOP_HLP_NO_64BIT(); 7535 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,7535 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_BRANCH_COND | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, 7536 7536 iemCImpl_int, X86_XCPT_OF, IEMINT_INTO); 7537 7537 } … … 7545 7545 IEMOP_MNEMONIC(iret, "iret"); 7546 7546 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 7547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,7547 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 7548 7548 iemCImpl_iret, pVCpu->iem.s.enmEffOpSize); 7549 7549 } … … 11466 11466 { 11467 11467 uint16_t u16Imm; IEM_OPCODE_GET_NEXT_U16(&u16Imm); 11468 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_call_rel_16, (int16_t)u16Imm);11468 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_UNCOND, iemCImpl_call_rel_16, (int16_t)u16Imm); 11469 11469 } 11470 11470 … … 11472 11472 { 11473 11473 uint32_t u32Imm; IEM_OPCODE_GET_NEXT_U32(&u32Imm); 11474 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_call_rel_32, (int32_t)u32Imm);11474 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_UNCOND, iemCImpl_call_rel_32, (int32_t)u32Imm); 11475 11475 } 11476 11476 … … 11478 11478 { 11479 11479 uint64_t u64Imm; IEM_OPCODE_GET_NEXT_S32_SX_U64(&u64Imm); 11480 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH , iemCImpl_call_rel_64, u64Imm);11480 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_BRANCH_UNCOND, iemCImpl_call_rel_64, u64Imm); 11481 11481 } 11482 11482 … … 11535 11535 uint16_t u16Sel; IEM_OPCODE_GET_NEXT_U16(&u16Sel); 11536 11536 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 11537 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,11537 IEM_MC_DEFER_TO_CIMPL_3_RET(IEM_CIMPL_F_BRANCH_UNCOND | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 11538 11538 iemCImpl_FarJmp, u16Sel, off32Seg, pVCpu->iem.s.enmEffOpSize); 11539 11539 } … … 11621 11621 IEMOP_HLP_MIN_386(); 11622 11622 /** @todo testcase! */ 11623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS,11623 IEM_MC_DEFER_TO_CIMPL_2_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_RFLAGS, 11624 11624 iemCImpl_int, X86_XCPT_DB, IEMINT_INT1); 11625 11625 } … … 12612 12612 IEM_MC_ARG(uint16_t, u16Target, 0); 12613 12613 IEM_MC_FETCH_GREG_U16(u16Target, IEM_GET_MODRM_RM(pVCpu, bRm)); 12614 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_16, u16Target);12614 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_16, u16Target); 12615 12615 IEM_MC_END(); 12616 12616 break; … … 12620 12620 IEM_MC_ARG(uint32_t, u32Target, 0); 12621 12621 IEM_MC_FETCH_GREG_U32(u32Target, IEM_GET_MODRM_RM(pVCpu, bRm)); 12622 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_32, u32Target);12622 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_32, u32Target); 12623 12623 IEM_MC_END(); 12624 12624 break; … … 12628 12628 IEM_MC_ARG(uint64_t, u64Target, 0); 12629 12629 IEM_MC_FETCH_GREG_U64(u64Target, IEM_GET_MODRM_RM(pVCpu, bRm)); 12630 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_64, u64Target);12630 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_64, u64Target); 12631 12631 IEM_MC_END(); 12632 12632 break; … … 12647 12647 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 12648 12648 IEM_MC_FETCH_MEM_U16(u16Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 12649 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_16, u16Target);12649 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_16, u16Target); 12650 12650 IEM_MC_END(); 12651 12651 break; … … 12658 12658 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 12659 12659 IEM_MC_FETCH_MEM_U32(u32Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 12660 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_32, u32Target);12660 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_32, u32Target); 12661 12661 IEM_MC_END(); 12662 12662 break; … … 12669 12669 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 12670 12670 IEM_MC_FETCH_MEM_U64(u64Target, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); 12671 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH , iemCImpl_call_64, u64Target);12671 IEM_MC_CALL_CIMPL_1(IEM_CIMPL_F_BRANCH_INDIR, iemCImpl_call_64, u64Target); 12672 12672 IEM_MC_END(); 12673 12673 break; … … 12705 12705 IEM_MC_FETCH_MEM_U16(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 12706 12706 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 2); \ 12707 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \12707 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \ 12708 12708 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \ 12709 12709 IEM_MC_END(); \ … … 12720 12720 IEM_MC_FETCH_MEM_U32(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 12721 12721 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 4); \ 12722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \12722 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_MODE | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, \ 12723 12723 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \ 12724 12724 IEM_MC_END(); \ … … 12736 12736 IEM_MC_FETCH_MEM_U64(offSeg, pVCpu->iem.s.iEffSeg, GCPtrEffSrc); \ 12737 12737 IEM_MC_FETCH_MEM_U16_DISP(u16Sel, pVCpu->iem.s.iEffSeg, GCPtrEffSrc, 8); \ 12738 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_MODE /* no gates */, \12738 IEM_MC_CALL_CIMPL_3(IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_MODE /* no gates */, \ 12739 12739 a_fnCImpl, u16Sel, offSeg, enmEffOpSize); \ 12740 12740 IEM_MC_END(); \ -
trunk/src/VBox/VMM/VMMAll/IEMAllInstructionsTwoByte0f.cpp.h
r100266 r100701 1414 1414 IEMOP_HLP_VMX_INSTR("vmlaunch", kVmxVDiag_Vmentry); 1415 1415 IEMOP_HLP_DONE_DECODING(); 1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,1416 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 1417 1417 iemCImpl_vmlaunch); 1418 1418 } … … 1434 1434 IEMOP_HLP_VMX_INSTR("vmresume", kVmxVDiag_Vmentry); 1435 1435 IEMOP_HLP_DONE_DECODING(); 1436 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,1436 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 1437 1437 iemCImpl_vmresume); 1438 1438 } … … 1572 1572 IEMOP_MNEMONIC(vmrun, "vmrun"); 1573 1573 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); /** @todo check prefix effect on the SVM instructions. ASSUMING no lock for now. */ 1574 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT,1574 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT, 1575 1575 iemCImpl_vmrun); 1576 1576 } … … 1956 1956 IEMOP_MNEMONIC(syscall, "syscall"); /** @todo 286 LOADALL */ 1957 1957 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1958 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,1958 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 1959 1959 iemCImpl_syscall); 1960 1960 } … … 1975 1975 IEMOP_MNEMONIC(sysret, "sysret"); /** @todo 386 LOADALL */ 1976 1976 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 1977 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB,1977 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_END_TB, 1978 1978 iemCImpl_sysret, pVCpu->iem.s.enmEffOpSize); 1979 1979 } … … 5174 5174 IEMOP_MNEMONIC0(FIXED, SYSENTER, sysenter, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0); 5175 5175 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5176 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,5176 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 5177 5177 iemCImpl_sysenter); 5178 5178 } … … 5183 5183 IEMOP_MNEMONIC0(FIXED, SYSEXIT, sysexit, DISOPTYPE_CONTROLFLOW | DISOPTYPE_UNCOND_CONTROLFLOW, 0); 5184 5184 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 5185 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,5185 IEM_MC_DEFER_TO_CIMPL_1_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 5186 5186 iemCImpl_sysexit, pVCpu->iem.s.enmEffOpSize); 5187 5187 } … … 9592 9592 IEMOP_HLP_MIN_386(); /* 386SL and later. */ 9593 9593 IEMOP_HLP_DONE_DECODING_NO_LOCK_PREFIX(); 9594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB,9594 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_INDIR | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT | IEM_CIMPL_F_END_TB, 9595 9595 iemCImpl_rsm); 9596 9596 } -
trunk/src/VBox/VMM/VMMAll/IEMAllThreadedPython.py
r100694 r100701 199 199 kdCImplFlags = { 200 200 'IEM_CIMPL_F_MODE': True, 201 'IEM_CIMPL_F_BRANCH': False, 201 'IEM_CIMPL_F_BRANCH_UNCOND': False, 202 'IEM_CIMPL_F_BRANCH_COND': False, 203 'IEM_CIMPL_F_BRANCH_INDIR': True, 202 204 'IEM_CIMPL_F_RFLAGS': False, 203 205 'IEM_CIMPL_F_STATUS_FLAGS': False, … … 511 513 collecting these in self.dsCImplFlags. 512 514 """ 515 fSeenConditional = False; 513 516 for oStmt in aoStmts: 514 517 # Pick up hints from CIMPL calls and deferals. … … 523 526 self.raiseProblem('Unknown CIMPL flag value: %s' % (sFlag,)); 524 527 528 # Check for conditional so we can categorize any branches correctly. 529 if ( oStmt.sName.startswith('IEM_MC_IF_') 530 or oStmt.sName == 'IEM_MC_ENDIF'): 531 fSeenConditional = True; 532 525 533 # Set IEM_IMPL_C_F_BRANCH if we see any branching MCs. 526 if ( oStmt.sName.startswith('IEM_MC_SET_RIP') 527 or oStmt.sName.startswith('IEM_MC_REL_JMP')): 528 self.dsCImplFlags['IEM_CIMPL_F_BRANCH'] = True; 534 elif oStmt.sName.startswith('IEM_MC_SET_RIP'): 535 assert not fSeenConditional; 536 self.dsCImplFlags['IEM_CIMPL_F_BRANCH_INDIR'] = True; 537 elif oStmt.sName.startswith('IEM_MC_REL_JMP'): 538 if fSeenConditional: 539 self.dsCImplFlags['IEM_CIMPL_F_BRANCH_COND'] = True; 540 else: 541 self.dsCImplFlags['IEM_CIMPL_F_BRANCH_UNCOND'] = True; 529 542 530 543 # Process branches of conditionals recursively. … … 884 897 sCode += ');'; 885 898 899 sCImplFlags = ' | '.join(self.dsCImplFlags.keys()); 900 if not sCImplFlags: 901 sCImplFlags = '0' 902 886 903 aoStmts = [ 887 904 iai.McCppGeneric('IEM_MC2_BEGIN_EMIT_CALLS();', cchIndent = cchIndent), # Scope and a hook for various stuff. … … 895 912 cchIndent = cchIndent)); 896 913 897 aoStmts.append(iai.McCppGeneric('IEM_MC2_END_EMIT_CALLS();', cchIndent = cchIndent)); # For closing the scope. 914 aoStmts.append(iai.McCppGeneric('IEM_MC2_END_EMIT_CALLS(' + sCImplFlags + ');', 915 cchIndent = cchIndent)); # For closing the scope. 898 916 return aoStmts; 899 917 … … 1084 1102 for oVar in self.aoVariations: 1085 1103 dsCImplFlags.update(oVar.dsCImplFlags); 1086 if ( 'IEM_CIMPL_F_BRANCH' in dsCImplFlags 1087 or 'IEM_CIMPL_F_MODE' in dsCImplFlags 1088 or 'IEM_CIMPL_F_REP' in dsCImplFlags): 1104 if ( 'IEM_CIMPL_F_BRANCH_UNCOND' in dsCImplFlags 1105 or 'IEM_CIMPL_F_BRANCH_COND' in dsCImplFlags 1106 or 'IEM_CIMPL_F_BRANCH_INDIR' in dsCImplFlags 1107 or 'IEM_CIMPL_F_MODE' in dsCImplFlags 1108 or 'IEM_CIMPL_F_REP' in dsCImplFlags): 1089 1109 aoDecoderStmts.append(iai.McCppGeneric('pVCpu->iem.s.fEndTb = true;')); 1090 1110 -
trunk/src/VBox/VMM/VMMAll/IEMAllThreadedRecompiler.cpp
r100697 r100701 225 225 } while (0) 226 226 227 #define IEM_MC2_END_EMIT_CALLS( ) \227 #define IEM_MC2_END_EMIT_CALLS(a_fCImplFlags) \ 228 228 Assert(pTb->cInstructions <= pTb->Thrd.cCalls); \ 229 229 if (pTb->cInstructions < 255) \ 230 230 pTb->cInstructions++; \ 231 uint32_t const fCImplFlagsMc2 = (a_fCImplFlags); \ 232 RT_NOREF(fCImplFlagsMc2); \ 231 233 } while (0) 232 234 -
trunk/src/VBox/VMM/include/IEMMc.h
r100591 r100701 1230 1230 /** @name IEM_CIMPL_F_XXX - State change clues for CIMPL calls. 1231 1231 * 1232 * These clues are mainly for the recompiler, so that it can 1232 * These clues are mainly for the recompiler, so that it can emit correct code. 1233 * 1234 * They are processed by the python script and which also automatically 1235 * calculates flags for MC blocks based on the statements, extending the use of 1236 * these flags to describe MC block behavior to the recompiler core. The python 1237 * script pass the flags to the IEM_MC2_END_EMIT_CALLS macro, but mainly for 1238 * error checking purposes. The script emits the necessary fEndTb = true and 1239 * similar statements as this reduces compile time a tiny bit. 1233 1240 * 1234 1241 * @{ */ 1235 #define IEM_CIMPL_F_MODE RT_BIT_32(0) /**< Execution flags may change (IEMCPU::fExec). */ 1236 #define IEM_CIMPL_F_BRANCH RT_BIT_32(1) /**< Branches (changes RIP, maybe CS). */ 1237 #define IEM_CIMPL_F_RFLAGS RT_BIT_32(2) /**< May change significant portions of RFLAGS. */ 1238 #define IEM_CIMPL_F_STATUS_FLAGS RT_BIT_32(3) /**< May change the status bits (X86_EFL_STATUS_BITS) in RFLAGS . */ 1239 #define IEM_CIMPL_F_VMEXIT RT_BIT_32(4) /**< May trigger a VM exit. */ 1240 #define IEM_CIMPL_F_FPU RT_BIT_32(5) /**< May modify FPU state. */ 1241 #define IEM_CIMPL_F_REP RT_BIT_32(6) /**< REP prefixed instruction which may yield before updating PC. */ 1242 #define IEM_CIMPL_F_END_TB RT_BIT_32(7) 1242 /** Execution flags may change (IEMCPU::fExec). */ 1243 #define IEM_CIMPL_F_MODE RT_BIT_32(0) 1244 /** Unconditional direct branches (changes RIP, maybe CS). */ 1245 #define IEM_CIMPL_F_BRANCH_UNCOND RT_BIT_32(1) 1246 /** Conditional direct branch (may change RIP, maybe CS). */ 1247 #define IEM_CIMPL_F_BRANCH_COND RT_BIT_32(2) 1248 /** Indirect unconditional branch (changes RIP, maybe CS). 1249 * 1250 * This is used for all system control transfers (SYSCALL, SYSRET, INT, ++) as 1251 * well as for return instructions (RET, IRET, RETF). 1252 * 1253 * Since the INTO instruction is currently the only indirect branch instruction 1254 * that is conditional (depends on the overflow flag), that instruction will 1255 * have both IEM_CIMPL_F_BRANCH_INDIR and IEM_CIMPL_F_BRANCH_COND set. All 1256 * other branch instructions will have exactly one of the branch flags set. */ 1257 #define IEM_CIMPL_F_BRANCH_INDIR RT_BIT_32(3) 1258 /** May change significant portions of RFLAGS. */ 1259 #define IEM_CIMPL_F_RFLAGS RT_BIT_32(4) 1260 /** May change the status bits (X86_EFL_STATUS_BITS) in RFLAGS . */ 1261 #define IEM_CIMPL_F_STATUS_FLAGS RT_BIT_32(5) 1262 /** May trigger a VM exit. */ 1263 #define IEM_CIMPL_F_VMEXIT RT_BIT_32(6) 1264 /** May modify FPU state. */ 1265 #define IEM_CIMPL_F_FPU RT_BIT_32(7) 1266 /** REP prefixed instruction which may yield before updating PC. */ 1267 #define IEM_CIMPL_F_REP RT_BIT_32(8) 1268 /** Force end of TB after the instruction. */ 1269 #define IEM_CIMPL_F_END_TB RT_BIT_32(9) 1243 1270 /** Convenience: Raise exception (technically unnecessary, since it shouldn't return VINF_SUCCESS). */ 1244 #define IEM_CIMPL_F_XCPT (IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT) 1271 #define IEM_CIMPL_F_XCPT (IEM_CIMPL_F_MODE | IEM_CIMPL_F_BRANCH_UNCOND | IEM_CIMPL_F_RFLAGS | IEM_CIMPL_F_VMEXIT) 1272 /** Convenience: Testing any kind of branch. */ 1273 #define IEM_CIMPL_F_BRANCH_ANY (IEM_CIMPL_F_BRANCH_UNCOND | IEM_CIMPL_F_BRANCH_COND | IEM_CIMPL_F_BRANCH_INDIR) 1245 1274 /** @} */ 1246 1275 … … 1259 1288 if (rcStrictHlp == VINF_SUCCESS) \ 1260 1289 { \ 1261 AssertMsg( ((a_fFlags) & IEM_CIMPL_F_BRANCH ) \1290 AssertMsg( ((a_fFlags) & IEM_CIMPL_F_BRANCH_ANY) \ 1262 1291 || ( uRipBefore + cbInstr == pVCpu->cpum.GstCtx.rip \ 1263 1292 && uCsBefore == pVCpu->cpum.GstCtx.cs.Sel) \
Note:
See TracChangeset
for help on using the changeset viewer.