Changeset 102077 in vbox
- Timestamp:
- Nov 13, 2023 11:52:34 AM (15 months ago)
- Location:
- trunk
- Files:
-
- 10 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/err.h
r102068 r102077 2599 2599 /** Recompiler: Variable management internal processing error \#7. */ 2600 2600 #define VERR_IEM_VAR_IPE_7 (-5370) 2601 /** Recompiler: Variable management internal processing error \#8. */ 2602 #define VERR_IEM_VAR_IPE_8 (-5371) 2603 /** Recompiler: Variable management internal processing error \#9. */ 2604 #define VERR_IEM_VAR_IPE_9 (-5372) 2605 /** Recompiler: Variable management internal processing error \#10. */ 2606 #define VERR_IEM_VAR_IPE_10 (-5373) 2601 2607 2602 2608 /** Recompiler: Unimplemented case. */ -
trunk/include/VBox/vmm/stam.h
r100695 r102077 248 248 /** Instructions per translation block. */ 249 249 STAMUNIT_INSTR_PER_TB, 250 /** Bytes per translation block. */ 251 STAMUNIT_BYTES_PER_TB, 250 252 /** The end (exclusive). */ 251 253 STAMUNIT_END -
trunk/src/VBox/VMM/VMMAll/IEMAllInstOneByte.cpp.h
r102033 r102077 7670 7670 case IEMMODE_16BIT: 7671 7671 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7672 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7672 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7673 7673 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7674 7674 iemCImpl_repe_scas_al_m16); 7675 7675 case IEMMODE_32BIT: 7676 7676 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7677 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7678 7678 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7679 7679 iemCImpl_repe_scas_al_m32); 7680 7680 case IEMMODE_64BIT: 7681 7681 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7682 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7683 7683 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7684 7684 iemCImpl_repe_scas_al_m64); … … 7694 7694 case IEMMODE_16BIT: 7695 7695 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7696 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7697 7697 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7698 7698 iemCImpl_repne_scas_al_m16); 7699 7699 case IEMMODE_32BIT: 7700 7700 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7701 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7702 7702 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7703 7703 iemCImpl_repne_scas_al_m32); 7704 7704 case IEMMODE_64BIT: 7705 7705 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7706 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7707 7707 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7708 7708 iemCImpl_repne_scas_al_m64); … … 7744 7744 case IEMMODE_16BIT: 7745 7745 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7746 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7747 7747 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7748 7748 iemCImpl_repe_scas_ax_m16); 7749 7749 case IEMMODE_32BIT: 7750 7750 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7751 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7752 7752 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7753 7753 iemCImpl_repe_scas_ax_m32); 7754 7754 case IEMMODE_64BIT: 7755 7755 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7756 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7757 7757 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7758 7758 iemCImpl_repe_scas_ax_m64); … … 7765 7765 case IEMMODE_16BIT: 7766 7766 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7767 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7768 7768 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7769 7769 iemCImpl_repe_scas_eax_m16); 7770 7770 case IEMMODE_32BIT: 7771 7771 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7772 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7773 7773 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7774 7774 iemCImpl_repe_scas_eax_m32); 7775 7775 case IEMMODE_64BIT: 7776 7776 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7777 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7778 7778 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7779 7779 iemCImpl_repe_scas_eax_m64); … … 7786 7786 case IEMMODE_32BIT: 7787 7787 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7788 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7788 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7789 7789 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7790 7790 iemCImpl_repe_scas_rax_m32); 7791 7791 case IEMMODE_64BIT: 7792 7792 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7793 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7794 7794 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7795 7795 iemCImpl_repe_scas_rax_m64); … … 7810 7810 case IEMMODE_16BIT: 7811 7811 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7812 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7813 7813 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7814 7814 iemCImpl_repne_scas_ax_m16); 7815 7815 case IEMMODE_32BIT: 7816 7816 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7817 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7818 7818 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7819 7819 iemCImpl_repne_scas_ax_m32); 7820 7820 case IEMMODE_64BIT: 7821 7821 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7822 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7823 7823 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7824 7824 iemCImpl_repne_scas_ax_m64); … … 7831 7831 case IEMMODE_16BIT: 7832 7832 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7833 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7834 7834 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7835 7835 iemCImpl_repne_scas_eax_m16); 7836 7836 case IEMMODE_32BIT: 7837 7837 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7838 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7839 7839 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7840 7840 iemCImpl_repne_scas_eax_m32); 7841 7841 case IEMMODE_64BIT: 7842 7842 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7843 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7844 7844 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7845 7845 iemCImpl_repne_scas_eax_m64); … … 7852 7852 case IEMMODE_32BIT: 7853 7853 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7854 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7855 7855 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7856 7856 iemCImpl_repne_scas_rax_m32); 7857 7857 case IEMMODE_64BIT: 7858 7858 IEM_MC_DEFER_TO_CIMPL_0_RET(IEM_CIMPL_F_REP | IEM_CIMPL_F_STATUS_FLAGS, 7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_x SI)7859 RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xDI) 7860 7860 | RT_BIT_64(kIemNativeGstReg_GprFirst + X86_GREG_xCX), 7861 7861 iemCImpl_repne_scas_rax_m64); -
trunk/src/VBox/VMM/VMMAll/IEMAllInstPython.py
r102070 r102077 2771 2771 'IEM_MC_BSWAP_LOCAL_U64': (McBlock.parseMcGeneric, False, False, ), 2772 2772 'IEM_MC_CALC_RM_EFF_ADDR': (McBlock.parseMcGeneric, False, False, ), 2773 'IEM_MC_CALL_AIMPL_3': (McBlock.parseMcCallAImpl, True, False,),2774 'IEM_MC_CALL_AIMPL_4': (McBlock.parseMcCallAImpl, True, False,),2773 'IEM_MC_CALL_AIMPL_3': (McBlock.parseMcCallAImpl, True, True, ), 2774 'IEM_MC_CALL_AIMPL_4': (McBlock.parseMcCallAImpl, True, True, ), 2775 2775 'IEM_MC_CALL_AVX_AIMPL_2': (McBlock.parseMcCallAvxAImpl, True, False, ), 2776 2776 'IEM_MC_CALL_AVX_AIMPL_3': (McBlock.parseMcCallAvxAImpl, True, False, ), … … 2789 2789 'IEM_MC_CALL_SSE_AIMPL_3': (McBlock.parseMcCallSseAImpl, True, False, ), 2790 2790 'IEM_MC_CALL_VOID_AIMPL_0': (McBlock.parseMcCallVoidAImpl, True, False, ), 2791 'IEM_MC_CALL_VOID_AIMPL_1': (McBlock.parseMcCallVoidAImpl, True, False,),2792 'IEM_MC_CALL_VOID_AIMPL_2': (McBlock.parseMcCallVoidAImpl, True, False,),2793 'IEM_MC_CALL_VOID_AIMPL_3': (McBlock.parseMcCallVoidAImpl, True, False,),2794 'IEM_MC_CALL_VOID_AIMPL_4': (McBlock.parseMcCallVoidAImpl, True, False,),2791 'IEM_MC_CALL_VOID_AIMPL_1': (McBlock.parseMcCallVoidAImpl, True, True, ), 2792 'IEM_MC_CALL_VOID_AIMPL_2': (McBlock.parseMcCallVoidAImpl, True, True, ), 2793 'IEM_MC_CALL_VOID_AIMPL_3': (McBlock.parseMcCallVoidAImpl, True, True, ), 2794 'IEM_MC_CALL_VOID_AIMPL_4': (McBlock.parseMcCallVoidAImpl, True, True, ), 2795 2795 'IEM_MC_CLEAR_EFL_BIT': (McBlock.parseMcGeneric, True, False, ), 2796 2796 'IEM_MC_CLEAR_FSW_EX': (McBlock.parseMcGeneric, True, False, ), … … 3011 3011 'IEM_MC_RAISE_GP0_IF_EFF_ADDR_UNALIGNED': (McBlock.parseMcGeneric, True, False, ), 3012 3012 'IEM_MC_RAISE_SSE_AVX_SIMD_FP_OR_UD_XCPT': (McBlock.parseMcGeneric, True, False, ), 3013 'IEM_MC_REF_EFLAGS': (McBlock.parseMcGeneric, False, False,),3013 'IEM_MC_REF_EFLAGS': (McBlock.parseMcGeneric, False, True, ), 3014 3014 'IEM_MC_REF_FPUREG': (McBlock.parseMcGeneric, False, False, ), 3015 3015 'IEM_MC_REF_GREG_I32': (McBlock.parseMcGeneric, False, False, ), … … 3017 3017 'IEM_MC_REF_GREG_I64': (McBlock.parseMcGeneric, False, False, ), 3018 3018 'IEM_MC_REF_GREG_I64_CONST': (McBlock.parseMcGeneric, False, False, ), 3019 'IEM_MC_REF_GREG_U16': (McBlock.parseMcGeneric, False, False,),3019 'IEM_MC_REF_GREG_U16': (McBlock.parseMcGeneric, False, True, ), 3020 3020 'IEM_MC_REF_GREG_U16_CONST': (McBlock.parseMcGeneric, False, False, ), 3021 3021 'IEM_MC_REF_GREG_U32': (McBlock.parseMcGeneric, False, False, ), -
trunk/src/VBox/VMM/VMMAll/IEMAllN8vePython.py
r102073 r102077 482 482 g_dUnsupportedMcStmtLastOneStats = {} 483 483 484 ## Statistics: List of variations (value) with vars/args that is only missing this one statement (key).485 g_dUnsupportedMcStmtLastOne VarStats = {}484 ## Statistics: List of variations (value) with aimpl_[^0] calls that is only missing this one statement (key). 485 g_dUnsupportedMcStmtLastOneAImplStats = {} 486 486 487 487 … … 517 517 g_dUnsupportedMcStmtLastOneStats[sStmt] = [oVariation,]; 518 518 519 #if ( len(dUnsupportedStmts) == 1 #in (1,2) 520 # and iai.McStmt.findStmtByNames(aoStmts, 521 # { 'IEM_MC_LOCAL': 1, 'IEM_MC_LOCAL_CONST': 1, 'IEM_MC_ARG': 1, 'IEM_MC_ARG_CONST': 1, 522 # 'IEM_MC_ARG_LOCAL_REF': 1, 'IEM_MC_ARG_LOCAL_EFLAGS': 1, })): 523 # for sStmt in dUnsupportedStmts: 524 # if sStmt in g_dUnsupportedMcStmtLastOneVarStats: 525 # g_dUnsupportedMcStmtLastOneVarStats[sStmt].append(oVariation); 526 # else: 527 # g_dUnsupportedMcStmtLastOneVarStats[sStmt] = [oVariation,]; 519 if ( len(dUnsupportedStmts) in (1,2) 520 and iai.McStmt.findStmtByNames(aoStmts, 521 { 'IEM_MC_CALL_AIMPL_3': 1, 522 'IEM_MC_CALL_AIMPL_4': 1, 523 #'IEM_MC_CALL_VOID_AIMPL_0': 1, - can't test results... ? 524 'IEM_MC_CALL_VOID_AIMPL_1': 1, 525 'IEM_MC_CALL_VOID_AIMPL_2': 1, 526 'IEM_MC_CALL_VOID_AIMPL_3': 1, 527 'IEM_MC_CALL_VOID_AIMPL_4': 1, 528 #'IEM_MC_CALL_FPU_AIMPL_1': 1, 529 #'IEM_MC_CALL_FPU_AIMPL_2': 1, 530 #'IEM_MC_CALL_FPU_AIMPL_3': 1, 531 #'IEM_MC_CALL_MMX_AIMPL_2': 1, 532 #'IEM_MC_CALL_MMX_AIMPL_3': 1, 533 #'IEM_MC_CALL_SSE_AIMPL_2': 1, 534 #'IEM_MC_CALL_SSE_AIMPL_3': 1, 535 #'IEM_MC_CALL_AVX_AIMPL_2': 1, 536 #'IEM_MC_CALL_AVX_AIMPL_3': 1, 537 #'IEM_MC_CALL_AVX_AIMPL_4': 1, 538 })): 539 for sStmt in dUnsupportedStmts: 540 if sStmt in g_dUnsupportedMcStmtLastOneAImplStats: 541 g_dUnsupportedMcStmtLastOneAImplStats[sStmt].append(oVariation); 542 else: 543 g_dUnsupportedMcStmtLastOneAImplStats[sStmt] = [oVariation,]; 528 544 529 545 return None; … … 571 587 print('todo:', file = sys.stderr); 572 588 573 #if g_dUnsupportedMcStmtLastOneVarStats:574 # asTopKeys = sorted(g_dUnsupportedMcStmtLastOneVarStats, reverse = True,575 # key = lambda sSortKey: len(g_dUnsupportedMcStmtLastOneVarStats[sSortKey]))[:16];576 #print('todo:', file = sys.stderr);577 # print('todo: Top %s variations with variables and 1-2 unsupported statement dependency:' % (len(asTopKeys),),578 #file = sys.stderr);579 #cchMaxKey = max([len(sKey) for sKey in asTopKeys]);580 #for sKey in asTopKeys:581 #print('todo: %*s = %s (%s%s)'582 # % (cchMaxKey, sKey, len(g_dUnsupportedMcStmtLastOneVarStats[sKey]),583 # ', '.join([oVar.getShortName() for oVar in g_dUnsupportedMcStmtLastOneVarStats[sKey][:5]]),584 # ',...' if len(g_dUnsupportedMcStmtLastOneVarStats[sKey]) >= 5 else '', )585 #, file = sys.stderr);589 if g_dUnsupportedMcStmtLastOneAImplStats: 590 asTopKeys = sorted(g_dUnsupportedMcStmtLastOneAImplStats, reverse = True, 591 key = lambda sSortKey: len(g_dUnsupportedMcStmtLastOneAImplStats[sSortKey]))[:16]; 592 print('todo:', file = sys.stderr); 593 print('todo: Top %s variations with AIMPL call and 1-2 unsupported statement dependencies:' % (len(asTopKeys),), 594 file = sys.stderr); 595 cchMaxKey = max([len(sKey) for sKey in asTopKeys]); 596 for sKey in asTopKeys: 597 print('todo: %*s = %s (%s%s)' 598 % (cchMaxKey, sKey, len(g_dUnsupportedMcStmtLastOneAImplStats[sKey]), 599 ', '.join([oVar.getShortName() for oVar in g_dUnsupportedMcStmtLastOneAImplStats[sKey][:5]]), 600 ',...' if len(g_dUnsupportedMcStmtLastOneAImplStats[sKey]) >= 5 else '', ) 601 , file = sys.stderr); 586 602 587 603 return True; -
trunk/src/VBox/VMM/VMMAll/IEMAllN8veRecompiler.cpp
r102073 r102077 3152 3152 3153 3153 /** 3154 * Flushes delayed write of a specific guest register. 3155 * 3156 * This must be called prior to calling CImpl functions and any helpers that use 3157 * the guest state (like raising exceptions) and such. 3158 * 3159 * This optimization has not yet been implemented. The first target would be 3160 * RIP updates, since these are the most common ones. 3161 */ 3162 DECL_HIDDEN_THROW(uint32_t) iemNativeRegFlushPendingSpecificWrite(PIEMRECOMPILERSTATE pReNative, uint32_t off, 3163 IEMNATIVEGSTREGREF enmClass, uint8_t idxReg) 3164 { 3165 RT_NOREF(pReNative, enmClass, idxReg); 3166 return off; 3167 } 3168 3169 3170 /** 3154 3171 * Flushes any delayed guest register writes. 3155 3172 * … … 5158 5175 AssertStmt(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Invalid, 5159 5176 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5177 AssertStmt(pReNative->Core.aVars[idxVar].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5160 5178 pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_Stack; 5161 5179 … … 5201 5219 5202 5220 /** 5203 * Changes it to a variable with a constant value.5221 * Sets it to a variable with a constant value. 5204 5222 * 5205 5223 * This does not require stack storage as we know the value and can always … … 5221 5239 pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_Immediate; 5222 5240 } 5241 AssertStmt(pReNative->Core.aVars[idxVar].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5242 5223 5243 pReNative->Core.aVars[idxVar].u.uValue = uValue; 5224 5244 } … … 5226 5246 5227 5247 /** 5228 * Changes the variable to a reference (pointer) to @a idxOtherVar. 5248 * Sets the variable to a reference (pointer) to @a idxOtherVar. 5249 * 5250 * This does not require stack storage as we know the value and can always 5251 * reload it. Loading is postponed till needed. 5229 5252 * 5230 5253 * @param pReNative The recompiler state. … … 5246 5269 pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_Immediate; 5247 5270 } 5271 AssertStmt(pReNative->Core.aVars[idxVar].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5272 5248 5273 pReNative->Core.aVars[idxVar].u.idxRefVar = idxOtherVar; 5249 5274 5250 5275 /* Update the other variable, ensure it's a stack variable. */ 5251 /** @todo handle variables with const values... that' sgo boom now. */5276 /** @todo handle variables with const values... that'll go boom now. */ 5252 5277 pReNative->Core.aVars[idxOtherVar].idxReferrerVar = idxVar; 5253 5278 iemNativeVarSetKindToStack(pReNative, idxOtherVar); 5279 } 5280 5281 5282 /** 5283 * Sets the variable to a reference (pointer) to a guest register reference. 5284 * 5285 * This does not require stack storage as we know the value and can always 5286 * reload it. Loading is postponed till needed. 5287 * 5288 * @param pReNative The recompiler state. 5289 * @param idxVar The variable. 5290 * @param enmRegClass The class guest registers to reference. 5291 * @param idxReg The register within @a enmRegClass to reference. 5292 * 5293 * @throws VERR_IEM_VAR_IPE_2 5294 */ 5295 static void iemNativeVarSetKindToGstRegRef(PIEMRECOMPILERSTATE pReNative, uint8_t idxVar, 5296 IEMNATIVEGSTREGREF enmRegClass, uint8_t idxReg) 5297 { 5298 Assert(idxVar < RT_ELEMENTS(pReNative->Core.aVars) && (pReNative->Core.bmVars & RT_BIT_32(idxVar))); 5299 5300 if (pReNative->Core.aVars[idxVar].enmKind != kIemNativeVarKind_GstRegRef) 5301 { 5302 /* Only simple trasnsitions for now. */ 5303 AssertStmt(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Invalid, 5304 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5305 pReNative->Core.aVars[idxVar].enmKind = kIemNativeVarKind_GstRegRef; 5306 } 5307 AssertStmt(pReNative->Core.aVars[idxVar].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_2)); 5308 5309 pReNative->Core.aVars[idxVar].u.GstRegRef.enmClass = enmRegClass; 5310 pReNative->Core.aVars[idxVar].u.GstRegRef.idx = idxReg; 5254 5311 } 5255 5312 … … 5775 5832 5776 5833 Assert(!(pReNative->Core.bmHstRegs & g_afIemNativeCallRegs[cHiddenArgs])); /* No variables for hidden arguments. */ 5834 5835 #ifdef IEMNATIVE_FP_OFF_STACK_ARG0 5836 /* 5837 * If there are any stack arguments, make sure they are in their place as well. 5838 * 5839 * We can use IEMNATIVE_CALL_ARG0_GREG as temporary register since we'll (or 5840 * the caller) be loading it later and it must be free (see first loop). 5841 */ 5842 if (cArgs > IEMNATIVE_CALL_ARG_GREG_COUNT) 5843 for (unsigned i = IEMNATIVE_CALL_ARG_GREG_COUNT; i < cArgs; i++) 5844 { 5845 uint8_t const idxVar = pReNative->Core.aidxArgVars[i]; 5846 int32_t const offBpDisp = g_aoffIemNativeCallStackArgBpDisp[i - IEMNATIVE_CALL_ARG_GREG_COUNT]; 5847 if (pReNative->Core.aVars[idxVar].idxReg < RT_ELEMENTS(pReNative->Core.aHstRegs)) 5848 { 5849 Assert(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack); /* Imm as well? */ 5850 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, pReNative->Core.aVars[idxVar].idxReg); 5851 pReNative->Core.bmHstRegs &= ~RT_BIT_32(pReNative->Core.aVars[idxVar].idxReg); 5852 pReNative->Core.aVars[idxVar].idxReg = UINT8_MAX; 5853 } 5854 else 5855 { 5856 /* Use ARG0 as temp for stuff we need registers for. */ 5857 switch (pReNative->Core.aVars[idxVar].enmKind) 5858 { 5859 case kIemNativeVarKind_Stack: 5860 AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX, 5861 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3)); 5862 off = iemNativeEmitLoadGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG /* is free */, 5863 iemNativeVarCalcBpDisp(pReNative, idxVar)); 5864 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG); 5865 continue; 5866 5867 case kIemNativeVarKind_Immediate: 5868 off = iemNativeEmitStoreImm64ByBp(pReNative, off, offBpDisp, pReNative->Core.aVars[idxVar].u.uValue); 5869 continue; 5870 5871 case kIemNativeVarKind_VarRef: 5872 { 5873 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar; 5874 Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars)); 5875 AssertStmt(pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX, 5876 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4)); 5877 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, 5878 iemNativeStackCalcBpDisp(pReNative->Core.aVars[idxOtherVar].idxStackSlot)); 5879 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG); 5880 continue; 5881 } 5882 5883 case kIemNativeVarKind_GstRegRef: 5884 off = iemNativeEmitLeaGprByGstRegRef(pReNative, off, IEMNATIVE_CALL_ARG0_GREG, 5885 pReNative->Core.aVars[idxVar].u.GstRegRef.enmClass, 5886 pReNative->Core.aVars[idxVar].u.GstRegRef.idx); 5887 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG); 5888 continue; 5889 5890 case kIemNativeVarKind_Invalid: 5891 case kIemNativeVarKind_End: 5892 break; 5893 } 5894 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3)); 5895 } 5896 } 5897 #else 5898 AssertCompile(IEMNATIVE_CALL_MAX_ARG_COUNT <= IEMNATIVE_CALL_ARG_GREG_COUNT); 5899 #endif 5777 5900 5778 5901 /* … … 5851 5974 #endif 5852 5975 5853 #ifdef IEMNATIVE_FP_OFF_STACK_ARG05854 /*5855 * If there are any stack arguments, make sure they are in their place as well.5856 *5857 * We can use IEMNATIVE_CALL_ARG0_GREG as temporary register since it the5858 * caller will load it later and it must be free (see first loop).5859 */5860 if (cArgs > IEMNATIVE_CALL_ARG_GREG_COUNT)5861 for (unsigned i = IEMNATIVE_CALL_ARG_GREG_COUNT; i < cArgs; i++)5862 {5863 uint8_t const idxVar = pReNative->Core.aidxArgVars[i];5864 int32_t const offBpDisp = g_aoffIemNativeCallStackArgBpDisp[i - IEMNATIVE_CALL_ARG_GREG_COUNT];5865 if (pReNative->Core.aVars[idxVar].idxReg < RT_ELEMENTS(pReNative->Core.aHstRegs))5866 {5867 Assert(pReNative->Core.aVars[idxVar].enmKind == kIemNativeVarKind_Stack); /* Imm as well? */5868 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, pReNative->Core.aVars[idxVar].idxReg);5869 pReNative->Core.bmHstRegs &= ~RT_BIT_32(pReNative->Core.aVars[idxVar].idxReg);5870 pReNative->Core.aVars[idxVar].idxReg = UINT8_MAX;5871 }5872 else5873 {5874 /* Use ARG0 as temp for stuff we need registers for. */5875 switch (pReNative->Core.aVars[idxVar].enmKind)5876 {5877 case kIemNativeVarKind_Stack:5878 AssertStmt(pReNative->Core.aVars[idxVar].idxStackSlot != UINT8_MAX,5879 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3));5880 off = iemNativeEmitLoadGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG /* is free */,5881 iemNativeVarCalcBpDisp(pReNative, idxVar));5882 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG);5883 continue;5884 5885 case kIemNativeVarKind_Immediate:5886 off = iemNativeEmitStoreImm64ByBp(pReNative, off, offBpDisp, pReNative->Core.aVars[idxVar].u.uValue);5887 continue;5888 5889 case kIemNativeVarKind_VarRef:5890 {5891 uint8_t const idxOtherVar = pReNative->Core.aVars[idxVar].u.idxRefVar;5892 Assert(idxOtherVar < RT_ELEMENTS(pReNative->Core.aVars));5893 AssertStmt(pReNative->Core.aVars[idxOtherVar].idxStackSlot != UINT8_MAX,5894 IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_4));5895 off = iemNativeEmitLeaGprByBp(pReNative, off, IEMNATIVE_CALL_ARG0_GREG,5896 iemNativeStackCalcBpDisp(pReNative->Core.aVars[idxOtherVar].idxStackSlot));5897 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG);5898 continue;5899 }5900 5901 case kIemNativeVarKind_GstRegRef:5902 off = iemNativeEmitLeaGprByGstRegRef(pReNative, off, IEMNATIVE_CALL_ARG0_GREG,5903 pReNative->Core.aVars[idxVar].u.GstRegRef.enmClass,5904 pReNative->Core.aVars[idxVar].u.GstRegRef.idx);5905 off = iemNativeEmitStoreGprByBp(pReNative, off, offBpDisp, IEMNATIVE_CALL_ARG0_GREG);5906 continue;5907 5908 case kIemNativeVarKind_Invalid:5909 case kIemNativeVarKind_End:5910 break;5911 }5912 AssertFailedStmt(IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_3));5913 }5914 }5915 #else5916 AssertCompile(IEMNATIVE_CALL_MAX_ARG_COUNT <= IEMNATIVE_CALL_ARG_GREG_COUNT);5917 #endif5918 5919 5976 /* 5920 5977 * Free all argument variables (simplified). … … 5966 6023 5967 6024 /* 5968 * Load the two hidden arguments.6025 * Load the two or three hidden arguments. 5969 6026 */ 5970 6027 #if defined(VBOXSTRICTRC_STRICT_ENABLED) && defined(RT_OS_WINDOWS) && defined(RT_ARCH_AMD64) … … 6127 6184 #define IEM_MC_HINT_FLUSH_GUEST_SHADOW(g_fGstShwFlush) iemNativeRegFlushGuestShadows(pReNative, g_fGstShwFlush) 6128 6185 6186 6187 6188 /********************************************************************************************************************************* 6189 * Emitters for IEM_MC_CALL_VOID_AIMPL_XXX and IEM_MC_CALL_AIMPL_XXX * 6190 *********************************************************************************************************************************/ 6191 6192 /** 6193 * Common worker for IEM_MC_CALL_VOID_AIMPL_XXX and IEM_MC_CALL_AIMPL_XXX. 6194 */ 6195 DECL_INLINE_THROW(uint32_t) 6196 iemNativeEmitCallAImplCommon(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, 6197 uintptr_t pfnAImpl, uint8_t cArgs) 6198 { 6199 if (idxVarRc != UINT8_MAX) 6200 { 6201 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarRc); 6202 AssertStmt(pReNative->Core.aVars[idxVarRc].uArgNo == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_8)); 6203 AssertStmt(pReNative->Core.aVars[idxVarRc].cbVar <= sizeof(uint64_t), IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_9)); 6204 } 6205 6206 /* 6207 * Do all the call setup and cleanup. 6208 */ 6209 off = iemNativeEmitCallCommon(pReNative, off, cArgs, 0 /*cHiddenArgs*/); 6210 6211 /* 6212 * Make the call and update the return code variable if we've got one. 6213 */ 6214 off = iemNativeEmitCallImm(pReNative, off, pfnAImpl); 6215 if (idxVarRc < RT_ELEMENTS(pReNative->Core.aVars)) 6216 { 6217 iemNativeVarSetKindToStack(pReNative, idxVarRc); 6218 AssertStmt(pReNative->Core.aVars[idxVarRc].idxReg == UINT8_MAX, IEMNATIVE_DO_LONGJMP(pReNative, VERR_IEM_VAR_IPE_10)); 6219 pReNative->Core.aVars[idxVarRc].idxReg = IEMNATIVE_CALL_RET_GREG; 6220 } 6221 6222 return off; 6223 } 6224 6225 6226 6227 #define IEM_MC_CALL_VOID_AIMPL_0(a_pfn) \ 6228 off = iemNativeEmitCallAImpl1(pReNative, off, UINT8_MAX /*idxVarRc*/, (uintptr_t)(a_pfn)) 6229 6230 #define IEM_MC_CALL_AIMPL_0(a_rc, a_pfn) \ 6231 off = iemNativeEmitCallAImpl1(pReNative, off, a_rc, (uintptr_t)(a_pfn)) 6232 6233 /** Emits code for IEM_MC_CALL_VOID_AIMPL_0 and IEM_MC_CALL_AIMPL_0. */ 6234 DECL_INLINE_THROW(uint32_t) 6235 iemNativeEmitCallAImpl0(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, uintptr_t pfnAImpl) 6236 { 6237 return iemNativeEmitCallAImplCommon(pReNative, off, idxVarRc, pfnAImpl, 0); 6238 } 6239 6240 6241 #define IEM_MC_CALL_VOID_AIMPL_1(a_pfn, a0) \ 6242 off = iemNativeEmitCallAImpl1(pReNative, off, UINT8_MAX /*idxVarRc*/, (uintptr_t)(a_pfn), a0) 6243 6244 #define IEM_MC_CALL_AIMPL_1(a_rc, a_pfn, a0) \ 6245 off = iemNativeEmitCallAImpl1(pReNative, off, a_rc, (uintptr_t)(a_pfn), a0) 6246 6247 /** Emits code for IEM_MC_CALL_VOID_AIMPL_1 and IEM_MC_CALL_AIMPL_1. */ 6248 DECL_INLINE_THROW(uint32_t) 6249 iemNativeEmitCallAImpl1(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, uintptr_t pfnAImpl, uint8_t idxArg0) 6250 { 6251 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg0, 0); 6252 return iemNativeEmitCallAImplCommon(pReNative, off, idxVarRc, pfnAImpl, 1); 6253 } 6254 6255 6256 #define IEM_MC_CALL_VOID_AIMPL_2(a_pfn, a0, a1) \ 6257 off = iemNativeEmitCallAImpl2(pReNative, off, UINT8_MAX /*idxVarRc*/, (uintptr_t)(a_pfn), a0, a1) 6258 6259 #define IEM_MC_CALL_AIMPL_2(a_rc, a_pfn, a0, a1) \ 6260 off = iemNativeEmitCallAImpl2(pReNative, off, a_rc, (uintptr_t)(a_pfn), a0, a1) 6261 6262 /** Emits code for IEM_MC_CALL_VOID_AIMPL_2 and IEM_MC_CALL_AIMPL_2. */ 6263 DECL_INLINE_THROW(uint32_t) 6264 iemNativeEmitCallAImpl2(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, 6265 uintptr_t pfnAImpl, uint8_t idxArg0, uint8_t idxArg1) 6266 { 6267 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg0, 0); 6268 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg1, 1); 6269 return iemNativeEmitCallAImplCommon(pReNative, off, idxVarRc, pfnAImpl, 2); 6270 } 6271 6272 6273 #define IEM_MC_CALL_VOID_AIMPL_3(a_pfn, a0, a1, a2) \ 6274 off = iemNativeEmitCallAImpl3(pReNative, off, UINT8_MAX /*idxVarRc*/, (uintptr_t)(a_pfn), a0, a1, a2) 6275 6276 #define IEM_MC_CALL_AIMPL_3(a_rc, a_pfn, a0, a1, a2) \ 6277 off = iemNativeEmitCallAImpl3(pReNative, off, a_rc, (uintptr_t)(a_pfn), a0, a1, a2) 6278 6279 /** Emits code for IEM_MC_CALL_VOID_AIMPL_3 and IEM_MC_CALL_AIMPL_3. */ 6280 DECL_INLINE_THROW(uint32_t) 6281 iemNativeEmitCallAImpl3(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, 6282 uintptr_t pfnAImpl, uint8_t idxArg0, uint8_t idxArg1, uint8_t idxArg2) 6283 { 6284 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg0, 0); 6285 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg1, 1); 6286 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg2, 2); 6287 return iemNativeEmitCallAImplCommon(pReNative, off, idxVarRc, pfnAImpl, 3); 6288 } 6289 6290 6291 #define IEM_MC_CALL_VOID_AIMPL_4(a_pfn, a0, a1, a2, a3) \ 6292 off = iemNativeEmitCallAImpl4(pReNative, off, UINT8_MAX /*idxVarRc*/, (uintptr_t)(a_pfn), a0, a1, a2, a3) 6293 6294 #define IEM_MC_CALL_AIMPL_4(a_rc, a_pfn, a0, a1, a2, a3) \ 6295 off = iemNativeEmitCallAImpl4(pReNative, off, a_rc, (uintptr_t)(a_pfn), a0, a1, a2, a3) 6296 6297 /** Emits code for IEM_MC_CALL_VOID_AIMPL_4 and IEM_MC_CALL_AIMPL_4. */ 6298 DECL_INLINE_THROW(uint32_t) 6299 iemNativeEmitCallAImpl4(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRc, 6300 uintptr_t pfnAImpl, uint8_t idxArg0, uint8_t idxArg1, uint8_t idxArg2, uint8_t idxArg3) 6301 { 6302 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg0, 0); 6303 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg1, 1); 6304 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg2, 2); 6305 IEMNATIVE_ASSERT_ARG_VAR_IDX(pReNative, idxArg3, 3); 6306 return iemNativeEmitCallAImplCommon(pReNative, off, idxVarRc, pfnAImpl, 4); 6307 } 6129 6308 6130 6309 … … 6447 6626 6448 6627 6449 6450 6628 /********************************************************************************************************************************* 6451 6629 * General purpose register manipulation (add, sub). * … … 6558 6736 } 6559 6737 6738 6739 6740 /********************************************************************************************************************************* 6741 * Register references. * 6742 *********************************************************************************************************************************/ 6743 6744 #define IEM_MC_REF_GREG_U8_THREADED(a_pu8Dst, a_iGRegEx) \ 6745 off = iemNativeEmitRefGregU8(pReNative, off, a_pu8Dst, a_iGRegEx, false /*fConst*/) 6746 6747 #define IEM_MC_REF_GREG_U8_CONST_THREADED(a_pu8Dst, a_iGReg) \ 6748 off = iemNativeEmitRefGregU8(pReNative, off, a_pu8Dst, a_iGRegEx, true /*fConst*/) 6749 6750 6751 #define IEM_MC_REF_GREG_U16(a_pu16Dst, a_iGReg) \ 6752 off = iemNativeEmitRefGregUxx(pReNative, off, a_pu16Dst, a_iGReg, false /*fConst*/) 6753 6754 #define IEM_MC_REF_GREG_U16_CONST(a_pu16Dst, a_iGReg) \ 6755 off = iemNativeEmitRefGregUxx(pReNative, off, a_pu16Dst, a_iGReg, true /*fConst*/) 6756 6757 #define IEM_MC_REF_GREG_U32(a_pu32Dst, a_iGReg) \ 6758 off = iemNativeEmitRefGregUxx(pReNative, off, a_pu32Dst, a_iGReg, false /*fConst*/) 6759 6760 #define IEM_MC_REF_GREG_U32_CONST(a_pu32Dst, a_iGReg) \ 6761 off = iemNativeEmitRefGregUxx(pReNative, off, a_pu32Dst, a_iGReg, true /*fConst*/) 6762 6763 #define IEM_MC_REF_GREG_I32(a_pi32Dst, a_iGReg) \ 6764 off = iemNativeEmitRefGregUxx(pReNative, off, a_pi32Dst, a_iGReg, false /*fConst*/) 6765 6766 #define IEM_MC_REF_GREG_I32_CONST(a_pi32Dst, a_iGReg) \ 6767 off = iemNativeEmitRefGregUxx(pReNative, off, a_pi32Dst, a_iGReg, true /*fConst*/) 6768 6769 #define IEM_MC_REF_GREG_U64(a_pu64Dst, a_iGReg) \ 6770 off = iemNativeEmitRefGregUxx(pReNative, off, a_u64Dst, a_iGReg, false /*fConst*/) 6771 6772 #define IEM_MC_REF_GREG_U64_CONST(a_pu64Dst, a_iGReg) \ 6773 off = iemNativeEmitRefGregUxx(pReNative, off, a_u64Dst, a_iGReg, true /*fConst*/) 6774 6775 #define IEM_MC_REF_GREG_I64(a_pi64Dst, a_iGReg) \ 6776 off = iemNativeEmitRefGregUxx(pReNative, off, a_i64Dst, a_iGReg, false /*fConst*/) 6777 6778 #define IEM_MC_REF_GREG_I64_CONST(a_pi64Dst, a_iGReg) \ 6779 off = iemNativeEmitRefGregUxx(pReNative, off, a_i64Dst, a_iGReg, true /*fConst*/) 6780 6781 /** Handles IEM_MC_REF_GREG_Uxx[_CONST] and IEM_MC_REF_GREG_Ixx[_CONST]. */ 6782 DECL_INLINE_THROW(uint32_t) 6783 iemNativeEmitRefGregUxx(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRef, uint8_t iGReg, bool fConst) 6784 { 6785 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarRef); 6786 Assert(pReNative->Core.aVars[idxVarRef].cbVar == sizeof(void *)); 6787 Assert(iGReg < 16); 6788 6789 iemNativeVarSetKindToGstRegRef(pReNative, idxVarRef, kIemNativeGstRegRef_Gpr, iGReg); 6790 6791 /* If we've delayed writing back the register value, flush it now. */ 6792 off = iemNativeRegFlushPendingSpecificWrite(pReNative, off, kIemNativeGstRegRef_Gpr, iGReg); 6793 6794 /* If it's not a const reference we need to flush the shadow copy of the register now. */ 6795 if (!fConst) 6796 iemNativeRegFlushGuestShadows(pReNative, RT_BIT_64(IEMNATIVEGSTREG_GPR(iGReg))); 6797 6798 return off; 6799 } 6800 6801 6802 #define IEM_MC_REF_EFLAGS(a_pEFlags) \ 6803 off = iemNativeEmitRefEFlags(pReNative, off, a_pEFlags) 6804 6805 /** Handles IEM_MC_REF_EFLAGS. */ 6806 DECL_INLINE_THROW(uint32_t) 6807 iemNativeEmitRefEFlags(PIEMRECOMPILERSTATE pReNative, uint32_t off, uint8_t idxVarRef) 6808 { 6809 IEMNATIVE_ASSERT_VAR_IDX(pReNative, idxVarRef); 6810 Assert(pReNative->Core.aVars[idxVarRef].cbVar == sizeof(void *)); 6811 6812 iemNativeVarSetKindToGstRegRef(pReNative, idxVarRef, kIemNativeGstRegRef_EFlags, 0); 6813 6814 /* If we've delayed writing back the register value, flush it now. */ 6815 off = iemNativeRegFlushPendingSpecificWrite(pReNative, off, kIemNativeGstRegRef_EFlags, 0); 6816 6817 /* If there is a shadow copy of guest EFLAGS, flush it now. */ 6818 iemNativeRegFlushGuestShadows(pReNative, RT_BIT_64(kIemNativeGstReg_EFlags)); 6819 6820 return off; 6821 } 6560 6822 6561 6823 … … 7716 7978 DECLHIDDEN(PIEMTB) iemNativeRecompile(PVMCPUCC pVCpu, PIEMTB pTb) RT_NOEXCEPT 7717 7979 { 7980 STAM_REL_PROFILE_START(&pVCpu->iem.s.StatNativeRecompilation, a); 7981 7718 7982 /* 7719 7983 * The first time thru, we allocate the recompiler state, the other times … … 7750 8014 */ 7751 8015 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 7752 int32_t iGstInstr = -1; 7753 uint32_t fExec = pTb->fFlags; 7754 #endif 7755 PCIEMTHRDEDCALLENTRY pCallEntry = pTb->Thrd.paCalls; 8016 int32_t iGstInstr = -1; 8017 #endif 8018 #ifndef VBOX_WITHOUT_RELEASE_STATISTICS 8019 uint32_t cThreadedCalls = 0; 8020 uint32_t cRecompiledCalls = 0; 8021 #endif 8022 uint32_t fExec = pTb->fFlags; 8023 PCIEMTHRDEDCALLENTRY pCallEntry = pTb->Thrd.paCalls; 7756 8024 while (cCallsLeft-- > 0) 7757 8025 { … … 7761 8029 * Debug info and assembly markup. 7762 8030 */ 7763 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO7764 8031 if (pCallEntry->enmFunction == kIemThreadedFunc_BltIn_CheckMode) 7765 8032 fExec = pCallEntry->auParams[0]; 8033 #ifdef IEMNATIVE_WITH_TB_DEBUG_INFO 7766 8034 iemNativeDbgInfoAddNativeOffset(pReNative, off); 7767 8035 if (iGstInstr < (int32_t)pCallEntry->idxInstr) … … 7791 8059 if (pfnRecom) /** @todo stats on this. */ 7792 8060 { 7793 //STAM_COUNTER_INC()7794 8061 off = pfnRecom(pReNative, off, pCallEntry); 8062 STAM_REL_STATS({cRecompiledCalls++;}); 7795 8063 } 7796 8064 else 8065 { 7797 8066 off = iemNativeEmitThreadedCall(pReNative, off, pCallEntry); 8067 STAM_REL_STATS({cThreadedCalls++;}); 8068 } 7798 8069 Assert(off <= pReNative->cInstrBufAlloc); 7799 8070 Assert(pReNative->cCondDepth == 0); … … 7804 8075 pCallEntry++; 7805 8076 } 8077 8078 STAM_REL_PROFILE_ADD_PERIOD(&pVCpu->iem.s.StatNativeCallsRecompiled, cRecompiledCalls); 8079 STAM_REL_PROFILE_ADD_PERIOD(&pVCpu->iem.s.StatNativeCallsThreaded, cThreadedCalls); 8080 if (!cThreadedCalls) 8081 STAM_REL_COUNTER_INC(&pVCpu->iem.s.StatNativeFullyRecompiledTbs); 7806 8082 7807 8083 /* … … 7905 8181 7906 8182 iemExecMemAllocatorReadyForUse(pVCpu, paFinalInstrBuf, off * sizeof(IEMNATIVEINSTR)); 8183 STAM_REL_PROFILE_ADD_PERIOD(&pVCpu->iem.s.StatTbNativeCode, off * sizeof(IEMNATIVEINSTR)); 7907 8184 7908 8185 /* 7909 8186 * Convert the translation block. 7910 8187 */ 7911 //RT_BREAKPOINT();7912 8188 RTMemFree(pTb->Thrd.paCalls); 7913 8189 pTb->Native.paInstructions = paFinalInstrBuf; … … 7938 8214 #endif 7939 8215 8216 STAM_REL_PROFILE_STOP(&pVCpu->iem.s.StatNativeRecompilation, a); 7940 8217 return pTb; 7941 8218 } -
trunk/src/VBox/VMM/VMMR3/IEMR3.cpp
r101163 r102077 378 378 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatCheckNeedCsLimChecking, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, 379 379 "Needing CS.LIM checking TB after branch or on page crossing", "/IEM/CPU%u/re/CheckTbNeedCsLimChecking", idCpu); 380 #endif 380 381 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCallsRecompiled, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS_PER_TB, 382 "Number of threaded calls per TB that have been properly recompiled to native code", 383 "/IEM/CPU%u/re/NativeCallsRecompiledPerTb", idCpu); 384 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeCallsThreaded, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_CALLS_PER_TB, 385 "Number of threaded calls per TB that could not be recompiler to native code", 386 "/IEM/CPU%u/re/NativeCallsThreadedPerTb", idCpu); 387 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeFullyRecompiledTbs, STAMTYPE_COUNTER, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, 388 "Number of threaded calls that could not be recompiler to native code", 389 "/IEM/CPU%u/re/NativeFullyRecompiledTbs", idCpu); 390 391 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatTbNativeCode, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES_PER_TB, 392 "Size of native code per TB", "/IEM/CPU%u/re/NativeCodeSizePerTb", idCpu); 393 STAMR3RegisterF(pVM, (void *)&pVCpu->iem.s.StatNativeRecompilation, STAMTYPE_PROFILE, STAMVISIBILITY_ALWAYS, STAMUNIT_TICKS_PER_CALL, 394 "Profiling iemNativeRecompile()", "/IEM/CPU%u/re/NativeRecompilation", idCpu); 395 #endif /* VBOX_WITH_IEM_RECOMPILER */ 381 396 382 397 for (uint32_t i = 0; i < RT_ELEMENTS(pVCpu->iem.s.aStatXcpts); i++) -
trunk/src/VBox/VMM/VMMR3/STAM.cpp
r100695 r102077 3062 3062 case STAMUNIT_INSTR: return "instr"; 3063 3063 case STAMUNIT_INSTR_PER_TB: return "instr/tb"; 3064 case STAMUNIT_BYTES_PER_TB: return "bytes/tb"; 3064 3065 3065 3066 default: … … 3102 3103 case STAMUNIT_INSTR: return "instr"; 3103 3104 case STAMUNIT_INSTR_PER_TB: return "instr"; 3105 case STAMUNIT_BYTES_PER_TB: return "bytes"; 3104 3106 3105 3107 default: … … 3129 3131 case STAMUNIT_CALLS_PER_TB: return "tbs"; 3130 3132 case STAMUNIT_INSTR_PER_TB: return "tbs"; 3133 case STAMUNIT_BYTES_PER_TB: return "tbs"; 3131 3134 default: 3132 3135 AssertMsgFailed(("Wrong unit %d\n", enmUnit)); -
trunk/src/VBox/VMM/include/IEMInternal.h
r102010 r102077 1676 1676 R3PTRTYPE(struct IEMRECOMPILERSTATE *) pNativeRecompilerStateR3; 1677 1677 /** Alignment padding. */ 1678 uint64_t auAlignment10[ 4];1678 uint64_t auAlignment10[3]; 1679 1679 /** Statistics: Times TB execution was broken off before reaching the end. */ 1680 1680 STAMCOUNTER StatTbExecBreaks; … … 1687 1687 /** Statistics: Times a jump or page crossing required a TB with CS.LIM checking. */ 1688 1688 STAMCOUNTER StatCheckNeedCsLimChecking; 1689 /** Native TB statistics: Number of fully recompiled TBs. */ 1690 STAMCOUNTER StatNativeFullyRecompiledTbs; 1689 1691 /** Threaded TB statistics: Number of instructions per TB. */ 1690 1692 STAMPROFILE StatTbThreadedInstr; … … 1695 1697 /** Native TB statistics: Profiling native recompilation. */ 1696 1698 STAMPROFILE StatNativeRecompilation; 1699 /** Native TB statistics: Number of calls per TB that were recompiled properly. */ 1700 STAMPROFILE StatNativeCallsRecompiled; 1701 /** Native TB statistics: Number of threaded calls per TB that weren't recompiled. */ 1702 STAMPROFILE StatNativeCallsThreaded; 1697 1703 /** @} */ 1698 1704 -
trunk/src/VBox/VMM/include/IEMN8veRecompiler.h
r102068 r102077 830 830 831 831 /** 832 * Checks that a variable index is valid and that the variable is assigned the 833 * correct argument number. 834 * This also adds a RT_NOREF of a_idxVar. 835 */ 836 #define IEMNATIVE_ASSERT_ARG_VAR_IDX(a_pReNative, a_idxVar, a_uArgNo) do { \ 837 RT_NOREF(a_idxVar); \ 838 AssertMsg( (unsigned)(a_idxVar) < RT_ELEMENTS((a_pReNative)->Core.aVars) \ 839 && ((a_pReNative)->Core.bmVars & RT_BIT_32(a_idxVar))\ 840 && (a_pReNative)->Core.aVars[a_idxVar].uArgNo == (a_uArgNo) \ 841 , ("%s=%d; uArgNo=%d, expected %u\n", #a_idxVar, a_idxVar, \ 842 (a_pReNative)->Core.aVars[RT_MAX(a_idxVar, RT_ELEMENTS((a_pReNative)->Core.aVars)) - 1].uArgNo, a_uArgNo)); \ 843 } while (0) 844 845 /** 832 846 * Calculates the stack address of a variable as a [r]BP displacement value. 833 847 */
Note:
See TracChangeset
for help on using the changeset viewer.