Changeset 74791 in vbox
- Timestamp:
- Oct 12, 2018 10:44:17 AM (6 years ago)
- Location:
- trunk
- Files:
-
- 16 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/include/VBox/vmm/vm.h
r74790 r74791 727 727 #endif 728 728 729 /** @def VM_FF_IS_ PENDING729 /** @def VM_FF_IS_ANY_SET 730 730 * Checks if one or more force action in the specified set is pending. 731 731 * … … 733 733 * @param fFlags The flags to check for. 734 734 */ 735 #define VM_FF_IS_ PENDING(pVM, fFlags) RT_BOOL((pVM)->fGlobalForcedActions & (fFlags))735 #define VM_FF_IS_ANY_SET(pVM, fFlags) RT_BOOL((pVM)->fGlobalForcedActions & (fFlags)) 736 736 737 737 /** @def VMCPU_FF_IS_ANY_SET -
trunk/src/VBox/VMM/VMMAll/IEMAll.cpp
r74751 r74791 14354 14354 || ( !(fCpu & ~(VMCPU_FF_INTERRUPT_APIC | VMCPU_FF_INTERRUPT_PIC)) 14355 14355 && !pVCpu->cpum.GstCtx.rflags.Bits.u1IF) ) 14356 && !VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK) ))14356 && !VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) )) 14357 14357 { 14358 14358 if (cInstr-- > 0) … … 14516 14516 || ( !(fCpu & ~(VMCPU_FF_INTERRUPT_APIC | VMCPU_FF_INTERRUPT_PIC)) 14517 14517 && !pVCpu->cpum.GstCtx.rflags.Bits.u1IF)) 14518 && !VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK) )14518 && !VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) ) 14519 14519 || pStats->cInstructions < cMinInstructions)) 14520 14520 { -
trunk/src/VBox/VMM/VMMAll/IEMAllCImplStrInstr.cpp.h
r74789 r74791 76 76 if (RT_LIKELY( !VMCPU_FF_IS_ANY_SET(a_pVCpu, (a_fEflags) & X86_EFL_IF ? VMCPU_FF_YIELD_REPSTR_MASK \ 77 77 : VMCPU_FF_YIELD_REPSTR_NOINT_MASK) \ 78 && !VM_FF_IS_ PENDING(a_pVM, VM_FF_YIELD_REPSTR_MASK) \78 && !VM_FF_IS_ANY_SET(a_pVM, VM_FF_YIELD_REPSTR_MASK) \ 79 79 )) \ 80 80 { \ … … 99 99 if (RT_LIKELY( !VMCPU_FF_IS_ANY_SET(a_pVCpu, (a_fEflags) & X86_EFL_IF ? VMCPU_FF_YIELD_REPSTR_MASK \ 100 100 : VMCPU_FF_YIELD_REPSTR_NOINT_MASK) \ 101 && !VM_FF_IS_ PENDING(a_pVM, VM_FF_YIELD_REPSTR_MASK) \101 && !VM_FF_IS_ANY_SET(a_pVM, VM_FF_YIELD_REPSTR_MASK) \ 102 102 )) \ 103 103 { /* probable */ } \ … … 119 119 do { \ 120 120 if (RT_LIKELY( ( !VMCPU_FF_IS_ANY_SET(a_pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_REPSTR_MASK) \ 121 && !VM_FF_IS_ PENDING(a_pVM, VM_FF_HIGH_PRIORITY_POST_REPSTR_MASK)) \121 && !VM_FF_IS_ANY_SET(a_pVM, VM_FF_HIGH_PRIORITY_POST_REPSTR_MASK)) \ 122 122 || (a_fExitExpr) )) \ 123 123 { /* very likely */ } \ -
trunk/src/VBox/VMM/VMMAll/NEMAllNativeTemplate-win.cpp.h
r74789 r74791 4188 4188 uint64_t offDeltaIgnored; 4189 4189 uint64_t const nsNextTimerEvt = TMTimerPollGIP(pVM, pVCpu, &offDeltaIgnored); NOREF(nsNextTimerEvt); 4190 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC)4190 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC) 4191 4191 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 4192 4192 { … … 4303 4303 * If no relevant FFs are pending, loop. 4304 4304 */ 4305 if ( !VM_FF_IS_ PENDING( pVM, !fSingleStepping ? VM_FF_HP_R0_PRE_HM_MASK : VM_FF_HP_R0_PRE_HM_STEP_MASK)4305 if ( !VM_FF_IS_ANY_SET( pVM, !fSingleStepping ? VM_FF_HP_R0_PRE_HM_MASK : VM_FF_HP_R0_PRE_HM_STEP_MASK) 4306 4306 && !VMCPU_FF_IS_ANY_SET(pVCpu, !fSingleStepping ? VMCPU_FF_HP_R0_PRE_HM_MASK : VMCPU_FF_HP_R0_PRE_HM_STEP_MASK) ) 4307 4307 continue; -
trunk/src/VBox/VMM/VMMAll/PGMAllBth.h
r74790 r74791 1450 1450 ) 1451 1451 # else 1452 if (VM_FF_IS_ PENDING(pVM, VM_FF_PGM_SYNC_CR3 | VM_FF_PGM_SYNC_CR3_NON_GLOBAL) )1452 if (VM_FF_IS_ANY_SET(pVM, VM_FF_PGM_SYNC_CR3 | VM_FF_PGM_SYNC_CR3_NON_GLOBAL) ) 1453 1453 # endif 1454 1454 { -
trunk/src/VBox/VMM/VMMR0/HMSVMR0.cpp
r74790 r74791 4220 4220 4221 4221 PVM pVM = pVCpu->CTX_SUFF(pVM); 4222 if ( VM_FF_IS_ PENDING(pVM, !pVCpu->hm.s.fSingleInstruction4222 if ( VM_FF_IS_ANY_SET(pVM, !pVCpu->hm.s.fSingleInstruction 4223 4223 ? VM_FF_HP_R0_PRE_HM_MASK : VM_FF_HP_R0_PRE_HM_STEP_MASK) 4224 4224 || VMCPU_FF_IS_ANY_SET(pVCpu, !pVCpu->hm.s.fSingleInstruction … … 4239 4239 /* Pending HM-to-R3 operations (critsects, timers, EMT rendezvous etc.) */ 4240 4240 /* -XXX- what was that about single stepping? */ 4241 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HM_TO_R3_MASK)4241 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HM_TO_R3_MASK) 4242 4242 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 4243 4243 { … … 4369 4369 */ 4370 4370 pSvmTransient->fEFlags = ASMIntDisableFlags(); 4371 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC)4371 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC) 4372 4372 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 4373 4373 { … … 4482 4482 */ 4483 4483 pSvmTransient->fEFlags = ASMIntDisableFlags(); 4484 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC)4484 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC) 4485 4485 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 4486 4486 { -
trunk/src/VBox/VMM/VMMR0/HMVMXR0.cpp
r74790 r74791 6900 6900 PVM pVM = pVCpu->CTX_SUFF(pVM); 6901 6901 if ( !fStepping 6902 ? !VM_FF_IS_ PENDING(pVM, VM_FF_HP_R0_PRE_HM_MASK)6902 ? !VM_FF_IS_ANY_SET(pVM, VM_FF_HP_R0_PRE_HM_MASK) 6903 6903 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HP_R0_PRE_HM_MASK) 6904 : !VM_FF_IS_ PENDING(pVM, VM_FF_HP_R0_PRE_HM_STEP_MASK)6904 : !VM_FF_IS_ANY_SET(pVM, VM_FF_HP_R0_PRE_HM_STEP_MASK) 6905 6905 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HP_R0_PRE_HM_STEP_MASK) ) 6906 6906 return VINF_SUCCESS; … … 6922 6922 6923 6923 /* Pending HM-to-R3 operations (critsects, timers, EMT rendezvous etc.) */ 6924 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HM_TO_R3_MASK)6924 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HM_TO_R3_MASK) 6925 6925 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 6926 6926 { … … 8584 8584 pVmxTransient->fEFlags = ASMIntDisableFlags(); 8585 8585 8586 if ( ( !VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC)8586 if ( ( !VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_TM_VIRTUAL_SYNC) 8587 8587 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HM_TO_R3_MASK)) 8588 8588 || ( fStepping /* Optimized for the non-stepping case, so a bit of unnecessary work when stepping. */ -
trunk/src/VBox/VMM/VMMR3/DBGF.cpp
r74790 r74791 834 834 { 835 835 int rc; 836 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_REQUEST)836 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_REQUEST) 837 837 && !VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_REQUEST)) 838 838 { -
trunk/src/VBox/VMM/VMMR3/EM.cpp
r74790 r74791 1187 1187 * important FFs while we were busy switching the state. So, check again. 1188 1188 */ 1189 if ( VM_FF_IS_ PENDING(pVM, VM_FF_REQUEST | VM_FF_PDM_QUEUES | VM_FF_DBGF | VM_FF_CHECK_VM_STATE | VM_FF_RESET)1189 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_REQUEST | VM_FF_PDM_QUEUES | VM_FF_DBGF | VM_FF_CHECK_VM_STATE | VM_FF_RESET) 1190 1190 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_TIMER | VMCPU_FF_REQUEST)) 1191 1191 { … … 1222 1222 * else. Sync back the state and leave the lock to be on the safe side. 1223 1223 */ 1224 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)1224 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 1225 1225 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 1226 1226 { … … 1274 1274 #endif 1275 1275 AssertCompile(VMCPU_FF_ALL_REM_MASK & VMCPU_FF_TIMER); 1276 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1276 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1277 1277 || VMCPU_FF_IS_ANY_SET(pVCpu, 1278 1278 VMCPU_FF_ALL_REM_MASK … … 1399 1399 * Check for pending actions. 1400 1400 */ 1401 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1401 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1402 1402 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK & ~VMCPU_FF_UNHALT)) 1403 1403 return VINF_SUCCESS; … … 1863 1863 * Post execution chunk first. 1864 1864 */ 1865 if ( VM_FF_IS_ PENDING(pVM, VM_FF_NORMAL_PRIORITY_POST_MASK)1865 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_NORMAL_PRIORITY_POST_MASK) 1866 1866 || (VMCPU_FF_NORMAL_PRIORITY_POST_MASK && VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_NORMAL_PRIORITY_POST_MASK)) ) 1867 1867 { … … 2094 2094 * (Executed in ascending priority order.) 2095 2095 */ 2096 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_MASK)2096 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_MASK) 2097 2097 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_MASK)) 2098 2098 { … … 2416 2416 && rc != VINF_EM_TERMINATE 2417 2417 && rc != VINF_EM_OFF 2418 && ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)2418 && ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 2419 2419 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK & ~VMCPU_FF_UNHALT))) 2420 2420 { -
trunk/src/VBox/VMM/VMMR3/EMHM.cpp
r74790 r74791 95 95 * Service necessary FFs before going into HM. 96 96 */ 97 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)97 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 98 98 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 99 99 { … … 119 119 */ 120 120 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 121 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)121 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 122 122 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 123 123 { … … 404 404 Assert(!VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_SELM_SYNC_TSS | VMCPU_FF_SELM_SYNC_GDT | VMCPU_FF_SELM_SYNC_LDT)); 405 405 #endif 406 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)406 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 407 407 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 408 408 { … … 465 465 */ 466 466 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 467 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)467 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 468 468 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 469 469 rc = VBOXSTRICTRC_TODO(emR3HighPriorityPostForcedActions(pVM, pVCpu, rc)); … … 485 485 TMTimerPollVoid(pVM, pVCpu); 486 486 #endif 487 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK)487 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) 488 488 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_MASK)) 489 489 { -
trunk/src/VBox/VMM/VMMR3/EMR3Nem.cpp
r74790 r74791 95 95 * Service necessary FFs before going into HM. 96 96 */ 97 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)97 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 98 98 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 99 99 { … … 119 119 */ 120 120 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 121 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)121 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 122 122 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 123 123 { … … 379 379 * Process high priority pre-execution raw-mode FFs. 380 380 */ 381 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)381 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 382 382 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 383 383 { … … 448 448 */ 449 449 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 450 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)450 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 451 451 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 452 452 rcStrict = emR3HighPriorityPostForcedActions(pVM, pVCpu, rcStrict); … … 468 468 TMTimerPollVoid(pVM, pVCpu); 469 469 #endif 470 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK)470 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) 471 471 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_MASK)) 472 472 { -
trunk/src/VBox/VMM/VMMR3/EMRaw.cpp
r74790 r74791 172 172 * Check vital forced actions, but ignore pending interrupts and timers. 173 173 */ 174 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)174 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 175 175 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 176 176 { … … 1317 1317 * Process high priority pre-execution raw-mode FFs. 1318 1318 */ 1319 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)1319 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 1320 1320 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1321 1321 { … … 1348 1348 CSAMR3CheckCodeEx(pVM, &pVCpu->cpum.GstCtx, pVCpu->cpum.GstCtx.eip); 1349 1349 STAM_PROFILE_ADV_RESUME(&pVCpu->em.s.StatRAWEntry, b); 1350 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)1350 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 1351 1351 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1352 1352 { … … 1417 1417 rc = CPUMRawLeave(pVCpu, rc); 1418 1418 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 1419 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)1419 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 1420 1420 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 1421 1421 rc = VBOXSTRICTRC_TODO(emR3HighPriorityPostForcedActions(pVM, pVCpu, rc)); … … 1484 1484 #endif 1485 1485 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTail, d); 1486 if ( VM_FF_IS_ PENDING(pVM, ~VM_FF_HIGH_PRIORITY_PRE_RAW_MASK | VM_FF_PGM_NO_MEMORY)1486 if ( VM_FF_IS_ANY_SET(pVM, ~VM_FF_HIGH_PRIORITY_PRE_RAW_MASK | VM_FF_PGM_NO_MEMORY) 1487 1487 || VMCPU_FF_IS_ANY_SET(pVCpu, ~VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1488 1488 { -
trunk/src/VBox/VMM/VMMR3/NEMR3Native-win.cpp
r74789 r74791 1705 1705 if (rcStrict == VINF_NEM_FLUSH_TLB) 1706 1706 { 1707 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK | VM_FF_HP_R0_PRE_HM_MASK)1707 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK | VM_FF_HP_R0_PRE_HM_MASK) 1708 1708 && !VMCPU_FF_IS_ANY_SET(pVCpu, (VMCPU_FF_HIGH_PRIORITY_POST_MASK | VMCPU_FF_HP_R0_PRE_HM_MASK) 1709 1709 & ~VMCPU_FF_RESUME_GUEST_MASK)) -
trunk/src/VBox/VMM/VMMR3/VMEmt.cpp
r74790 r74791 372 372 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 373 373 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 374 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)374 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 375 375 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 376 376 break; 377 377 uint64_t u64NanoTS; 378 378 TMTimerPollGIP(pVM, pVCpu, &u64NanoTS); 379 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)379 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 380 380 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 381 381 break; … … 571 571 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 572 572 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 573 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)573 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 574 574 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 575 575 break; … … 580 580 uint64_t u64NanoTS; 581 581 TMTimerPollGIP(pVM, pVCpu, &u64NanoTS); 582 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)582 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 583 583 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 584 584 break; … … 720 720 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 721 721 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 722 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)722 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 723 723 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 724 724 break; … … 730 730 uint64_t u64Delta; 731 731 uint64_t u64GipTime = TMTimerPollGIP(pVM, pVCpu, &u64Delta); 732 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)732 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 733 733 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 734 734 break; … … 740 740 { 741 741 VMMR3YieldStop(pVM); 742 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)742 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 743 743 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 744 744 break; … … 808 808 * Check Relevant FFs. 809 809 */ 810 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)810 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 811 811 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK)) 812 812 break; … … 898 898 899 899 if ( pUVCpu->pVM 900 && ( VM_FF_IS_ PENDING(pUVCpu->pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)900 && ( VM_FF_IS_ANY_SET(pUVCpu->pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 901 901 || VMCPU_FF_IS_ANY_SET(VMMGetCpu(pUVCpu->pVM), VMCPU_FF_EXTERNAL_SUSPENDED_MASK) 902 902 ) … … 960 960 * Check Relevant FFs. 961 961 */ 962 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)962 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 963 963 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK)) 964 964 break; … … 1109 1109 ? VMCPU_FF_EXTERNAL_HALTED_MASK 1110 1110 : VMCPU_FF_EXTERNAL_HALTED_MASK & ~(VMCPU_FF_UPDATE_APIC | VMCPU_FF_INTERRUPT_APIC | VMCPU_FF_INTERRUPT_PIC); 1111 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)1111 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 1112 1112 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 1113 1113 { … … 1190 1190 1191 1191 if ( pVM 1192 && ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)1192 && ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 1193 1193 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK) 1194 1194 ) -
trunk/src/VBox/VMM/VMMRC/TRPMRCHandlers.cpp
r74790 r74791 211 211 */ 212 212 if ( rc == VINF_SUCCESS 213 && ( VM_FF_IS_ PENDING(pVM, VM_FF_TM_VIRTUAL_SYNC | VM_FF_REQUEST | VM_FF_PGM_NO_MEMORY | VM_FF_PDM_DMA)213 && ( VM_FF_IS_ANY_SET(pVM, VM_FF_TM_VIRTUAL_SYNC | VM_FF_REQUEST | VM_FF_PGM_NO_MEMORY | VM_FF_PDM_DMA) 214 214 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_TIMER | VMCPU_FF_TO_R3 215 215 | VMCPU_FF_UPDATE_APIC | VMCPU_FF_INTERRUPT_APIC | VMCPU_FF_INTERRUPT_PIC -
trunk/src/recompiler/VBoxRecompiler.c
r74789 r74791 1163 1163 */ 1164 1164 case EXCP_SINGLE_INSTR: 1165 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1165 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1166 1166 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK)) 1167 1167 continue; … … 1196 1196 if (rc == VINF_EM_DBG_STEPPED) 1197 1197 { 1198 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1198 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1199 1199 && !VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK)) 1200 1200 continue;
Note:
See TracChangeset
for help on using the changeset viewer.