Changeset 74791 in vbox for trunk/src/VBox/VMM/VMMR3
- Timestamp:
- Oct 12, 2018 10:44:17 AM (7 years ago)
- svn:sync-xref-src-repo-rev:
- 125735
- Location:
- trunk/src/VBox/VMM/VMMR3
- Files:
-
- 7 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/VMMR3/DBGF.cpp
r74790 r74791 834 834 { 835 835 int rc; 836 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_REQUEST)836 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_EMT_RENDEZVOUS | VM_FF_REQUEST) 837 837 && !VMCPU_FF_IS_SET(pVCpu, VMCPU_FF_REQUEST)) 838 838 { -
trunk/src/VBox/VMM/VMMR3/EM.cpp
r74790 r74791 1187 1187 * important FFs while we were busy switching the state. So, check again. 1188 1188 */ 1189 if ( VM_FF_IS_ PENDING(pVM, VM_FF_REQUEST | VM_FF_PDM_QUEUES | VM_FF_DBGF | VM_FF_CHECK_VM_STATE | VM_FF_RESET)1189 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_REQUEST | VM_FF_PDM_QUEUES | VM_FF_DBGF | VM_FF_CHECK_VM_STATE | VM_FF_RESET) 1190 1190 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_TIMER | VMCPU_FF_REQUEST)) 1191 1191 { … … 1222 1222 * else. Sync back the state and leave the lock to be on the safe side. 1223 1223 */ 1224 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)1224 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 1225 1225 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 1226 1226 { … … 1274 1274 #endif 1275 1275 AssertCompile(VMCPU_FF_ALL_REM_MASK & VMCPU_FF_TIMER); 1276 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1276 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1277 1277 || VMCPU_FF_IS_ANY_SET(pVCpu, 1278 1278 VMCPU_FF_ALL_REM_MASK … … 1399 1399 * Check for pending actions. 1400 1400 */ 1401 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)1401 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 1402 1402 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK & ~VMCPU_FF_UNHALT)) 1403 1403 return VINF_SUCCESS; … … 1863 1863 * Post execution chunk first. 1864 1864 */ 1865 if ( VM_FF_IS_ PENDING(pVM, VM_FF_NORMAL_PRIORITY_POST_MASK)1865 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_NORMAL_PRIORITY_POST_MASK) 1866 1866 || (VMCPU_FF_NORMAL_PRIORITY_POST_MASK && VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_NORMAL_PRIORITY_POST_MASK)) ) 1867 1867 { … … 2094 2094 * (Executed in ascending priority order.) 2095 2095 */ 2096 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_MASK)2096 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_MASK) 2097 2097 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_MASK)) 2098 2098 { … … 2416 2416 && rc != VINF_EM_TERMINATE 2417 2417 && rc != VINF_EM_OFF 2418 && ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_REM_MASK)2418 && ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_REM_MASK) 2419 2419 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_REM_MASK & ~VMCPU_FF_UNHALT))) 2420 2420 { -
trunk/src/VBox/VMM/VMMR3/EMHM.cpp
r74790 r74791 95 95 * Service necessary FFs before going into HM. 96 96 */ 97 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)97 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 98 98 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 99 99 { … … 119 119 */ 120 120 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 121 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)121 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 122 122 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 123 123 { … … 404 404 Assert(!VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_SELM_SYNC_TSS | VMCPU_FF_SELM_SYNC_GDT | VMCPU_FF_SELM_SYNC_LDT)); 405 405 #endif 406 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)406 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 407 407 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 408 408 { … … 465 465 */ 466 466 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 467 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)467 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 468 468 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 469 469 rc = VBOXSTRICTRC_TODO(emR3HighPriorityPostForcedActions(pVM, pVCpu, rc)); … … 485 485 TMTimerPollVoid(pVM, pVCpu); 486 486 #endif 487 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK)487 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) 488 488 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_MASK)) 489 489 { -
trunk/src/VBox/VMM/VMMR3/EMR3Nem.cpp
r74790 r74791 95 95 * Service necessary FFs before going into HM. 96 96 */ 97 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)97 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 98 98 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 99 99 { … … 119 119 */ 120 120 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 121 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)121 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 122 122 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 123 123 { … … 379 379 * Process high priority pre-execution raw-mode FFs. 380 380 */ 381 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)381 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 382 382 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 383 383 { … … 448 448 */ 449 449 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 450 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)450 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 451 451 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 452 452 rcStrict = emR3HighPriorityPostForcedActions(pVM, pVCpu, rcStrict); … … 468 468 TMTimerPollVoid(pVM, pVCpu); 469 469 #endif 470 if ( VM_FF_IS_ PENDING(pVM, VM_FF_ALL_MASK)470 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_ALL_MASK) 471 471 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_ALL_MASK)) 472 472 { -
trunk/src/VBox/VMM/VMMR3/EMRaw.cpp
r74790 r74791 172 172 * Check vital forced actions, but ignore pending interrupts and timers. 173 173 */ 174 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)174 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 175 175 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 176 176 { … … 1317 1317 * Process high priority pre-execution raw-mode FFs. 1318 1318 */ 1319 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)1319 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 1320 1320 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1321 1321 { … … 1348 1348 CSAMR3CheckCodeEx(pVM, &pVCpu->cpum.GstCtx, pVCpu->cpum.GstCtx.eip); 1349 1349 STAM_PROFILE_ADV_RESUME(&pVCpu->em.s.StatRAWEntry, b); 1350 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK)1350 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_PRE_RAW_MASK) 1351 1351 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1352 1352 { … … 1417 1417 rc = CPUMRawLeave(pVCpu, rc); 1418 1418 VMCPU_FF_CLEAR(pVCpu, VMCPU_FF_RESUME_GUEST_MASK); 1419 if ( VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK)1419 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK) 1420 1420 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_HIGH_PRIORITY_POST_MASK)) 1421 1421 rc = VBOXSTRICTRC_TODO(emR3HighPriorityPostForcedActions(pVM, pVCpu, rc)); … … 1484 1484 #endif 1485 1485 STAM_PROFILE_ADV_STOP(&pVCpu->em.s.StatRAWTail, d); 1486 if ( VM_FF_IS_ PENDING(pVM, ~VM_FF_HIGH_PRIORITY_PRE_RAW_MASK | VM_FF_PGM_NO_MEMORY)1486 if ( VM_FF_IS_ANY_SET(pVM, ~VM_FF_HIGH_PRIORITY_PRE_RAW_MASK | VM_FF_PGM_NO_MEMORY) 1487 1487 || VMCPU_FF_IS_ANY_SET(pVCpu, ~VMCPU_FF_HIGH_PRIORITY_PRE_RAW_MASK)) 1488 1488 { -
trunk/src/VBox/VMM/VMMR3/NEMR3Native-win.cpp
r74789 r74791 1705 1705 if (rcStrict == VINF_NEM_FLUSH_TLB) 1706 1706 { 1707 if ( !VM_FF_IS_ PENDING(pVM, VM_FF_HIGH_PRIORITY_POST_MASK | VM_FF_HP_R0_PRE_HM_MASK)1707 if ( !VM_FF_IS_ANY_SET(pVM, VM_FF_HIGH_PRIORITY_POST_MASK | VM_FF_HP_R0_PRE_HM_MASK) 1708 1708 && !VMCPU_FF_IS_ANY_SET(pVCpu, (VMCPU_FF_HIGH_PRIORITY_POST_MASK | VMCPU_FF_HP_R0_PRE_HM_MASK) 1709 1709 & ~VMCPU_FF_RESUME_GUEST_MASK)) -
trunk/src/VBox/VMM/VMMR3/VMEmt.cpp
r74790 r74791 372 372 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 373 373 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 374 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)374 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 375 375 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 376 376 break; 377 377 uint64_t u64NanoTS; 378 378 TMTimerPollGIP(pVM, pVCpu, &u64NanoTS); 379 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)379 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 380 380 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 381 381 break; … … 571 571 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 572 572 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 573 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)573 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 574 574 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 575 575 break; … … 580 580 uint64_t u64NanoTS; 581 581 TMTimerPollGIP(pVM, pVCpu, &u64NanoTS); 582 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)582 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 583 583 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 584 584 break; … … 720 720 uint64_t const cNsElapsedTimers = RTTimeNanoTS() - u64StartTimers; 721 721 STAM_REL_PROFILE_ADD_PERIOD(&pUVCpu->vm.s.StatHaltTimers, cNsElapsedTimers); 722 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)722 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 723 723 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 724 724 break; … … 730 730 uint64_t u64Delta; 731 731 uint64_t u64GipTime = TMTimerPollGIP(pVM, pVCpu, &u64Delta); 732 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)732 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 733 733 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 734 734 break; … … 740 740 { 741 741 VMMR3YieldStop(pVM); 742 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)742 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 743 743 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 744 744 break; … … 808 808 * Check Relevant FFs. 809 809 */ 810 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)810 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 811 811 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK)) 812 812 break; … … 898 898 899 899 if ( pUVCpu->pVM 900 && ( VM_FF_IS_ PENDING(pUVCpu->pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)900 && ( VM_FF_IS_ANY_SET(pUVCpu->pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 901 901 || VMCPU_FF_IS_ANY_SET(VMMGetCpu(pUVCpu->pVM), VMCPU_FF_EXTERNAL_SUSPENDED_MASK) 902 902 ) … … 960 960 * Check Relevant FFs. 961 961 */ 962 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)962 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 963 963 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK)) 964 964 break; … … 1109 1109 ? VMCPU_FF_EXTERNAL_HALTED_MASK 1110 1110 : VMCPU_FF_EXTERNAL_HALTED_MASK & ~(VMCPU_FF_UPDATE_APIC | VMCPU_FF_INTERRUPT_APIC | VMCPU_FF_INTERRUPT_PIC); 1111 if ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_HALTED_MASK)1111 if ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_HALTED_MASK) 1112 1112 || VMCPU_FF_IS_ANY_SET(pVCpu, fMask)) 1113 1113 { … … 1190 1190 1191 1191 if ( pVM 1192 && ( VM_FF_IS_ PENDING(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK)1192 && ( VM_FF_IS_ANY_SET(pVM, VM_FF_EXTERNAL_SUSPENDED_MASK) 1193 1193 || VMCPU_FF_IS_ANY_SET(pVCpu, VMCPU_FF_EXTERNAL_SUSPENDED_MASK) 1194 1194 )
Note:
See TracChangeset
for help on using the changeset viewer.