- Timestamp:
- Sep 29, 2008 12:14:42 PM (17 years ago)
- svn:sync-xref-src-repo-rev:
- 37137
- Location:
- trunk/src/VBox/VMM
- Files:
-
- 4 edited
Legend:
- Unmodified
- Added
- Removed
-
trunk/src/VBox/VMM/MMHyper.cpp
r12579 r12792 277 277 Log(("Relocating the hypervisor from %VGv to %VGv\n", GCPtrOld, GCPtrNew)); 278 278 279 /* relocate our selves and the VM structure. */ 279 /* 280 * Relocate the VM structure and ourselves. 281 */ 280 282 RTGCINTPTR offDelta = GCPtrNew - GCPtrOld; 281 283 pVM->pVMGC += offDelta; 284 for (uint32_t i = 0; i < pVM->cCPUs; i++) 285 pVM->aCpus[i].pVMRC = pVM->pVMGC; 286 282 287 pVM->mm.s.pvHyperAreaGC += offDelta; 283 288 pVM->mm.s.pHyperHeapGC += offDelta; 284 pVM->mm.s.pHyperHeapHC->pbHeap GC += offDelta;285 pVM->mm.s.pHyperHeapHC->pVM GC += pVM->pVMGC;286 for (uint32_t i = 0; i < pVM->cCPUs; i++) 287 pVM->aCpus[i].pVMRC = pVM->pVMGC;288 289 /* relocate the rest.*/289 pVM->mm.s.pHyperHeapHC->pbHeapRC += offDelta; 290 pVM->mm.s.pHyperHeapHC->pVMRC = pVM->pVMGC; 291 292 /* 293 * Relocate the rest. 294 */ 290 295 VMR3Relocate(pVM, offDelta); 291 296 return true; … … 777 782 PMMHYPERHEAP pHeap = (PMMHYPERHEAP)pv; 778 783 pHeap->u32Magic = MMHYPERHEAP_MAGIC; 779 pHeap->pVMHC = pVM; 780 pHeap->pVMGC = pVM->pVMGC; 781 pHeap->pbHeapHC = (uint8_t *)pHeap + MMYPERHEAP_HDR_SIZE; 784 pHeap->pbHeapR3 = (uint8_t *)pHeap + MMYPERHEAP_HDR_SIZE; 785 pHeap->pbHeapR0 = (uintptr_t)pHeap->pbHeapR3; /** @todo #1865: Map heap into ring-0 on darwin. */ 782 786 //pHeap->pbHeapGC = 0; // set by mmR3HyperHeapMap() 787 pHeap->pVMR3 = pVM; 788 pHeap->pVMR0 = pVM->pVMR0; 789 pHeap->pVMRC = pVM->pVMGC; 783 790 pHeap->cbHeap = cbAligned - MMYPERHEAP_HDR_SIZE; 784 791 pHeap->cbFree = pHeap->cbHeap - sizeof(MMHYPERCHUNK); … … 788 795 //pHeap->HyperHeapStatTree = 0; 789 796 790 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)pHeap->pbHeap HC;797 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)pHeap->pbHeapR3; 791 798 pFree->cb = pHeap->cbFree; 792 799 //pFree->core.offNext = 0; … … 817 824 if (VBOX_SUCCESS(rc)) 818 825 { 819 pHeap->pVM GC = pVM->pVMGC;820 pHeap->pbHeap GC = *ppHeapGC + MMYPERHEAP_HDR_SIZE;826 pHeap->pVMRC = pVM->pVMGC; 827 pHeap->pbHeapRC = *ppHeapGC + MMYPERHEAP_HDR_SIZE; 821 828 /* Reserve a page for fencing. */ 822 829 MMR3HyperReserve(pVM, PAGE_SIZE, "fence", NULL); -
trunk/src/VBox/VMM/MMInternal.h
r9388 r12792 139 139 * If defined, it indicates the number of frees that should be delayed. 140 140 */ 141 #if defined( __DOXYGEN__)142 # define MMHYPER_HEAP_FREE_DELAY 64141 #if defined(DOXYGEN_RUNNING) 142 # define MMHYPER_HEAP_FREE_DELAY 64 143 143 #endif 144 144 … … 147 147 * with the value it has. 148 148 */ 149 #if defined(VBOX_STRICT) || defined( __DOXYGEN__)150 # define MMHYPER_HEAP_FREE_POISON 0xCB149 #if defined(VBOX_STRICT) || defined(DOXYGEN_RUNNING) 150 # define MMHYPER_HEAP_FREE_POISON 0xcb 151 151 #endif 152 152 153 153 /** @def MMHYPER_HEAP_STRICT 154 154 * Enables a bunch of assertions in the heap code. */ 155 #if defined(VBOX_STRICT) || defined( __DOXYGEN__)155 #if defined(VBOX_STRICT) || defined(DOXYGEN_RUNNING) 156 156 # define MMHYPER_HEAP_STRICT 1 157 # if 0 || defined( __DOXYGEN__)157 # if 0 || defined(DOXYGEN_RUNNING) 158 158 /** @def MMHYPER_HEAP_STRICT_FENCE 159 159 * Enables tail fence. */ … … 164 164 /** @def MMHYPER_HEAP_STRICT_FENCE_U32 165 165 * The fence filler. */ 166 # define MMHYPER_HEAP_STRICT_FENCE_U32 0xdeadbeef166 # define MMHYPER_HEAP_STRICT_FENCE_U32 UINT32_C(0xdeadbeef) 167 167 # endif 168 168 #endif … … 176 176 /** Core avl node, key is the tag. 177 177 * @todo The type is wrong! Get your lazy a$$ over and create that offsetted uint32_t version we need here! */ 178 AVLOGCPHYSNODECORE Core;178 AVLOGCPHYSNODECORE Core; 179 179 /** Aligning the 64-bit fields on a 64-bit line. */ 180 uint32_t u32Padding0;180 uint32_t u32Padding0; 181 181 /** Indicator for whether these statistics are registered with STAM or not. */ 182 bool fRegistered;182 bool fRegistered; 183 183 /** Number of allocation. */ 184 uint64_t cAllocations;184 uint64_t cAllocations; 185 185 /** Number of frees. */ 186 uint64_t cFrees;186 uint64_t cFrees; 187 187 /** Failures. */ 188 uint64_t cFailures;188 uint64_t cFailures; 189 189 /** Number of bytes allocated (sum). */ 190 uint64_t cbAllocated;190 uint64_t cbAllocated; 191 191 /** Number of bytes freed (sum). */ 192 uint64_t cbFreed;192 uint64_t cbFreed; 193 193 /** Number of bytes currently allocated. */ 194 uint32_t cbCurAllocated;194 uint32_t cbCurAllocated; 195 195 /** Max number of bytes allocated. */ 196 uint32_t cbMaxAllocated;196 uint32_t cbMaxAllocated; 197 197 } MMHYPERSTAT; 198 198 /** Pointer to hypervisor heap statistics record. */ … … 206 206 /** Previous block in the list of all blocks. 207 207 * This is relative to the start of the heap. */ 208 uint32_t 208 uint32_t offNext; 209 209 /** Offset to the previous block relative to this one. */ 210 int32_t 210 int32_t offPrev; 211 211 /** The statistics record this allocation belongs to (self relative). */ 212 int32_t 212 int32_t offStat; 213 213 /** Offset to the heap block (self relative). */ 214 int32_t 214 int32_t offHeap; 215 215 } MMHYPERCHUNK; 216 216 /** Pointer to a hypervisor heap chunk. */ … … 224 224 { 225 225 /** Main list. */ 226 MMHYPERCHUNK 226 MMHYPERCHUNK core; 227 227 /** Offset of the next chunk in the list of free nodes. */ 228 uint32_t 228 uint32_t offNext; 229 229 /** Offset of the previous chunk in the list of free nodes. */ 230 int32_t 230 int32_t offPrev; 231 231 /** Size of the block. */ 232 uint32_t 232 uint32_t cb; 233 233 } MMHYPERCHUNKFREE; 234 234 /** Pointer to a free hypervisor heap chunk. */ … … 245 245 /** The heap size. (This structure is not included!) */ 246 246 uint32_t cbHeap; 247 /** The HC Ring-3 address of the VM. */ 248 R3PTRTYPE(PVM) pVMHC; 249 /** The HC Ring-3 address of the heap. */ 250 R3R0PTRTYPE(uint8_t *) pbHeapHC; 251 /** The GC address of the heap. */ 252 RCPTRTYPE(uint8_t *) pbHeapGC; 253 /** The GC address of the VM. */ 254 RCPTRTYPE(PVM) pVMGC; 247 /** The HC ring-3 address of the heap. */ 248 R3PTRTYPE(uint8_t *) pbHeapR3; 249 /** The HC ring-3 address of the shared VM strcture. */ 250 PVMR3 pVMR3; 251 /** The HC ring-0 address of the heap. */ 252 R0PTRTYPE(uint8_t *) pbHeapR0; 253 /** The HC ring-0 address of the shared VM strcture. */ 254 PVMR0 pVMR0; 255 /** The RC address of the heap. */ 256 RCPTRTYPE(uint8_t *) pbHeapRC; 257 /** The RC address of the shared VM strcture. */ 258 PVMRC pVMRC; 255 259 /** The amount of free memory in the heap. */ 256 260 uint32_t cbFree; … … 286 290 287 291 /** Magic value for MMHYPERHEAP. (C. S. Lewis) */ 288 #define MMHYPERHEAP_MAGIC 0x18981129292 #define MMHYPERHEAP_MAGIC UINT32_C(0x18981129) 289 293 290 294 … … 778 782 779 783 #endif 784 -
trunk/src/VBox/VMM/VMMAll/MMAllHyper.cpp
r11311 r12792 48 48 #define ASSERT_OFFPREV(pHeap, pChunk) \ 49 49 do { Assert(MMHYPERCHUNK_GET_OFFPREV(pChunk) <= 0); \ 50 Assert(MMHYPERCHUNK_GET_OFFPREV(pChunk) >= (intptr_t) CTXSUFF((pHeap)->pbHeap) - (intptr_t)(pChunk)); \50 Assert(MMHYPERCHUNK_GET_OFFPREV(pChunk) >= (intptr_t)(pHeap)->CTX_SUFF(pbHeap) - (intptr_t)(pChunk)); \ 51 51 AssertMsg( MMHYPERCHUNK_GET_OFFPREV(pChunk) != 0 \ 52 || (uint8_t *)(pChunk) == CTXSUFF((pHeap)->pbHeap), \53 ("pChunk=%p pvHyperHeap=%p\n", (pChunk), CTXSUFF((pHeap)->pbHeap))); \52 || (uint8_t *)(pChunk) == (pHeap)->CTX_SUFF(pbHeap), \ 53 ("pChunk=%p pvHyperHeap=%p\n", (pChunk), (pHeap)->CTX_SUFF(pbHeap))); \ 54 54 } while (0) 55 55 56 56 #define ASSERT_OFFNEXT(pHeap, pChunk) \ 57 57 do { ASSERT_ALIGN((pChunk)->offNext); \ 58 ASSERT_L((pChunk)->offNext, (uintptr_t) CTXSUFF((pHeap)->pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \58 ASSERT_L((pChunk)->offNext, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \ 59 59 } while (0) 60 60 … … 75 75 AssertMsg(!((pChunk)->offStat & (MMHYPER_HEAP_ALIGN_MIN - 1)), ("offStat=%RX32\n", (pChunk)->offStat)); \ 76 76 uintptr_t uPtr = (uintptr_t)(pChunk)->offStat + (uintptr_t)pChunk; NOREF(uPtr); \ 77 AssertMsg(uPtr - (uintptr_t) CTXSUFF((pHeap)->pbHeap) < (pHeap)->offPageAligned, \78 ("%p - %p < %RX32\n", uPtr, CTXSUFF((pHeap)->pbHeap), (pHeap)->offPageAligned)); \77 AssertMsg(uPtr - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) < (pHeap)->offPageAligned, \ 78 ("%p - %p < %RX32\n", uPtr, (pHeap)->CTX_SUFF(pbHeap), (pHeap)->offPageAligned)); \ 79 79 } \ 80 80 } while (0) … … 99 99 #define ASSERT_FREE_OFFPREV(pHeap, pChunk) \ 100 100 do { ASSERT_ALIGN((pChunk)->offPrev); \ 101 ASSERT_GE(((pChunk)->offPrev & (MMHYPER_HEAP_ALIGN_MIN - 1)), (intptr_t) CTXSUFF((pHeap)->pbHeap) - (intptr_t)(pChunk)); \101 ASSERT_GE(((pChunk)->offPrev & (MMHYPER_HEAP_ALIGN_MIN - 1)), (intptr_t)(pHeap)->CTX_SUFF(pbHeap) - (intptr_t)(pChunk)); \ 102 102 Assert((pChunk)->offPrev != MMHYPERCHUNK_GET_OFFPREV(&(pChunk)->core) || !(pChunk)->offPrev); \ 103 103 AssertMsg( (pChunk)->offPrev \ 104 || (uintptr_t)(pChunk) - (uintptr_t) CTXSUFF((pHeap)->pbHeap) == (pHeap)->offFreeHead, \105 ("pChunk=%p offChunk=%#x offFreeHead=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t) CTXSUFF((pHeap)->pbHeap),\104 || (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) == (pHeap)->offFreeHead, \ 105 ("pChunk=%p offChunk=%#x offFreeHead=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap),\ 106 106 (pHeap)->offFreeHead)); \ 107 107 } while (0) … … 109 109 #define ASSERT_FREE_OFFNEXT(pHeap, pChunk) \ 110 110 do { ASSERT_ALIGN((pChunk)->offNext); \ 111 ASSERT_L((pChunk)->offNext, (uintptr_t) CTXSUFF((pHeap)->pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \111 ASSERT_L((pChunk)->offNext, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \ 112 112 Assert((pChunk)->offNext != (pChunk)->core.offNext || !(pChunk)->offNext); \ 113 113 AssertMsg( (pChunk)->offNext \ 114 || (uintptr_t)(pChunk) - (uintptr_t) CTXSUFF((pHeap)->pbHeap) == (pHeap)->offFreeTail, \115 ("pChunk=%p offChunk=%#x offFreeTail=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t) CTXSUFF((pHeap)->pbHeap), \114 || (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) == (pHeap)->offFreeTail, \ 115 ("pChunk=%p offChunk=%#x offFreeTail=%#x\n", (pChunk), (uintptr_t)(pChunk) - (uintptr_t)(pHeap)->CTX_SUFF(pbHeap), \ 116 116 (pHeap)->offFreeTail)); \ 117 117 } while (0) … … 124 124 ("cb=%d offNext=%d\n", (pChunk)->cb, (pChunk)->core.offNext)); \ 125 125 else \ 126 ASSERT_LE((pChunk)->cb, (uintptr_t) CTXSUFF((pHeap)->pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \126 ASSERT_LE((pChunk)->cb, (uintptr_t)(pHeap)->CTX_SUFF(pbHeap) + (pHeap)->offPageAligned - (uintptr_t)(pChunk)); \ 127 127 } while (0) 128 128 … … 234 234 const uint32_t cbChunk = pChunk->offNext 235 235 ? pChunk->offNext 236 : CTXSUFF(pHeap->pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk;236 : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk; 237 237 pStat->cbAllocated += (uint32_t)cbChunk; 238 238 pStat->cbCurAllocated += (uint32_t)cbChunk; … … 319 319 */ 320 320 PMMHYPERCHUNK pRet = NULL; 321 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)((char *) CTXSUFF(pHeap->pbHeap) + pHeap->offFreeHead);321 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)((char *)pHeap->CTX_SUFF(pbHeap) + pHeap->offFreeHead); 322 322 while (pFree) 323 323 { … … 350 350 { 351 351 /* make new head node, mark it USED for simplisity. */ 352 PMMHYPERCHUNK pPrev = (PMMHYPERCHUNK) CTXSUFF(pHeap->pbHeap);352 PMMHYPERCHUNK pPrev = (PMMHYPERCHUNK)pHeap->CTX_SUFF(pbHeap); 353 353 Assert(pPrev == &pFree->core); 354 354 pPrev->offPrev = 0; … … 564 564 * boundrary. 565 565 */ 566 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)((char *) CTXSUFF(pHeap->pbHeap) + pHeap->offFreeTail);566 PMMHYPERCHUNKFREE pFree = (PMMHYPERCHUNKFREE)((char *)pHeap->CTX_SUFF(pbHeap) + pHeap->offFreeTail); 567 567 ASSERT_CHUNK_FREE(pHeap, pFree); 568 568 if ( (((uintptr_t)(&pFree->core + 1) + pFree->cb) & (PAGE_OFFSET_MASK - 1)) … … 624 624 Log3(("mmHyperAllocPages: Unlinked pFree=%d\n", pFree)); 625 625 } 626 pHeap->offPageAligned = (uintptr_t)pvRet - (uintptr_t) CTXSUFF(pHeap->pbHeap);626 pHeap->offPageAligned = (uintptr_t)pvRet - (uintptr_t)pHeap->CTX_SUFF(pbHeap); 627 627 Log3(("mmHyperAllocPages: Returning %p (page aligned)\n", pvRet)); 628 628 … … 633 633 } 634 634 635 636 635 #ifdef VBOX_WITH_STATISTICS 636 637 637 /** 638 638 * Get the statistic record for a tag. … … 662 662 if (!pStat->fRegistered) 663 663 { 664 # ifdef IN_RING3665 mmR3HyperStatRegisterOne(pHeap->pVM HC, pStat);666 # else667 /** @todo schedule a HCaction. */668 # endif664 # ifdef IN_RING3 665 mmR3HyperStatRegisterOne(pHeap->pVMR3, pStat); 666 # else 667 /** @todo schedule a R3 action. */ 668 # endif 669 669 } 670 670 return pStat; 671 671 } 672 672 673 #ifdef IN_RING3 673 674 # ifdef IN_RING3 674 675 /** 675 676 * Registers statistics with STAM. … … 681 682 return; 682 683 const char *pszTag = mmR3GetTagName((MMTAG)pStat->Core.Key); 683 684 char szName[128]; 685 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cAllocations", pszTag); 686 STAMR3Register(pVM, &pStat->cAllocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_COUNT, "Number of alloc calls."); 687 688 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cFrees", pszTag); 689 STAMR3Register(pVM, &pStat->cFrees, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_COUNT, "Number of free calls."); 690 691 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cFailures", pszTag); 692 STAMR3Register(pVM, &pStat->cFailures, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_COUNT, "Number of failures."); 693 694 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cbAllocated", pszTag); 695 STAMR3Register(pVM, &pStat->cbAllocated, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_BYTES, "Total number of allocated bytes."); 696 697 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cbFreed", pszTag); 698 STAMR3Register(pVM, &pStat->cbFreed, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_BYTES, "Total number of freed bytes."); 699 700 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cbCurAllocated", pszTag); 701 STAMR3Register(pVM, &pStat->cbCurAllocated, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_BYTES, "Number of bytes currently allocated."); 702 703 RTStrPrintf(szName, sizeof(szName), "/MM/HyperHeap/%s/cbMaxAllocated", pszTag); 704 STAMR3Register(pVM, &pStat->cbMaxAllocated, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, szName, STAMUNIT_BYTES, "Max number of bytes allocated at the same time."); 705 684 STAMR3RegisterF(pVM, &pStat->cAllocations, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Number of alloc calls.", "/MM/HyperHeap/%s/cAllocations", pszTag); 685 STAMR3RegisterF(pVM, &pStat->cFrees, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Number of free calls.", "/MM/HyperHeap/%s/cFrees", pszTag); 686 STAMR3RegisterF(pVM, &pStat->cFailures, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_COUNT, "Number of failures.", "/MM/HyperHeap/%s/cFailures", pszTag); 687 STAMR3RegisterF(pVM, &pStat->cbAllocated, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Total number of allocated bytes.", "/MM/HyperHeap/%s/cbAllocated", pszTag); 688 STAMR3RegisterF(pVM, &pStat->cbFreed, STAMTYPE_U64, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Total number of freed bytes.", "/MM/HyperHeap/%s/cbFreed", pszTag); 689 STAMR3RegisterF(pVM, &pStat->cbCurAllocated, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Number of bytes currently allocated.", "/MM/HyperHeap/%s/cbCurAllocated", pszTag); 690 STAMR3RegisterF(pVM, &pStat->cbMaxAllocated, STAMTYPE_U32, STAMVISIBILITY_ALWAYS, STAMUNIT_BYTES, "Max number of bytes allocated at the same time.","/MM/HyperHeap/%s/cbMaxAllocated", pszTag); 706 691 pStat->fRegistered = true; 707 692 } 708 # endif709 710 #endif 693 # endif /* IN_RING3 */ 694 695 #endif /* VBOX_WITH_STATISTICS */ 711 696 712 697 … … 776 761 777 762 /* Some more verifications using additional info from pHeap. */ 778 AssertMsgReturn((uintptr_t)pChunk + offPrev >= (uintptr_t) CTXSUFF(pHeap->pbHeap),763 AssertMsgReturn((uintptr_t)pChunk + offPrev >= (uintptr_t)pHeap->CTX_SUFF(pbHeap), 779 764 ("%p: offPrev=%#RX32!\n", pv, offPrev), 780 765 VERR_INVALID_POINTER); … … 784 769 VERR_INVALID_POINTER); 785 770 786 AssertMsgReturn( (uintptr_t)pv - (uintptr_t) CTXSUFF(pHeap->pbHeap) <= pHeap->offPageAligned,787 ("Invalid pointer %p! (heap: %p-%p)\n", pv, CTXSUFF(pHeap->pbHeap),788 (char *) CTXSUFF(pHeap->pbHeap) + pHeap->offPageAligned),771 AssertMsgReturn( (uintptr_t)pv - (uintptr_t)pHeap->CTX_SUFF(pbHeap) <= pHeap->offPageAligned, 772 ("Invalid pointer %p! (heap: %p-%p)\n", pv, pHeap->CTX_SUFF(pbHeap), 773 (char *)pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned), 789 774 VERR_INVALID_POINTER); 790 775 … … 797 782 const uint32_t cbChunk = pChunk->offNext 798 783 ? pChunk->offNext 799 : CTXSUFF(pHeap->pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk;784 : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pChunk; 800 785 #endif 801 786 #ifdef MMHYPER_HEAP_FREE_POISON … … 816 801 const size_t cb = pCur->offNext 817 802 ? pCur->offNext - sizeof(*pCur) 818 : CTXSUFF(pHeap->pbHeap) + pHeap->offPageAligned - (uint8_t *)pCur - sizeof(*pCur);803 : pHeap->CTX_SUFF(pbHeap) + pHeap->offPageAligned - (uint8_t *)pCur - sizeof(*pCur); 819 804 uint8_t *pab = (uint8_t *)(pCur + 1); 820 805 for (unsigned off = 0; off < cb; off++) … … 901 886 } 902 887 if (!pRight) 903 pRight = (PMMHYPERCHUNKFREE)((char *) CTXSUFF(pHeap->pbHeap) + pHeap->offFreeTail); /** @todo this can't be correct! 'pLeft = .. ; else' I think */888 pRight = (PMMHYPERCHUNKFREE)((char *)pHeap->CTX_SUFF(pbHeap) + pHeap->offFreeTail); /** @todo this can't be correct! 'pLeft = .. ; else' I think */ 904 889 if (pRight) 905 890 { … … 926 911 MMHYPERCHUNK_SET_TYPE(&pFree->core, MMHYPERCHUNK_FLAGS_FREE); 927 912 pFree->offPrev = 0; 928 pHeap->offFreeHead = (uintptr_t)pFree - (uintptr_t) CTXSUFF(pHeap->pbHeap);913 pHeap->offFreeHead = (uintptr_t)pFree - (uintptr_t)pHeap->CTX_SUFF(pbHeap); 929 914 if (pRight) 930 915 { … … 974 959 { 975 960 pFree->offNext = 0; 976 pHeap->offFreeTail = (uintptr_t)pFree - (uintptr_t) CTXSUFF(pHeap->pbHeap);961 pHeap->offFreeTail = (uintptr_t)pFree - (uintptr_t)pHeap->CTX_SUFF(pbHeap); 977 962 } 978 963 Log3(("mmHyperFree: Inserted %p after %p in free list.\n", pFree, pLeft)); … … 1005 990 { 1006 991 pFree->offNext = 0; 1007 pHeap->offFreeTail = (uintptr_t)pFree - (uintptr_t) CTXSUFF(pHeap->pbHeap);992 pHeap->offFreeTail = (uintptr_t)pFree - (uintptr_t)pHeap->CTX_SUFF(pbHeap); 1008 993 } 1009 994 Log3(("mmHyperFree: cbFree %d -> %d (%d)\n", pHeap->cbFree, pHeap->cbFree - pRight->cb, -(int32_t)pRight->cb)); … … 1016 1001 pFree->cb = pFree->core.offNext - sizeof(MMHYPERCHUNK); 1017 1002 else 1018 pFree->cb = pHeap->offPageAligned - ((uintptr_t)pFree - (uintptr_t) CTXSUFF(pHeap->pbHeap)) - sizeof(MMHYPERCHUNK);1003 pFree->cb = pHeap->offPageAligned - ((uintptr_t)pFree - (uintptr_t)pHeap->CTX_SUFF(pbHeap)) - sizeof(MMHYPERCHUNK); 1019 1004 Log3(("mmHyperFree: cbFree %d -> %d (%d)\n", pHeap->cbFree, pHeap->cbFree + pFree->cb, pFree->cb)); 1020 1005 pHeap->cbFree += pFree->cb; … … 1045 1030 #ifdef IN_RING3 1046 1031 Log(("%p %06x USED offNext=%06x offPrev=-%06x %s%s\n", 1047 pCur, (uintptr_t)pCur - (uintptr_t) CTXSUFF(pHeap->pbHeap),1032 pCur, (uintptr_t)pCur - (uintptr_t)pHeap->CTX_SUFF(pbHeap), 1048 1033 pCur->core.offNext, -MMHYPERCHUNK_GET_OFFPREV(&pCur->core), 1049 1034 mmR3GetTagName((MMTAG)pStat->Core.Key), pszSelf)); 1050 1035 #else 1051 1036 Log(("%p %06x USED offNext=%06x offPrev=-%06x %d%s\n", 1052 pCur, (uintptr_t)pCur - (uintptr_t) CTXSUFF(pHeap->pbHeap),1037 pCur, (uintptr_t)pCur - (uintptr_t)pHeap->CTX_SUFF(pbHeap), 1053 1038 pCur->core.offNext, -MMHYPERCHUNK_GET_OFFPREV(&pCur->core), 1054 1039 (MMTAG)pStat->Core.Key, pszSelf)); … … 1057 1042 else 1058 1043 Log(("%p %06x USED offNext=%06x offPrev=-%06x\n", 1059 pCur, (uintptr_t)pCur - (uintptr_t) CTXSUFF(pHeap->pbHeap),1044 pCur, (uintptr_t)pCur - (uintptr_t)pHeap->CTX_SUFF(pbHeap), 1060 1045 pCur->core.offNext, -MMHYPERCHUNK_GET_OFFPREV(&pCur->core))); 1061 1046 } 1062 1047 else 1063 1048 Log(("%p %06x FREE offNext=%06x offPrev=-%06x : cb=%06x offNext=%06x offPrev=-%06x\n", 1064 pCur, (uintptr_t)pCur - (uintptr_t) CTXSUFF(pHeap->pbHeap),1049 pCur, (uintptr_t)pCur - (uintptr_t)pHeap->CTX_SUFF(pbHeap), 1065 1050 pCur->core.offNext, -MMHYPERCHUNK_GET_OFFPREV(&pCur->core), pCur->cb, pCur->offNext, pCur->offPrev)); 1066 1051 } … … 1075 1060 { 1076 1061 PMMHYPERCHUNKFREE pPrev = NULL; 1077 PMMHYPERCHUNKFREE pCur = (PMMHYPERCHUNKFREE) CTXSUFF(pHeap->pbHeap);1062 PMMHYPERCHUNKFREE pCur = (PMMHYPERCHUNKFREE)pHeap->CTX_SUFF(pbHeap); 1078 1063 for (;;) 1079 1064 { … … 1087 1072 1088 1073 # ifdef MMHYPER_HEAP_STRICT_FENCE 1089 uint32_t off = (uint8_t *)pCur - CTXSUFF(pHeap->pbHeap);1074 uint32_t off = (uint8_t *)pCur - pHeap->CTX_SUFF(pbHeap); 1090 1075 if ( MMHYPERCHUNK_ISUSED(&pCur->core) 1091 1076 && off < pHeap->offPageAligned) … … 1146 1131 Log(("MMHyperHeapDump: *** heap dump - start ***\n")); 1147 1132 PMMHYPERHEAP pHeap = CTXSUFF(pVM->mm.s.pHyperHeap); 1148 PMMHYPERCHUNKFREE pCur = (PMMHYPERCHUNKFREE) CTXSUFF(pHeap->pbHeap);1133 PMMHYPERCHUNKFREE pCur = (PMMHYPERCHUNKFREE)pHeap->CTX_SUFF(pbHeap); 1149 1134 for (;;) 1150 1135 { -
trunk/src/VBox/VMM/testcase/tstVMStructGC.cpp
r12772 r12792 211 211 GEN_CHECK_OFF(MMHYPERHEAP, u32Magic); 212 212 GEN_CHECK_OFF(MMHYPERHEAP, cbHeap); 213 GEN_CHECK_OFF(MMHYPERHEAP, pbHeapHC); 214 GEN_CHECK_OFF(MMHYPERHEAP, pbHeapGC); 215 GEN_CHECK_OFF(MMHYPERHEAP, pVMHC); 216 GEN_CHECK_OFF(MMHYPERHEAP, pVMGC); 213 GEN_CHECK_OFF(MMHYPERHEAP, pbHeapR3); 214 GEN_CHECK_OFF(MMHYPERHEAP, pVMR3); 215 GEN_CHECK_OFF(MMHYPERHEAP, pbHeapR0); 216 GEN_CHECK_OFF(MMHYPERHEAP, pVMR0); 217 GEN_CHECK_OFF(MMHYPERHEAP, pbHeapRC); 218 GEN_CHECK_OFF(MMHYPERHEAP, pVMRC); 217 219 GEN_CHECK_OFF(MMHYPERHEAP, cbFree); 218 220 GEN_CHECK_OFF(MMHYPERHEAP, offFreeHead);
Note:
See TracChangeset
for help on using the changeset viewer.