VirtualBox

source: vbox/trunk/src/VBox/Runtime/common/alloc/heapoffset.cpp@ 25066

Last change on this file since 25066 was 25066, checked in by vboxsync, 15 years ago

rtHeapOffsetAllocBlock: Rewrote the alignment handling so it doesn't blow up nor leaks memory.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Id
File size: 32.9 KB
Line 
1/* $Id: heapoffset.cpp 25066 2009-11-28 02:30:08Z vboxsync $ */
2/** @file
3 * IPRT - An Offset Based Heap.
4 */
5
6/*
7 * Copyright (C) 2006-2009 Sun Microsystems, Inc.
8 *
9 * This file is part of VirtualBox Open Source Edition (OSE), as
10 * available from http://www.virtualbox.org. This file is free software;
11 * you can redistribute it and/or modify it under the terms of the GNU
12 * General Public License (GPL) as published by the Free Software
13 * Foundation, in version 2 as it comes in the "COPYING" file of the
14 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
15 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
16 *
17 * The contents of this file may alternatively be used under the terms
18 * of the Common Development and Distribution License Version 1.0
19 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
20 * VirtualBox OSE distribution, in which case the provisions of the
21 * CDDL are applicable instead of those of the GPL.
22 *
23 * You may elect to license modified versions of this file under the
24 * terms and conditions of either the GPL or the CDDL or both.
25 *
26 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa
27 * Clara, CA 95054 USA or visit http://www.sun.com if you need
28 * additional information or have any questions.
29 */
30
31
32/*******************************************************************************
33* Header Files *
34*******************************************************************************/
35#define LOG_GROUP RTLOGGROUP_DEFAULT
36#include <iprt/heap.h>
37#include "internal/iprt.h"
38
39#include <iprt/assert.h>
40#include <iprt/asm.h>
41#include <iprt/string.h>
42#include <iprt/err.h>
43#include <iprt/log.h>
44#include <iprt/param.h>
45
46#include "internal/magics.h"
47
48
49/*******************************************************************************
50* Structures and Typedefs *
51*******************************************************************************/
52/** Pointer to the heap anchor block. */
53typedef struct RTHEAPOFFSETINTERNAL *PRTHEAPOFFSETINTERNAL;
54/** Pointer to a heap block. */
55typedef struct RTHEAPOFFSETBLOCK *PRTHEAPOFFSETBLOCK;
56/** Pointer to a free heap block. */
57typedef struct RTHEAPOFFSETFREE *PRTHEAPOFFSETFREE;
58
59/**
60 * Structure describing a block in an offset based heap.
61 *
62 * If this block is allocated, it is followed by the user data.
63 * If this block is free, see RTHEAPOFFSETFREE.
64 */
65typedef struct RTHEAPOFFSETBLOCK
66{
67 /** The next block in the global block list. */
68 uint32_t /*PRTHEAPOFFSETBLOCK*/ offNext;
69 /** The previous block in the global block list. */
70 uint32_t /*PRTHEAPOFFSETBLOCK*/ offPrev;
71 /** Offset into the heap of this block. Used to locate the anchor block. */
72 uint32_t /*PRTHEAPOFFSETINTERNAL*/ offSelf;
73 /** Flags + magic. */
74 uint32_t fFlags;
75} RTHEAPOFFSETBLOCK;
76AssertCompileSize(RTHEAPOFFSETBLOCK, 16);
77
78/** The block is free if this flag is set. When cleared it's allocated. */
79#define RTHEAPOFFSETBLOCK_FLAGS_FREE (RT_BIT_32(0))
80/** The magic value. */
81#define RTHEAPOFFSETBLOCK_FLAGS_MAGIC (UINT32_C(0xabcdef00))
82/** The mask that needs to be applied to RTHEAPOFFSETBLOCK::fFlags to obtain the magic value. */
83#define RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK (~RT_BIT_32(0))
84
85/**
86 * Checks if the specified block is valid or not.
87 * @returns boolean answer.
88 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
89 */
90#define RTHEAPOFFSETBLOCK_IS_VALID(pBlock) \
91 ( ((pBlock)->fFlags & RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK) == RTHEAPOFFSETBLOCK_FLAGS_MAGIC )
92
93/**
94 * Checks if the specified block is valid and in use.
95 * @returns boolean answer.
96 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
97 */
98#define RTHEAPOFFSETBLOCK_IS_VALID_USED(pBlock) \
99 ( ((pBlock)->fFlags & (RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK | RTHEAPOFFSETBLOCK_FLAGS_FREE)) \
100 == RTHEAPOFFSETBLOCK_FLAGS_MAGIC )
101
102/**
103 * Checks if the specified block is valid and free.
104 * @returns boolean answer.
105 * @param pBlock Pointer to a RTHEAPOFFSETBLOCK structure.
106 */
107#define RTHEAPOFFSETBLOCK_IS_VALID_FREE(pBlock) \
108 ( ((pBlock)->fFlags & (RTHEAPOFFSETBLOCK_FLAGS_MAGIC_MASK | RTHEAPOFFSETBLOCK_FLAGS_FREE)) \
109 == (RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE) )
110
111/**
112 * Checks if the specified block is free or not.
113 * @returns boolean answer.
114 * @param pBlock Pointer to a valid RTHEAPOFFSETBLOCK structure.
115 */
116#define RTHEAPOFFSETBLOCK_IS_FREE(pBlock) (!!((pBlock)->fFlags & RTHEAPOFFSETBLOCK_FLAGS_FREE))
117
118/**
119 * A free heap block.
120 * This is an extended version of RTHEAPOFFSETBLOCK that takes the unused
121 * user data to store free list pointers and a cached size value.
122 */
123typedef struct RTHEAPOFFSETFREE
124{
125 /** Core stuff. */
126 RTHEAPOFFSETBLOCK Core;
127 /** Pointer to the next free block. */
128 uint32_t /*PRTHEAPOFFSETFREE*/ offNext;
129 /** Pointer to the previous free block. */
130 uint32_t /*PRTHEAPOFFSETFREE*/ offPrev;
131 /** The size of the block (excluding the RTHEAPOFFSETBLOCK part). */
132 uint32_t cb;
133 /** An alignment filler to make it a multiple of 16 bytes. */
134 uint32_t Alignment;
135} RTHEAPOFFSETFREE;
136AssertCompileSize(RTHEAPOFFSETFREE, 16+16);
137
138
139/**
140 * The heap anchor block.
141 * This structure is placed at the head of the memory block specified to RTHeapOffsetInit(),
142 * which means that the first RTHEAPOFFSETBLOCK appears immediately after this structure.
143 */
144typedef struct RTHEAPOFFSETINTERNAL
145{
146 /** The typical magic (RTHEAPOFFSET_MAGIC). */
147 uint32_t u32Magic;
148 /** The heap size. (This structure is included!) */
149 uint32_t cbHeap;
150 /** The amount of free memory in the heap. */
151 uint32_t cbFree;
152 /** Free head pointer. */
153 uint32_t /*PRTHEAPOFFSETFREE*/ offFreeHead;
154 /** Free tail pointer. */
155 uint32_t /*PRTHEAPOFFSETFREE*/ offFreeTail;
156 /** Make the size of this structure 32 bytes. */
157 uint32_t au32Alignment[3];
158} RTHEAPOFFSETINTERNAL;
159AssertCompileSize(RTHEAPOFFSETINTERNAL, 32);
160
161
162/** The minimum allocation size. */
163#define RTHEAPOFFSET_MIN_BLOCK (sizeof(RTHEAPOFFSETBLOCK))
164AssertCompile(RTHEAPOFFSET_MIN_BLOCK >= sizeof(RTHEAPOFFSETBLOCK));
165AssertCompile(RTHEAPOFFSET_MIN_BLOCK >= sizeof(RTHEAPOFFSETFREE) - sizeof(RTHEAPOFFSETBLOCK));
166
167/** The minimum and default alignment. */
168#define RTHEAPOFFSET_ALIGNMENT (sizeof(RTHEAPOFFSETBLOCK))
169
170
171/*******************************************************************************
172* Defined Constants And Macros *
173*******************************************************************************/
174#ifdef RT_STRICT
175# define RTHEAPOFFSET_STRICT 1
176#endif
177
178/**
179 * Converts RTHEAPOFFSETBLOCK::offSelf into a heap anchor block pointer.
180 *
181 * @returns Pointer of given type.
182 * @param pBlock The block to find the heap anchor block for.
183 */
184#define RTHEAPOFF_GET_ANCHOR(pBlock) ( (PRTHEAPOFFSETINTERNAL)((uint8_t *)(pBlock) - (pBlock)->offSelf ) )
185
186
187/**
188 * Converts an offset to a pointer.
189 *
190 * All offsets are relative to the heap to make life simple.
191 *
192 * @returns Pointer of given type.
193 * @param pHeapInt Pointer to the heap anchor block.
194 * @param off The offset to convert.
195 * @param type The desired type.
196 */
197#ifdef RTHEAPOFFSET_STRICT
198# define RTHEAPOFF_TO_PTR_N(pHeapInt, off, type) ( (type)rtHeapOffCheckedOffToPtr(pHeapInt, off, true /*fNull*/) )
199#else
200# define RTHEAPOFF_TO_PTR_N(pHeapInt, off, type) ( (type)((off) ? (uint8_t *)(pHeapInt) + (off) : NULL) )
201#endif
202
203/**
204 * Converts an offset to a pointer.
205 *
206 * All offsets are relative to the heap to make life simple.
207 *
208 * @returns Pointer of given type.
209 * @param pHeapInt Pointer to the heap anchor block.
210 * @param off The offset to convert.
211 * @param type The desired type.
212 */
213#ifdef RTHEAPOFFSET_STRICT
214# define RTHEAPOFF_TO_PTR(pHeapInt, off, type) ( (type)rtHeapOffCheckedOffToPtr(pHeapInt, off, false /*fNull*/) )
215#else
216# define RTHEAPOFF_TO_PTR(pHeapInt, off, type) ( (type)((uint8_t *)(pHeapInt) + (off)) )
217#endif
218
219/**
220 * Converts a pointer to an offset.
221 *
222 * All offsets are relative to the heap to make life simple.
223 *
224 * @returns Offset into the heap.
225 * @param pHeapInt Pointer to the heap anchor block.
226 * @param ptr The pointer to convert.
227 */
228#ifdef RTHEAPOFFSET_STRICT
229# define RTHEAPOFF_TO_OFF(pHeapInt, ptr) rtHeapOffCheckedPtrToOff(pHeapInt, ptr)
230#else
231# define RTHEAPOFF_TO_OFF(pHeapInt, ptr) ( (uint32_t)((ptr) ? (uintptr_t)(ptr) - (uintptr_t)(pHeapInt) : UINT32_C(0)) )
232#endif
233
234#define ASSERT_L(a, b) AssertMsg((a) < (b), ("a=%08x b=%08x\n", (a), (b)))
235#define ASSERT_LE(a, b) AssertMsg((a) <= (b), ("a=%08x b=%08x\n", (a), (b)))
236#define ASSERT_G(a, b) AssertMsg((a) > (b), ("a=%08x b=%08x\n", (a), (b)))
237#define ASSERT_GE(a, b) AssertMsg((a) >= (b), ("a=%08x b=%08x\n", (a), (b)))
238#define ASSERT_ALIGN(a) AssertMsg(!((uintptr_t)(a) & (RTHEAPOFFSET_ALIGNMENT - 1)), ("a=%p\n", (uintptr_t)(a)))
239
240#define ASSERT_PREV(pHeapInt, pBlock) \
241 do { ASSERT_ALIGN((pBlock)->offPrev); \
242 if ((pBlock)->offPrev) \
243 { \
244 ASSERT_L((pBlock)->offPrev, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
245 ASSERT_GE((pBlock)->offPrev, sizeof(RTHEAPOFFSETINTERNAL)); \
246 } \
247 else \
248 Assert((pBlock) == (PRTHEAPOFFSETBLOCK)((pHeapInt) + 1)); \
249 } while (0)
250
251#define ASSERT_NEXT(pHeap, pBlock) \
252 do { ASSERT_ALIGN((pBlock)->offNext); \
253 if ((pBlock)->offNext) \
254 { \
255 ASSERT_L((pBlock)->offNext, (pHeapInt)->cbHeap); \
256 ASSERT_G((pBlock)->offNext, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
257 } \
258 } while (0)
259
260#define ASSERT_BLOCK(pHeapInt, pBlock) \
261 do { AssertMsg(RTHEAPOFFSETBLOCK_IS_VALID(pBlock), ("%#x\n", (pBlock)->fFlags)); \
262 AssertMsg(RTHEAPOFF_GET_ANCHOR(pBlock) == (pHeapInt), ("%p != %p\n", RTHEAPOFF_GET_ANCHOR(pBlock), (pHeapInt))); \
263 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), sizeof(RTHEAPOFFSETINTERNAL)); \
264 ASSERT_L( RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->cbHeap); \
265 ASSERT_NEXT(pHeapInt, pBlock); \
266 ASSERT_PREV(pHeapInt, pBlock); \
267 } while (0)
268
269#define ASSERT_BLOCK_USED(pHeapInt, pBlock) \
270 do { AssertMsg(RTHEAPOFFSETBLOCK_IS_VALID_USED((pBlock)), ("%#x\n", (pBlock)->fFlags)); \
271 AssertMsg(RTHEAPOFF_GET_ANCHOR(pBlock) == (pHeapInt), ("%p != %p\n", RTHEAPOFF_GET_ANCHOR(pBlock), (pHeapInt))); \
272 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), sizeof(RTHEAPOFFSETINTERNAL)); \
273 ASSERT_L( RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->cbHeap); \
274 ASSERT_NEXT(pHeapInt, pBlock); \
275 ASSERT_PREV(pHeapInt, pBlock); \
276 } while (0)
277
278#define ASSERT_FREE_PREV(pHeapInt, pBlock) \
279 do { ASSERT_ALIGN((pBlock)->offPrev); \
280 if ((pBlock)->offPrev) \
281 { \
282 ASSERT_GE((pBlock)->offPrev, (pHeapInt)->offFreeHead); \
283 ASSERT_L((pBlock)->offPrev, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
284 ASSERT_LE((pBlock)->offPrev, (pBlock)->Core.offPrev); \
285 } \
286 else \
287 Assert((pBlock) == RTHEAPOFF_TO_PTR(pHeapInt, (pHeapInt)->offFreeHead, PRTHEAPOFFSETFREE) ); \
288 } while (0)
289
290#define ASSERT_FREE_NEXT(pHeapInt, pBlock) \
291 do { ASSERT_ALIGN((pBlock)->offNext); \
292 if ((pBlock)->offNext) \
293 { \
294 ASSERT_LE((pBlock)->offNext, (pHeapInt)->offFreeTail); \
295 ASSERT_G((pBlock)->offNext, RTHEAPOFF_TO_OFF(pHeapInt, pBlock)); \
296 ASSERT_GE((pBlock)->offNext, (pBlock)->Core.offNext); \
297 } \
298 else \
299 Assert((pBlock) == RTHEAPOFF_TO_PTR(pHeapInt, (pHeapInt)->offFreeTail, PRTHEAPOFFSETFREE)); \
300 } while (0)
301
302#ifdef RTHEAPOFFSET_STRICT
303# define ASSERT_FREE_CB(pHeapInt, pBlock) \
304 do { size_t cbCalc = ((pBlock)->Core.offNext ? (pBlock)->Core.offNext : (pHeapInt)->cbHeap) \
305 - RTHEAPOFF_TO_OFF((pHeapInt), (pBlock)) - sizeof(RTHEAPOFFSETBLOCK); \
306 AssertMsg((pBlock)->cb == cbCalc, ("cb=%#zx cbCalc=%#zx\n", (pBlock)->cb, cbCalc)); \
307 } while (0)
308#else
309# define ASSERT_FREE_CB(pHeapInt, pBlock) do {} while (0)
310#endif
311
312/** Asserts that a free block is valid. */
313#define ASSERT_BLOCK_FREE(pHeapInt, pBlock) \
314 do { ASSERT_BLOCK(pHeapInt, &(pBlock)->Core); \
315 Assert(RTHEAPOFFSETBLOCK_IS_VALID_FREE(&(pBlock)->Core)); \
316 ASSERT_GE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->offFreeHead); \
317 ASSERT_LE(RTHEAPOFF_TO_OFF(pHeapInt, pBlock), (pHeapInt)->offFreeTail); \
318 ASSERT_FREE_NEXT(pHeapInt, pBlock); \
319 ASSERT_FREE_PREV(pHeapInt, pBlock); \
320 ASSERT_FREE_CB(pHeapInt, pBlock); \
321 } while (0)
322
323/** Asserts that the heap anchor block is ok. */
324#define ASSERT_ANCHOR(pHeapInt) \
325 do { AssertPtr(pHeapInt);\
326 Assert((pHeapInt)->u32Magic == RTHEAPOFFSET_MAGIC); \
327 } while (0)
328
329
330/*******************************************************************************
331* Internal Functions *
332*******************************************************************************/
333#ifdef RTHEAPOFFSET_STRICT
334static void rtHeapOffsetAssertAll(PRTHEAPOFFSETINTERNAL pHeapInt);
335#endif
336static PRTHEAPOFFSETBLOCK rtHeapOffsetAllocBlock(PRTHEAPOFFSETINTERNAL pHeapInt, size_t cb, size_t uAlignment);
337static void rtHeapOffsetFreeBlock(PRTHEAPOFFSETINTERNAL pHeapInt, PRTHEAPOFFSETBLOCK pBlock);
338
339#ifdef RTHEAPOFFSET_STRICT
340
341/** Checked version of RTHEAPOFF_TO_PTR and RTHEAPOFF_TO_PTR_N. */
342DECLINLINE(void *) rtHeapOffCheckedOffToPtr(PRTHEAPOFFSETINTERNAL pHeapInt, uint32_t off, bool fNull)
343{
344 Assert(off || fNull);
345 if (!off)
346 return NULL;
347 AssertMsg(off < pHeapInt->cbHeap, ("%#x %#x\n", off, pHeapInt->cbHeap));
348 AssertMsg(off >= sizeof(*pHeapInt), ("%#x %#x\n", off, sizeof(*pHeapInt)));
349 return (uint8_t *)pHeapInt + off;
350}
351
352/** Checked version of RTHEAPOFF_TO_OFF. */
353DECLINLINE(uint32_t) rtHeapOffCheckedPtrToOff(PRTHEAPOFFSETINTERNAL pHeapInt, void *pv)
354{
355 if (!pv)
356 return 0;
357 uintptr_t off = (uintptr_t)pv - (uintptr_t)pHeapInt;
358 AssertMsg(off < pHeapInt->cbHeap, ("%#x %#x\n", off, pHeapInt->cbHeap));
359 AssertMsg(off >= sizeof(*pHeapInt), ("%#x %#x\n", off, sizeof(*pHeapInt)));
360 return (uint32_t)off;
361}
362
363#endif /* RTHEAPOFFSET_STRICT */
364
365
366
367RTDECL(int) RTHeapOffsetInit(PRTHEAPOFFSET phHeap, void *pvMemory, size_t cbMemory)
368{
369 PRTHEAPOFFSETINTERNAL pHeapInt;
370 PRTHEAPOFFSETFREE pFree;
371 unsigned i;
372
373 /*
374 * Validate input. The imposed minimum heap size is just a convenient value.
375 */
376 AssertReturn(cbMemory >= PAGE_SIZE, VERR_INVALID_PARAMETER);
377 AssertPtrReturn(pvMemory, VERR_INVALID_POINTER);
378 AssertReturn((uintptr_t)pvMemory + (cbMemory - 1) > (uintptr_t)cbMemory, VERR_INVALID_PARAMETER);
379
380 /*
381 * Place the heap anchor block at the start of the heap memory,
382 * enforce 32 byte alignment of it. Also align the heap size correctly.
383 */
384 pHeapInt = (PRTHEAPOFFSETINTERNAL)pvMemory;
385 if ((uintptr_t)pvMemory & 31)
386 {
387 const uintptr_t off = 32 - ((uintptr_t)pvMemory & 31);
388 cbMemory -= off;
389 pHeapInt = (PRTHEAPOFFSETINTERNAL)((uintptr_t)pvMemory + off);
390 }
391 cbMemory &= ~(RTHEAPOFFSET_ALIGNMENT - 1);
392
393
394 /* Init the heap anchor block. */
395 pHeapInt->u32Magic = RTHEAPOFFSET_MAGIC;
396 pHeapInt->cbHeap = cbMemory;
397 pHeapInt->cbFree = cbMemory
398 - sizeof(RTHEAPOFFSETBLOCK)
399 - sizeof(RTHEAPOFFSETINTERNAL);
400 pHeapInt->offFreeTail = pHeapInt->offFreeHead = sizeof(*pHeapInt);
401 for (i = 0; i < RT_ELEMENTS(pHeapInt->au32Alignment); i++)
402 pHeapInt->au32Alignment[i] = UINT32_MAX;
403
404 /* Init the single free block. */
405 pFree = RTHEAPOFF_TO_PTR(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE);
406 pFree->Core.offNext = 0;
407 pFree->Core.offPrev = 0;
408 pFree->Core.offSelf = pHeapInt->offFreeHead;
409 pFree->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
410 pFree->offNext = 0;
411 pFree->offPrev = 0;
412 pFree->cb = pHeapInt->cbFree;
413
414 *phHeap = pHeapInt;
415
416#ifdef RTHEAPOFFSET_STRICT
417 rtHeapOffsetAssertAll(pHeapInt);
418#endif
419 return VINF_SUCCESS;
420}
421RT_EXPORT_SYMBOL(RTHeapOffsetInit);
422
423
424RTDECL(void *) RTHeapOffsetAlloc(RTHEAPOFFSET hHeap, size_t cb, size_t cbAlignment)
425{
426 PRTHEAPOFFSETINTERNAL pHeapInt = hHeap;
427 PRTHEAPOFFSETBLOCK pBlock;
428
429 /*
430 * Validate and adjust the input.
431 */
432 AssertPtrReturn(pHeapInt, NULL);
433 if (cb < RTHEAPOFFSET_MIN_BLOCK)
434 cb = RTHEAPOFFSET_MIN_BLOCK;
435 else
436 cb = RT_ALIGN_Z(cb, RTHEAPOFFSET_ALIGNMENT);
437 if (!cbAlignment)
438 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
439 else
440 {
441 Assert(!(cbAlignment & (cbAlignment - 1)));
442 Assert((cbAlignment & ~(cbAlignment - 1)) == cbAlignment);
443 if (cbAlignment < RTHEAPOFFSET_ALIGNMENT)
444 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
445 }
446
447 /*
448 * Do the allocation.
449 */
450 pBlock = rtHeapOffsetAllocBlock(pHeapInt, cb, cbAlignment);
451 if (RT_LIKELY(pBlock))
452 {
453 void *pv = pBlock + 1;
454 return pv;
455 }
456 return NULL;
457}
458RT_EXPORT_SYMBOL(RTHeapOffsetAlloc);
459
460
461RTDECL(void *) RTHeapOffsetAllocZ(RTHEAPOFFSET hHeap, size_t cb, size_t cbAlignment)
462{
463 PRTHEAPOFFSETINTERNAL pHeapInt = hHeap;
464 PRTHEAPOFFSETBLOCK pBlock;
465
466 /*
467 * Validate and adjust the input.
468 */
469 AssertPtrReturn(pHeapInt, NULL);
470 if (cb < RTHEAPOFFSET_MIN_BLOCK)
471 cb = RTHEAPOFFSET_MIN_BLOCK;
472 else
473 cb = RT_ALIGN_Z(cb, RTHEAPOFFSET_ALIGNMENT);
474 if (!cbAlignment)
475 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
476 else
477 {
478 Assert(!(cbAlignment & (cbAlignment - 1)));
479 Assert((cbAlignment & ~(cbAlignment - 1)) == cbAlignment);
480 if (cbAlignment < RTHEAPOFFSET_ALIGNMENT)
481 cbAlignment = RTHEAPOFFSET_ALIGNMENT;
482 }
483
484 /*
485 * Do the allocation.
486 */
487 pBlock = rtHeapOffsetAllocBlock(pHeapInt, cb, cbAlignment);
488 if (RT_LIKELY(pBlock))
489 {
490 void *pv = pBlock + 1;
491 memset(pv, 0, cb);
492 return pv;
493 }
494 return NULL;
495}
496RT_EXPORT_SYMBOL(RTHeapOffsetAllocZ);
497
498
499/**
500 * Allocates a block of memory from the specified heap.
501 *
502 * No parameter validation or adjustment is performed.
503 *
504 * @returns Pointer to the allocated block.
505 * @returns NULL on failure.
506 *
507 * @param pHeapInt The heap.
508 * @param cb Size of the memory block to allocate.
509 * @param uAlignment The alignment specifications for the allocated block.
510 */
511static PRTHEAPOFFSETBLOCK rtHeapOffsetAllocBlock(PRTHEAPOFFSETINTERNAL pHeapInt, size_t cb, size_t uAlignment)
512{
513 PRTHEAPOFFSETBLOCK pRet = NULL;
514 PRTHEAPOFFSETFREE pFree;
515
516 AssertReturn((pHeapInt)->u32Magic == RTHEAPOFFSET_MAGIC, NULL);
517#ifdef RTHEAPOFFSET_STRICT
518 rtHeapOffsetAssertAll(pHeapInt);
519#endif
520
521 /*
522 * Search for a fitting block from the lower end of the heap.
523 */
524 for (pFree = RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE);
525 pFree;
526 pFree = RTHEAPOFF_TO_PTR_N(pHeapInt, pFree->offNext, PRTHEAPOFFSETFREE))
527 {
528 uintptr_t offAlign;
529 ASSERT_BLOCK_FREE(pHeapInt, pFree);
530
531 /*
532 * Match for size and alignment.
533 */
534 if (pFree->cb < cb)
535 continue;
536 offAlign = (uintptr_t)(&pFree->Core + 1) & (uAlignment - 1);
537 if (offAlign)
538 {
539 PRTHEAPOFFSETFREE pPrev;
540
541 offAlign = (uintptr_t)(&pFree[1].Core + 1) & (uAlignment - 1);
542 offAlign = uAlignment - offAlign;
543 if (pFree->cb < cb + offAlign + sizeof(RTHEAPOFFSETFREE))
544 continue;
545
546 /*
547 * Split up the free block into two, so that the 2nd is aligned as
548 * per specification.
549 */
550 pPrev = pFree;
551 pFree = (PRTHEAPOFFSETFREE)((uintptr_t)(pFree + 1) + offAlign);
552 pFree->Core.offPrev = pPrev->Core.offSelf;
553 pFree->Core.offNext = pPrev->Core.offNext;
554 pFree->Core.offSelf = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
555 pFree->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
556 pFree->offPrev = pPrev->Core.offSelf;
557 pFree->offNext = pPrev->offNext;
558 pFree->cb = (pFree->Core.offNext ? pFree->Core.offNext : pHeapInt->cbHeap)
559 - pFree->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
560
561 pPrev->Core.offNext = pFree->Core.offSelf;
562 pPrev->offNext = pFree->Core.offSelf;
563 pPrev->cb = pFree->Core.offSelf - pPrev->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
564
565 if (pFree->Core.offNext)
566 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = pFree->Core.offSelf;
567 if (pFree->offNext)
568 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETFREE)->offPrev = pFree->Core.offSelf;
569 else
570 pHeapInt->offFreeTail = pFree->Core.offSelf;
571
572 pHeapInt->cbFree -= sizeof(RTHEAPOFFSETBLOCK);
573 ASSERT_BLOCK_FREE(pHeapInt, pPrev);
574 ASSERT_BLOCK_FREE(pHeapInt, pFree);
575 }
576
577 /*
578 * Split off a new FREE block?
579 */
580 if (pFree->cb >= cb + RT_ALIGN_Z(sizeof(RTHEAPOFFSETFREE), RTHEAPOFFSET_ALIGNMENT))
581 {
582 /*
583 * Create a new FREE block at then end of this one.
584 */
585 PRTHEAPOFFSETFREE pNew = (PRTHEAPOFFSETFREE)((uintptr_t)&pFree->Core + cb + sizeof(RTHEAPOFFSETBLOCK));
586
587 pNew->Core.offSelf = RTHEAPOFF_TO_OFF(pHeapInt, pNew);
588 pNew->Core.offNext = pFree->Core.offNext;
589 if (pFree->Core.offNext)
590 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = pNew->Core.offSelf;
591 pNew->Core.offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
592 pNew->Core.fFlags = RTHEAPOFFSETBLOCK_FLAGS_MAGIC | RTHEAPOFFSETBLOCK_FLAGS_FREE;
593
594 pNew->offNext = pFree->offNext;
595 if (pNew->offNext)
596 RTHEAPOFF_TO_PTR(pHeapInt, pNew->offNext, PRTHEAPOFFSETFREE)->offPrev = pNew->Core.offSelf;
597 else
598 pHeapInt->offFreeTail = pNew->Core.offSelf;
599 pNew->offPrev = pFree->offPrev;
600 if (pNew->offPrev)
601 RTHEAPOFF_TO_PTR(pHeapInt, pNew->offPrev, PRTHEAPOFFSETFREE)->offNext = pNew->Core.offSelf;
602 else
603 pHeapInt->offFreeHead = pNew->Core.offSelf;
604 pNew->cb = (pNew->Core.offNext ? pNew->Core.offNext : pHeapInt->cbHeap) \
605 - pNew->Core.offSelf - sizeof(RTHEAPOFFSETBLOCK);
606 ASSERT_BLOCK_FREE(pHeapInt, pNew);
607
608 /*
609 * Adjust and convert the old FREE node into a USED node.
610 */
611 pFree->Core.fFlags &= ~RTHEAPOFFSETBLOCK_FLAGS_FREE;
612 pFree->Core.offNext = pNew->Core.offSelf;
613 pHeapInt->cbFree -= pFree->cb;
614 pHeapInt->cbFree += pNew->cb;
615 pRet = &pFree->Core;
616 ASSERT_BLOCK_USED(pHeapInt, pRet);
617 }
618 else
619 {
620 /*
621 * Link it out of the free list.
622 */
623 if (pFree->offNext)
624 RTHEAPOFF_TO_PTR(pHeapInt, pFree->offNext, PRTHEAPOFFSETFREE)->offPrev = pFree->offPrev;
625 else
626 pHeapInt->offFreeTail = pFree->offPrev;
627 if (pFree->offPrev)
628 RTHEAPOFF_TO_PTR(pHeapInt, pFree->offPrev, PRTHEAPOFFSETFREE)->offNext = pFree->offNext;
629 else
630 pHeapInt->offFreeHead = pFree->offNext;
631
632 /*
633 * Convert it to a used block.
634 */
635 pHeapInt->cbFree -= pFree->cb;
636 pFree->Core.fFlags &= ~RTHEAPOFFSETBLOCK_FLAGS_FREE;
637 pRet = &pFree->Core;
638 ASSERT_BLOCK_USED(pHeapInt, pRet);
639 }
640 break;
641 }
642
643#ifdef RTHEAPOFFSET_STRICT
644 rtHeapOffsetAssertAll(pHeapInt);
645#endif
646 return pRet;
647}
648
649
650RTDECL(void) RTHeapOffsetFree(RTHEAPOFFSET hHeap, void *pv)
651{
652 PRTHEAPOFFSETINTERNAL pHeapInt;
653 PRTHEAPOFFSETBLOCK pBlock;
654
655 /*
656 * Validate input.
657 */
658 if (!pv)
659 return;
660 AssertPtr(pv);
661 Assert(RT_ALIGN_P(pv, RTHEAPOFFSET_ALIGNMENT) == pv);
662
663 /*
664 * Get the block and heap. If in strict mode, validate these.
665 */
666 pBlock = (PRTHEAPOFFSETBLOCK)pv - 1;
667 pHeapInt = RTHEAPOFF_GET_ANCHOR(pBlock);
668 ASSERT_BLOCK_USED(pHeapInt, pBlock);
669 ASSERT_ANCHOR(pHeapInt);
670 Assert(pHeapInt == (PRTHEAPOFFSETINTERNAL)hHeap || !hHeap);
671
672#ifdef RTHEAPOFFSET_FREE_POISON
673 /*
674 * Poison the block.
675 */
676 const size_t cbBlock = (pBlock->pNext ? (uintptr_t)pBlock->pNext : (uintptr_t)pHeapInt->pvEnd)
677 - (uintptr_t)pBlock - sizeof(RTHEAPOFFSETBLOCK);
678 memset(pBlock + 1, RTHEAPOFFSET_FREE_POISON, cbBlock);
679#endif
680
681 /*
682 * Call worker which does the actual job.
683 */
684 rtHeapOffsetFreeBlock(pHeapInt, pBlock);
685}
686RT_EXPORT_SYMBOL(RTHeapOffsetFree);
687
688
689/**
690 * Free a memory block.
691 *
692 * @param pHeapInt The heap.
693 * @param pBlock The memory block to free.
694 */
695static void rtHeapOffsetFreeBlock(PRTHEAPOFFSETINTERNAL pHeapInt, PRTHEAPOFFSETBLOCK pBlock)
696{
697 PRTHEAPOFFSETFREE pFree = (PRTHEAPOFFSETFREE)pBlock;
698 PRTHEAPOFFSETFREE pLeft;
699 PRTHEAPOFFSETFREE pRight;
700
701#ifdef RTHEAPOFFSET_STRICT
702 rtHeapOffsetAssertAll(pHeapInt);
703#endif
704
705 /*
706 * Look for the closest free list blocks by walking the blocks right
707 * of us (both lists are sorted by address).
708 */
709 pLeft = NULL;
710 pRight = NULL;
711 if (pHeapInt->offFreeTail)
712 {
713 pRight = RTHEAPOFF_TO_PTR_N(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETFREE);
714 while (pRight && !RTHEAPOFFSETBLOCK_IS_FREE(&pRight->Core))
715 {
716 ASSERT_BLOCK(pHeapInt, &pRight->Core);
717 pRight = RTHEAPOFF_TO_PTR_N(pHeapInt, pRight->Core.offNext, PRTHEAPOFFSETFREE);
718 }
719 if (!pRight)
720 pLeft = RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeTail, PRTHEAPOFFSETFREE);
721 else
722 {
723 ASSERT_BLOCK_FREE(pHeapInt, pRight);
724 pLeft = RTHEAPOFF_TO_PTR_N(pHeapInt, pRight->offPrev, PRTHEAPOFFSETFREE);
725 }
726 if (pLeft)
727 ASSERT_BLOCK_FREE(pHeapInt, pLeft);
728 }
729 AssertMsgReturnVoid(pLeft != pFree, ("Freed twice! pv=%p (pBlock=%p)\n", pBlock + 1, pBlock));
730 ASSERT_L(RTHEAPOFF_TO_OFF(pHeapInt, pLeft), RTHEAPOFF_TO_OFF(pHeapInt, pFree));
731 Assert(!pRight || (uintptr_t)pRight > (uintptr_t)pFree);
732 Assert(!pLeft || RTHEAPOFF_TO_PTR_N(pHeapInt, pLeft->offNext, PRTHEAPOFFSETFREE) == pRight);
733
734 /*
735 * Insert at the head of the free block list?
736 */
737 if (!pLeft)
738 {
739 Assert(pRight == RTHEAPOFF_TO_PTR_N(pHeapInt, pHeapInt->offFreeHead, PRTHEAPOFFSETFREE));
740 pFree->Core.fFlags |= RTHEAPOFFSETBLOCK_FLAGS_FREE;
741 pFree->offPrev = 0;
742 pFree->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pRight);
743 if (pRight)
744 pRight->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
745 else
746 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
747 pHeapInt->offFreeHead = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
748 }
749 else
750 {
751 /*
752 * Can we merge with left hand free block?
753 */
754 if (pLeft->Core.offNext == RTHEAPOFF_TO_OFF(pHeapInt, pFree))
755 {
756 pLeft->Core.offNext = pFree->Core.offNext;
757 if (pFree->Core.offNext)
758 RTHEAPOFF_TO_PTR(pHeapInt, pFree->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pLeft);
759 pHeapInt->cbFree -= pLeft->cb;
760 pFree = pLeft;
761 }
762 /*
763 * No, just link it into the free list then.
764 */
765 else
766 {
767 pFree->Core.fFlags |= RTHEAPOFFSETBLOCK_FLAGS_FREE;
768 pFree->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pRight);
769 pFree->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pLeft);
770 pLeft->offNext = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
771 if (pRight)
772 pRight->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
773 else
774 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
775 }
776 }
777
778 /*
779 * Can we merge with right hand free block?
780 */
781 if ( pRight
782 && pRight->Core.offPrev == RTHEAPOFF_TO_OFF(pHeapInt, pFree))
783 {
784 /* core */
785 pFree->Core.offNext = pRight->Core.offNext;
786 if (pRight->Core.offNext)
787 RTHEAPOFF_TO_PTR(pHeapInt, pRight->Core.offNext, PRTHEAPOFFSETBLOCK)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
788
789 /* free */
790 pFree->offNext = pRight->offNext;
791 if (pRight->offNext)
792 RTHEAPOFF_TO_PTR(pHeapInt, pRight->offNext, PRTHEAPOFFSETFREE)->offPrev = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
793 else
794 pHeapInt->offFreeTail = RTHEAPOFF_TO_OFF(pHeapInt, pFree);
795 pHeapInt->cbFree -= pRight->cb;
796 }
797
798 /*
799 * Calculate the size and update free stats.
800 */
801 pFree->cb = (pFree->Core.offNext ? pFree->Core.offNext : pHeapInt->cbHeap)
802 - RTHEAPOFF_TO_OFF(pHeapInt, pFree) - sizeof(RTHEAPOFFSETBLOCK);
803 pHeapInt->cbFree += pFree->cb;
804 ASSERT_BLOCK_FREE(pHeapInt, pFree);
805
806#ifdef RTHEAPOFFSET_STRICT
807 rtHeapOffsetAssertAll(pHeapInt);
808#endif
809}
810
811
812#ifdef RTHEAPOFFSET_STRICT
813/**
814 * Internal consistency check (relying on assertions).
815 * @param pHeapInt
816 */
817static void rtHeapOffsetAssertAll(PRTHEAPOFFSETINTERNAL pHeapInt)
818{
819 PRTHEAPOFFSETFREE pPrev = NULL;
820 PRTHEAPOFFSETFREE pPrevFree = NULL;
821 PRTHEAPOFFSETFREE pBlock;
822 for (pBlock = (PRTHEAPOFFSETFREE)(pHeapInt + 1);
823 pBlock;
824 pBlock = RTHEAPOFF_TO_PTR_N(pHeapInt, pBlock->Core.offNext, PRTHEAPOFFSETFREE))
825 {
826 if (RTHEAPOFFSETBLOCK_IS_FREE(&pBlock->Core))
827 {
828 ASSERT_BLOCK_FREE(pHeapInt, pBlock);
829 Assert(pBlock->offPrev == RTHEAPOFF_TO_OFF(pHeapInt, pPrevFree));
830 Assert(pPrevFree || pHeapInt->offFreeHead == RTHEAPOFF_TO_OFF(pHeapInt, pBlock));
831 pPrevFree = pBlock;
832 }
833 else
834 ASSERT_BLOCK_USED(pHeapInt, &pBlock->Core);
835 Assert(!pPrev || RTHEAPOFF_TO_OFF(pHeapInt, pPrev) == pBlock->Core.offPrev);
836 pPrev = pBlock;
837 }
838 Assert(pHeapInt->offFreeTail == RTHEAPOFF_TO_OFF(pHeapInt, pPrevFree));
839}
840#endif
841
842
843RTDECL(size_t) RTHeapOffsetSize(RTHEAPOFFSET hHeap, void *pv)
844{
845 PRTHEAPOFFSETINTERNAL pHeapInt;
846 PRTHEAPOFFSETBLOCK pBlock;
847 size_t cbBlock;
848
849 /*
850 * Validate input.
851 */
852 if (!pv)
853 return 0;
854 AssertPtrReturn(pv, 0);
855 AssertReturn(RT_ALIGN_P(pv, RTHEAPOFFSET_ALIGNMENT) == pv, 0);
856
857 /*
858 * Get the block and heap. If in strict mode, validate these.
859 */
860 pBlock = (PRTHEAPOFFSETBLOCK)pv - 1;
861 pHeapInt = RTHEAPOFF_GET_ANCHOR(pBlock);
862 ASSERT_BLOCK_USED(pHeapInt, pBlock);
863 ASSERT_ANCHOR(pHeapInt);
864 Assert(pHeapInt == (PRTHEAPOFFSETINTERNAL)hHeap || !hHeap);
865
866 /*
867 * Calculate the block size.
868 */
869 cbBlock = (pBlock->offNext ? pBlock->offNext : pHeapInt->cbHeap)
870 - RTHEAPOFF_TO_OFF(pHeapInt, pBlock) - sizeof(RTHEAPOFFSETBLOCK);
871 return cbBlock;
872}
873RT_EXPORT_SYMBOL(RTHeapOffsetSize);
874
875
876RTDECL(size_t) RTHeapOffsetGetHeapSize(RTHEAPOFFSET hHeap)
877{
878 PRTHEAPOFFSETINTERNAL pHeapInt;
879
880 if (hHeap == NIL_RTHEAPOFFSET)
881 return 0;
882
883 pHeapInt = hHeap;
884 AssertPtrReturn(pHeapInt, 0);
885 ASSERT_ANCHOR(pHeapInt);
886 return pHeapInt->cbHeap;
887}
888RT_EXPORT_SYMBOL(RTHeapOffsetGetHeapSize);
889
890
891RTDECL(size_t) RTHeapOffsetGetFreeSize(RTHEAPOFFSET hHeap)
892{
893 PRTHEAPOFFSETINTERNAL pHeapInt;
894
895 if (hHeap == NIL_RTHEAPOFFSET)
896 return 0;
897
898 pHeapInt = hHeap;
899 AssertPtrReturn(pHeapInt, 0);
900 ASSERT_ANCHOR(pHeapInt);
901 return pHeapInt->cbFree;
902}
903RT_EXPORT_SYMBOL(RTHeapOffsetGetFreeSize);
904
905
906RTDECL(void) RTHeapOffsetDump(RTHEAPOFFSET hHeap, PFNRTHEAPOFFSETPRINTF pfnPrintf)
907{
908 PRTHEAPOFFSETINTERNAL pHeapInt = (PRTHEAPOFFSETINTERNAL)hHeap;
909 PRTHEAPOFFSETFREE pBlock;
910
911 pfnPrintf("**** Dumping Heap %p - cbHeap=%x cbFree=%x ****\n",
912 hHeap, pHeapInt->cbHeap, pHeapInt->cbFree);
913
914 for (pBlock = (PRTHEAPOFFSETFREE)(pHeapInt + 1);
915 pBlock;
916 pBlock = RTHEAPOFF_TO_PTR_N(pHeapInt, pBlock->Core.offNext, PRTHEAPOFFSETFREE))
917 {
918 size_t cb = (pBlock->offNext ? pBlock->Core.offNext : pHeapInt->cbHeap)
919 - RTHEAPOFF_TO_OFF(pHeapInt, pBlock) - sizeof(RTHEAPOFFSETBLOCK);
920 if (RTHEAPOFFSETBLOCK_IS_FREE(&pBlock->Core))
921 pfnPrintf("%p %06x FREE offNext=%06x offPrev=%06x fFlags=%#x cb=%#06x : cb=%#06x offNext=%06x offPrev=%06x\n",
922 pBlock, pBlock->Core.offSelf, pBlock->Core.offNext, pBlock->Core.offPrev, pBlock->Core.fFlags, cb,
923 pBlock->cb, pBlock->offNext, pBlock->offPrev);
924 else
925 pfnPrintf("%p %06x USED offNext=%06x offPrev=%06x fFlags=%#x cb=%#06x\n",
926 pBlock, pBlock->Core.offSelf, pBlock->Core.offNext, pBlock->Core.offPrev, pBlock->Core.fFlags, cb);
927 }
928 pfnPrintf("**** Done dumping Heap %p ****\n", hHeap);
929}
930RT_EXPORT_SYMBOL(RTHeapOffsetDump);
931
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette