VirtualBox

source: vbox/trunk/include/iprt/mem.h@ 78303

Last change on this file since 78303 was 76687, checked in by vboxsync, 6 years ago

GCC/Address sanitiser: do not break pre-compiled headers.
bugref:8019: GCC sanitisers
This change removes -include sanitizer/lsan_interface.h from the build command
line if the GCC sanitisers are enabled and includes the file from iprt/mem.h
instead. This prevents breaking builds with pre-compiled headers which
require that no C tokens should occur before the pre-compiled header is
included, and is generally nicer.

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 36.3 KB
Line 
1/** @file
2 * IPRT - Memory Management and Manipulation.
3 */
4
5/*
6 * Copyright (C) 2006-2019 Oracle Corporation
7 *
8 * This file is part of VirtualBox Open Source Edition (OSE), as
9 * available from http://www.virtualbox.org. This file is free software;
10 * you can redistribute it and/or modify it under the terms of the GNU
11 * General Public License (GPL) as published by the Free Software
12 * Foundation, in version 2 as it comes in the "COPYING" file of the
13 * VirtualBox OSE distribution. VirtualBox OSE is distributed in the
14 * hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
15 *
16 * The contents of this file may alternatively be used under the terms
17 * of the Common Development and Distribution License Version 1.0
18 * (CDDL) only, as it comes in the "COPYING.CDDL" file of the
19 * VirtualBox OSE distribution, in which case the provisions of the
20 * CDDL are applicable instead of those of the GPL.
21 *
22 * You may elect to license modified versions of this file under the
23 * terms and conditions of either the GPL or the CDDL or both.
24 */
25
26#ifndef IPRT_INCLUDED_mem_h
27#define IPRT_INCLUDED_mem_h
28#ifndef RT_WITHOUT_PRAGMA_ONCE
29# pragma once
30#endif
31
32
33#include <iprt/cdefs.h>
34#include <iprt/types.h>
35
36#ifdef IPRT_WITH_GCC_SANITIZER
37# include <sanitizer/lsan_interface.h>
38#endif
39
40#ifdef IN_RC
41# error "There are no RTMem APIs available Guest Context!"
42#endif
43
44
45/** @defgroup grp_rt_mem RTMem - Memory Management and Manipulation
46 * @ingroup grp_rt
47 * @{
48 */
49
50RT_C_DECLS_BEGIN
51
52/** @def RTMEM_ALIGNMENT
53 * The alignment of the memory blocks returned by RTMemAlloc(), RTMemAllocZ(),
54 * RTMemRealloc(), RTMemTmpAlloc() and RTMemTmpAllocZ() for allocations greater
55 * than RTMEM_ALIGNMENT.
56 *
57 * @note This alignment is not forced if the electric fence is active!
58 */
59#if defined(RT_OS_OS2)
60# define RTMEM_ALIGNMENT 4
61#else
62# define RTMEM_ALIGNMENT 8
63#endif
64
65/** @def RTMEM_TAG
66 * The default allocation tag used by the RTMem allocation APIs.
67 *
68 * When not defined before the inclusion of iprt/mem.h or iprt/memobj.h, this
69 * will default to the pointer to the current file name. The memory API will
70 * make of use of this as pointer to a volatile but read-only string.
71 * The alternative tag includes the line number for a more-detailed analysis.
72 */
73#ifndef RTMEM_TAG
74# if 0
75# define RTMEM_TAG (__FILE__ ":" RT_XSTR(__LINE__))
76# else
77# define RTMEM_TAG (__FILE__)
78# endif
79#endif
80
81
82/** @name Allocate temporary memory.
83 * @{ */
84/**
85 * Allocates temporary memory with default tag.
86 *
87 * Temporary memory blocks are used for not too large memory blocks which
88 * are believed not to stick around for too long. Using this API instead
89 * of RTMemAlloc() not only gives the heap manager room for optimization
90 * but makes the code easier to read.
91 *
92 * @returns Pointer to the allocated memory.
93 * @returns NULL on failure, assertion raised in strict builds.
94 * @param cb Size in bytes of the memory block to allocated.
95 */
96#define RTMemTmpAlloc(cb) RTMemTmpAllocTag((cb), RTMEM_TAG)
97
98/**
99 * Allocates temporary memory with custom tag.
100 *
101 * Temporary memory blocks are used for not too large memory blocks which
102 * are believed not to stick around for too long. Using this API instead
103 * of RTMemAlloc() not only gives the heap manager room for optimization
104 * but makes the code easier to read.
105 *
106 * @returns Pointer to the allocated memory.
107 * @returns NULL on failure, assertion raised in strict builds.
108 * @param cb Size in bytes of the memory block to allocated.
109 * @param pszTag Allocation tag used for statistics and such.
110 */
111RTDECL(void *) RTMemTmpAllocTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
112
113/**
114 * Allocates zero'd temporary memory with default tag.
115 *
116 * Same as RTMemTmpAlloc() but the memory will be zero'd.
117 *
118 * @returns Pointer to the allocated memory.
119 * @returns NULL on failure, assertion raised in strict builds.
120 * @param cb Size in bytes of the memory block to allocated.
121 */
122#define RTMemTmpAllocZ(cb) RTMemTmpAllocZTag((cb), RTMEM_TAG)
123
124/**
125 * Allocates zero'd temporary memory with custom tag.
126 *
127 * Same as RTMemTmpAlloc() but the memory will be zero'd.
128 *
129 * @returns Pointer to the allocated memory.
130 * @returns NULL on failure, assertion raised in strict builds.
131 * @param cb Size in bytes of the memory block to allocated.
132 * @param pszTag Allocation tag used for statistics and such.
133 */
134RTDECL(void *) RTMemTmpAllocZTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
135
136/**
137 * Free temporary memory.
138 *
139 * @param pv Pointer to memory block.
140 */
141RTDECL(void) RTMemTmpFree(void *pv) RT_NO_THROW_PROTO;
142
143/** @} */
144
145
146/**
147 * Allocates memory with default tag.
148 *
149 * @returns Pointer to the allocated memory.
150 * @returns NULL on failure, assertion raised in strict builds.
151 * @param cb Size in bytes of the memory block to allocated.
152 */
153#define RTMemAlloc(cb) RTMemAllocTag((cb), RTMEM_TAG)
154
155/**
156 * Allocates memory with custom tag.
157 *
158 * @returns Pointer to the allocated memory.
159 * @returns NULL on failure, assertion raised in strict builds.
160 * @param cb Size in bytes of the memory block to allocated.
161 * @param pszTag Allocation tag used for statistics and such.
162 */
163RTDECL(void *) RTMemAllocTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
164
165/**
166 * Allocates zero'd memory with default tag.
167 *
168 * Instead of memset(pv, 0, sizeof()) use this when you want zero'd
169 * memory. This keeps the code smaller and the heap can skip the memset
170 * in about 0.42% of calls :-).
171 *
172 * @returns Pointer to the allocated memory.
173 * @returns NULL on failure.
174 * @param cb Size in bytes of the memory block to allocated.
175 */
176#define RTMemAllocZ(cb) RTMemAllocZTag((cb), RTMEM_TAG)
177
178/**
179 * Allocates zero'd memory with custom tag.
180 *
181 * Instead of memset(pv, 0, sizeof()) use this when you want zero'd
182 * memory. This keeps the code smaller and the heap can skip the memset
183 * in about 0.42% of calls :-).
184 *
185 * @returns Pointer to the allocated memory.
186 * @returns NULL on failure.
187 * @param cb Size in bytes of the memory block to allocated.
188 * @param pszTag Allocation tag used for statistics and such.
189 */
190RTDECL(void *) RTMemAllocZTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
191
192/**
193 * Wrapper around RTMemAlloc for automatically aligning variable sized
194 * allocations so that the various electric fence heaps works correctly.
195 *
196 * @returns See RTMemAlloc.
197 * @param cbUnaligned The unaligned size.
198 */
199#define RTMemAllocVar(cbUnaligned) RTMemAllocVarTag((cbUnaligned), RTMEM_TAG)
200
201/**
202 * Wrapper around RTMemAllocTag for automatically aligning variable sized
203 * allocations so that the various electric fence heaps works correctly.
204 *
205 * @returns See RTMemAlloc.
206 * @param cbUnaligned The unaligned size.
207 * @param pszTag Allocation tag used for statistics and such.
208 */
209RTDECL(void *) RTMemAllocVarTag(size_t cbUnaligned, const char *pszTag) RT_NO_THROW_PROTO;
210
211/**
212 * Wrapper around RTMemAllocZ for automatically aligning variable sized
213 * allocations so that the various electric fence heaps works correctly.
214 *
215 * @returns See RTMemAllocZ.
216 * @param cbUnaligned The unaligned size.
217 */
218#define RTMemAllocZVar(cbUnaligned) RTMemAllocZVarTag((cbUnaligned), RTMEM_TAG)
219
220/**
221 * Wrapper around RTMemAllocZTag for automatically aligning variable sized
222 * allocations so that the various electric fence heaps works correctly.
223 *
224 * @returns See RTMemAllocZ.
225 * @param cbUnaligned The unaligned size.
226 * @param pszTag Allocation tag used for statistics and such.
227 */
228RTDECL(void *) RTMemAllocZVarTag(size_t cbUnaligned, const char *pszTag) RT_NO_THROW_PROTO;
229
230/**
231 * Duplicates a chunk of memory into a new heap block (default tag).
232 *
233 * @returns New heap block with the duplicate data.
234 * @returns NULL if we're out of memory.
235 * @param pvSrc The memory to duplicate.
236 * @param cb The amount of memory to duplicate.
237 */
238#define RTMemDup(pvSrc, cb) RTMemDupTag((pvSrc), (cb), RTMEM_TAG)
239
240/**
241 * Duplicates a chunk of memory into a new heap block (custom tag).
242 *
243 * @returns New heap block with the duplicate data.
244 * @returns NULL if we're out of memory.
245 * @param pvSrc The memory to duplicate.
246 * @param cb The amount of memory to duplicate.
247 * @param pszTag Allocation tag used for statistics and such.
248 */
249RTDECL(void *) RTMemDupTag(const void *pvSrc, size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
250
251/**
252 * Duplicates a chunk of memory into a new heap block with some additional
253 * zeroed memory (default tag).
254 *
255 * @returns New heap block with the duplicate data.
256 * @returns NULL if we're out of memory.
257 * @param pvSrc The memory to duplicate.
258 * @param cbSrc The amount of memory to duplicate.
259 * @param cbExtra The amount of extra memory to allocate and zero.
260 */
261#define RTMemDupEx(pvSrc, cbSrc, cbExtra) RTMemDupExTag((pvSrc), (cbSrc), (cbExtra), RTMEM_TAG)
262
263/**
264 * Duplicates a chunk of memory into a new heap block with some additional
265 * zeroed memory (default tag).
266 *
267 * @returns New heap block with the duplicate data.
268 * @returns NULL if we're out of memory.
269 * @param pvSrc The memory to duplicate.
270 * @param cbSrc The amount of memory to duplicate.
271 * @param cbExtra The amount of extra memory to allocate and zero.
272 * @param pszTag Allocation tag used for statistics and such.
273 */
274RTDECL(void *) RTMemDupExTag(const void *pvSrc, size_t cbSrc, size_t cbExtra, const char *pszTag) RT_NO_THROW_PROTO;
275
276/**
277 * Reallocates memory with default tag.
278 *
279 * @returns Pointer to the allocated memory.
280 * @returns NULL on failure.
281 * @param pvOld The memory block to reallocate.
282 * @param cbNew The new block size (in bytes).
283 */
284#define RTMemRealloc(pvOld, cbNew) RTMemReallocTag((pvOld), (cbNew), RTMEM_TAG)
285
286/**
287 * Reallocates memory with custom tag.
288 *
289 * @returns Pointer to the allocated memory.
290 * @returns NULL on failure.
291 * @param pvOld The memory block to reallocate.
292 * @param cbNew The new block size (in bytes).
293 * @param pszTag Allocation tag used for statistics and such.
294 */
295RTDECL(void *) RTMemReallocTag(void *pvOld, size_t cbNew, const char *pszTag) RT_NO_THROW_PROTO;
296
297/**
298 * Frees memory.
299 *
300 * @param pv Pointer to memory block.
301 */
302RTDECL(void) RTMemFree(void *pv) RT_NO_THROW_PROTO;
303
304
305
306/** @name RTR0MemAllocEx and RTR0MemAllocExTag flags.
307 * @{ */
308/** The returned memory should be zeroed. */
309#define RTMEMALLOCEX_FLAGS_ZEROED RT_BIT(0)
310/** It must be load code into the returned memory block and execute it. */
311#define RTMEMALLOCEX_FLAGS_EXEC RT_BIT(1)
312/** Allocation from any context.
313 * Will return VERR_NOT_SUPPORTED if not supported. */
314#define RTMEMALLOCEX_FLAGS_ANY_CTX_ALLOC RT_BIT(2)
315/** Allocate the memory such that it can be freed from any context.
316 * Will return VERR_NOT_SUPPORTED if not supported. */
317#define RTMEMALLOCEX_FLAGS_ANY_CTX_FREE RT_BIT(3)
318/** Allocate and free from any context.
319 * Will return VERR_NOT_SUPPORTED if not supported. */
320#define RTMEMALLOCEX_FLAGS_ANY_CTX (RTMEMALLOCEX_FLAGS_ANY_CTX_ALLOC | RTMEMALLOCEX_FLAGS_ANY_CTX_FREE)
321/** Reachable by 16-bit address.
322 * Will return VERR_NOT_SUPPORTED if not supported. */
323#define RTMEMALLOCEX_FLAGS_16BIT_REACH RT_BIT(4)
324/** Reachable by 32-bit address.
325 * Will return VERR_NOT_SUPPORTED if not supported. */
326#define RTMEMALLOCEX_FLAGS_32BIT_REACH RT_BIT(5)
327/** Mask of valid flags. */
328#define RTMEMALLOCEX_FLAGS_VALID_MASK UINT32_C(0x0000003f)
329/** Mask of valid flags for ring-0. */
330#define RTMEMALLOCEX_FLAGS_VALID_MASK_R0 UINT32_C(0x0000000f)
331/** @} */
332
333/**
334 * Extended heap allocation API, default tag.
335 *
336 * @returns IPRT status code.
337 * @retval VERR_NO_MEMORY if we're out of memory.
338 * @retval VERR_NO_EXEC_MEMORY if we're out of executable memory.
339 * @retval VERR_NOT_SUPPORTED if any of the specified flags are unsupported.
340 *
341 * @param cb The amount of memory to allocate.
342 * @param cbAlignment The alignment requirements. Use 0 to indicate
343 * default alignment.
344 * @param fFlags A combination of the RTMEMALLOCEX_FLAGS_XXX
345 * defines.
346 * @param ppv Where to return the memory.
347 */
348#define RTMemAllocEx(cb, cbAlignment, fFlags, ppv) RTMemAllocExTag((cb), (cbAlignment), (fFlags), RTMEM_TAG, (ppv))
349
350/**
351 * Extended heap allocation API, custom tag.
352 *
353 * Depending on the implementation, using this function may add extra overhead,
354 * so use the simpler APIs where ever possible.
355 *
356 * @returns IPRT status code.
357 * @retval VERR_NO_MEMORY if we're out of memory.
358 * @retval VERR_NO_EXEC_MEMORY if we're out of executable memory.
359 * @retval VERR_NOT_SUPPORTED if any of the specified flags are unsupported.
360 *
361 * @param cb The amount of memory to allocate.
362 * @param cbAlignment The alignment requirements. Use 0 to indicate
363 * default alignment.
364 * @param fFlags A combination of the RTMEMALLOCEX_FLAGS_XXX
365 * defines.
366 * @param pszTag The tag.
367 * @param ppv Where to return the memory.
368 */
369RTDECL(int) RTMemAllocExTag(size_t cb, size_t cbAlignment, uint32_t fFlags, const char *pszTag, void **ppv) RT_NO_THROW_PROTO;
370
371/**
372 * For freeing memory allocated by RTMemAllocEx or RTMemAllocExTag.
373 *
374 * @param pv What to free, NULL is fine.
375 * @param cb The amount of allocated memory.
376 */
377RTDECL(void) RTMemFreeEx(void *pv, size_t cb) RT_NO_THROW_PROTO;
378
379
380
381/**
382 * Allocates memory which may contain code (default tag).
383 *
384 * @returns Pointer to the allocated memory.
385 * @returns NULL on failure.
386 * @param cb Size in bytes of the memory block to allocate.
387 */
388#define RTMemExecAlloc(cb) RTMemExecAllocTag((cb), RTMEM_TAG)
389
390/**
391 * Allocates memory which may contain code (custom tag).
392 *
393 * @returns Pointer to the allocated memory.
394 * @returns NULL on failure.
395 * @param cb Size in bytes of the memory block to allocate.
396 * @param pszTag Allocation tag used for statistics and such.
397 */
398RTDECL(void *) RTMemExecAllocTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
399
400/**
401 * Free executable/read/write memory allocated by RTMemExecAlloc().
402 *
403 * @param pv Pointer to memory block.
404 * @param cb The allocation size.
405 */
406RTDECL(void) RTMemExecFree(void *pv, size_t cb) RT_NO_THROW_PROTO;
407
408#if defined(IN_RING0) && defined(RT_ARCH_AMD64) && defined(RT_OS_LINUX)
409/**
410 * Donate read+write+execute memory to the exec heap.
411 *
412 * This API is specific to AMD64 and Linux/GNU. A kernel module that desires to
413 * use RTMemExecAlloc on AMD64 Linux/GNU will have to donate some statically
414 * allocated memory in the module if it wishes for GCC generated code to work.
415 * GCC can only generate modules that work in the address range ~2GB to ~0
416 * currently.
417 *
418 * The API only accept one single donation.
419 *
420 * @returns IPRT status code.
421 * @param pvMemory Pointer to the memory block.
422 * @param cb The size of the memory block.
423 */
424RTR0DECL(int) RTR0MemExecDonate(void *pvMemory, size_t cb) RT_NO_THROW_PROTO;
425#endif /* R0+AMD64+LINUX */
426
427/**
428 * Allocate page aligned memory with default tag.
429 *
430 * @returns Pointer to the allocated memory.
431 * @returns NULL if we're out of memory.
432 * @param cb Size of the memory block. Will be rounded up to page size.
433 */
434#define RTMemPageAlloc(cb) RTMemPageAllocTag((cb), RTMEM_TAG)
435
436/**
437 * Allocate page aligned memory with custom tag.
438 *
439 * @returns Pointer to the allocated memory.
440 * @returns NULL if we're out of memory.
441 * @param cb Size of the memory block. Will be rounded up to page size.
442 * @param pszTag Allocation tag used for statistics and such.
443 */
444RTDECL(void *) RTMemPageAllocTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
445
446/**
447 * Allocate zero'd page aligned memory with default tag.
448 *
449 * @returns Pointer to the allocated memory.
450 * @returns NULL if we're out of memory.
451 * @param cb Size of the memory block. Will be rounded up to page size.
452 */
453#define RTMemPageAllocZ(cb) RTMemPageAllocZTag((cb), RTMEM_TAG)
454
455/**
456 * Allocate zero'd page aligned memory with custom tag.
457 *
458 * @returns Pointer to the allocated memory.
459 * @returns NULL if we're out of memory.
460 * @param cb Size of the memory block. Will be rounded up to page size.
461 * @param pszTag Allocation tag used for statistics and such.
462 */
463RTDECL(void *) RTMemPageAllocZTag(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
464
465/**
466 * Free a memory block allocated with RTMemPageAlloc() or RTMemPageAllocZ().
467 *
468 * @param pv Pointer to the block as it was returned by the allocation function.
469 * NULL will be ignored.
470 * @param cb The allocation size. Will be rounded up to page size.
471 * Ignored if @a pv is NULL.
472 */
473RTDECL(void) RTMemPageFree(void *pv, size_t cb) RT_NO_THROW_PROTO;
474
475/** Page level protection flags for RTMemProtect().
476 * @{
477 */
478/** No access at all. */
479#define RTMEM_PROT_NONE 0
480/** Read access. */
481#define RTMEM_PROT_READ 1
482/** Write access. */
483#define RTMEM_PROT_WRITE 2
484/** Execute access. */
485#define RTMEM_PROT_EXEC 4
486/** @} */
487
488/**
489 * Change the page level protection of a memory region.
490 *
491 * @returns iprt status code.
492 * @param pv Start of the region. Will be rounded down to nearest page boundary.
493 * @param cb Size of the region. Will be rounded up to the nearest page boundary.
494 * @param fProtect The new protection, a combination of the RTMEM_PROT_* defines.
495 */
496RTDECL(int) RTMemProtect(void *pv, size_t cb, unsigned fProtect) RT_NO_THROW_PROTO;
497
498/**
499 * Goes thru some pains to make sure the specified memory block is thoroughly
500 * scrambled.
501 *
502 * @param pv The start of the memory block.
503 * @param cb The size of the memory block.
504 * @param cMinPasses The minimum number of passes to make.
505 */
506RTDECL(void) RTMemWipeThoroughly(void *pv, size_t cb, size_t cMinPasses) RT_NO_THROW_PROTO;
507
508
509/** @def RTMEM_WILL_LEAK
510 * Macro for hinting that a memory allocation @a a_pv will leak.
511 *
512 * @note This shall only be used in code that doesn't allocate the object.
513 * Code allocating memory knowing it will leak shall start the allocation
514 * tag string with 'will-leak:'.
515 */
516/** @def RTMEM_MAY_LEAK
517 * Macro for hinting that a memory allocation @a a_pv may leak.
518 *
519 * @note This shall only be used in code that doesn't allocate the object.
520 * Code allocating memory knowing it may leak shall start the allocation
521 * tag string with 'may-leak:'.
522 */
523#ifdef IPRT_WITH_GCC_SANITIZER
524# define RTMEM_WILL_LEAK(a_pv) __lsan_ignore_object(a_pv)
525# define RTMEM_MAY_LEAK(a_pv) __lsan_ignore_object(a_pv)
526#else
527# define RTMEM_WILL_LEAK(a_pv) do { } while (0)
528# define RTMEM_MAY_LEAK(a_pv) do { } while (0)
529#endif
530
531
532#ifdef IN_RING0
533
534/**
535 * Allocates physical contiguous memory (below 4GB).
536 * The allocation is page aligned and the content is undefined.
537 *
538 * @returns Pointer to the memory block. This is page aligned.
539 * @param pPhys Where to store the physical address.
540 * @param cb The allocation size in bytes. This is always
541 * rounded up to PAGE_SIZE.
542 */
543RTR0DECL(void *) RTMemContAlloc(PRTCCPHYS pPhys, size_t cb) RT_NO_THROW_PROTO;
544
545/**
546 * Frees memory allocated ysing RTMemContAlloc().
547 *
548 * @param pv Pointer to return from RTMemContAlloc().
549 * @param cb The cb parameter passed to RTMemContAlloc().
550 */
551RTR0DECL(void) RTMemContFree(void *pv, size_t cb) RT_NO_THROW_PROTO;
552
553/**
554 * Copy memory from an user mode buffer into a kernel buffer.
555 *
556 * @retval VINF_SUCCESS on success.
557 * @retval VERR_ACCESS_DENIED on error.
558 *
559 * @param pvDst The kernel mode destination address.
560 * @param R3PtrSrc The user mode source address.
561 * @param cb The number of bytes to copy.
562 */
563RTR0DECL(int) RTR0MemUserCopyFrom(void *pvDst, RTR3PTR R3PtrSrc, size_t cb);
564
565/**
566 * Copy memory from a kernel buffer into a user mode one.
567 *
568 * @retval VINF_SUCCESS on success.
569 * @retval VERR_ACCESS_DENIED on error.
570 *
571 * @param R3PtrDst The user mode destination address.
572 * @param pvSrc The kernel mode source address.
573 * @param cb The number of bytes to copy.
574 */
575RTR0DECL(int) RTR0MemUserCopyTo(RTR3PTR R3PtrDst, void const *pvSrc, size_t cb);
576
577/**
578 * Tests if the specified address is in the user addressable range.
579 *
580 * This function does not check whether the memory at that address is accessible
581 * or anything of that sort, only if the address it self is in the user mode
582 * range.
583 *
584 * @returns true if it's in the user addressable range. false if not.
585 * @param R3Ptr The user mode pointer to test.
586 *
587 * @remarks Some systems may have overlapping kernel and user address ranges.
588 * One prominent example of this is the x86 version of Mac OS X. Use
589 * RTR0MemAreKrnlAndUsrDifferent() to check.
590 */
591RTR0DECL(bool) RTR0MemUserIsValidAddr(RTR3PTR R3Ptr);
592
593/**
594 * Tests if the specified address is in the kernel mode range.
595 *
596 * This function does not check whether the memory at that address is accessible
597 * or anything of that sort, only if the address it self is in the kernel mode
598 * range.
599 *
600 * @returns true if it's in the kernel range. false if not.
601 * @param pv The alleged kernel mode pointer.
602 *
603 * @remarks Some systems may have overlapping kernel and user address ranges.
604 * One prominent example of this is the x86 version of Mac OS X. Use
605 * RTR0MemAreKrnlAndUsrDifferent() to check.
606 */
607RTR0DECL(bool) RTR0MemKernelIsValidAddr(void *pv);
608
609/**
610 * Are user mode and kernel mode address ranges distinctly different.
611 *
612 * This determines whether RTR0MemKernelIsValidAddr and RTR0MemUserIsValidAddr
613 * can be used for deciding whether some arbitrary address is a user mode or a
614 * kernel mode one.
615 *
616 * @returns true if they are, false if not.
617 */
618RTR0DECL(bool) RTR0MemAreKrnlAndUsrDifferent(void);
619
620/**
621 * Copy memory from an potentially unsafe kernel mode location and into a safe
622 * (kernel) buffer.
623 *
624 * @retval VINF_SUCCESS on success.
625 * @retval VERR_ACCESS_DENIED on error.
626 * @retval VERR_NOT_SUPPORTED if not (yet) supported.
627 *
628 * @param pvDst The destination address (safe).
629 * @param pvSrc The source address (potentially unsafe).
630 * @param cb The number of bytes to copy.
631 */
632RTR0DECL(int) RTR0MemKernelCopyFrom(void *pvDst, void const *pvSrc, size_t cb);
633
634/**
635 * Copy from a safe (kernel) buffer and to a potentially unsafe kenrel mode
636 * location.
637 *
638 * @retval VINF_SUCCESS on success.
639 * @retval VERR_ACCESS_DENIED on error.
640 * @retval VERR_NOT_SUPPORTED if not (yet) supported.
641 *
642 * @param pvDst The destination address (potentially unsafe).
643 * @param pvSrc The source address (safe).
644 * @param cb The number of bytes to copy.
645 */
646RTR0DECL(int) RTR0MemKernelCopyTo(void *pvDst, void const *pvSrc, size_t cb);
647
648#endif /* IN_RING0 */
649
650
651/** @name Electrical Fence Version of some APIs.
652 * @{
653 */
654
655/**
656 * Same as RTMemTmpAllocTag() except that it's fenced.
657 *
658 * @returns Pointer to the allocated memory.
659 * @returns NULL on failure.
660 * @param cb Size in bytes of the memory block to allocate.
661 * @param pszTag Allocation tag used for statistics and such.
662 * @param SRC_POS The source position where call is being made from.
663 * Use RT_SRC_POS when possible. Optional.
664 */
665RTDECL(void *) RTMemEfTmpAlloc(size_t cb, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
666
667/**
668 * Same as RTMemTmpAllocZTag() except that it's fenced.
669 *
670 * @returns Pointer to the allocated memory.
671 * @returns NULL on failure.
672 * @param cb Size in bytes of the memory block to allocate.
673 * @param pszTag Allocation tag used for statistics and such.
674 * @param SRC_POS The source position where call is being made from. Use
675 * RT_SRC_POS when possible. Optional.
676 */
677RTDECL(void *) RTMemEfTmpAllocZ(size_t cb, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
678
679/**
680 * Same as RTMemTmpFree() except that it's for fenced memory.
681 *
682 * @param pv Pointer to memory block.
683 * @param SRC_POS The source position where call is being made from. Use
684 * RT_SRC_POS when possible. Optional.
685 */
686RTDECL(void) RTMemEfTmpFree(void *pv, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
687
688/**
689 * Same as RTMemAllocTag() except that it's fenced.
690 *
691 * @returns Pointer to the allocated memory. Free with RTMemEfFree().
692 * @returns NULL on failure.
693 * @param cb Size in bytes of the memory block to allocate.
694 * @param pszTag Allocation tag used for statistics and such.
695 * @param SRC_POS The source position where call is being made from. Use
696 * RT_SRC_POS when possible. Optional.
697 */
698RTDECL(void *) RTMemEfAlloc(size_t cb, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
699
700/**
701 * Same as RTMemAllocZTag() except that it's fenced.
702 *
703 * @returns Pointer to the allocated memory.
704 * @returns NULL on failure.
705 * @param cb Size in bytes of the memory block to allocate.
706 * @param pszTag Allocation tag used for statistics and such.
707 * @param SRC_POS The source position where call is being made from. Use
708 * RT_SRC_POS when possible. Optional.
709 */
710RTDECL(void *) RTMemEfAllocZ(size_t cb, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
711
712/**
713 * Same as RTMemAllocVarTag() except that it's fenced.
714 *
715 * @returns Pointer to the allocated memory. Free with RTMemEfFree().
716 * @returns NULL on failure.
717 * @param cbUnaligned Size in bytes of the memory block to allocate.
718 * @param pszTag Allocation tag used for statistics and such.
719 * @param SRC_POS The source position where call is being made from. Use
720 * RT_SRC_POS when possible. Optional.
721 */
722RTDECL(void *) RTMemEfAllocVar(size_t cbUnaligned, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
723
724/**
725 * Same as RTMemAllocZVarTag() except that it's fenced.
726 *
727 * @returns Pointer to the allocated memory.
728 * @returns NULL on failure.
729 * @param cbUnaligned Size in bytes of the memory block to allocate.
730 * @param pszTag Allocation tag used for statistics and such.
731 * @param SRC_POS The source position where call is being made from. Use
732 * RT_SRC_POS when possible. Optional.
733 */
734RTDECL(void *) RTMemEfAllocZVar(size_t cbUnaligned, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
735
736/**
737 * Same as RTMemReallocTag() except that it's fenced.
738 *
739 * @returns Pointer to the allocated memory.
740 * @returns NULL on failure.
741 * @param pvOld The memory block to reallocate.
742 * @param cbNew The new block size (in bytes).
743 * @param pszTag Allocation tag used for statistics and such.
744 * @param SRC_POS The source position where call is being made from. Use
745 * RT_SRC_POS when possible. Optional.
746 */
747RTDECL(void *) RTMemEfRealloc(void *pvOld, size_t cbNew, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
748
749/**
750 * Free memory allocated by any of the RTMemEf* allocators.
751 *
752 * @param pv Pointer to memory block.
753 * @param SRC_POS The source position where call is being made from. Use
754 * RT_SRC_POS when possible. Optional.
755 */
756RTDECL(void) RTMemEfFree(void *pv, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
757
758/**
759 * Same as RTMemDupTag() except that it's fenced.
760 *
761 * @returns New heap block with the duplicate data.
762 * @returns NULL if we're out of memory.
763 * @param pvSrc The memory to duplicate.
764 * @param cb The amount of memory to duplicate.
765 * @param pszTag Allocation tag used for statistics and such.
766 * @param SRC_POS The source position where call is being made from. Use
767 * RT_SRC_POS when possible. Optional.
768 */
769RTDECL(void *) RTMemEfDup(const void *pvSrc, size_t cb, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
770
771/**
772 * Same as RTMemEfDupExTag except that it's fenced.
773 *
774 * @returns New heap block with the duplicate data.
775 * @returns NULL if we're out of memory.
776 * @param pvSrc The memory to duplicate.
777 * @param cbSrc The amount of memory to duplicate.
778 * @param cbExtra The amount of extra memory to allocate and zero.
779 * @param pszTag Allocation tag used for statistics and such.
780 * @param SRC_POS The source position where call is being made from. Use
781 * RT_SRC_POS when possible. Optional.
782 */
783RTDECL(void *) RTMemEfDupEx(const void *pvSrc, size_t cbSrc, size_t cbExtra, const char *pszTag, RT_SRC_POS_DECL) RT_NO_THROW_PROTO;
784
785/** @def RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF
786 * Define RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF to enable electric fence new and
787 * delete operators for classes which uses the RTMEMEF_NEW_AND_DELETE_OPERATORS
788 * macro.
789 */
790/** @def RTMEMEF_NEW_AND_DELETE_OPERATORS
791 * Defines the electric fence new and delete operators for a class when
792 * RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF is define.
793 */
794/** @def RTR0MEMEF_NEW_AND_DELETE_OPERATORS_IOKIT
795 * Defines the electric fence new and delete operators for an IOKit class when
796 * RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF is define.
797 *
798 * This differs from RTMEMEF_NEW_AND_DELETE_OPERATORS in that the memory we
799 * allocate is initialized to zero. It is also assuming we don't have nothrow
800 * variants and exceptions, so fewer variations.
801 */
802#if defined(RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF) && !defined(RTMEM_NO_WRAP_SOME_NEW_AND_DELETE_TO_EF)
803# if defined(RT_EXCEPTIONS_ENABLED)
804# define RTMEMEF_NEW_AND_DELETE_OPERATORS() \
805 void *operator new(size_t cb) RT_THROW(std::bad_alloc) \
806 { \
807 void *pv = RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
808 if (RT_LIKELY(pv)) \
809 return pv; \
810 throw std::bad_alloc(); \
811 } \
812 void *operator new(size_t cb, const std::nothrow_t &nothrow_constant) RT_NO_THROW_DEF \
813 { \
814 NOREF(nothrow_constant); \
815 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
816 } \
817 void *operator new[](size_t cb) RT_THROW(std::bad_alloc) \
818 { \
819 void *pv = RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
820 if (RT_LIKELY(pv)) \
821 return pv; \
822 throw std::bad_alloc(); \
823 } \
824 void *operator new[](size_t cb, const std::nothrow_t &nothrow_constant) RT_NO_THROW_DEF \
825 { \
826 NOREF(nothrow_constant); \
827 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
828 } \
829 \
830 void operator delete(void *pv) RT_NO_THROW_DEF \
831 { \
832 RTMemEfFree(pv, RT_SRC_POS); \
833 } \
834 void operator delete(void *pv, const std::nothrow_t &nothrow_constant) RT_NO_THROW_DEF \
835 { \
836 NOREF(nothrow_constant); \
837 RTMemEfFree(pv, RT_SRC_POS); \
838 } \
839 void operator delete[](void *pv) RT_NO_THROW_DEF \
840 { \
841 RTMemEfFree(pv, RT_SRC_POS); \
842 } \
843 void operator delete[](void *pv, const std::nothrow_t &nothrow_constant) RT_NO_THROW_DEF \
844 { \
845 NOREF(nothrow_constant); \
846 RTMemEfFree(pv, RT_SRC_POS); \
847 } \
848 \
849 typedef int UsingElectricNewAndDeleteOperators
850# else
851# define RTMEMEF_NEW_AND_DELETE_OPERATORS() \
852 void *operator new(size_t cb) \
853 { \
854 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
855 } \
856 void *operator new(size_t cb, const std::nothrow_t &nothrow_constant) \
857 { \
858 NOREF(nothrow_constant); \
859 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
860 } \
861 void *operator new[](size_t cb) \
862 { \
863 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
864 } \
865 void *operator new[](size_t cb, const std::nothrow_t &nothrow_constant) \
866 { \
867 NOREF(nothrow_constant); \
868 return RTMemEfAlloc(cb, RTMEM_TAG, RT_SRC_POS); \
869 } \
870 \
871 void operator delete(void *pv) \
872 { \
873 RTMemEfFree(pv, RT_SRC_POS); \
874 } \
875 void operator delete(void *pv, const std::nothrow_t &nothrow_constant) \
876 { \
877 NOREF(nothrow_constant); \
878 RTMemEfFree(pv, RT_SRC_POS); \
879 } \
880 void operator delete[](void *pv) \
881 { \
882 RTMemEfFree(pv, RT_SRC_POS); \
883 } \
884 void operator delete[](void *pv, const std::nothrow_t &nothrow_constant) \
885 { \
886 NOREF(nothrow_constant); \
887 RTMemEfFree(pv, RT_SRC_POS); \
888 } \
889 \
890 typedef int UsingElectricNewAndDeleteOperators
891# endif
892# define RTR0MEMEF_NEW_AND_DELETE_OPERATORS_IOKIT() \
893 void *operator new(size_t cb) \
894 { \
895 return RTMemEfAllocZ(cb, RTMEM_TAG, RT_SRC_POS); \
896 } \
897 void *operator new[](size_t cb) \
898 { \
899 return RTMemEfAllocZ(cb, RTMEM_TAG, RT_SRC_POS); \
900 } \
901 \
902 void operator delete(void *pv) \
903 { \
904 RTMemEfFree(pv, RT_SRC_POS); \
905 } \
906 void operator delete[](void *pv) \
907 { \
908 RTMemEfFree(pv, RT_SRC_POS); \
909 } \
910 \
911 typedef int UsingElectricNewAndDeleteOperators
912#else
913# define RTMEMEF_NEW_AND_DELETE_OPERATORS() \
914 typedef int UsingDefaultNewAndDeleteOperators
915# define RTR0MEMEF_NEW_AND_DELETE_OPERATORS_IOKIT() \
916 typedef int UsingDefaultNewAndDeleteOperators
917#endif
918#ifdef DOXYGEN_RUNNING
919# define RTMEM_WRAP_SOME_NEW_AND_DELETE_TO_EF
920#endif
921
922/** @def RTMEM_WRAP_TO_EF_APIS
923 * Define RTMEM_WRAP_TO_EF_APIS to wrap RTMem APIs to RTMemEf APIs.
924 */
925#if defined(RTMEM_WRAP_TO_EF_APIS) && !defined(RTMEM_NO_WRAP_TO_EF_APIS) \
926 && ( defined(IN_RING3) || ( defined(IN_RING0) && !defined(IN_RING0_AGNOSTIC) && (defined(RT_OS_DARWIN) || 0) ) )
927# define RTMemTmpAllocTag(cb, pszTag) RTMemEfTmpAlloc((cb), (pszTag), RT_SRC_POS)
928# define RTMemTmpAllocZTag(cb, pszTag) RTMemEfTmpAllocZ((cb), (pszTag), RT_SRC_POS)
929# define RTMemTmpFree(pv) RTMemEfTmpFree((pv), RT_SRC_POS)
930# define RTMemAllocTag(cb, pszTag) RTMemEfAlloc((cb), (pszTag), RT_SRC_POS)
931# define RTMemAllocZTag(cb, pszTag) RTMemEfAllocZ((cb), (pszTag), RT_SRC_POS)
932# define RTMemAllocVarTag(cbUnaligned, pszTag) RTMemEfAllocVar((cbUnaligned), (pszTag), RT_SRC_POS)
933# define RTMemAllocZVarTag(cbUnaligned, pszTag) RTMemEfAllocZVar((cbUnaligned), (pszTag), RT_SRC_POS)
934# define RTMemReallocTag(pvOld, cbNew, pszTag) RTMemEfRealloc((pvOld), (cbNew), (pszTag), RT_SRC_POS)
935# define RTMemFree(pv) RTMemEfFree((pv), RT_SRC_POS)
936# define RTMemDupTag(pvSrc, cb, pszTag) RTMemEfDup((pvSrc), (cb), (pszTag), RT_SRC_POS)
937# define RTMemDupExTag(pvSrc, cbSrc, cbExtra, pszTag) RTMemEfDupEx((pvSrc), (cbSrc), (cbExtra), (pszTag), RT_SRC_POS)
938#endif
939#ifdef DOXYGEN_RUNNING
940# define RTMEM_WRAP_TO_EF_APIS
941#endif
942
943/**
944 * Fenced drop-in replacement for RTMemTmpAllocTag.
945 * @copydoc RTMemTmpAllocTag
946 */
947RTDECL(void *) RTMemEfTmpAllocNP(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
948
949/**
950 * Fenced drop-in replacement for RTMemTmpAllocZTag.
951 * @copydoc RTMemTmpAllocZTag
952 */
953RTDECL(void *) RTMemEfTmpAllocZNP(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
954
955/**
956 * Fenced drop-in replacement for RTMemTmpFreeTag.
957 * @copydoc RTMemTmpFree
958 */
959RTDECL(void) RTMemEfTmpFreeNP(void *pv) RT_NO_THROW_PROTO;
960
961/**
962 * Fenced drop-in replacement for RTMemAllocTag.
963 * @copydoc RTMemAllocTag
964 */
965RTDECL(void *) RTMemEfAllocNP(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
966
967/**
968 * Fenced drop-in replacement for RTMemAllocZTag.
969 * @copydoc RTMemAllocZTag
970 */
971RTDECL(void *) RTMemEfAllocZNP(size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
972
973/**
974 * Fenced drop-in replacement for RTMemAllocVarTag
975 * @copydoc RTMemAllocVarTag
976 */
977RTDECL(void *) RTMemEfAllocVarNP(size_t cbUnaligned, const char *pszTag) RT_NO_THROW_PROTO;
978
979/**
980 * Fenced drop-in replacement for RTMemAllocZVarTag.
981 * @copydoc RTMemAllocZVarTag
982 */
983RTDECL(void *) RTMemEfAllocZVarNP(size_t cbUnaligned, const char *pszTag) RT_NO_THROW_PROTO;
984
985/**
986 * Fenced drop-in replacement for RTMemReallocTag.
987 * @copydoc RTMemReallocTag
988 */
989RTDECL(void *) RTMemEfReallocNP(void *pvOld, size_t cbNew, const char *pszTag) RT_NO_THROW_PROTO;
990
991/**
992 * Fenced drop-in replacement for RTMemFree.
993 * @copydoc RTMemFree
994 */
995RTDECL(void) RTMemEfFreeNP(void *pv) RT_NO_THROW_PROTO;
996
997/**
998 * Fenced drop-in replacement for RTMemDupExTag.
999 * @copydoc RTMemDupTag
1000 */
1001RTDECL(void *) RTMemEfDupNP(const void *pvSrc, size_t cb, const char *pszTag) RT_NO_THROW_PROTO;
1002
1003/**
1004 * Fenced drop-in replacement for RTMemDupExTag.
1005 * @copydoc RTMemDupExTag
1006 */
1007RTDECL(void *) RTMemEfDupExNP(const void *pvSrc, size_t cbSrc, size_t cbExtra, const char *pszTag) RT_NO_THROW_PROTO;
1008
1009/** @} */
1010
1011RT_C_DECLS_END
1012
1013/** @} */
1014
1015
1016#endif /* !IPRT_INCLUDED_mem_h */
1017
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette