VirtualBox

source: vbox/trunk/include/iprt/asm-mem.h@ 106559

Last change on this file since 106559 was 106559, checked in by vboxsync, 6 weeks ago

IPRT: Sort out export/import trouble with 'inline-assembly' functions implemented in C++ on windows by introducing a RT_DECL_ASM() macro that combines RTDECL with DECLASM. jiraref:VBP-1171

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 10.6 KB
Line 
1/** @file
2 * IPRT - Assembly Memory Functions.
3 */
4
5/*
6 * Copyright (C) 2006-2024 Oracle and/or its affiliates.
7 *
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.virtualbox.org.
10 *
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
14 * License.
15 *
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
23 *
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
29 *
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
32 *
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
34 */
35
36#ifndef IPRT_INCLUDED_asm_mem_h
37#define IPRT_INCLUDED_asm_mem_h
38#ifndef RT_WITHOUT_PRAGMA_ONCE
39# pragma once
40#endif
41
42#include <iprt/cdefs.h>
43#include <iprt/types.h>
44#include <iprt/assert.h>
45
46#if defined(_MSC_VER) && RT_INLINE_ASM_USES_INTRIN
47/* Emit the intrinsics at all optimization levels. */
48# include <iprt/sanitized/intrin.h>
49# if defined(RT_ARCH_X86) || defined(RT_ARCH_AMD64)
50# pragma intrinsic(__cpuid)
51# pragma intrinsic(__stosd)
52# pragma intrinsic(__stosw)
53# pragma intrinsic(__stosb)
54# ifdef RT_ARCH_AMD64
55# pragma intrinsic(__stosq)
56# endif
57# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
58# pragma intrinsic(__iso_volatile_load8)
59# endif
60#endif
61
62
63/*
64 * Undefine all symbols we have Watcom C/C++ #pragma aux'es for.
65 */
66#if defined(__WATCOMC__) && ARCH_BITS == 16 && defined(RT_ARCH_X86)
67# include "asm-mem-watcom-x86-16.h"
68#elif defined(__WATCOMC__) && ARCH_BITS == 32 && defined(RT_ARCH_X86)
69# include "asm-mem-watcom-x86-32.h"
70#endif
71
72
73
74/** @defgroup grp_rt_asm_mem ASM - Memory Assembly Routines
75 * @ingroup grp_rt_asm
76 * @{
77 */
78
79/**
80 * Zeros a memory block with a 32-bit aligned size.
81 *
82 * @param pv Pointer to the memory block.
83 * @param cb Number of bytes in the block. This MUST be aligned on 32-bit!
84 */
85#if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) || (!defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86))
86RT_ASM_DECL_PRAGMA_WATCOM(void) ASMMemZero32(volatile void RT_FAR *pv, size_t cb) RT_NOTHROW_PROTO;
87#else
88DECLINLINE(void) ASMMemZero32(volatile void RT_FAR *pv, size_t cb) RT_NOTHROW_DEF
89{
90# if RT_INLINE_ASM_USES_INTRIN
91# ifdef RT_ARCH_AMD64
92 if (!(cb & 7))
93 __stosq((unsigned __int64 RT_FAR *)pv, 0, cb / 8);
94 else
95# endif
96 __stosd((unsigned long RT_FAR *)pv, 0, cb / 4);
97
98# elif RT_INLINE_ASM_GNU_STYLE
99 __asm__ __volatile__("rep stosl"
100 : "=D" (pv),
101 "=c" (cb)
102 : "0" (pv),
103 "1" (cb >> 2),
104 "a" (0)
105 : "memory");
106# else
107 __asm
108 {
109 xor eax, eax
110# ifdef RT_ARCH_AMD64
111 mov rcx, [cb]
112 shr rcx, 2
113 mov rdi, [pv]
114# else
115 mov ecx, [cb]
116 shr ecx, 2
117 mov edi, [pv]
118# endif
119 rep stosd
120 }
121# endif
122}
123#endif
124
125
126/**
127 * Fills a memory block with a 32-bit aligned size.
128 *
129 * @param pv Pointer to the memory block.
130 * @param cb Number of bytes in the block. This MUST be aligned on 32-bit!
131 * @param u32 The value to fill with.
132 */
133#if (RT_INLINE_ASM_EXTERNAL && !RT_INLINE_ASM_USES_INTRIN) || (!defined(RT_ARCH_AMD64) && !defined(RT_ARCH_X86))
134RT_ASM_DECL_PRAGMA_WATCOM(void) ASMMemFill32(volatile void RT_FAR *pv, size_t cb, uint32_t u32) RT_NOTHROW_PROTO;
135#else
136DECLINLINE(void) ASMMemFill32(volatile void RT_FAR *pv, size_t cb, uint32_t u32) RT_NOTHROW_DEF
137{
138 Assert(!(cb & 3));
139 Assert(cb > 0);
140# if RT_INLINE_ASM_USES_INTRIN
141# ifdef RT_ARCH_AMD64
142 if (!(cb & 7))
143 __stosq((unsigned __int64 RT_FAR *)pv, RT_MAKE_U64(u32, u32), cb / 8);
144 else
145# endif
146 __stosd((unsigned long RT_FAR *)pv, u32, cb / 4);
147
148# elif RT_INLINE_ASM_GNU_STYLE
149 __asm__ __volatile__("rep stosl"
150 : "=D" (pv),
151 "=c" (cb)
152 : "0" (pv),
153 "1" (cb >> 2),
154 "a" (u32)
155 : "memory");
156# else
157 __asm
158 {
159# ifdef RT_ARCH_AMD64
160 mov rcx, [cb]
161 shr rcx, 2
162 mov rdi, [pv]
163# else
164 mov ecx, [cb]
165 shr ecx, 2
166 mov edi, [pv]
167# endif
168 mov eax, [u32]
169 rep stosd
170 }
171# endif
172}
173#endif
174
175
176/**
177 * Checks if a memory block is all zeros.
178 *
179 * @returns Pointer to the first non-zero byte.
180 * @returns NULL if all zero.
181 *
182 * @param pv Pointer to the memory block.
183 * @param cb Number of bytes in the block.
184 */
185#if !defined(RDESKTOP) && (!defined(RT_OS_LINUX) || !defined(__KERNEL__))
186RT_DECL_ASM(void RT_FAR *) ASMMemFirstNonZero(void const RT_FAR *pv, size_t cb) RT_NOTHROW_PROTO;
187#else
188DECLINLINE(void RT_FAR *) ASMMemFirstNonZero(void const RT_FAR *pv, size_t cb) RT_NOTHROW_DEF
189{
190/** @todo replace with ASMMemFirstNonZero-generic.cpp in kernel modules. */
191 uint8_t const *pb = (uint8_t const RT_FAR *)pv;
192 for (; cb; cb--, pb++)
193 if (RT_LIKELY(*pb == 0))
194 { /* likely */ }
195 else
196 return (void RT_FAR *)pb;
197 return NULL;
198}
199#endif
200
201
202/**
203 * Checks if a memory block is all zeros.
204 *
205 * @returns true if zero, false if not.
206 *
207 * @param pv Pointer to the memory block.
208 * @param cb Number of bytes in the block.
209 *
210 * @sa ASMMemFirstNonZero
211 */
212DECLINLINE(bool) ASMMemIsZero(void const RT_FAR *pv, size_t cb) RT_NOTHROW_DEF
213{
214 return ASMMemFirstNonZero(pv, cb) == NULL;
215}
216
217
218/**
219 * Checks if a memory block is filled with the specified byte, returning the
220 * first mismatch.
221 *
222 * This is sort of an inverted memchr.
223 *
224 * @returns Pointer to the byte which doesn't equal u8.
225 * @returns NULL if all equal to u8.
226 *
227 * @param pv Pointer to the memory block.
228 * @param cb Number of bytes in the block.
229 * @param u8 The value it's supposed to be filled with.
230 *
231 * @remarks No alignment requirements.
232 */
233#if (!defined(RT_OS_LINUX) || !defined(__KERNEL__)) \
234 && (!defined(RT_OS_FREEBSD) || !defined(_KERNEL))
235RT_DECL_ASM(void *) ASMMemFirstMismatchingU8(void const RT_FAR *pv, size_t cb, uint8_t u8) RT_NOTHROW_PROTO;
236#else
237DECLINLINE(void *) ASMMemFirstMismatchingU8(void const RT_FAR *pv, size_t cb, uint8_t u8) RT_NOTHROW_DEF
238{
239/** @todo replace with ASMMemFirstMismatchingU8-generic.cpp in kernel modules. */
240 uint8_t const *pb = (uint8_t const RT_FAR *)pv;
241 for (; cb; cb--, pb++)
242 if (RT_LIKELY(*pb == u8))
243 { /* likely */ }
244 else
245 return (void *)pb;
246 return NULL;
247}
248#endif
249
250
251/**
252 * Checks if a memory block is filled with the specified byte.
253 *
254 * @returns true if all matching, false if not.
255 *
256 * @param pv Pointer to the memory block.
257 * @param cb Number of bytes in the block.
258 * @param u8 The value it's supposed to be filled with.
259 *
260 * @remarks No alignment requirements.
261 */
262DECLINLINE(bool) ASMMemIsAllU8(void const RT_FAR *pv, size_t cb, uint8_t u8) RT_NOTHROW_DEF
263{
264 return ASMMemFirstMismatchingU8(pv, cb, u8) == NULL;
265}
266
267
268/**
269 * Checks if a memory block is filled with the specified 32-bit value.
270 *
271 * This is a sort of inverted memchr.
272 *
273 * @returns Pointer to the first value which doesn't equal u32.
274 * @returns NULL if all equal to u32.
275 *
276 * @param pv Pointer to the memory block.
277 * @param cb Number of bytes in the block. This MUST be aligned on 32-bit!
278 * @param u32 The value it's supposed to be filled with.
279 */
280DECLINLINE(uint32_t RT_FAR *) ASMMemFirstMismatchingU32(void const RT_FAR *pv, size_t cb, uint32_t u32) RT_NOTHROW_DEF
281{
282/** @todo rewrite this in inline assembly? */
283 uint32_t const RT_FAR *pu32 = (uint32_t const RT_FAR *)pv;
284 for (; cb; cb -= 4, pu32++)
285 if (RT_LIKELY(*pu32 == u32))
286 { /* likely */ }
287 else
288 return (uint32_t RT_FAR *)pu32;
289 return NULL;
290}
291
292
293/**
294 * Probes a byte pointer for read access.
295 *
296 * While the function will not fault if the byte is not read accessible,
297 * the idea is to do this in a safe place like before acquiring locks
298 * and such like.
299 *
300 * Also, this functions guarantees that an eager compiler is not going
301 * to optimize the probing away.
302 *
303 * @param pvByte Pointer to the byte.
304 */
305#if RT_INLINE_ASM_EXTERNAL_TMP_ARM
306RT_ASM_DECL_PRAGMA_WATCOM(uint8_t) ASMProbeReadByte(const void RT_FAR *pvByte) RT_NOTHROW_PROTO;
307#else
308DECLINLINE(uint8_t) ASMProbeReadByte(const void RT_FAR *pvByte) RT_NOTHROW_DEF
309{
310# if defined(RT_ARCH_AMD64) || defined(RT_ARCH_X86)
311 uint8_t u8;
312# if RT_INLINE_ASM_GNU_STYLE
313 __asm__ __volatile__("movb %1, %0\n\t"
314 : "=q" (u8)
315 : "m" (*(const uint8_t *)pvByte));
316# else
317 __asm
318 {
319# ifdef RT_ARCH_AMD64
320 mov rax, [pvByte]
321 mov al, [rax]
322# else
323 mov eax, [pvByte]
324 mov al, [eax]
325# endif
326 mov [u8], al
327 }
328# endif
329 return u8;
330
331# elif defined(RT_ARCH_ARM64) || defined(RT_ARCH_ARM32)
332# if RT_INLINE_ASM_USES_INTRIN
333 return (uint8_t)__iso_volatile_load8((volatile const char *)pvByte);
334# else
335 uint32_t u32;
336 __asm__ __volatile__("Lstart_ASMProbeReadByte_%=:\n\t"
337# if defined(RT_ARCH_ARM64)
338 "ldxrb %w[uDst], %[pMem]\n\t"
339# else
340 "ldrexb %[uDst], %[pMem]\n\t"
341# endif
342 : [uDst] "=&r" (u32)
343 : [pMem] "Q" (*(uint8_t const *)pvByte));
344 return (uint8_t)u32;
345# endif
346
347# else
348# error "Port me"
349# endif
350}
351#endif
352
353/** @} */
354
355/*
356 * Include #pragma aux definitions for Watcom C/C++.
357 */
358#if defined(__WATCOMC__) && ARCH_BITS == 16 && defined(RT_ARCH_X86)
359# define IPRT_ASM_WATCOM_X86_16_WITH_PRAGMAS
360# undef IPRT_INCLUDED_asm_mem_watcom_x86_16_h
361# include "asm-mem-watcom-x86-16.h"
362#elif defined(__WATCOMC__) && ARCH_BITS == 32 && defined(RT_ARCH_X86)
363# define IPRT_ASM_WATCOM_X86_32_WITH_PRAGMAS
364# undef IPRT_INCLUDED_asm_mem_watcom_x86_32_h
365# include "asm-mem-watcom-x86-32.h"
366#endif
367
368#endif /* !IPRT_INCLUDED_asm_mem_h */
369
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette