VirtualBox

source: vbox/trunk/include/iprt/armv8.h@ 107807

Last change on this file since 107807 was 107650, checked in by vboxsync, 5 weeks ago

VMM/CPUM,++: Made the HostFeatures match the host when targeting x86 guests on arm64 hosts. Merged and deduplicated code targeting x86 & amd64. jiraref:VBP-1470

  • Property svn:eol-style set to native
  • Property svn:keywords set to Author Date Id Revision
File size: 287.0 KB
Line 
1/** @file
2 * IPRT - ARMv8 (AArch64 and AArch32) Structures and Definitions.
3 */
4
5/*
6 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
7 *
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.virtualbox.org.
10 *
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
14 * License.
15 *
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
23 *
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
29 *
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
32 *
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
34 */
35
36#ifndef IPRT_INCLUDED_armv8_h
37#define IPRT_INCLUDED_armv8_h
38#ifndef RT_WITHOUT_PRAGMA_ONCE
39# pragma once
40#endif
41
42#ifndef VBOX_FOR_DTRACE_LIB
43# include <iprt/cdefs.h>
44# ifndef RT_IN_ASSEMBLER
45# include <iprt/types.h>
46# include <iprt/assert.h>
47# endif
48# include <iprt/assertcompile.h>
49#else
50# pragma D depends_on library vbox-types.d
51#endif
52
53/** @defgroup grp_rt_armv8 ARMv8 Types and Definitions
54 * @ingroup grp_rt
55 * @{
56 */
57
58/** @name The AArch64 register encoding - deprecated.
59 * @deprecated Use ARMV8_A64_REG_XXX instead.
60 * @todo correct code and drop these remaining ones.
61 * @{ */
62#define ARMV8_AARCH64_REG_X0 0
63#define ARMV8_AARCH64_REG_X1 1
64#define ARMV8_AARCH64_REG_X2 2
65#define ARMV8_AARCH64_REG_X3 3
66#define ARMV8_AARCH64_REG_ZR 31
67/** @} */
68
69/** @name The AArch64 general purpose register encoding.
70 * @{ */
71#define ARMV8_A64_REG_X0 0
72#define ARMV8_A64_REG_X1 1
73#define ARMV8_A64_REG_X2 2
74#define ARMV8_A64_REG_X3 3
75#define ARMV8_A64_REG_X4 4
76#define ARMV8_A64_REG_X5 5
77#define ARMV8_A64_REG_X6 6
78#define ARMV8_A64_REG_X7 7
79#define ARMV8_A64_REG_X8 8
80#define ARMV8_A64_REG_X9 9
81#define ARMV8_A64_REG_X10 10
82#define ARMV8_A64_REG_X11 11
83#define ARMV8_A64_REG_X12 12
84#define ARMV8_A64_REG_X13 13
85#define ARMV8_A64_REG_X14 14
86#define ARMV8_A64_REG_X15 15
87#define ARMV8_A64_REG_X16 16
88#define ARMV8_A64_REG_X17 17
89#define ARMV8_A64_REG_X18 18
90#define ARMV8_A64_REG_X19 19
91#define ARMV8_A64_REG_X20 20
92#define ARMV8_A64_REG_X21 21
93#define ARMV8_A64_REG_X22 22
94#define ARMV8_A64_REG_X23 23
95#define ARMV8_A64_REG_X24 24
96#define ARMV8_A64_REG_X25 25
97#define ARMV8_A64_REG_X26 26
98#define ARMV8_A64_REG_X27 27
99#define ARMV8_A64_REG_X28 28
100#define ARMV8_A64_REG_X29 29
101#define ARMV8_A64_REG_X30 30
102/** @} */
103
104/** @name The AArch64 32-bit general purpose register names.
105 * @{ */
106#define ARMV8_A64_REG_W0 ARMV8_A64_REG_X0
107#define ARMV8_A64_REG_W1 ARMV8_A64_REG_X1
108#define ARMV8_A64_REG_W2 ARMV8_A64_REG_X2
109#define ARMV8_A64_REG_W3 ARMV8_A64_REG_X3
110#define ARMV8_A64_REG_W4 ARMV8_A64_REG_X4
111#define ARMV8_A64_REG_W5 ARMV8_A64_REG_X5
112#define ARMV8_A64_REG_W6 ARMV8_A64_REG_X6
113#define ARMV8_A64_REG_W7 ARMV8_A64_REG_X7
114#define ARMV8_A64_REG_W8 ARMV8_A64_REG_X8
115#define ARMV8_A64_REG_W9 ARMV8_A64_REG_X9
116#define ARMV8_A64_REG_W10 ARMV8_A64_REG_X10
117#define ARMV8_A64_REG_W11 ARMV8_A64_REG_X11
118#define ARMV8_A64_REG_W12 ARMV8_A64_REG_X12
119#define ARMV8_A64_REG_W13 ARMV8_A64_REG_X13
120#define ARMV8_A64_REG_W14 ARMV8_A64_REG_X14
121#define ARMV8_A64_REG_W15 ARMV8_A64_REG_X15
122#define ARMV8_A64_REG_W16 ARMV8_A64_REG_X16
123#define ARMV8_A64_REG_W17 ARMV8_A64_REG_X17
124#define ARMV8_A64_REG_W18 ARMV8_A64_REG_X18
125#define ARMV8_A64_REG_W19 ARMV8_A64_REG_X19
126#define ARMV8_A64_REG_W20 ARMV8_A64_REG_X20
127#define ARMV8_A64_REG_W21 ARMV8_A64_REG_X21
128#define ARMV8_A64_REG_W22 ARMV8_A64_REG_X22
129#define ARMV8_A64_REG_W23 ARMV8_A64_REG_X23
130#define ARMV8_A64_REG_W24 ARMV8_A64_REG_X24
131#define ARMV8_A64_REG_W25 ARMV8_A64_REG_X25
132#define ARMV8_A64_REG_W26 ARMV8_A64_REG_X26
133#define ARMV8_A64_REG_W27 ARMV8_A64_REG_X27
134#define ARMV8_A64_REG_W28 ARMV8_A64_REG_X28
135#define ARMV8_A64_REG_W29 ARMV8_A64_REG_X29
136#define ARMV8_A64_REG_W30 ARMV8_A64_REG_X30
137/** @} */
138
139/** @name The AArch64 NEON scalar register encoding.
140 * @{ */
141#define ARMV8_A64_REG_Q0 0
142#define ARMV8_A64_REG_Q1 1
143#define ARMV8_A64_REG_Q2 2
144#define ARMV8_A64_REG_Q3 3
145#define ARMV8_A64_REG_Q4 4
146#define ARMV8_A64_REG_Q5 5
147#define ARMV8_A64_REG_Q6 6
148#define ARMV8_A64_REG_Q7 7
149#define ARMV8_A64_REG_Q8 8
150#define ARMV8_A64_REG_Q9 9
151#define ARMV8_A64_REG_Q10 10
152#define ARMV8_A64_REG_Q11 11
153#define ARMV8_A64_REG_Q12 12
154#define ARMV8_A64_REG_Q13 13
155#define ARMV8_A64_REG_Q14 14
156#define ARMV8_A64_REG_Q15 15
157#define ARMV8_A64_REG_Q16 16
158#define ARMV8_A64_REG_Q17 17
159#define ARMV8_A64_REG_Q18 18
160#define ARMV8_A64_REG_Q19 19
161#define ARMV8_A64_REG_Q20 20
162#define ARMV8_A64_REG_Q21 21
163#define ARMV8_A64_REG_Q22 22
164#define ARMV8_A64_REG_Q23 23
165#define ARMV8_A64_REG_Q24 24
166#define ARMV8_A64_REG_Q25 25
167#define ARMV8_A64_REG_Q26 26
168#define ARMV8_A64_REG_Q27 27
169#define ARMV8_A64_REG_Q28 28
170#define ARMV8_A64_REG_Q29 29
171#define ARMV8_A64_REG_Q30 30
172#define ARMV8_A64_REG_Q31 31
173/** @} */
174
175/** @name The AArch64 NEON vector register encoding.
176 * @{ */
177#define ARMV8_A64_REG_V0 ARMV8_A64_REG_Q0
178#define ARMV8_A64_REG_V1 ARMV8_A64_REG_Q1
179#define ARMV8_A64_REG_V2 ARMV8_A64_REG_Q2
180#define ARMV8_A64_REG_V3 ARMV8_A64_REG_Q3
181#define ARMV8_A64_REG_V4 ARMV8_A64_REG_Q4
182#define ARMV8_A64_REG_V5 ARMV8_A64_REG_Q5
183#define ARMV8_A64_REG_V6 ARMV8_A64_REG_Q6
184#define ARMV8_A64_REG_V7 ARMV8_A64_REG_Q7
185#define ARMV8_A64_REG_V8 ARMV8_A64_REG_Q8
186#define ARMV8_A64_REG_V9 ARMV8_A64_REG_Q9
187#define ARMV8_A64_REG_V10 ARMV8_A64_REG_Q10
188#define ARMV8_A64_REG_V11 ARMV8_A64_REG_Q11
189#define ARMV8_A64_REG_V12 ARMV8_A64_REG_Q12
190#define ARMV8_A64_REG_V13 ARMV8_A64_REG_Q13
191#define ARMV8_A64_REG_V14 ARMV8_A64_REG_Q14
192#define ARMV8_A64_REG_V15 ARMV8_A64_REG_Q15
193#define ARMV8_A64_REG_V16 ARMV8_A64_REG_Q16
194#define ARMV8_A64_REG_V17 ARMV8_A64_REG_Q17
195#define ARMV8_A64_REG_V18 ARMV8_A64_REG_Q18
196#define ARMV8_A64_REG_V19 ARMV8_A64_REG_Q19
197#define ARMV8_A64_REG_V20 ARMV8_A64_REG_Q20
198#define ARMV8_A64_REG_V21 ARMV8_A64_REG_Q21
199#define ARMV8_A64_REG_V22 ARMV8_A64_REG_Q22
200#define ARMV8_A64_REG_V23 ARMV8_A64_REG_Q23
201#define ARMV8_A64_REG_V24 ARMV8_A64_REG_Q24
202#define ARMV8_A64_REG_V25 ARMV8_A64_REG_Q25
203#define ARMV8_A64_REG_V26 ARMV8_A64_REG_Q26
204#define ARMV8_A64_REG_V27 ARMV8_A64_REG_Q27
205#define ARMV8_A64_REG_V28 ARMV8_A64_REG_Q28
206#define ARMV8_A64_REG_V29 ARMV8_A64_REG_Q29
207#define ARMV8_A64_REG_V30 ARMV8_A64_REG_Q30
208#define ARMV8_A64_REG_V31 ARMV8_A64_REG_Q31
209/** @} */
210
211/** @name The AArch64 register 31.
212 * @note Register 31 typically refers to the zero register, but can also in
213 * select case (by instruction and opecode field) refer the to stack
214 * pointer of the current exception level. ARM typically uses \<Xn|SP\>
215 * to indicate that register 31 is taken as SP, if just \<Xn\> is used
216 * 31 will be the zero register.
217 * @{ */
218/** The stack pointer. */
219#define ARMV8_A64_REG_SP 31
220/** The zero register. Reads as zero, writes ignored. */
221#define ARMV8_A64_REG_XZR 31
222/** The zero register, the 32-bit register name. */
223#define ARMV8_A64_REG_WZR ARMV8_A64_REG_XZR
224/** @} */
225
226/** @name AArch64 register aliases
227 * @{ */
228/** The link register is typically mapped to x30 as that's the default pick of
229 * the RET instruction. */
230#define ARMV8_A64_REG_LR ARMV8_A64_REG_X30
231/** Frame base pointer is typically mapped to x29. */
232#define ARMV8_A64_REG_BP ARMV8_A64_REG_X29
233/** @} */
234
235
236/** @name System register encoding.
237 * @{
238 */
239/** Mask for the op0 part of an MSR/MRS instruction */
240#define ARMV8_AARCH64_SYSREG_OP0_MASK (RT_BIT_32(19) | RT_BIT_32(20))
241/** Shift for the op0 part of an MSR/MRS instruction */
242#define ARMV8_AARCH64_SYSREG_OP0_SHIFT 19
243/** Returns the op0 part of the given MRS/MSR instruction. */
244#define ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP0_MASK) >> ARMV8_AARCH64_SYSREG_OP0_SHIFT)
245/** Mask for the op1 part of an MSR/MRS instruction */
246#define ARMV8_AARCH64_SYSREG_OP1_MASK (RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18))
247/** Shift for the op1 part of an MSR/MRS instruction */
248#define ARMV8_AARCH64_SYSREG_OP1_SHIFT 16
249/** Returns the op1 part of the given MRS/MSR instruction. */
250#define ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP1_MASK) >> ARMV8_AARCH64_SYSREG_OP1_SHIFT)
251/** Mask for the CRn part of an MSR/MRS instruction */
252#define ARMV8_AARCH64_SYSREG_CRN_MASK ( RT_BIT_32(12) | RT_BIT_32(13) | RT_BIT_32(14) \
253 | RT_BIT_32(15) )
254/** Shift for the CRn part of an MSR/MRS instruction */
255#define ARMV8_AARCH64_SYSREG_CRN_SHIFT 12
256/** Returns the CRn part of the given MRS/MSR instruction. */
257#define ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRN_MASK) >> ARMV8_AARCH64_SYSREG_CRN_SHIFT)
258/** Mask for the CRm part of an MSR/MRS instruction */
259#define ARMV8_AARCH64_SYSREG_CRM_MASK ( RT_BIT_32(8) | RT_BIT_32(9) | RT_BIT_32(10) \
260 | RT_BIT_32(11) )
261/** Shift for the CRm part of an MSR/MRS instruction */
262#define ARMV8_AARCH64_SYSREG_CRM_SHIFT 8
263/** Returns the CRn part of the given MRS/MSR instruction. */
264#define ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRM_MASK) >> ARMV8_AARCH64_SYSREG_CRM_SHIFT)
265/** Mask for the op2 part of an MSR/MRS instruction */
266#define ARMV8_AARCH64_SYSREG_OP2_MASK (RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7))
267/** Shift for the op2 part of an MSR/MRS instruction */
268#define ARMV8_AARCH64_SYSREG_OP2_SHIFT 5
269/** Returns the op2 part of the given MRS/MSR instruction. */
270#define ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP2_MASK) >> ARMV8_AARCH64_SYSREG_OP2_SHIFT)
271/** Mask for all system register encoding relevant fields in an MRS/MSR instruction. */
272#define ARMV8_AARCH64_SYSREG_MASK ( ARMV8_AARCH64_SYSREG_OP0_MASK | ARMV8_AARCH64_SYSREG_OP1_MASK \
273 | ARMV8_AARCH64_SYSREG_CRN_MASK | ARMV8_AARCH64_SYSREG_CRN_MASK \
274 | ARMV8_AARCH64_SYSREG_OP2_MASK)
275/** @} */
276
277/** @name Mapping of op0:op1:CRn:CRm:op2 to a system register ID. This is
278 * IPRT specific and not part of the ARMv8 specification.
279 * @{ */
280#define ARMV8_AARCH64_SYSREG_ID_CREATE(a_Op0, a_Op1, a_CRn, a_CRm, a_Op2) \
281 (uint16_t)( (((a_Op0) & 0x3) << 14) \
282 | (((a_Op1) & 0x7) << 11) \
283 | (((a_CRn) & 0xf) << 7) \
284 | (((a_CRm) & 0xf) << 3) \
285 | ((a_Op2) & 0x7))
286/** Returns the internal system register ID from the given MRS/MSR instruction. */
287#define ARMV8_AARCH64_SYSREG_ID_FROM_MRS_MSR(a_MsrMrsInsn) \
288 ARMV8_AARCH64_SYSREG_ID_CREATE(ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn), \
289 ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn), \
290 ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn), \
291 ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn), \
292 ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn))
293/** Encodes the given system register ID in the given MSR/MRS instruction. */
294#define ARMV8_AARCH64_SYSREG_ID_ENCODE_IN_MRS_MSR(a_MsrMrsInsn, a_SysregId) \
295 ((a_MsrMrsInsn) = ((a_MsrMrsInsn) & ~ARMV8_AARCH64_SYSREG_MASK) | (a_SysregId << ARMV8_AARCH64_SYSREG_OP2_SHIFT))
296/** @} */
297
298
299/** @name System register IDs.
300 * @{ */
301/** OSDTRRX_EL1 register - RW. */
302#define ARMV8_AARCH64_SYSREG_OSDTRRX_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 0, 2)
303/** MDSCR_EL1 - RW. */
304#define ARMV8_AARCH64_SYSREG_MDSCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 2, 2)
305/** DBGBVR<0..15>_EL1 register - RW. */
306#define ARMV8_AARCH64_SYSREG_DBGBVRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 4)
307/** DBGBCR<0..15>_EL1 register - RW. */
308#define ARMV8_AARCH64_SYSREG_DBGBCRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 5)
309/** DBGWVR<0..15>_EL1 register - RW. */
310#define ARMV8_AARCH64_SYSREG_DBGWVRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 6)
311/** DBGWCR<0..15>_EL1 register - RW. */
312#define ARMV8_AARCH64_SYSREG_DBGWCRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 7)
313/** MDCCINT_EL1 register - RW. */
314#define ARMV8_AARCH64_SYSREG_MDCCINT_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 2, 0)
315/** OSDTRTX_EL1 register - RW. */
316#define ARMV8_AARCH64_SYSREG_OSDTRTX_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 3, 2)
317/** OSECCR_EL1 register - RW. */
318#define ARMV8_AARCH64_SYSREG_OSECCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 6, 2)
319/** MDRAR_EL1 register - RO. */
320#define ARMV8_AARCH64_SYSREG_MDRAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 0, 0)
321/** OSLAR_EL1 register - WO. */
322#define ARMV8_AARCH64_SYSREG_OSLAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 0, 4)
323/** OSLSR_EL1 register - RO. */
324#define ARMV8_AARCH64_SYSREG_OSLSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 1, 4)
325/** OSDLR_EL1 register - RW. */
326#define ARMV8_AARCH64_SYSREG_OSDLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 3, 4)
327
328/** MIDR_EL1 register - RO. */
329#define ARMV8_AARCH64_SYSREG_MIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 0)
330/** MIPDR_EL1 register - RO. */
331#define ARMV8_AARCH64_SYSREG_MPIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 5)
332/** REVIDR_EL1 register - RO. */
333#define ARMV8_AARCH64_SYSREG_REVIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 6)
334/** ID_PFR0_EL1 register - RO. */
335#define ARMV8_AARCH64_SYSREG_ID_PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 0)
336/** ID_PFR1_EL1 register - RO. */
337#define ARMV8_AARCH64_SYSREG_ID_PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 1)
338/** ID_DFR0_EL1 register - RO. */
339#define ARMV8_AARCH64_SYSREG_ID_DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 2)
340/** ID_AFR0_EL1 register - RO. */
341#define ARMV8_AARCH64_SYSREG_ID_AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 3)
342/** ID_MMFR0_EL1 register - RO. */
343#define ARMV8_AARCH64_SYSREG_ID_MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 4)
344/** ID_MMFR1_EL1 register - RO. */
345#define ARMV8_AARCH64_SYSREG_ID_MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 5)
346/** ID_MMFR2_EL1 register - RO. */
347#define ARMV8_AARCH64_SYSREG_ID_MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 6)
348/** ID_MMFR3_EL1 register - RO. */
349#define ARMV8_AARCH64_SYSREG_ID_MMFR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 7)
350
351/** ID_ISAR0_EL1 register - RO. */
352#define ARMV8_AARCH64_SYSREG_ID_ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 0)
353/** ID_ISAR1_EL1 register - RO. */
354#define ARMV8_AARCH64_SYSREG_ID_ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 1)
355/** ID_ISAR2_EL1 register - RO. */
356#define ARMV8_AARCH64_SYSREG_ID_ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 2)
357/** ID_ISAR3_EL1 register - RO. */
358#define ARMV8_AARCH64_SYSREG_ID_ISAR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 3)
359/** ID_ISAR4_EL1 register - RO. */
360#define ARMV8_AARCH64_SYSREG_ID_ISAR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 4)
361/** ID_ISAR5_EL1 register - RO. */
362#define ARMV8_AARCH64_SYSREG_ID_ISAR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 5)
363/** ID_MMFR4_EL1 register - RO. */
364#define ARMV8_AARCH64_SYSREG_ID_MMFR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 6)
365/** ID_ISAR6_EL1 register - RO. */
366#define ARMV8_AARCH64_SYSREG_ID_ISAR6_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 7)
367
368/** MVFR0_EL1 register - RO. */
369#define ARMV8_AARCH64_SYSREG_MVFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 0)
370/** MVFR1_EL1 register - RO. */
371#define ARMV8_AARCH64_SYSREG_MVFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 1)
372/** MVFR2_EL1 register - RO. */
373#define ARMV8_AARCH64_SYSREG_MVFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 2)
374/** ID_PFR2_EL1 register - RO. */
375#define ARMV8_AARCH64_SYSREG_ID_PFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 4)
376/** ID_DFR1_EL1 register - RO. */
377#define ARMV8_AARCH64_SYSREG_ID_DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 5)
378/** ID_MMFR5_EL1 register - RO. */
379#define ARMV8_AARCH64_SYSREG_ID_MMFR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 6)
380
381/** ID_AA64PFR0_EL1 register - RO. */
382#define ARMV8_AARCH64_SYSREG_ID_AA64PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 0)
383/** ID_AA64PFR0_EL1 register - RO. */
384#define ARMV8_AARCH64_SYSREG_ID_AA64PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 1)
385/** ID_AA64ZFR0_EL1 register - RO. */
386#define ARMV8_AARCH64_SYSREG_ID_AA64ZFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 4)
387/** ID_AA64SMFR0_EL1 register - RO. */
388#define ARMV8_AARCH64_SYSREG_ID_AA64SMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 5)
389
390/** ID_AA64DFR0_EL1 register - RO. */
391#define ARMV8_AARCH64_SYSREG_ID_AA64DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 0)
392/** ID_AA64DFR0_EL1 register - RO. */
393#define ARMV8_AARCH64_SYSREG_ID_AA64DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 1)
394/** ID_AA64AFR0_EL1 register - RO. */
395#define ARMV8_AARCH64_SYSREG_ID_AA64AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 4)
396/** ID_AA64AFR1_EL1 register - RO. */
397#define ARMV8_AARCH64_SYSREG_ID_AA64AFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 5)
398
399/** ID_AA64ISAR0_EL1 register - RO. */
400#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 0)
401/** ID_AA64ISAR1_EL1 register - RO. */
402#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 1)
403/** ID_AA64ISAR2_EL1 register - RO. */
404#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 2)
405
406/** ID_AA64MMFR0_EL1 register - RO. */
407#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 0)
408/** ID_AA64MMFR1_EL1 register - RO. */
409#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 1)
410/** ID_AA64MMFR2_EL1 register - RO. */
411#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 2)
412
413/** SCTRL_EL1 register - RW. */
414#define ARMV8_AARCH64_SYSREG_SCTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 0)
415/** ACTRL_EL1 register - RW. */
416#define ARMV8_AARCH64_SYSREG_ACTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 1)
417/** CPACR_EL1 register - RW. */
418#define ARMV8_AARCH64_SYSREG_CPACR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 2)
419/** RGSR_EL1 register - RW. */
420#define ARMV8_AARCH64_SYSREG_RGSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 5)
421/** GCR_EL1 register - RW. */
422#define ARMV8_AARCH64_SYSREG_GCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 6)
423
424/** ZCR_EL1 register - RW. */
425#define ARMV8_AARCH64_SYSREG_ZCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 0)
426/** TRFCR_EL1 register - RW. */
427#define ARMV8_AARCH64_SYSREG_TRFCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 1)
428/** SMPRI_EL1 register - RW. */
429#define ARMV8_AARCH64_SYSREG_SMPRI_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 4)
430/** SMCR_EL1 register - RW. */
431#define ARMV8_AARCH64_SYSREG_SMCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 6)
432
433/** TTBR0_EL1 register - RW. */
434#define ARMV8_AARCH64_SYSREG_TTBR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 0)
435/** TTBR1_EL1 register - RW. */
436#define ARMV8_AARCH64_SYSREG_TTBR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 1)
437/** TCR_EL1 register - RW. */
438#define ARMV8_AARCH64_SYSREG_TCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 2)
439
440/** APIAKeyLo_EL1 register - RW. */
441#define ARMV8_AARCH64_SYSREG_APIAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 0)
442/** APIAKeyHi_EL1 register - RW. */
443#define ARMV8_AARCH64_SYSREG_APIAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 1)
444/** APIBKeyLo_EL1 register - RW. */
445#define ARMV8_AARCH64_SYSREG_APIBKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 2)
446/** APIBKeyHi_EL1 register - RW. */
447#define ARMV8_AARCH64_SYSREG_APIBKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 3)
448
449/** APDAKeyLo_EL1 register - RW. */
450#define ARMV8_AARCH64_SYSREG_APDAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 0)
451/** APDAKeyHi_EL1 register - RW. */
452#define ARMV8_AARCH64_SYSREG_APDAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 1)
453/** APDBKeyLo_EL1 register - RW. */
454#define ARMV8_AARCH64_SYSREG_APDBKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 2)
455/** APDBKeyHi_EL1 register - RW. */
456#define ARMV8_AARCH64_SYSREG_APDBKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 3)
457
458/** APGAKeyLo_EL1 register - RW. */
459#define ARMV8_AARCH64_SYSREG_APGAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 3, 0)
460/** APGAKeyHi_EL1 register - RW. */
461#define ARMV8_AARCH64_SYSREG_APGAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 3, 1)
462
463/** SPSR_EL1 register - RW. */
464#define ARMV8_AARCH64_SYSREG_SPSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 0)
465/** ELR_EL1 register - RW. */
466#define ARMV8_AARCH64_SYSREG_ELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 1)
467
468/** SP_EL0 register - RW. */
469#define ARMV8_AARCH64_SYSREG_SP_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 1, 0)
470
471/** PSTATE.SPSel value. */
472#define ARMV8_AARCH64_SYSREG_SPSEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 0)
473/** PSTATE.CurrentEL value. */
474#define ARMV8_AARCH64_SYSREG_CURRENTEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 2)
475/** PSTATE.PAN value. */
476#define ARMV8_AARCH64_SYSREG_PAN ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 3)
477/** PSTATE.UAO value. */
478#define ARMV8_AARCH64_SYSREG_UAO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 4)
479
480/** PSTATE.ALLINT value. */
481#define ARMV8_AARCH64_SYSREG_ALLINT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 3, 0)
482
483/** ICC_PMR_EL1 register - RW. */
484#define ARMV8_AARCH64_SYSREG_ICC_PMR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 6, 0)
485
486/** AFSR0_EL1 register - RW. */
487#define ARMV8_AARCH64_SYSREG_AFSR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 0)
488/** AFSR1_EL1 register - RW. */
489#define ARMV8_AARCH64_SYSREG_AFSR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 1)
490
491/** ESR_EL1 register - RW. */
492#define ARMV8_AARCH64_SYSREG_ESR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 2, 0)
493
494/** ERRIDR_EL1 register - RO. */
495#define ARMV8_AARCH64_SYSREG_ERRIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 0)
496/** ERRSELR_EL1 register - RW. */
497#define ARMV8_AARCH64_SYSREG_ERRSELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 1)
498
499/** FAR_EL1 register - RW. */
500#define ARMV8_AARCH64_SYSREG_FAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 6, 0, 0)
501
502/** PAR_EL1 register - RW. */
503#define ARMV8_AARCH64_SYSREG_PAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 7, 4, 0)
504
505/** PMINTENCLR_EL1 register - RW. */
506#define ARMV8_AARCH64_SYSREG_PMINTENCLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 9, 14, 2)
507
508/** MAIR_EL1 register - RW. */
509#define ARMV8_AARCH64_SYSREG_MAIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 10, 2, 0)
510
511/** AMAIR_EL1 register - RW. */
512#define ARMV8_AARCH64_SYSREG_AMAIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 10, 3, 0)
513
514/** VBAR_EL1 register - RW. */
515#define ARMV8_AARCH64_SYSREG_VBAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 0, 0)
516
517/** ICC_IAR0_EL1 register - RO. */
518#define ARMV8_AARCH64_SYSREG_ICC_IAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 0)
519/** ICC_EOIR0_EL1 register - WO. */
520#define ARMV8_AARCH64_SYSREG_ICC_EOIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 1)
521/** ICC_HPPIR0_EL1 register - WO. */
522#define ARMV8_AARCH64_SYSREG_ICC_HPPIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 2)
523/** ICC_BPR0_EL1 register - RW. */
524#define ARMV8_AARCH64_SYSREG_ICC_BPR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 3)
525/** ICC_AP0R0_EL1 register - RW. */
526#define ARMV8_AARCH64_SYSREG_ICC_AP0R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 4)
527/** ICC_AP0R1_EL1 register - RW. */
528#define ARMV8_AARCH64_SYSREG_ICC_AP0R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 5)
529/** ICC_AP0R2_EL1 register - RW. */
530#define ARMV8_AARCH64_SYSREG_ICC_AP0R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 6)
531/** ICC_AP0R3_EL1 register - RW. */
532#define ARMV8_AARCH64_SYSREG_ICC_AP0R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 7)
533
534/** ICC_AP1R0_EL1 register - RW. */
535#define ARMV8_AARCH64_SYSREG_ICC_AP1R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 0)
536/** ICC_AP1R1_EL1 register - RW. */
537#define ARMV8_AARCH64_SYSREG_ICC_AP1R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 1)
538/** ICC_AP1R2_EL1 register - RW. */
539#define ARMV8_AARCH64_SYSREG_ICC_AP1R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 2)
540/** ICC_AP1R3_EL1 register - RW. */
541#define ARMV8_AARCH64_SYSREG_ICC_AP1R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 3)
542/** ICC_NMIAR1_EL1 register - RO. */
543#define ARMV8_AARCH64_SYSREG_ICC_NMIAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 5)
544
545/** ICC_DIR_EL1 register - WO. */
546#define ARMV8_AARCH64_SYSREG_ICC_DIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 1)
547/** ICC_RPR_EL1 register - RO. */
548#define ARMV8_AARCH64_SYSREG_ICC_RPR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 3)
549/** ICC_SGI1R_EL1 register - WO. */
550#define ARMV8_AARCH64_SYSREG_ICC_SGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 5)
551/** ICC_ASGI1R_EL1 register - WO. */
552#define ARMV8_AARCH64_SYSREG_ICC_ASGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 6)
553/** ICC_SGI0R_EL1 register - WO. */
554#define ARMV8_AARCH64_SYSREG_ICC_SGI0R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 7)
555
556/** ICC_IAR1_EL1 register - RO. */
557#define ARMV8_AARCH64_SYSREG_ICC_IAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 0)
558/** ICC_EOIR1_EL1 register - WO. */
559#define ARMV8_AARCH64_SYSREG_ICC_EOIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 1)
560/** ICC_HPPIR1_EL1 register - RO. */
561#define ARMV8_AARCH64_SYSREG_ICC_HPPIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 2)
562/** ICC_BPR1_EL1 register - RW. */
563#define ARMV8_AARCH64_SYSREG_ICC_BPR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 3)
564/** ICC_CTLR_EL1 register - RW. */
565#define ARMV8_AARCH64_SYSREG_ICC_CTLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 4)
566/** ICC_SRE_EL1 register - RW. */
567#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 5)
568/** ICC_IGRPEN0_EL1 register - RW. */
569#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 6)
570/** ICC_IGRPEN1_EL1 register - RW. */
571#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 7)
572
573/** CONTEXTIDR_EL1 register - RW. */
574#define ARMV8_AARCH64_SYSREG_CONTEXTIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 13, 0, 1)
575/** TPIDR_EL1 register - RW. */
576#define ARMV8_AARCH64_SYSREG_TPIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 13, 0, 4)
577
578/** CNTKCTL_EL1 register - RW. */
579#define ARMV8_AARCH64_SYSREG_CNTKCTL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 14, 1, 0)
580
581/** CSSELR_EL1 register - RW. */
582#define ARMV8_AARCH64_SYSREG_CSSELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 2, 0, 0, 0)
583
584/** CTR_EL0 - Cache Type Register - RO. */
585#define ARMV8_AARCH64_SYSREG_CTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 0, 0, 1)
586/** DCZID_EL0 - Data Cache Zero ID Register - RO. */
587#define ARMV8_AARCH64_SYSREG_DCZID_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 0, 0, 7)
588
589
590/** NZCV - Status Flags - ??. */
591#define ARMV8_AARCH64_SYSREG_NZCV ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 0)
592/** DAIF - Interrupt Mask Bits - ??. */
593#define ARMV8_AARCH64_SYSREG_DAIF ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 1)
594/** SVCR - Streaming Vector Control Register - ??. */
595#define ARMV8_AARCH64_SYSREG_SVCR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 2)
596/** DIT - Data Independent Timing - ??. */
597#define ARMV8_AARCH64_SYSREG_DIT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 5)
598/** SSBS - Speculative Store Bypass Safe - ??. */
599#define ARMV8_AARCH64_SYSREG_SSBS ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 6)
600/** TCO - Tag Check Override - ??. */
601#define ARMV8_AARCH64_SYSREG_TCO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 7)
602
603/** FPCR register - RW. */
604#define ARMV8_AARCH64_SYSREG_FPCR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 4, 0)
605/** FPSR register - RW. */
606#define ARMV8_AARCH64_SYSREG_FPSR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 4, 1)
607
608/** PMCR_EL0 register - RW. */
609#define ARMV8_AARCH64_SYSREG_PMCR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 0)
610/** PMCNTENSET_EL0 register - RW. */
611#define ARMV8_AARCH64_SYSREG_PMCNTENSET_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 1)
612/** PMCNTENCLR_EL0 register - RW. */
613#define ARMV8_AARCH64_SYSREG_PMCNTENCLR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 2)
614/** PMOVSCLR_EL0 register - RW. */
615#define ARMV8_AARCH64_SYSREG_PMOVSCLR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 3)
616
617/** PMCCNTR_EL0 register - RW. */
618#define ARMV8_AARCH64_SYSREG_PMCCNTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 13, 0)
619
620/** PMUSERENR_EL0 register - RW. */
621#define ARMV8_AARCH64_SYSREG_PMUSERENR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 14, 0)
622
623/** PMCCFILTR_EL0 register - RW. */
624#define ARMV8_AARCH64_SYSREG_PMCCFILTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 15, 7)
625
626/** ICC_SRE_EL2 register - RW. */
627#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 9, 5)
628
629/** TPIDR_EL0 register - RW. */
630#define ARMV8_AARCH64_SYSREG_TPIDR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 13, 0, 2)
631/** TPIDRRO_EL0 register - RO. */
632#define ARMV8_AARCH64_SYSREG_TPIDRRO_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 13, 0, 3)
633
634/** CNTFRQ_EL0 register - RW. */
635#define ARMV8_AARCH64_SYSREG_CNTFRQ_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 0, 0)
636/** CNTVCT_EL0 register - RW. */
637#define ARMV8_AARCH64_SYSREG_CNTVCT_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 0, 2)
638
639/** CNTP_TVAL_EL0 register - RW. */
640#define ARMV8_AARCH64_SYSREG_CNTP_TVAL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 0)
641/** CNTP_CTL_EL0 register - RW. */
642#define ARMV8_AARCH64_SYSREG_CNTP_CTL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 1)
643/** CNTP_CVAL_EL0 register - RW. */
644#define ARMV8_AARCH64_SYSREG_CNTP_CVAL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 2)
645
646/** CNTV_CTL_EL0 register - RW. */
647#define ARMV8_AARCH64_SYSREG_CNTV_CTL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 3, 1)
648
649/** VPIDR_EL2 register - RW. */
650#define ARMV8_AARCH64_SYSREG_VPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 0, 0, 0)
651/** VMPIDR_EL2 register - RW. */
652#define ARMV8_AARCH64_SYSREG_VMPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 0, 0, 5)
653
654/** SCTLR_EL2 register - RW. */
655#define ARMV8_AARCH64_SYSREG_SCTLR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 0, 0)
656/** ACTLR_EL2 register - RW. */
657#define ARMV8_AARCH64_SYSREG_ACTLR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 0, 1)
658
659/** HCR_EL2 register - RW. */
660#define ARMV8_AARCH64_SYSREG_HCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 0)
661/** MDCR_EL2 register - RW. */
662#define ARMV8_AARCH64_SYSREG_MDCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 1)
663/** CPTR_EL2 register - RW. */
664#define ARMV8_AARCH64_SYSREG_CPTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 2)
665/** HSTR_EL2 register - RW. */
666#define ARMV8_AARCH64_SYSREG_HSTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 3)
667/** HFGRTR_EL2 register - RW. */
668#define ARMV8_AARCH64_SYSREG_HFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 4)
669/** HFGWTR_EL2 register - RW. */
670#define ARMV8_AARCH64_SYSREG_HFGWTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 5)
671/** HFGITR_EL2 register - RW. */
672#define ARMV8_AARCH64_SYSREG_HFGITR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 6)
673/** HACR_EL2 register - RW. */
674#define ARMV8_AARCH64_SYSREG_HACR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 7)
675
676/** ZCR_EL2 register - RW. */
677#define ARMV8_AARCH64_SYSREG_ZCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 0)
678/** TRFCR_EL2 register - RW. */
679#define ARMV8_AARCH64_SYSREG_TRFCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 1)
680/** HCRX_EL2 register - RW. */
681#define ARMV8_AARCH64_SYSREG_HCRX_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 2)
682
683/** SDER32_EL2 register - RW. */
684#define ARMV8_AARCH64_SYSREG_SDER32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 3, 0)
685
686/** TTBR0_EL2 register - RW. */
687#define ARMV8_AARCH64_SYSREG_TTBR0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 0)
688/** TTBR1_EL2 register - RW. */
689#define ARMV8_AARCH64_SYSREG_TTBR1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 1)
690/** TCR_EL2 register - RW. */
691#define ARMV8_AARCH64_SYSREG_TCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 2)
692
693/** VTTBR_EL2 register - RW. */
694#define ARMV8_AARCH64_SYSREG_VTTBR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 1, 0)
695/** VTCR_EL2 register - RW. */
696#define ARMV8_AARCH64_SYSREG_VTCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 1, 2)
697
698/** VNCR_EL2 register - RW. */
699#define ARMV8_AARCH64_SYSREG_VNCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 2, 0)
700
701/** VSTTBR_EL2 register - RW. */
702#define ARMV8_AARCH64_SYSREG_VSTTBR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 6, 0)
703/** VSTCR_EL2 register - RW. */
704#define ARMV8_AARCH64_SYSREG_VSTCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 6, 2)
705
706/** DACR32_EL2 register - RW. */
707#define ARMV8_AARCH64_SYSREG_DACR32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 0, 0)
708
709/** HDFGRTR_EL2 register - RW. */
710#define ARMV8_AARCH64_SYSREG_HDFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 4)
711/** HDFGWTR_EL2 register - RW. */
712#define ARMV8_AARCH64_SYSREG_HDFGWTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 5)
713/** HAFGRTR_EL2 register - RW. */
714#define ARMV8_AARCH64_SYSREG_HAFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 6)
715
716/** SPSR_EL2 register - RW. */
717#define ARMV8_AARCH64_SYSREG_SPSR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 0, 0)
718/** ELR_EL2 register - RW. */
719#define ARMV8_AARCH64_SYSREG_ELR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 0, 1)
720
721/** SP_EL1 register - RW. */
722#define ARMV8_AARCH64_SYSREG_SP_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 1, 0)
723
724/** IFSR32_EL2 register - RW. */
725#define ARMV8_AARCH64_SYSREG_IFSR32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 0, 1)
726
727/** AFSR0_EL2 register - RW. */
728#define ARMV8_AARCH64_SYSREG_AFSR0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 1, 0)
729/** AFSR1_EL2 register - RW. */
730#define ARMV8_AARCH64_SYSREG_AFSR1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 1, 1)
731
732/** ESR_EL2 register - RW. */
733#define ARMV8_AARCH64_SYSREG_ESR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 2, 0)
734/** VSESR_EL2 register - RW. */
735#define ARMV8_AARCH64_SYSREG_VSESR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 2, 3)
736
737/** FPEXC32_EL2 register - RW. */
738#define ARMV8_AARCH64_SYSREG_FPEXC32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 3, 0)
739
740/** TFSR_EL2 register - RW. */
741#define ARMV8_AARCH64_SYSREG_TFSR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 6, 0)
742
743/** FAR_EL2 register - RW. */
744#define ARMV8_AARCH64_SYSREG_FAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 6, 0, 0)
745/** HPFAR_EL2 register - RW. */
746#define ARMV8_AARCH64_SYSREG_HPFAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 6, 0, 4)
747
748/** PMSCR_EL2 register - RW. */
749#define ARMV8_AARCH64_SYSREG_PMSCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 9, 9, 0)
750
751/** MAIR_EL2 register - RW. */
752#define ARMV8_AARCH64_SYSREG_MAIR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 2, 0)
753
754/** AMAIR_EL2 register - RW. */
755#define ARMV8_AARCH64_SYSREG_AMAIR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 3, 0)
756
757/** MPAMHCR_EL2 register - RW. */
758#define ARMV8_AARCH64_SYSREG_MPAMHCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 4, 0)
759/** MPAMVPMV_EL2 register - RW. */
760#define ARMV8_AARCH64_SYSREG_MPAMVPMV_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 4, 1)
761
762/** MPAM2_EL2 register - RW. */
763#define ARMV8_AARCH64_SYSREG_MPAM2_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 5, 0)
764
765/** MPAMVPM0_EL2 register - RW. */
766#define ARMV8_AARCH64_SYSREG_MPAMVPM0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 0)
767/** MPAMVPM1_EL2 register - RW. */
768#define ARMV8_AARCH64_SYSREG_MPAMVPM1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 1)
769/** MPAMVPM2_EL2 register - RW. */
770#define ARMV8_AARCH64_SYSREG_MPAMVPM2_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 2)
771/** MPAMVPM3_EL2 register - RW. */
772#define ARMV8_AARCH64_SYSREG_MPAMVPM3_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 3)
773/** MPAMVPM4_EL2 register - RW. */
774#define ARMV8_AARCH64_SYSREG_MPAMVPM4_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 4)
775/** MPAMVPM5_EL2 register - RW. */
776#define ARMV8_AARCH64_SYSREG_MPAMVPM5_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 5)
777/** MPAMVPM6_EL2 register - RW. */
778#define ARMV8_AARCH64_SYSREG_MPAMVPM6_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 6)
779/** MPAMVPM7_EL2 register - RW. */
780#define ARMV8_AARCH64_SYSREG_MPAMVPM7_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 7)
781
782/** VBAR_EL2 register - RW. */
783#define ARMV8_AARCH64_SYSREG_VBAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 0)
784/** RVBAR_EL2 register - RW. */
785#define ARMV8_AARCH64_SYSREG_RVBAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 1)
786/** RMR_EL2 register - RW. */
787#define ARMV8_AARCH64_SYSREG_RMR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 2)
788
789/** VDISR_EL2 register - RW. */
790#define ARMV8_AARCH64_SYSREG_VDISR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 1, 1)
791
792/** CONTEXTIDR_EL2 register - RW. */
793#define ARMV8_AARCH64_SYSREG_CONTEXTIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 1)
794/** TPIDR_EL2 register - RW. */
795#define ARMV8_AARCH64_SYSREG_TPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 2)
796/** SCXTNUM_EL2 register - RW. */
797#define ARMV8_AARCH64_SYSREG_SCXTNUM_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 7)
798
799/** CNTVOFF_EL2 register - RW. */
800#define ARMV8_AARCH64_SYSREG_CNTVOFF_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 0, 3)
801/** CNTPOFF_EL2 register - RW. */
802#define ARMV8_AARCH64_SYSREG_CNTPOFF_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 0, 6)
803
804/** CNTHCTL_EL2 register - RW. */
805#define ARMV8_AARCH64_SYSREG_CNTHCTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 1, 0)
806
807/** CNTHP_TVAL_EL2 register - RW. */
808#define ARMV8_AARCH64_SYSREG_CNTHP_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 0)
809/** CNTHP_CTL_EL2 register - RW. */
810#define ARMV8_AARCH64_SYSREG_CNTHP_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 1)
811/** CNTHP_CVAL_EL2 register - RW. */
812#define ARMV8_AARCH64_SYSREG_CNTHP_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 2)
813
814/** CNTHV_TVAL_EL2 register - RW. */
815#define ARMV8_AARCH64_SYSREG_CNTHV_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 0)
816/** CNTHV_CTL_EL2 register - RW. */
817#define ARMV8_AARCH64_SYSREG_CNTHV_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 1)
818/** CNTHV_CVAL_EL2 register - RW. */
819#define ARMV8_AARCH64_SYSREG_CNTHV_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 2)
820
821/** CNTHVS_TVAL_EL2 register - RW. */
822#define ARMV8_AARCH64_SYSREG_CNTHVS_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 0)
823/** CNTHVS_CTL_EL2 register - RW. */
824#define ARMV8_AARCH64_SYSREG_CNTHVS_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 1)
825/** CNTHVS_CVAL_EL2 register - RW. */
826#define ARMV8_AARCH64_SYSREG_CNTHVS_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 2)
827
828/** CNTHPS_TVAL_EL2 register - RW. */
829#define ARMV8_AARCH64_SYSREG_CNTHPS_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 0)
830/** CNTHPS_CTL_EL2 register - RW. */
831#define ARMV8_AARCH64_SYSREG_CNTHPS_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 1)
832/** CNTHPS_CVAL_EL2 register - RW. */
833#define ARMV8_AARCH64_SYSREG_CNTHPS_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 2)
834
835/** SP_EL2 register - RW. */
836#define ARMV8_AARCH64_SYSREG_SP_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 6, 4, 1, 0)
837/** @} */
838
839
840#ifndef RT_IN_ASSEMBLER
841/**
842 * SPSR_EL2 (according to chapter C5.2.19)
843 */
844typedef union ARMV8SPSREL2
845{
846 /** The plain unsigned view. */
847 uint64_t u;
848 /** The 8-bit view. */
849 uint8_t au8[8];
850 /** The 16-bit view. */
851 uint16_t au16[4];
852 /** The 32-bit view. */
853 uint32_t au32[2];
854 /** The 64-bit view. */
855 uint64_t u64;
856} ARMV8SPSREL2;
857/** Pointer to SPSR_EL2. */
858typedef ARMV8SPSREL2 *PARMV8SPSREL2;
859/** Pointer to const SPSR_EL2. */
860typedef const ARMV8SPSREL2 *PCXARMV8SPSREL2;
861#endif /* !RT_IN_ASSEMBLER */
862
863
864/** @name SPSR_EL2 (When exception is taken from AArch64 state)
865 * @{
866 */
867/** Bit 0 - 3 - M - AArch64 Exception level and selected stack pointer. */
868#define ARMV8_SPSR_EL2_AARCH64_M (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
869#define ARMV8_SPSR_EL2_AARCH64_GET_M(a_Spsr) ((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M)
870/** Bit 0 - SP - Selected stack pointer. */
871#define ARMV8_SPSR_EL2_AARCH64_SP RT_BIT_64(0)
872#define ARMV8_SPSR_EL2_AARCH64_SP_BIT 0
873/** Bit 1 - Reserved (read as zero). */
874#define ARMV8_SPSR_EL2_AARCH64_RSVD_1 RT_BIT_64(1)
875/** Bit 2 - 3 - EL - Exception level. */
876#define ARMV8_SPSR_EL2_AARCH64_EL (RT_BIT_64(2) | RT_BIT_64(3))
877#define ARMV8_SPSR_EL2_AARCH64_EL_SHIFT 2
878#define ARMV8_SPSR_EL2_AARCH64_GET_EL(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_EL_SHIFT) & 3)
879#define ARMV8_SPSR_EL2_AARCH64_SET_EL(a_El) ((a_El) << ARMV8_SPSR_EL2_AARCH64_EL_SHIFT)
880/** Bit 4 - M[4] - Execution state (0 means AArch64, when 1 this contains a AArch32 state). */
881#define ARMV8_SPSR_EL2_AARCH64_M4 RT_BIT_64(4)
882#define ARMV8_SPSR_EL2_AARCH64_M4_BIT 4
883/** Bit 5 - T - T32 instruction set state (only valid when ARMV8_SPSR_EL2_AARCH64_M4 is set). */
884#define ARMV8_SPSR_EL2_AARCH64_T RT_BIT_64(5)
885#define ARMV8_SPSR_EL2_AARCH64_T_BIT 5
886/** Bit 6 - I - FIQ interrupt mask. */
887#define ARMV8_SPSR_EL2_AARCH64_F RT_BIT_64(6)
888#define ARMV8_SPSR_EL2_AARCH64_F_BIT 6
889/** Bit 7 - I - IRQ interrupt mask. */
890#define ARMV8_SPSR_EL2_AARCH64_I RT_BIT_64(7)
891#define ARMV8_SPSR_EL2_AARCH64_I_BIT 7
892/** Bit 8 - A - SError interrupt mask. */
893#define ARMV8_SPSR_EL2_AARCH64_A RT_BIT_64(8)
894#define ARMV8_SPSR_EL2_AARCH64_A_BIT 8
895/** Bit 9 - D - Debug Exception mask. */
896#define ARMV8_SPSR_EL2_AARCH64_D RT_BIT_64(9)
897#define ARMV8_SPSR_EL2_AARCH64_D_BIT 9
898/** Bit 10 - 11 - BTYPE - Branch Type indicator. */
899#define ARMV8_SPSR_EL2_AARCH64_BYTPE (RT_BIT_64(10) | RT_BIT_64(11))
900#define ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT 10
901#define ARMV8_SPSR_EL2_AARCH64_GET_BYTPE(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT) & 3)
902/** Bit 12 - SSBS - Speculative Store Bypass. */
903#define ARMV8_SPSR_EL2_AARCH64_SSBS RT_BIT_64(12)
904#define ARMV8_SPSR_EL2_AARCH64_SSBS_BIT 12
905/** Bit 13 - ALLINT - All IRQ or FIQ interrupts mask. */
906#define ARMV8_SPSR_EL2_AARCH64_ALLINT RT_BIT_64(13)
907#define ARMV8_SPSR_EL2_AARCH64_ALLINT_BIT 13
908/** Bit 14 - 19 - Reserved (read as zero). */
909#define ARMV8_SPSR_EL2_AARCH64_RSVD_14_19 ( RT_BIT_64(14) | RT_BIT_64(15) | RT_BIT_64(16) \
910 | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
911/** Bit 20 - IL - Illegal Execution State flag. */
912#define ARMV8_SPSR_EL2_AARCH64_IL RT_BIT_64(20)
913#define ARMV8_SPSR_EL2_AARCH64_IL_BIT 20
914/** Bit 21 - SS - Software Step flag. */
915#define ARMV8_SPSR_EL2_AARCH64_SS RT_BIT_64(21)
916#define ARMV8_SPSR_EL2_AARCH64_SS_BIT 21
917/** Bit 22 - PAN - Privileged Access Never flag. */
918#define ARMV8_SPSR_EL2_AARCH64_PAN RT_BIT_64(25)
919#define ARMV8_SPSR_EL2_AARCH64_PAN_BIT 22
920/** Bit 23 - UAO - User Access Override flag. */
921#define ARMV8_SPSR_EL2_AARCH64_UAO RT_BIT_64(23)
922#define ARMV8_SPSR_EL2_AARCH64_UAO_BIT 23
923/** Bit 24 - DIT - Data Independent Timing flag. */
924#define ARMV8_SPSR_EL2_AARCH64_DIT RT_BIT_64(24)
925#define ARMV8_SPSR_EL2_AARCH64_DIT_BIT 24
926/** Bit 25 - TCO - Tag Check Override flag. */
927#define ARMV8_SPSR_EL2_AARCH64_TCO RT_BIT_64(25)
928#define ARMV8_SPSR_EL2_AARCH64_TCO_BIT 25
929/** Bit 26 - 27 - Reserved (read as zero). */
930#define ARMV8_SPSR_EL2_AARCH64_RSVD_26_27 (RT_BIT_64(26) | RT_BIT_64(27))
931/** Bit 28 - V - Overflow condition flag. */
932#define ARMV8_SPSR_EL2_AARCH64_V RT_BIT_64(28)
933#define ARMV8_SPSR_EL2_AARCH64_V_BIT 28
934/** Bit 29 - C - Carry condition flag. */
935#define ARMV8_SPSR_EL2_AARCH64_C RT_BIT_64(29)
936#define ARMV8_SPSR_EL2_AARCH64_C_BIT 29
937/** Bit 30 - Z - Zero condition flag. */
938#define ARMV8_SPSR_EL2_AARCH64_Z RT_BIT_64(30)
939#define ARMV8_SPSR_EL2_AARCH64_Z_BIT 30
940/** Bit 31 - N - Negative condition flag. */
941#define ARMV8_SPSR_EL2_AARCH64_N RT_BIT_64(31)
942#define ARMV8_SPSR_EL2_AARCH64_N_BIT 31
943/** Bit 32 - 63 - Reserved (read as zero). */
944#define ARMV8_SPSR_EL2_AARCH64_RSVD_32_63 (UINT64_C(0xffffffff00000000))
945/** Checks whether the given SPSR value contains a AARCH64 execution state. */
946#define ARMV8_SPSR_EL2_IS_AARCH64_STATE(a_Spsr) (!((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M4))
947/** @} */
948
949/** @name Aarch64 Exception levels
950 * @{ */
951/** Exception Level 0 - User mode. */
952#define ARMV8_AARCH64_EL_0 0
953/** Exception Level 1 - Supervisor mode. */
954#define ARMV8_AARCH64_EL_1 1
955/** Exception Level 2 - Hypervisor mode. */
956#define ARMV8_AARCH64_EL_2 2
957/** @} */
958
959
960/** @name ESR_EL2 (Exception Syndrome Register, EL2)
961 * @{
962 */
963/** Bit 0 - 24 - ISS - Instruction Specific Syndrome, encoding depends on the exception class. */
964#define ARMV8_ESR_EL2_ISS UINT64_C(0x1ffffff)
965#define ARMV8_ESR_EL2_ISS_GET(a_Esr) ((a_Esr) & ARMV8_ESR_EL2_ISS)
966/** Bit 25 - IL - Instruction length for synchronous exception (0 means 16-bit instruction, 1 32-bit instruction). */
967#define ARMV8_ESR_EL2_IL RT_BIT_64(25)
968#define ARMV8_ESR_EL2_IL_BIT 25
969#define ARMV8_ESR_EL2_IL_IS_32BIT(a_Esr) RT_BOOL((a_Esr) & ARMV8_ESR_EL2_IL)
970#define ARMV8_ESR_EL2_IL_IS_16BIT(a_Esr) (!((a_Esr) & ARMV8_ESR_EL2_IL))
971/** Bit 26 - 31 - EC - Exception class, indicates reason for the exception that this register holds information about. */
972#define ARMV8_ESR_EL2_EC ( RT_BIT_64(26) | RT_BIT_64(27) | RT_BIT_64(28) \
973 | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
974#define ARMV8_ESR_EL2_EC_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_EC) >> 26)
975/** Bit 32 - 36 - ISS2 - Only valid when FEAT_LS64_V and/or FEAT_LS64_ACCDATA is present. */
976#define ARMV8_ESR_EL2_ISS2 ( RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) \
977 | RT_BIT_64(35) | RT_BIT_64(36))
978#define ARMV8_ESR_EL2_ISS2_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_ISS2) >> 32)
979/** @} */
980
981
982/** @name ESR_EL2 Exception Classes (EC)
983 * @{ */
984/** Unknown exception reason. */
985#define ARMV8_ESR_EL2_EC_UNKNOWN UINT32_C(0)
986/** Trapped WF* instruction. */
987#define ARMV8_ESR_EL2_EC_TRAPPED_WFX UINT32_C(1)
988/** AArch32 - Trapped MCR or MRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
989#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_15 UINT32_C(3)
990/** AArch32 - Trapped MCRR or MRRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
991#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCRR_MRRC_COPROC15 UINT32_C(4)
992/** AArch32 - Trapped MCR or MRC access (coproc == 0b1110). */
993#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_14 UINT32_C(5)
994/** AArch32 - Trapped LDC or STC access. */
995#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_LDC_STC UINT32_C(6)
996/** AArch32 - Trapped access to SME, SVE or Advanced SIMD or floating point fnunctionality. */
997#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON UINT32_C(7)
998/** AArch32 - Trapped VMRS access not reported using ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON. */
999#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_VMRS UINT32_C(8)
1000/** AArch32 - Trapped pointer authentication instruction. */
1001#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_PA_INSN UINT32_C(9)
1002/** FEAT_LS64 - Exception from LD64B or ST64B instruction. */
1003#define ARMV8_ESR_EL2_EC_LS64_EXCEPTION UINT32_C(10)
1004/** AArch32 - Trapped MRRC access (coproc == 0b1110). */
1005#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MRRC_COPROC14 UINT32_C(12)
1006/** FEAT_BTI - Branch Target Exception. */
1007#define ARMV8_ESR_EL2_EC_BTI_BRANCH_TARGET_EXCEPTION UINT32_C(13)
1008/** Illegal Execution State. */
1009#define ARMV8_ESR_EL2_ILLEGAL_EXECUTION_STATE UINT32_C(14)
1010/** AArch32 - SVC instruction execution. */
1011#define ARMV8_ESR_EL2_EC_AARCH32_SVC_INSN UINT32_C(17)
1012/** AArch32 - HVC instruction execution. */
1013#define ARMV8_ESR_EL2_EC_AARCH32_HVC_INSN UINT32_C(18)
1014/** AArch32 - SMC instruction execution. */
1015#define ARMV8_ESR_EL2_EC_AARCH32_SMC_INSN UINT32_C(19)
1016/** AArch64 - SVC instruction execution. */
1017#define ARMV8_ESR_EL2_EC_AARCH64_SVC_INSN UINT32_C(21)
1018/** AArch64 - HVC instruction execution. */
1019#define ARMV8_ESR_EL2_EC_AARCH64_HVC_INSN UINT32_C(22)
1020/** AArch64 - SMC instruction execution. */
1021#define ARMV8_ESR_EL2_EC_AARCH64_SMC_INSN UINT32_C(23)
1022/** AArch64 - Trapped MSR, MRS or System instruction execution in AArch64 state. */
1023#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_SYS_INSN UINT32_C(24)
1024/** FEAT_SVE - Access to SVE vunctionality not reported using ARMV8_ESR_EL2_EC_UNKNOWN. */
1025#define ARMV8_ESR_EL2_EC_SVE_TRAPPED UINT32_C(25)
1026/** FEAT_PAuth and FEAT_NV - Trapped ERET, ERETAA or ERTAB instruction. */
1027#define ARMV8_ESR_EL2_EC_PAUTH_NV_TRAPPED_ERET_ERETAA_ERETAB UINT32_C(26)
1028/** FEAT_TME - Exception from TSTART instruction. */
1029#define ARMV8_ESR_EL2_EC_TME_TSTART_INSN_EXCEPTION UINT32_C(27)
1030/** FEAT_FPAC - Exception from a Pointer Authentication instruction failure. */
1031#define ARMV8_ESR_EL2_EC_FPAC_PA_INSN_FAILURE_EXCEPTION UINT32_C(28)
1032/** FEAT_SME - Access to SME functionality trapped. */
1033#define ARMV8_ESR_EL2_EC_SME_TRAPPED_SME_ACCESS UINT32_C(29)
1034/** FEAT_RME - Exception from Granule Protection Check. */
1035#define ARMV8_ESR_EL2_EC_RME_GRANULE_PROT_CHECK_EXCEPTION UINT32_C(30)
1036/** Instruction Abort from a lower Exception level. */
1037#define ARMV8_ESR_EL2_INSN_ABORT_FROM_LOWER_EL UINT32_C(32)
1038/** Instruction Abort from the same Exception level. */
1039#define ARMV8_ESR_EL2_INSN_ABORT_FROM_EL2 UINT32_C(33)
1040/** PC alignment fault exception. */
1041#define ARMV8_ESR_EL2_PC_ALIGNMENT_EXCEPTION UINT32_C(34)
1042/** Data Abort from a lower Exception level. */
1043#define ARMV8_ESR_EL2_DATA_ABORT_FROM_LOWER_EL UINT32_C(36)
1044/** Data Abort from the same Exception level (or access associated with VNCR_EL2). */
1045#define ARMV8_ESR_EL2_DATA_ABORT_FROM_EL2 UINT32_C(37)
1046/** SP alignment fault exception. */
1047#define ARMV8_ESR_EL2_SP_ALIGNMENT_EXCEPTION UINT32_C(38)
1048/** FEAT_MOPS - Memory Operation Exception. */
1049#define ARMV8_ESR_EL2_EC_MOPS_EXCEPTION UINT32_C(39)
1050/** AArch32 - Trapped floating point exception. */
1051#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_FP_EXCEPTION UINT32_C(40)
1052/** AArch64 - Trapped floating point exception. */
1053#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_FP_EXCEPTION UINT32_C(44)
1054/** SError interrupt. */
1055#define ARMV8_ESR_EL2_SERROR_INTERRUPT UINT32_C(47)
1056/** Breakpoint Exception from a lower Exception level. */
1057#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_LOWER_EL UINT32_C(48)
1058/** Breakpoint Exception from the same Exception level. */
1059#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_EL2 UINT32_C(49)
1060/** Software Step Exception from a lower Exception level. */
1061#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_LOWER_EL UINT32_C(50)
1062/** Software Step Exception from the same Exception level. */
1063#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_EL2 UINT32_C(51)
1064/** Watchpoint Exception from a lower Exception level. */
1065#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_LOWER_EL UINT32_C(52)
1066/** Watchpoint Exception from the same Exception level. */
1067#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_EL2 UINT32_C(53)
1068/** AArch32 - BKPT instruction execution. */
1069#define ARMV8_ESR_EL2_EC_AARCH32_BKPT_INSN UINT32_C(56)
1070/** AArch32 - Vector Catch exception. */
1071#define ARMV8_ESR_EL2_EC_AARCH32_VEC_CATCH_EXCEPTION UINT32_C(58)
1072/** AArch64 - BRK instruction execution. */
1073#define ARMV8_ESR_EL2_EC_AARCH64_BRK_INSN UINT32_C(60)
1074/** @} */
1075
1076
1077/** @name ISS encoding for Data Abort exceptions.
1078 * @{ */
1079/** Bit 0 - 5 - DFSC - Data Fault Status Code. */
1080#define ARMV8_EC_ISS_DATA_ABRT_DFSC ( RT_BIT_32(0) | RT_BIT_32(1) | RT_BIT_32(2) \
1081 | RT_BIT_32(3) | RT_BIT_32(4) | RT_BIT_32(5))
1082#define ARMV8_EC_ISS_DATA_ABRT_DFSC_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_DFSC)
1083/** Bit 6 - WnR - Write not Read. */
1084#define ARMV8_EC_ISS_DATA_ABRT_WNR RT_BIT_32(6)
1085#define ARMV8_EC_ISS_DATA_ABRT_WNR_BIT 6
1086/** Bit 7 - S1PTW - Stage 2 translation fault for an access made for a stage 1 translation table walk. */
1087#define ARMV8_EC_ISS_DATA_ABRT_S1PTW RT_BIT_32(7)
1088#define ARMV8_EC_ISS_DATA_ABRT_S1PTW_BIT 7
1089/** Bit 8 - CM - Cache maintenance instruction. */
1090#define ARMV8_EC_ISS_DATA_ABRT_CM RT_BIT_32(8)
1091#define ARMV8_EC_ISS_DATA_ABRT_CM_BIT 8
1092/** Bit 9 - EA - External abort type. */
1093#define ARMV8_EC_ISS_DATA_ABRT_EA RT_BIT_32(9)
1094#define ARMV8_EC_ISS_DATA_ABRT_EA_BIT 9
1095/** Bit 10 - FnV - FAR not Valid. */
1096#define ARMV8_EC_ISS_DATA_ABRT_FNV RT_BIT_32(10)
1097#define ARMV8_EC_ISS_DATA_ABRT_FNV_BIT 10
1098/** Bit 11 - 12 - LST - Load/Store Type. */
1099#define ARMV8_EC_ISS_DATA_ABRT_LST (RT_BIT_32(11) | RT_BIT_32(12))
1100#define ARMV8_EC_ISS_DATA_ABRT_LST_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_LST) >> 11)
1101/** Bit 13 - VNCR - Fault came from use of VNCR_EL2 register by EL1 code. */
1102#define ARMV8_EC_ISS_DATA_ABRT_VNCR RT_BIT_32(13)
1103#define ARMV8_EC_ISS_DATA_ABRT_VNCR_BIT 13
1104/** Bit 14 - AR - Acquire/Release semantics. */
1105#define ARMV8_EC_ISS_DATA_ABRT_AR RT_BIT_32(14)
1106#define ARMV8_EC_ISS_DATA_ABRT_AR_BIT 14
1107/** Bit 15 - SF - Sixty Four bit general-purpose register transfer (only when ISV is 1). */
1108#define ARMV8_EC_ISS_DATA_ABRT_SF RT_BIT_32(15)
1109#define ARMV8_EC_ISS_DATA_ABRT_SF_BIT 15
1110/** Bit 16 - 20 - SRT - Syndrome Register Transfer. */
1111#define ARMV8_EC_ISS_DATA_ABRT_SRT ( RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18) \
1112 | RT_BIT_32(19) | RT_BIT_32(20))
1113#define ARMV8_EC_ISS_DATA_ABRT_SRT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SRT) >> 16)
1114/** Bit 21 - SSE - Syndrome Sign Extend. */
1115#define ARMV8_EC_ISS_DATA_ABRT_SSE RT_BIT_32(21)
1116#define ARMV8_EC_ISS_DATA_ABRT_SSE_BIT 21
1117/** Bit 22 - 23 - SAS - Syndrome Access Size. */
1118#define ARMV8_EC_ISS_DATA_ABRT_SAS (RT_BIT_32(22) | RT_BIT_32(23))
1119#define ARMV8_EC_ISS_DATA_ABRT_SAS_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SAS) >> 22)
1120/** Bit 24 - ISV - Instruction Syndrome Valid. */
1121#define ARMV8_EC_ISS_DATA_ABRT_ISV RT_BIT_32(24)
1122#define ARMV8_EC_ISS_DATA_ABRT_ISV_BIT 24
1123/** @} */
1124
1125
1126/** @name Data Fault Status Code (DFSC).
1127 * @{ */
1128/** Address size fault, level 0 of translation or translation table base register. */
1129#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL0 0
1130/** Address size fault, level 1. */
1131#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL1 1
1132/** Address size fault, level 2. */
1133#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL2 2
1134/** Address size fault, level 3. */
1135#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL3 3
1136/** Translation fault, level 0. */
1137#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL0 4
1138/** Translation fault, level 1. */
1139#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL1 5
1140/** Translation fault, level 2. */
1141#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL2 6
1142/** Translation fault, level 3. */
1143#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL3 7
1144/** FEAT_LPA2 - Access flag fault, level 0. */
1145#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL0 8
1146/** Access flag fault, level 1. */
1147#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL1 9
1148/** Access flag fault, level 2. */
1149#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL2 10
1150/** Access flag fault, level 3. */
1151#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL3 11
1152/** FEAT_LPA2 - Permission fault, level 0. */
1153#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL0 12
1154/** Permission fault, level 1. */
1155#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL1 13
1156/** Permission fault, level 2. */
1157#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL2 14
1158/** Permission fault, level 3. */
1159#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL3 15
1160/** Synchronous External abort, not a translation table walk or hardware update of translation table. */
1161#define ARMV8_EC_ISS_DATA_ABRT_DFSC_SYNC_EXTERNAL 16
1162/** FEAT_MTE2 - Synchronous Tag Check Fault. */
1163#define ARMV8_EC_ISS_DATA_ABRT_DFSC_MTE2_SYNC_TAG_CHK_FAULT 17
1164/** @todo Do the rest (lazy developer). */
1165/** @} */
1166
1167
1168/** @name SAS encoding.
1169 * @{ */
1170/** Byte access. */
1171#define ARMV8_EC_ISS_DATA_ABRT_SAS_BYTE 0
1172/** Halfword access (uint16_t). */
1173#define ARMV8_EC_ISS_DATA_ABRT_SAS_HALFWORD 1
1174/** Word access (uint32_t). */
1175#define ARMV8_EC_ISS_DATA_ABRT_SAS_WORD 2
1176/** Doubleword access (uint64_t). */
1177#define ARMV8_EC_ISS_DATA_ABRT_SAS_DWORD 3
1178/** @} */
1179
1180
1181/** @name ISS encoding for trapped MSR, MRS or System instruction exceptions.
1182 * @{ */
1183/** Bit 0 - Direction flag. */
1184#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION RT_BIT_32(0)
1185#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION_IS_READ(a_Iss) RT_BOOL((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION)
1186/** Bit 1 - 4 - CRm value from the instruction. */
1187#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM ( RT_BIT_32(1) | RT_BIT_32(2) | RT_BIT_32(3) \
1188 | RT_BIT_32(4))
1189#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM) >> 1)
1190/** Bit 5 - 9 - Rt value from the instruction. */
1191#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT ( RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7) \
1192 | RT_BIT_32(8) | RT_BIT_32(9))
1193#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT) >> 5)
1194/** Bit 10 - 13 - CRn value from the instruction. */
1195#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN ( RT_BIT_32(10) | RT_BIT_32(11) | RT_BIT_32(12) \
1196 | RT_BIT_32(13))
1197#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN) >> 10)
1198/** Bit 14 - 16 - Op2 value from the instruction. */
1199#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1 (RT_BIT_32(14) | RT_BIT_32(15) | RT_BIT_32(16))
1200#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1) >> 14)
1201/** Bit 17 - 19 - Op2 value from the instruction. */
1202#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2 (RT_BIT_32(17) | RT_BIT_32(18) | RT_BIT_32(19))
1203#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2) >> 17)
1204/** Bit 20 - 21 - Op0 value from the instruction. */
1205#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0 (RT_BIT_32(20) | RT_BIT_32(21))
1206#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0) >> 20)
1207/** Bit 22 - 24 - Reserved. */
1208#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RSVD (RT_BIT_32(22) | RT_BIT_32(23) | RT_BIT_32(24))
1209/** @} */
1210
1211
1212/** @name ISS encoding for trapped HVC instruction exceptions.
1213 * @{ */
1214/** Bit 0 - 15 - imm16 value of the instruction. */
1215#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM (UINT16_C(0xffff))
1216#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM)
1217/** @} */
1218
1219
1220/** @name TCR_EL1 - Translation Control Register (EL1)
1221 * @{
1222 */
1223/** Bit 0 - 5 - Size offset of the memory region addressed by TTBR0_EL1 (2^(64-T0SZ)). */
1224#define ARMV8_TCR_EL1_AARCH64_T0SZ ( RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) \
1225 | RT_BIT_64(3) | RT_BIT_64(4) | RT_BIT_64(5))
1226#define ARMV8_TCR_EL1_AARCH64_T0SZ_GET(a_Tcr) ((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ)
1227/** Bit 7 - Translation table walk disable for translations using TTBR0_EL1. */
1228#define ARMV8_TCR_EL1_AARCH64_EPD0 RT_BIT_64(7)
1229#define ARMV8_TCR_EL1_AARCH64_EPD0_BIT 7
1230/** Bit 8 - 9 - Inner cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
1231#define ARMV8_TCR_EL1_AARCH64_IRGN0 (RT_BIT_64(8) | RT_BIT_64(9))
1232#define ARMV8_TCR_EL1_AARCH64_IRGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN0) >> 8)
1233/** Non cacheable. */
1234# define ARMV8_TCR_EL1_AARCH64_IRGN0_NON_CACHEABLE 0
1235/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1236# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_WA 1
1237/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1238# define ARMV8_TCR_EL1_AARCH64_IRGN0_WT_RA_NWA 2
1239/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1240# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_NWA 3
1241/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
1242#define ARMV8_TCR_EL1_AARCH64_ORGN0 (RT_BIT_64(10) | RT_BIT_64(11))
1243#define ARMV8_TCR_EL1_AARCH64_ORGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN0) >> 10)
1244/** Non cacheable. */
1245# define ARMV8_TCR_EL1_AARCH64_ORGN0_NON_CACHEABLE 0
1246/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1247# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_WA 1
1248/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1249# define ARMV8_TCR_EL1_AARCH64_ORGN0_WT_RA_NWA 2
1250/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1251# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_NWA 3
1252/** Bit 12 - 13 - Shareability attribute memory associated with translation table walks using TTBR0_EL1. */
1253#define ARMV8_TCR_EL1_AARCH64_SH0 (RT_BIT_64(12) | RT_BIT_64(13))
1254#define ARMV8_TCR_EL1_AARCH64_SH0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH0) >> 12)
1255/** Non shareable. */
1256# define ARMV8_TCR_EL1_AARCH64_SH0_NON_SHAREABLE 0
1257/** Invalid value. */
1258# define ARMV8_TCR_EL1_AARCH64_SH0_INVALID 1
1259/** Outer Shareable. */
1260# define ARMV8_TCR_EL1_AARCH64_SH0_OUTER_SHAREABLE 2
1261/** Inner Shareable. */
1262# define ARMV8_TCR_EL1_AARCH64_SH0_INNER_SHAREABLE 3
1263/** Bit 14 - 15 - Translation Granule Size for TTBR0_EL1. */
1264#define ARMV8_TCR_EL1_AARCH64_TG0 (RT_BIT_64(14) | RT_BIT_64(15))
1265#define ARMV8_TCR_EL1_AARCH64_TG0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG0) >> 14)
1266/** Invalid granule size. */
1267# define ARMV8_TCR_EL1_AARCH64_TG0_INVALID 0
1268/** 16KiB granule size. */
1269# define ARMV8_TCR_EL1_AARCH64_TG0_16KB 1
1270/** 4KiB granule size. */
1271# define ARMV8_TCR_EL1_AARCH64_TG0_4KB 2
1272/** 64KiB granule size. */
1273# define ARMV8_TCR_EL1_AARCH64_TG0_64KB 3
1274/** Bit 16 - 21 - Size offset of the memory region addressed by TTBR1_EL1 (2^(64-T1SZ)). */
1275#define ARMV8_TCR_EL1_AARCH64_T1SZ ( RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) \
1276 | RT_BIT_64(19) | RT_BIT_64(20) | RT_BIT_64(21))
1277#define ARMV8_TCR_EL1_AARCH64_T1SZ_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ) >> 16)
1278/** Bit 22 - Selects whether TTBR0_EL1 (0) or TTBR1_EL1 (1) defines the ASID. */
1279#define ARMV8_TCR_EL1_AARCH64_A1 RT_BIT_64(22)
1280#define ARMV8_TCR_EL1_AARCH64_A1_BIT 22
1281/** Bit 23 - Translation table walk disable for translations using TTBR1_EL1. */
1282#define ARMV8_TCR_EL1_AARCH64_EPD1 RT_BIT_64(23)
1283#define ARMV8_TCR_EL1_AARCH64_EPD1_BIT 23
1284/** Bit 24 - 25 - Inner cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
1285#define ARMV8_TCR_EL1_AARCH64_IRGN1 (RT_BIT_64(24) | RT_BIT_64(25))
1286#define ARMV8_TCR_EL1_AARCH64_IRGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN1) >> 26)
1287/** Non cacheable. */
1288# define ARMV8_TCR_EL1_AARCH64_IRGN1_NON_CACHEABLE 0
1289/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1290# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_WA 1
1291/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1292# define ARMV8_TCR_EL1_AARCH64_IRGN1_WT_RA_NWA 2
1293/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1294# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_NWA 3
1295/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
1296#define ARMV8_TCR_EL1_AARCH64_ORGN1 (RT_BIT_64(26) | RT_BIT_64(27))
1297#define ARMV8_TCR_EL1_AARCH64_ORGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN1) >> 26)
1298/** Non cacheable. */
1299# define ARMV8_TCR_EL1_AARCH64_ORGN1_NON_CACHEABLE 0
1300/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1301# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_WA 1
1302/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1303# define ARMV8_TCR_EL1_AARCH64_ORGN1_WT_RA_NWA 2
1304/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1305# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_NWA 3
1306/** Bit 28 - 29 - Shareability attribute memory associated with translation table walks using TTBR1_EL1. */
1307#define ARMV8_TCR_EL1_AARCH64_SH1 (RT_BIT_64(28) | RT_BIT_64(29))
1308#define ARMV8_TCR_EL1_AARCH64_SH1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH1) >> 28)
1309/** Non shareable. */
1310# define ARMV8_TCR_EL1_AARCH64_SH1_NON_SHAREABLE 0
1311/** Invalid value. */
1312# define ARMV8_TCR_EL1_AARCH64_SH1_INVALID 1
1313/** Outer Shareable. */
1314# define ARMV8_TCR_EL1_AARCH64_SH1_OUTER_SHAREABLE 2
1315/** Inner Shareable. */
1316# define ARMV8_TCR_EL1_AARCH64_SH1_INNER_SHAREABLE 3
1317/** Bit 30 - 31 - Translation Granule Size for TTBR1_EL1. */
1318#define ARMV8_TCR_EL1_AARCH64_TG1 (RT_BIT_64(30) | RT_BIT_64(31))
1319#define ARMV8_TCR_EL1_AARCH64_TG1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG1) >> 30)
1320/** Invalid granule size. */
1321# define ARMV8_TCR_EL1_AARCH64_TG1_INVALID 0
1322/** 16KiB granule size. */
1323# define ARMV8_TCR_EL1_AARCH64_TG1_16KB 1
1324/** 4KiB granule size. */
1325# define ARMV8_TCR_EL1_AARCH64_TG1_4KB 2
1326/** 64KiB granule size. */
1327# define ARMV8_TCR_EL1_AARCH64_TG1_64KB 3
1328/** Bit 32 - 34 - Intermediate Physical Address Size. */
1329#define ARMV8_TCR_EL1_AARCH64_IPS (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34))
1330#define ARMV8_TCR_EL1_AARCH64_IPS_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IPS) >> 32)
1331/** IPA - 32 bits, 4GiB. */
1332# define ARMV8_TCR_EL1_AARCH64_IPS_32BITS 0
1333/** IPA - 36 bits, 64GiB. */
1334# define ARMV8_TCR_EL1_AARCH64_IPS_36BITS 1
1335/** IPA - 40 bits, 1TiB. */
1336# define ARMV8_TCR_EL1_AARCH64_IPS_40BITS 2
1337/** IPA - 42 bits, 4TiB. */
1338# define ARMV8_TCR_EL1_AARCH64_IPS_42BITS 3
1339/** IPA - 44 bits, 16TiB. */
1340# define ARMV8_TCR_EL1_AARCH64_IPS_44BITS 4
1341/** IPA - 48 bits, 256TiB. */
1342# define ARMV8_TCR_EL1_AARCH64_IPS_48BITS 5
1343/** IPA - 52 bits, 4PiB. */
1344# define ARMV8_TCR_EL1_AARCH64_IPS_52BITS 6
1345/** Bit 36 - ASID Size (0 - 8 bit, 1 - 16 bit). */
1346#define ARMV8_TCR_EL1_AARCH64_AS RT_BIT_64(36)
1347#define ARMV8_TCR_EL1_AARCH64_AS_BIT 36
1348/** Bit 37 - Top Byte Ignore for translations from TTBR0_EL1. */
1349#define ARMV8_TCR_EL1_AARCH64_TBI0 RT_BIT_64(37)
1350#define ARMV8_TCR_EL1_AARCH64_TBI0_BIT 37
1351/** Bit 38 - Top Byte Ignore for translations from TTBR1_EL1. */
1352#define ARMV8_TCR_EL1_AARCH64_TBI1 RT_BIT_64(38)
1353#define ARMV8_TCR_EL1_AARCH64_TBI1_BIT 38
1354/** Bit 39 - Hardware Access flag update in stage 1 translations from EL0 and EL1. */
1355#define ARMV8_TCR_EL1_AARCH64_HA RT_BIT_64(39)
1356#define ARMV8_TCR_EL1_AARCH64_HA_BIT 39
1357/** Bit 40 - Hardware management of dirty state in stage 1 translations from EL0 and EL1. */
1358#define ARMV8_TCR_EL1_AARCH64_HD RT_BIT_64(40)
1359#define ARMV8_TCR_EL1_AARCH64_HD_BIT 40
1360/** Bit 41 - Hierarchical Permission Disables for TTBR0_EL1. */
1361#define ARMV8_TCR_EL1_AARCH64_HPD0 RT_BIT_64(41)
1362#define ARMV8_TCR_EL1_AARCH64_HPD0_BIT 41
1363/** Bit 42 - Hierarchical Permission Disables for TTBR1_EL1. */
1364#define ARMV8_TCR_EL1_AARCH64_HPD1 RT_BIT_64(42)
1365#define ARMV8_TCR_EL1_AARCH64_HPD1_BIT 42
1366/** Bit 43 - Bit[59] Hardware Use for translations using TTBR0_EL1. */
1367#define ARMV8_TCR_EL1_AARCH64_HWU059 RT_BIT_64(43)
1368#define ARMV8_TCR_EL1_AARCH64_HWU059_BIT 43
1369/** Bit 44 - Bit[60] Hardware Use for translations using TTBR0_EL1. */
1370#define ARMV8_TCR_EL1_AARCH64_HWU060 RT_BIT_64(44)
1371#define ARMV8_TCR_EL1_AARCH64_HWU060_BIT 44
1372/** Bit 46 - Bit[61] Hardware Use for translations using TTBR0_EL1. */
1373#define ARMV8_TCR_EL1_AARCH64_HWU061 RT_BIT_64(45)
1374#define ARMV8_TCR_EL1_AARCH64_HWU061_BIT 45
1375/** Bit 46 - Bit[62] Hardware Use for translations using TTBR0_EL1. */
1376#define ARMV8_TCR_EL1_AARCH64_HWU062 RT_BIT_64(46)
1377#define ARMV8_TCR_EL1_AARCH64_HWU062_BIT 46
1378/** Bit 47 - Bit[59] Hardware Use for translations using TTBR1_EL1. */
1379#define ARMV8_TCR_EL1_AARCH64_HWU159 RT_BIT_64(47)
1380#define ARMV8_TCR_EL1_AARCH64_HWU159_BIT 47
1381/** Bit 48 - Bit[60] Hardware Use for translations using TTBR1_EL1. */
1382#define ARMV8_TCR_EL1_AARCH64_HWU160 RT_BIT_64(48)
1383#define ARMV8_TCR_EL1_AARCH64_HWU160_BIT 48
1384/** Bit 49 - Bit[61] Hardware Use for translations using TTBR1_EL1. */
1385#define ARMV8_TCR_EL1_AARCH64_HWU161 RT_BIT_64(49)
1386#define ARMV8_TCR_EL1_AARCH64_HWU161_BIT 49
1387/** Bit 50 - Bit[62] Hardware Use for translations using TTBR1_EL1. */
1388#define ARMV8_TCR_EL1_AARCH64_HWU162 RT_BIT_64(50)
1389#define ARMV8_TCR_EL1_AARCH64_HWU162_BIT 50
1390/** Bit 51 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR0_EL1. */
1391#define ARMV8_TCR_EL1_AARCH64_TBID0 RT_BIT_64(51)
1392#define ARMV8_TCR_EL1_AARCH64_TBID0_BIT 51
1393/** Bit 52 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR1_EL1. */
1394#define ARMV8_TCR_EL1_AARCH64_TBID1 RT_BIT_64(52)
1395#define ARMV8_TCR_EL1_AARCH64_TBID1_BIT 52
1396/** Bit 53 - Non fault translation table walk disable for stage 1 translations using TTBR0_EL1. */
1397#define ARMV8_TCR_EL1_AARCH64_NFD0 RT_BIT_64(53)
1398#define ARMV8_TCR_EL1_AARCH64_NFD0_BIT 53
1399/** Bit 54 - Non fault translation table walk disable for stage 1 translations using TTBR1_EL1. */
1400#define ARMV8_TCR_EL1_AARCH64_NFD1 RT_BIT_64(54)
1401#define ARMV8_TCR_EL1_AARCH64_NFD1_BIT 54
1402/** Bit 55 - Faulting Control for Unprivileged access to any address translated by TTBR0_EL1. */
1403#define ARMV8_TCR_EL1_AARCH64_E0PD0 RT_BIT_64(55)
1404#define ARMV8_TCR_EL1_AARCH64_E0PD0_BIT 55
1405/** Bit 56 - Faulting Control for Unprivileged access to any address translated by TTBR1_EL1. */
1406#define ARMV8_TCR_EL1_AARCH64_E0PD1 RT_BIT_64(56)
1407#define ARMV8_TCR_EL1_AARCH64_E0PD1_BIT 56
1408/** Bit 57 - TCMA0 */
1409#define ARMV8_TCR_EL1_AARCH64_TCMA0 RT_BIT_64(57)
1410#define ARMV8_TCR_EL1_AARCH64_TCMA0_BIT 57
1411/** Bit 58 - TCMA1 */
1412#define ARMV8_TCR_EL1_AARCH64_TCMA1 RT_BIT_64(58)
1413#define ARMV8_TCR_EL1_AARCH64_TCMA1_BIT 58
1414/** Bit 59 - Data Sharing(?). */
1415#define ARMV8_TCR_EL1_AARCH64_DS RT_BIT_64(59)
1416#define ARMV8_TCR_EL1_AARCH64_DS_BIT 59
1417/** @} */
1418
1419
1420/** @name TTBR<0,1>_EL1 - Translation Table Base Register <0,1> (EL1)
1421 * @{
1422 */
1423/** Bit 0 - Common not Private (FEAT_TTCNP). */
1424#define ARMV8_TTBR_EL1_AARCH64_CNP RT_BIT_64(0)
1425#define ARMV8_TTBR_EL1_AARCH64_CNP_BIT 0
1426/** Bit 1 - 47 - Translation table base address. */
1427#define ARMV8_TTBR_EL1_AARCH64_BADDR UINT64_C(0x0000fffffffffffe)
1428#define ARMV8_TTBR_EL1_AARCH64_BADDR_GET(a_Ttbr) ((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_BADDR)
1429/** Bit 48 - 63 - ASID. */
1430#define ARMV8_TTBR_EL1_AARCH64_ASID UINT64_C(0xffff000000000000)
1431#define ARMV8_TTBR_EL1_AARCH64_ASID_GET(a_Ttbr) (((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_ASID) >> 48)
1432/** @} */
1433
1434
1435/** @name MDSCR_EL1 - MOnitor Debug System Control Register (EL1).
1436 * @{ */
1437/** Bit 0 - SS - Software step control bit. */
1438#define ARMV8_MDSCR_EL1_AARCH64_SS RT_BIT_64(0)
1439#define ARMV8_MDSCR_EL1_AARCH64_SS_BIT 0
1440/** @} */
1441
1442
1443/** @name ICC_PMR_EL1 - Interrupt Controller Interrupt Priority Mask Register
1444 * @{ */
1445/** Bit 0 - 7 - Priority - The priority mask level for the CPU interface. */
1446#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY UINT64_C(0xff)
1447#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_GET(a_Pmr) ((a_Pmr) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1448#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_SET(a_Prio) ((a_Prio) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1449/** @} */
1450
1451
1452/** @name ICC_BPR0_EL1 - The group priority for Group 0 interrupts.
1453 * @{ */
1454/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1455#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1456#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_GET(a_Bpr0) ((a_Bpr0) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1457#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1458/** @} */
1459
1460
1461/** @name ICC_BPR1_EL1 - The group priority for Group 1 interrupts.
1462 * @{ */
1463/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1464#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1465#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_GET(a_Bpr1) ((a_Bpr1) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1466#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1467/** @} */
1468
1469
1470/** @name ICC_CTLR_EL1 - Interrupt Controller Control Register (EL1)
1471 * @{ */
1472/** Bit 0 - Common Binary Pointer Register - RW. */
1473#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR RT_BIT_64(0)
1474#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR_BIT 0
1475/** Bit 1 - EOI mode for current security state, when set ICC_DIR_EL1 provides interrupt deactivation functionality - RW. */
1476#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE RT_BIT_64(1)
1477#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE_BIT 1
1478/** Bit 7 - Priority Mask Hint Enable - RW (under circumstances). */
1479#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE RT_BIT_64(7)
1480#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE_BIT 7
1481/** Bit 8 - 10 - Priority bits - RO. */
1482#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10))
1483#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS_SET(a_PriBits) (((a_PriBits) << 8) & ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS)
1484/** Bit 11 - 13 - Interrupt identifier bits - RO. */
1485#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS (RT_BIT_64(11) | RT_BIT_64(12) | RT_BIT_64(13))
1486#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_SET(a_IdBits) (((a_IdBits) << 11) & ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS)
1487/** INTIDS are 16-bit wide. */
1488# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_16BITS 0
1489/** INTIDS are 24-bit wide. */
1490# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_24BITS 1
1491/** Bit 14 - SEI Supported - RO. */
1492#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS RT_BIT_64(14)
1493#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS_BIT 14
1494/** Bit 15 - Affinity 3 Valid - RO. */
1495#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V RT_BIT_64(15)
1496#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V_BIT 15
1497/** Bit 18 - Range Selector Support - RO. */
1498#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS RT_BIT_64(18)
1499#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS_BIT 18
1500/** Bit 19 - Extended INTID range supported - RO. */
1501#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE RT_BIT_64(19)
1502#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE_BIT 19
1503/** All RW bits. */
1504#define ARMV8_ICC_CTLR_EL1_RW (ARMV8_ICC_CTLR_EL1_AARCH64_CBPR | ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE | ARMV8_ICC_CTLR_EL1_AARCH64_PMHE)
1505/** All RO bits (including Res0). */
1506#define ARMV8_ICC_CTLR_EL1_RO ~ARMV8_ICC_CTLR_EL1_RW
1507/** @} */
1508
1509
1510/** @name ICC_IGRPEN0_EL1 - Interrupt Controller Interrupt Group 0 Enable Register (EL1)
1511 * @{ */
1512/** Bit 0 - Enables Group 0 interrupts for the current Security state. */
1513#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE RT_BIT_64(0)
1514#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE_BIT 0
1515/** @} */
1516
1517
1518/** @name ICC_IGRPEN1_EL1 - Interrupt Controller Interrupt Group 1 Enable Register (EL1)
1519 * @{ */
1520/** Bit 0 - Enables Group 1 interrupts for the current Security state. */
1521#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE RT_BIT_64(0)
1522#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE_BIT 0
1523/** @} */
1524
1525
1526/** @name ICC_SGI1R_EL1 - Interrupt Controller Software Generated Interrupt Group 1 Register (EL1) - WO
1527 * @{ */
1528/** Bit 0 - 15 - Target List, the set of PEs for which SGI interrupts will be generated. */
1529#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST (UINT64_C(0x000000000000ffff))
1530#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST_GET(a_Sgi1R) ((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST)
1531/** Bit 16 - 23 - The affinity 1 of the affinity path of the cluster for which SGI interrupts will be generated. */
1532#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1 (UINT64_C(0x00000000007f0000))
1533#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1) >> 16)
1534/** Bit 24 - 27 - The INTID of the SGI. */
1535#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1536#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_INTID) >> 24)
1537/* Bit 28 - 31 - Reserved. */
1538/** Bit 32 - 39 - The affinity 2 of the affinity path of the cluster for which SGI interrupts will be generated. */
1539#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2 (UINT64_C(0x000000ff00000000))
1540#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2) >> 32)
1541/** Bit 40 - Interrupt Routing Mode - 1 means interrupts to all PEs in the system excluding the generating PE. */
1542#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM RT_BIT_64(40)
1543#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM_BIT 40
1544/* Bit 41 - 43 - Reserved. */
1545/** Bit 44 - 47 - Range selector. */
1546#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1547#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_RS) >> 44)
1548/** Bit 48 - 55 - The affinity 3 of the affinity path of the cluster for which SGI interrupts will be generated. */
1549#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3 (UINT64_C(0x00ff000000000000))
1550#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3) >> 48)
1551/* Bit 56 - 63 - Reserved. */
1552/** @} */
1553
1554
1555/** @name CNTV_CTL_EL0 - Counter-timer Virtual Timer Control register.
1556 * @{ */
1557/** Bit 0 - Enables the timer. */
1558#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE RT_BIT_64(0)
1559#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE_BIT 0
1560/** Bit 1 - Timer interrupt mask bit. */
1561#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK RT_BIT_64(1)
1562#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK_BIT 1
1563/** Bit 2 - Timer status bit. */
1564#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS RT_BIT_64(2)
1565#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS_BIT 2
1566/** @} */
1567
1568
1569/** @name OSLAR_EL1 - OS Lock Access Register.
1570 * @{ */
1571/** Bit 0 - The OS Lock status bit. */
1572#define ARMV8_OSLAR_EL1_AARCH64_OSLK RT_BIT_64(0)
1573#define ARMV8_OSLAR_EL1_AARCH64_OSLK_BIT 0
1574/** @} */
1575
1576
1577/** @name OSLSR_EL1 - OS Lock Status Register.
1578 * @{ */
1579/** Bit 0 - OSLM[0] Bit 0 of OS Lock model implemented. */
1580#define ARMV8_OSLSR_EL1_AARCH64_OSLM0 RT_BIT_64(0)
1581#define ARMV8_OSLSR_EL1_AARCH64_OSLM0_BIT 0
1582/** Bit 1 - The OS Lock status bit. */
1583#define ARMV8_OSLSR_EL1_AARCH64_OSLK RT_BIT_64(1)
1584#define ARMV8_OSLSR_EL1_AARCH64_OSLK_BIT 1
1585/** Bit 2 - Not 32-bit access. */
1586#define ARMV8_OSLSR_EL1_AARCH64_NTT RT_BIT_64(2)
1587#define ARMV8_OSLSR_EL1_AARCH64_NTT_BIT 2
1588/** Bit 0 - OSLM[1] Bit 1 of OS Lock model implemented. */
1589#define ARMV8_OSLSR_EL1_AARCH64_OSLM1 RT_BIT_64(3)
1590#define ARMV8_OSLSR_EL1_AARCH64_OSLM1_BIT 3
1591/** @} */
1592
1593
1594/** @name ID_AA64ISAR0_EL1 - AArch64 Instruction Set Attribute Register 0.
1595 * @{ */
1596/* Bit 0 - 3 - Reserved. */
1597/** Bit 4 - 7 - Indicates support for AES instructions in AArch64 state. */
1598#define ARMV8_ID_AA64ISAR0_EL1_AES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1599#define ARMV8_ID_AA64ISAR0_EL1_AES_SHIFT 4
1600/** No AES instructions implemented. */
1601# define ARMV8_ID_AA64ISAR0_EL1_AES_NOT_IMPL 0
1602/** AES, AESD, AESMC and AESIMC instructions implemented (FEAT_AES). */
1603# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED 1
1604/** AES, AESD, AESMC and AESIMC instructions implemented and PMULL and PMULL2 instructions operating on 64bit source elements (FEAT_PMULL). */
1605# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED_PMULL 2
1606/** Bit 8 - 11 - Indicates support for SHA1 instructions in AArch64 state. */
1607#define ARMV8_ID_AA64ISAR0_EL1_SHA1_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1608#define ARMV8_ID_AA64ISAR0_EL1_SHA1_SHIFT 8
1609/** No SHA1 instructions implemented. */
1610# define ARMV8_ID_AA64ISAR0_EL1_SHA1_NOT_IMPL 0
1611/** SHA1C, SHA1P, SHA1M, SHA1H, SHA1SU0 and SHA1SU1 instructions implemented (FEAT_SHA1). */
1612# define ARMV8_ID_AA64ISAR0_EL1_SHA1_SUPPORTED 1
1613/** Bit 12 - 15 - Indicates support for SHA2 instructions in AArch64 state. */
1614#define ARMV8_ID_AA64ISAR0_EL1_SHA2_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1615#define ARMV8_ID_AA64ISAR0_EL1_SHA2_SHIFT 12
1616/** No SHA2 instructions implemented. */
1617# define ARMV8_ID_AA64ISAR0_EL1_SHA2_NOT_IMPL 0
1618/** SHA256 instructions implemented (FEAT_SHA256). */
1619# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256 1
1620/** SHA256 and SHA512 instructions implemented (FEAT_SHA512). */
1621# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256_SHA512 2
1622/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1623#define ARMV8_ID_AA64ISAR0_EL1_CRC32_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1624#define ARMV8_ID_AA64ISAR0_EL1_CRC32_SHIFT 16
1625/** No CRC32 instructions implemented. */
1626# define ARMV8_ID_AA64ISAR0_EL1_CRC32_NOT_IMPL 0
1627/** CRC32 instructions implemented (FEAT_CRC32). */
1628# define ARMV8_ID_AA64ISAR0_EL1_CRC32_SUPPORTED 1
1629/** Bit 20 - 23 - Indicates support for Atomic instructions in AArch64 state. */
1630#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1631#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SHIFT 20
1632/** No Atomic instructions implemented. */
1633# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_NOT_IMPL 0
1634/** Atomic instructions implemented (FEAT_LSE). */
1635# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SUPPORTED 2
1636/** Bit 24 - 27 - Indicates support for TME instructions. */
1637#define ARMV8_ID_AA64ISAR0_EL1_TME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1638#define ARMV8_ID_AA64ISAR0_EL1_TME_SHIFT 24
1639/** TME instructions are not implemented. */
1640# define ARMV8_ID_AA64ISAR0_EL1_TME_NOT_IMPL 0
1641/** TME instructions are implemented. */
1642# define ARMV8_ID_AA64ISAR0_EL1_TME_SUPPORTED 1
1643/** Bit 28 - 31 - Indicates support for SQRDMLAH and SQRDMLSH instructions in AArch64 state. */
1644#define ARMV8_ID_AA64ISAR0_EL1_RDM_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1645#define ARMV8_ID_AA64ISAR0_EL1_RDM_SHIFT 28
1646/** No RDMA instructions implemented. */
1647# define ARMV8_ID_AA64ISAR0_EL1_RDM_NOT_IMPL 0
1648/** SQRDMLAH and SQRDMLSH instructions implemented (FEAT_RDM). */
1649# define ARMV8_ID_AA64ISAR0_EL1_RDM_SUPPORTED 1
1650/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1651#define ARMV8_ID_AA64ISAR0_EL1_SHA3_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1652#define ARMV8_ID_AA64ISAR0_EL1_SHA3_SHIFT 32
1653/** No SHA3 instructions implemented. */
1654# define ARMV8_ID_AA64ISAR0_EL1_SHA3_NOT_IMPL 0
1655/** EOR3, RAX1, XAR and BCAX instructions implemented (FEAT_SHA3). */
1656# define ARMV8_ID_AA64ISAR0_EL1_SHA3_SUPPORTED 1
1657/** Bit 36 - 39 - Indicates support for SM3 instructions in AArch64 state. */
1658#define ARMV8_ID_AA64ISAR0_EL1_SM3_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1659#define ARMV8_ID_AA64ISAR0_EL1_SM3_SHIFT 36
1660/** No SM3 instructions implemented. */
1661# define ARMV8_ID_AA64ISAR0_EL1_SM3_NOT_IMPL 0
1662/** SM3 instructions implemented (FEAT_SM3). */
1663# define ARMV8_ID_AA64ISAR0_EL1_SM3_SUPPORTED 1
1664/** Bit 40 - 43 - Indicates support for SM4 instructions in AArch64 state. */
1665#define ARMV8_ID_AA64ISAR0_EL1_SM4_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1666#define ARMV8_ID_AA64ISAR0_EL1_SM4_SHIFT 40
1667/** No SM4 instructions implemented. */
1668# define ARMV8_ID_AA64ISAR0_EL1_SM4_NOT_IMPL 0
1669/** SM4 instructions implemented (FEAT_SM4). */
1670# define ARMV8_ID_AA64ISAR0_EL1_SM4_SUPPORTED 1
1671/** Bit 44 - 47 - Indicates support for Dot Product instructions in AArch64 state. */
1672#define ARMV8_ID_AA64ISAR0_EL1_DP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1673#define ARMV8_ID_AA64ISAR0_EL1_DP_SHIFT 44
1674/** No Dot Product instructions implemented. */
1675# define ARMV8_ID_AA64ISAR0_EL1_DP_NOT_IMPL 0
1676/** UDOT and SDOT instructions implemented (FEAT_DotProd). */
1677# define ARMV8_ID_AA64ISAR0_EL1_DP_SUPPORTED 1
1678/** Bit 48 - 51 - Indicates support for FMLAL and FMLSL instructions. */
1679#define ARMV8_ID_AA64ISAR0_EL1_FHM_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1680#define ARMV8_ID_AA64ISAR0_EL1_FHM_SHIFT 48
1681/** FMLAL and FMLSL instructions are not implemented. */
1682# define ARMV8_ID_AA64ISAR0_EL1_FHM_NOT_IMPL 0
1683/** FMLAL and FMLSL instructions are implemented (FEAT_FHM). */
1684# define ARMV8_ID_AA64ISAR0_EL1_FHM_SUPPORTED 1
1685/** Bit 52 - 55 - Indicates support for flag manipulation instructions. */
1686#define ARMV8_ID_AA64ISAR0_EL1_TS_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1687#define ARMV8_ID_AA64ISAR0_EL1_TS_SHIFT 52
1688/** No flag manipulation instructions implemented. */
1689# define ARMV8_ID_AA64ISAR0_EL1_TS_NOT_IMPL 0
1690/** CFINV, RMIF, SETF16 and SETF8 instrutions are implemented (FEAT_FlagM). */
1691# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED 1
1692/** CFINV, RMIF, SETF16, SETF8, AXFLAG and XAFLAG instrutions are implemented (FEAT_FlagM2). */
1693# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED_2 2
1694/** Bit 56 - 59 - Indicates support for Outer Shareable and TLB range maintenance instructions. */
1695#define ARMV8_ID_AA64ISAR0_EL1_TLB_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1696#define ARMV8_ID_AA64ISAR0_EL1_TLB_SHIFT 56
1697/** Outer Sahreable and TLB range maintenance instructions are not implemented. */
1698# define ARMV8_ID_AA64ISAR0_EL1_TLB_NOT_IMPL 0
1699/** Outer Shareable TLB maintenance instructions are implemented (FEAT_TLBIOS). */
1700# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED 1
1701/** Outer Shareable and TLB range maintenance instructions are implemented (FEAT_TLBIRANGE). */
1702# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED_RANGE 2
1703/** Bit 60 - 63 - Indicates support for Random Number instructons in AArch64 state. */
1704#define ARMV8_ID_AA64ISAR0_EL1_RNDR_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1705#define ARMV8_ID_AA64ISAR0_EL1_RNDR_SHIFT 60
1706/** No Random Number instructions implemented. */
1707# define ARMV8_ID_AA64ISAR0_EL1_RNDR_NOT_IMPL 0
1708/** RNDR and RDNRRS registers are implemented . */
1709# define ARMV8_ID_AA64ISAR0_EL1_RNDR_SUPPORTED 1
1710/** @} */
1711
1712
1713/** @name ID_AA64ISAR1_EL1 - AArch64 Instruction Set Attribute Register 0.
1714 * @{ */
1715/** Bit 0 - 3 - Indicates support for Data Persistence writeback instructions in AArch64 state. */
1716#define ARMV8_ID_AA64ISAR1_EL1_DPB_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1717#define ARMV8_ID_AA64ISAR1_EL1_DPB_SHIFT 0
1718/** DC CVAP not supported. */
1719# define ARMV8_ID_AA64ISAR1_EL1_DPB_NOT_IMPL 0
1720/** DC CVAP supported (FEAT_DPB). */
1721# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED 1
1722/** DC CVAP and DC CVADP supported (FEAT_DPB2). */
1723# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED_2 2
1724/** Bit 4 - 7 - Indicates whether QARMA5 algorithm is implemented in the PE for address authentication. */
1725#define ARMV8_ID_AA64ISAR1_EL1_APA_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1726#define ARMV8_ID_AA64ISAR1_EL1_APA_SHIFT 4
1727/** Address Authentication using the QARMA5 algorithm is not implemented. */
1728# define ARMV8_ID_AA64ISAR1_EL1_APA_NOT_IMPL 0
1729/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA5). */
1730# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH 1
1731/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA5). */
1732# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_EPAC 2
1733/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA5). */
1734# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH2 3
1735/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA5). */
1736# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPAC 4
1737/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA5). */
1738# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPACCOMBINE 5
1739/** Bit 8 - 11 - Indicates whether an implementation defined algorithm is implemented in the PE for address authentication. */
1740#define ARMV8_ID_AA64ISAR1_EL1_API_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1741#define ARMV8_ID_AA64ISAR1_EL1_API_SHIFT 8
1742/** Address Authentication using the QARMA5 algorithm is not implemented. */
1743# define ARMV8_ID_AA64ISAR1_EL1_API_NOT_IMPL 0
1744/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACIMP). */
1745# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH 1
1746/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACIMP). */
1747# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_EPAC 2
1748/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACIMP). */
1749# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH2 3
1750/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACIMP). */
1751# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPAC 4
1752/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACIMP). */
1753# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPACCOMBINE 5
1754/** Bit 12 - 15 - Indicates support for JavaScript conversion from double precision floating values to integers in AArch64 state. */
1755#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1756#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SHIFT 12
1757/** No FJCVTZS instruction implemented. */
1758# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_NOT_IMPL 0
1759/** FJCVTZS instruction implemented (FEAT_JSCVT). */
1760# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SUPPORTED 1
1761/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1762#define ARMV8_ID_AA64ISAR1_EL1_FCMA_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1763#define ARMV8_ID_AA64ISAR1_EL1_FCMA_SHIFT 16
1764/** No FCMLA and FCADD instructions implemented. */
1765# define ARMV8_ID_AA64ISAR1_EL1_FCMA_NOT_IMPL 0
1766/** FCMLA and FCADD instructions implemented (FEAT_FCMA). */
1767# define ARMV8_ID_AA64ISAR1_EL1_FCMA_SUPPORTED 1
1768/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1769#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1770#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SHIFT 20
1771/** No RCpc instructions implemented. */
1772# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_NOT_IMPL 0
1773/** The no offset LDAPR, LDAPRB and LDAPRH instructions are implemented (FEAT_LRCPC). */
1774# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED 1
1775/** The no offset LDAPR, LDAPRB, LDAPRH, LDAPR and STLR instructions are implemented (FEAT_LRCPC2). */
1776# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED_2 2
1777/** Bit 24 - 27 - Indicates whether the QARMA5 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1778#define ARMV8_ID_AA64ISAR1_EL1_GPA_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1779#define ARMV8_ID_AA64ISAR1_EL1_GPA_SHIFT 24
1780/** Generic Authentication using the QARMA5 algorithm is not implemented. */
1781# define ARMV8_ID_AA64ISAR1_EL1_GPA_NOT_IMPL 0
1782/** Generic Authentication using the QARMA5 algorithm is implemented (FEAT_PACQARMA5). */
1783# define ARMV8_ID_AA64ISAR1_EL1_GPA_SUPPORTED 1
1784/** Bit 28 - 31 - Indicates whether an implementation defined algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1785#define ARMV8_ID_AA64ISAR1_EL1_GPI_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1786#define ARMV8_ID_AA64ISAR1_EL1_GPI_SHIFT 28
1787/** Generic Authentication using an implementation defined algorithm is not implemented. */
1788# define ARMV8_ID_AA64ISAR1_EL1_GPI_NOT_IMPL 0
1789/** Generic Authentication using an implementation defined algorithm is implemented (FEAT_PACIMP). */
1790# define ARMV8_ID_AA64ISAR1_EL1_GPI_SUPPORTED 1
1791/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1792#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1793#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SHIFT 32
1794/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are not implemented. */
1795# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_NOT_IMPL 0
1796/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are implemented (FEAT_FRINTTS). */
1797# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SUPPORTED 1
1798/** Bit 36 - 39 - Indicates support for SB instructions in AArch64 state. */
1799#define ARMV8_ID_AA64ISAR1_EL1_SB_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1800#define ARMV8_ID_AA64ISAR1_EL1_SB_SHIFT 36
1801/** No SB instructions implemented. */
1802# define ARMV8_ID_AA64ISAR1_EL1_SB_NOT_IMPL 0
1803/** SB instructions implemented (FEAT_SB). */
1804# define ARMV8_ID_AA64ISAR1_EL1_SB_SUPPORTED 1
1805/** Bit 40 - 43 - Indicates support for prediction invalidation instructions in AArch64 state. */
1806#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1807#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SHIFT 40
1808/** Prediction invalidation instructions are not implemented. */
1809# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_NOT_IMPL 0
1810/** Prediction invalidation instructions are implemented (FEAT_SPECRES). */
1811# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SUPPORTED 1
1812/** Bit 44 - 47 - Indicates support for Advanced SIMD and Floating-point BFloat16 instructions in AArch64 state. */
1813#define ARMV8_ID_AA64ISAR1_EL1_BF16_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1814#define ARMV8_ID_AA64ISAR1_EL1_BF16_SHIFT 44
1815/** BFloat16 instructions are not implemented. */
1816# define ARMV8_ID_AA64ISAR1_EL1_BF16_NOT_IMPL 0
1817/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented (FEAT_BF16). */
1818# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_BF16 1
1819/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented and FPCR.EBF is supported (FEAT_EBF16). */
1820# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_EBF16 2
1821/** Bit 48 - 51 - Indicates support for Data Gathering Hint instructions. */
1822#define ARMV8_ID_AA64ISAR1_EL1_DGH_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1823#define ARMV8_ID_AA64ISAR1_EL1_DGH_SHIFT 48
1824/** Data Gathering Hint instructions are not implemented. */
1825# define ARMV8_ID_AA64ISAR1_EL1_DGH_NOT_IMPL 0
1826/** Data Gathering Hint instructions are implemented (FEAT_DGH). */
1827# define ARMV8_ID_AA64ISAR1_EL1_DGH_SUPPORTED 1
1828/** Bit 52 - 55 - Indicates support for Advanced SIMD and Floating-point Int8 matri multiplication instructions. */
1829#define ARMV8_ID_AA64ISAR1_EL1_I8MM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1830#define ARMV8_ID_AA64ISAR1_EL1_I8MM_SHIFT 52
1831/** No Int8 matrix multiplication instructions implemented. */
1832# define ARMV8_ID_AA64ISAR1_EL1_I8MM_NOT_IMPL 0
1833/** SMMLA, SUDOT, UMMLA, USMMLA and USDOT instrutions are implemented (FEAT_I8MM). */
1834# define ARMV8_ID_AA64ISAR1_EL1_I8MM_SUPPORTED 1
1835/** Bit 56 - 59 - Indicates support for the XS attribute, the TLBI and DSB insturctions with the nXS qualifier in AArch64 state. */
1836#define ARMV8_ID_AA64ISAR1_EL1_XS_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1837#define ARMV8_ID_AA64ISAR1_EL1_XS_SHIFT 56
1838/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are not supported. */
1839# define ARMV8_ID_AA64ISAR1_EL1_XS_NOT_IMPL 0
1840/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are supported (FEAT_XS). */
1841# define ARMV8_ID_AA64ISAR1_EL1_XS_SUPPORTED 1
1842/** Bit 60 - 63 - Indicates support LD64B and ST64B* instructons and the ACCDATA_EL1 register. */
1843#define ARMV8_ID_AA64ISAR1_EL1_LS64_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1844#define ARMV8_ID_AA64ISAR1_EL1_LS64_SHIFT 60
1845/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are not supported. */
1846# define ARMV8_ID_AA64ISAR1_EL1_LS64_NOT_IMPL 0
1847/** The LD64B and ST64B instructions are supported (FEAT_LS64). */
1848# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED 1
1849/** The LD64B, ST64B, ST64BV and associated traps are not supported (FEAT_LS64_V). */
1850# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_V 2
1851/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are supported (FEAT_LS64_ACCDATA). */
1852# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_ACCDATA 3
1853/** @} */
1854
1855
1856/** @name ID_AA64ISAR2_EL1 - AArch64 Instruction Set Attribute Register 0.
1857 * @{ */
1858/** Bit 0 - 3 - Indicates support for WFET and WFIT instructions in AArch64 state. */
1859#define ARMV8_ID_AA64ISAR2_EL1_WFXT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1860#define ARMV8_ID_AA64ISAR2_EL1_WFXT_SHIFT 0
1861/** WFET and WFIT are not supported. */
1862# define ARMV8_ID_AA64ISAR2_EL1_WFXT_NOT_IMPL 0
1863/** WFET and WFIT are supported (FEAT_WFxT). */
1864# define ARMV8_ID_AA64ISAR2_EL1_WFXT_SUPPORTED 2
1865/** Bit 4 - 7 - Indicates support for 12 bits of mantissa in reciprocal and reciprocal square root instructions in AArch64 state, when FPCR.AH is 1. */
1866#define ARMV8_ID_AA64ISAR2_EL1_RPRES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1867#define ARMV8_ID_AA64ISAR2_EL1_RPRES_SHIFT 4
1868/** Reciprocal and reciprocal square root estimates give 8 bits of mantissa when FPCR.AH is 1. */
1869# define ARMV8_ID_AA64ISAR2_EL1_RPRES_NOT_IMPL 0
1870/** Reciprocal and reciprocal square root estimates give 12 bits of mantissa when FPCR.AH is 1 (FEAT_RPRES). */
1871# define ARMV8_ID_AA64ISAR2_EL1_RPRES_SUPPORTED 1
1872/** Bit 8 - 11 - Indicates whether the QARMA3 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1873#define ARMV8_ID_AA64ISAR2_EL1_GPA3_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1874#define ARMV8_ID_AA64ISAR2_EL1_GPA3_SHIFT 8
1875/** Generic Authentication using the QARMA3 algorithm is not implemented. */
1876# define ARMV8_ID_AA64ISAR2_EL1_GPA3_NOT_IMPL 0
1877/** Generic Authentication using the QARMA3 algorithm is implemented (FEAT_PACQARMA3). */
1878# define ARMV8_ID_AA64ISAR2_EL1_GPA3_SUPPORTED 1
1879/** Bit 12 - 15 - Indicates whether QARMA3 algorithm is implemented in the PE for address authentication. */
1880#define ARMV8_ID_AA64ISAR2_EL1_APA3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1881#define ARMV8_ID_AA64ISAR2_EL1_APA3_SHIFT 12
1882/** Address Authentication using the QARMA3 algorithm is not implemented. */
1883# define ARMV8_ID_AA64ISAR2_EL1_APA3_NOT_IMPL 0
1884/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA3). */
1885# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH 1
1886/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA3). */
1887# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_EPAC 2
1888/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA3). */
1889# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH2 3
1890/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA3). */
1891# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPAC 4
1892/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA3). */
1893# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPACCOMBINE 5
1894/** Bit 16 - 19 - Indicates support for Memory Copy and Memory Set instructions in AArch64 state. */
1895#define ARMV8_ID_AA64ISAR2_EL1_MOPS_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1896#define ARMV8_ID_AA64ISAR2_EL1_MOPS_SHIFT 16
1897/** No Memory Copy and Memory Set instructions implemented. */
1898# define ARMV8_ID_AA64ISAR2_EL1_MOPS_NOT_IMPL 0
1899/** Memory Copy and Memory Set instructions implemented (FEAT_MOPS). */
1900# define ARMV8_ID_AA64ISAR2_EL1_MOPS_SUPPORTED 1
1901/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1902#define ARMV8_ID_AA64ISAR2_EL1_BC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1903#define ARMV8_ID_AA64ISAR2_EL1_BC_SHIFT 20
1904/** BC instruction is not implemented. */
1905# define ARMV8_ID_AA64ISAR2_EL1_BC_NOT_IMPL 0
1906/** BC instruction is implemented (FEAT_HBC). */
1907# define ARMV8_ID_AA64ISAR2_EL1_BC_SUPPORTED 1
1908/** Bit 24 - 27 - Indicates whether the ConstPACField() functions used as part of PAC additions returns FALSE or TRUE. */
1909#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1910#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_SHIFT 24
1911/** ConstPACField() returns FALSE. */
1912# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_FALSE 0
1913/** ConstPACField() returns TRUE (FEAT_CONSTPACFIELD). */
1914# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_TRUE 1
1915/* Bit 28 - 63 - Reserved. */
1916/** @} */
1917
1918
1919/** @name ID_AA64PFR0_EL1 - AArch64 Processor Feature Register 0.
1920 * @{ */
1921/** Bit 0 - 3 - EL0 Exception level handling. */
1922#define ARMV8_ID_AA64PFR0_EL1_EL0_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1923#define ARMV8_ID_AA64PFR0_EL1_EL0_SHIFT 0
1924/** EL0 can be executed in AArch64 state only. */
1925# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_ONLY 1
1926/** EL0 can be executed in AArch64 and AArch32 state. */
1927# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_AARCH32 2
1928/** Bit 4 - 7 - EL1 Exception level handling. */
1929#define ARMV8_ID_AA64PFR0_EL1_EL1_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1930#define ARMV8_ID_AA64PFR0_EL1_EL1_SHIFT 4
1931/** EL1 can be executed in AArch64 state only. */
1932# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_ONLY 1
1933/** EL1 can be executed in AArch64 and AArch32 state. */
1934# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_AARCH32 2
1935/** Bit 8 - 11 - EL2 Exception level handling. */
1936#define ARMV8_ID_AA64PFR0_EL1_EL2_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1937#define ARMV8_ID_AA64PFR0_EL1_EL2_SHIFT 8
1938/** EL2 is not implemented. */
1939# define ARMV8_ID_AA64PFR0_EL1_EL2_NOT_IMPL 0
1940/** EL2 can be executed in AArch64 state only. */
1941# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_ONLY 1
1942/** EL2 can be executed in AArch64 and AArch32 state. */
1943# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_AARCH32 2
1944/** Bit 12 - 15 - EL3 Exception level handling. */
1945#define ARMV8_ID_AA64PFR0_EL1_EL3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1946#define ARMV8_ID_AA64PFR0_EL1_EL3_SHIFT 12
1947/** EL3 is not implemented. */
1948# define ARMV8_ID_AA64PFR0_EL1_EL3_NOT_IMPL 0
1949/** EL3 can be executed in AArch64 state only. */
1950# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_ONLY 1
1951/** EL3 can be executed in AArch64 and AArch32 state. */
1952# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_AARCH32 2
1953/** Bit 16 - 19 - Floating-point support. */
1954#define ARMV8_ID_AA64PFR0_EL1_FP_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1955#define ARMV8_ID_AA64PFR0_EL1_FP_SHIFT 16
1956/** Floating-point is implemented and support single and double precision. */
1957# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP 0
1958/** Floating-point is implemented and support single, double and half precision. */
1959# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP_HP 1
1960/** Floating-point is not implemented. */
1961# define ARMV8_ID_AA64PFR0_EL1_FP_NOT_IMPL 0xf
1962/** Bit 20 - 23 - Advanced SIMD support. */
1963#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1964#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_SHIFT 20
1965/** Advanced SIMD is implemented and support single and double precision. */
1966# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP 0
1967/** Advanced SIMD is implemented and support single, double and half precision. */
1968# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP_HP 1
1969/** Advanced SIMD is not implemented. */
1970# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_NOT_IMPL 0xf
1971/** Bit 24 - 27 - System register GIC CPU interface support. */
1972#define ARMV8_ID_AA64PFR0_EL1_GIC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1973#define ARMV8_ID_AA64PFR0_EL1_GIC_SHIFT 24
1974/** GIC CPU interface system registers are not implemented. */
1975# define ARMV8_ID_AA64PFR0_EL1_GIC_NOT_IMPL 0
1976/** System register interface to versions 3.0 and 4.0 of the GIC CPU interface is supported. */
1977# define ARMV8_ID_AA64PFR0_EL1_GIC_V3_V4 1
1978/** System register interface to version 4.1 of the GIC CPU interface is supported. */
1979# define ARMV8_ID_AA64PFR0_EL1_GIC_V4_1 3
1980/** Bit 28 - 31 - RAS Extension version. */
1981#define ARMV8_ID_AA64PFR0_EL1_RAS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1982#define ARMV8_ID_AA64PFR0_EL1_RAS_SHIFT 28
1983/** No RAS extension. */
1984# define ARMV8_ID_AA64PFR0_EL1_RAS_NOT_IMPL 0
1985/** RAS Extension implemented. */
1986# define ARMV8_ID_AA64PFR0_EL1_RAS_SUPPORTED 1
1987/** FEAT_RASv1p1 implemented. */
1988# define ARMV8_ID_AA64PFR0_EL1_RAS_V1P1 2
1989/** Bit 32 - 35 - Scalable Vector Extension (SVE) support. */
1990#define ARMV8_ID_AA64PFR0_EL1_SVE_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1991#define ARMV8_ID_AA64PFR0_EL1_SVE_SHIFT 32
1992/** SVE is not supported. */
1993# define ARMV8_ID_AA64PFR0_EL1_SVE_NOT_IMPL 0
1994/** SVE is supported. */
1995# define ARMV8_ID_AA64PFR0_EL1_SVE_SUPPORTED 1
1996/** Bit 36 - 39 - Secure EL2 support. */
1997#define ARMV8_ID_AA64PFR0_EL1_SEL2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1998#define ARMV8_ID_AA64PFR0_EL1_SEL2_SHIFT 36
1999/** Secure EL2 is not supported. */
2000# define ARMV8_ID_AA64PFR0_EL1_SEL2_NOT_IMPL 0
2001/** Secure EL2 is implemented. */
2002# define ARMV8_ID_AA64PFR0_EL1_SEL2_SUPPORTED 1
2003/** Bit 40 - 43 - MPAM support. */
2004#define ARMV8_ID_AA64PFR0_EL1_MPAM_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2005#define ARMV8_ID_AA64PFR0_EL1_MPAM_SHIFT 40
2006/** MPAM extension major version number is 0. */
2007# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V0 0
2008/** MPAM extension major version number is 1. */
2009# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V1 1
2010/** Bit 44 - 47 - Activity Monitor Extension support. */
2011#define ARMV8_ID_AA64PFR0_EL1_AMU_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2012#define ARMV8_ID_AA64PFR0_EL1_AMU_SHIFT 44
2013/** Activity Monitor extension is not implemented. */
2014# define ARMV8_ID_AA64PFR0_EL1_AMU_NOT_IMPL 0
2015/** Activity Monitor extension is implemented as of FEAT_AMUv1. */
2016# define ARMV8_ID_AA64PFR0_EL1_AMU_V1 1
2017/** Activity Monitor extension is implemented as of FEAT_AMUv1p1 including virtualization support. */
2018# define ARMV8_ID_AA64PFR0_EL1_AMU_V1P1 2
2019/** Bit 48 - 51 - Data Independent Timing support. */
2020#define ARMV8_ID_AA64PFR0_EL1_DIT_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2021#define ARMV8_ID_AA64PFR0_EL1_DIT_SHIFT 48
2022/** AArch64 does not guarantee constant execution time of any instructions. */
2023# define ARMV8_ID_AA64PFR0_EL1_DIT_NOT_IMPL 0
2024/** AArch64 provides the PSTATE.DIT mechanism to guarantee constant execution time of certain instructions (FEAT_DIT). */
2025# define ARMV8_ID_AA64PFR0_EL1_DIT_SUPPORTED 1
2026/** Bit 52 - 55 - Realm Management Extension support. */
2027#define ARMV8_ID_AA64PFR0_EL1_RME_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2028#define ARMV8_ID_AA64PFR0_EL1_RME_SHIFT 52
2029/** Realm Management Extension not implemented. */
2030# define ARMV8_ID_AA64PFR0_EL1_RME_NOT_IMPL 0
2031/** RMEv1 is implemented (FEAT_RME). */
2032# define ARMV8_ID_AA64PFR0_EL1_RME_SUPPORTED 1
2033/** Bit 56 - 59 - Speculative use out of context branch targets support. */
2034#define ARMV8_ID_AA64PFR0_EL1_CSV2_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2035#define ARMV8_ID_AA64PFR0_EL1_CSV2_SHIFT 56
2036/** Implementation does not disclose whether FEAT_CSV2 is implemented. */
2037# define ARMV8_ID_AA64PFR0_EL1_CSV2_NOT_EXPOSED 0
2038/** FEAT_CSV2 is implemented. */
2039# define ARMV8_ID_AA64PFR0_EL1_CSV2_SUPPORTED 1
2040/** FEAT_CSV2_2 is implemented. */
2041# define ARMV8_ID_AA64PFR0_EL1_CSV2_2_SUPPORTED 2
2042/** FEAT_CSV2_3 is implemented. */
2043# define ARMV8_ID_AA64PFR0_EL1_CSV2_3_SUPPORTED 3
2044/** Bit 60 - 63 - Speculative use of faulting data support. */
2045#define ARMV8_ID_AA64PFR0_EL1_CSV3_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2046#define ARMV8_ID_AA64PFR0_EL1_CSV3_SHIFT 60
2047/** Implementation does not disclose whether data loaded under speculation with a permission or domain fault can be used. */
2048# define ARMV8_ID_AA64PFR0_EL1_CSV3_NOT_EXPOSED 0
2049/** FEAT_CSV3 is supported . */
2050# define ARMV8_ID_AA64PFR0_EL1_CSV3_SUPPORTED 1
2051/** @} */
2052
2053
2054/** @name ID_AA64PFR1_EL1 - AArch64 Processor Feature Register 1.
2055 * @{ */
2056/** Bit 0 - 3 - Branch Target Identification support. */
2057#define ARMV8_ID_AA64PFR1_EL1_BT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2058#define ARMV8_ID_AA64PFR1_EL1_BT_SHIFT 0
2059/** The Branch Target Identification mechanism is not implemented. */
2060# define ARMV8_ID_AA64PFR1_EL1_BT_NOT_IMPL 0
2061/** The Branch Target Identifcation mechanism is implemented. */
2062# define ARMV8_ID_AA64PFR1_EL1_BT_SUPPORTED 1
2063/** Bit 4 - 7 - Speculative Store Bypassing control support. */
2064#define ARMV8_ID_AA64PFR1_EL1_SSBS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2065#define ARMV8_ID_AA64PFR1_EL1_SSBS_SHIFT 4
2066/** AArch64 provides no mechanism to control the use of Speculative Store Bypassing. */
2067# define ARMV8_ID_AA64PFR1_EL1_SSBS_NOT_IMPL 0
2068/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe. */
2069# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED 1
2070/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe and adds MSR and MRS instructions
2071 * to directly read and write the PSTATE.SSBS field. */
2072# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED_MSR_MRS 2
2073/** Bit 8 - 11 - Memory Tagging Extension support. */
2074#define ARMV8_ID_AA64PFR1_EL1_MTE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2075#define ARMV8_ID_AA64PFR1_EL1_MTE_SHIFT 8
2076/** MTE is not implemented. */
2077# define ARMV8_ID_AA64PFR1_EL1_MTE_NOT_IMPL 0
2078/** Instruction only Memory Tagging Extensions implemented. */
2079# define ARMV8_ID_AA64PFR1_EL1_MTE_INSN_ONLY 1
2080/** Full Memory Tagging Extension implemented. */
2081# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL 2
2082/** Full Memory Tagging Extension with asymmetric Tag Check Fault handling implemented. */
2083# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL_ASYM_TAG_FAULT_CHK 3
2084/** Bit 12 - 15 - RAS Extension fractional field. */
2085#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2086#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_SHIFT 12
2087/** RAS Extension is implemented. */
2088# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_IMPL 0
2089/** FEAT_RASv1p1 is implemented. */
2090# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_RASV1P1 1
2091/** Bit 16 - 19 - MPAM minor version number. */
2092#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2093#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_SHIFT 16
2094/** The minor version of number of the MPAM extension is 0. */
2095# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_0 0
2096/** The minor version of number of the MPAM extension is 1. */
2097# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_1 1
2098/* Bit 20 - 23 - Reserved. */
2099/** Bit 24 - 27 - Scalable Matrix Extension support. */
2100#define ARMV8_ID_AA64PFR1_EL1_SME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2101#define ARMV8_ID_AA64PFR1_EL1_SME_SHIFT 24
2102/** Scalable Matrix Extensions are not implemented. */
2103# define ARMV8_ID_AA64PFR1_EL1_SME_NOT_IMPL 0
2104/** Scalable Matrix Extensions are implemented (FEAT_SME). */
2105# define ARMV8_ID_AA64PFR1_EL1_SME_SUPPORTED 1
2106/** Scalable Matrix Extensions are implemented + SME2 ZT0 register(FEAT_SME2). */
2107# define ARMV8_ID_AA64PFR1_EL1_SME_SME2 2
2108/** Bit 28 - 31 - Random Number trap to EL3 support. */
2109#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2110#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SHIFT 28
2111/** Trapping of RNDR and RNDRRS to EL3 is not supported. */
2112# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_NOT_IMPL 0
2113/** Trapping of RNDR and RDNRRS to EL3 is supported. */
2114# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SUPPORTED 1
2115/** Bit 32 - 35 - CSV2 fractional field. */
2116#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2117#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_SHIFT 32
2118/** Either CSV2 not exposed or implementation does not expose whether FEAT_CSV2_1p1 is implemented. */
2119# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_NOT_EXPOSED 0
2120/** FEAT_CSV2_1p1 is implemented. */
2121# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P1 1
2122/** FEAT_CSV2_1p2 is implemented. */
2123# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P2 2
2124/** Bit 36 - 39 - Non-maskable Interrupt support. */
2125#define ARMV8_ID_AA64PFR1_EL1_NMI_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2126#define ARMV8_ID_AA64PFR1_EL1_NMI_SHIFT 36
2127/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are not supported. */
2128# define ARMV8_ID_AA64PFR1_EL1_NMI_NOT_IMPL 0
2129/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are supported (FEAT_NMI). */
2130# define ARMV8_ID_AA64PFR1_EL1_NMI_SUPPORTED 1
2131/** @} */
2132
2133
2134/** @name ID_AA64MMFR0_EL1 - AArch64 Memory Model Feature Register 0.
2135 * @{ */
2136/** Bit 0 - 3 - Physical Address range supported. */
2137#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2138#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_SHIFT 0
2139/** Physical Address range is 32 bits, 4GiB. */
2140# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_32BITS 0
2141/** Physical Address range is 36 bits, 64GiB. */
2142# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_36BITS 1
2143/** Physical Address range is 40 bits, 1TiB. */
2144# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_40BITS 2
2145/** Physical Address range is 42 bits, 4TiB. */
2146# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_42BITS 3
2147/** Physical Address range is 44 bits, 16TiB. */
2148# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_44BITS 4
2149/** Physical Address range is 48 bits, 256TiB. */
2150# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_48BITS 5
2151/** Physical Address range is 52 bits, 4PiB. */
2152# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_52BITS 6
2153/** Bit 4 - 7 - Number of ASID bits. */
2154#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2155#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_SHIFT 4
2156/** ASID bits is 8. */
2157# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_8 0
2158/** ASID bits is 16. */
2159# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_16 2
2160/** Bit 8 - 11 - Indicates support for mixed-endian configuration. */
2161#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2162#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SHIFT 8
2163/** No mixed-endian support. */
2164# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_NOT_IMPL 0
2165/** Mixed-endian supported. */
2166# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SUPPORTED 1
2167/** Bit 12 - 15 - Indicates support for a distinction between Secure and Non-secure Memory. */
2168#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2169#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SHIFT 12
2170/** No distinction between Secure and Non-secure Memory supported. */
2171# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_NOT_IMPL 0
2172/** Distinction between Secure and Non-secure Memory supported. */
2173# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SUPPORTED 1
2174/** Bit 16 - 19 - Indicates support for mixed-endian at EL0 only. */
2175#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2176#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT 16
2177/** No mixed-endian support at EL0. */
2178# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_NOT_IMPL 0
2179/** Mixed-endian support at EL0. */
2180# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SUPPORTED 1
2181/** Bit 20 - 23 - Indicates support for 16KiB memory translation granule size. */
2182#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2183#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SHIFT 20
2184/** 16KiB granule size not supported. */
2185# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_NOT_IMPL 0
2186/** 16KiB granule size is supported. */
2187# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED 1
2188/** 16KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2189# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED_52BIT 2
2190/** Bit 24 - 27 - Indicates support for 64KiB memory translation granule size. */
2191#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2192#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SHIFT 24
2193/** 64KiB granule supported. */
2194# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SUPPORTED 0
2195/** 64KiB granule not supported. */
2196# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_NOT_IMPL 0xf
2197/** Bit 28 - 31 - Indicates support for 4KiB memory translation granule size. */
2198#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2199#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SHIFT 28
2200/** 4KiB granule supported. */
2201# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED 0
2202/** 4KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2203# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED_52BIT 1
2204/** 4KiB granule not supported. */
2205# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_NOT_IMPL 0xf
2206/** Bit 32 - 35 - Indicates support for 16KiB granule size at stage 2. */
2207#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2208#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SHIFT 32
2209/** Support for 16KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran16 field. */
2210# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORT_BY_TGRAN16 0
2211/** 16KiB granule not supported at stage 2. */
2212# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_NOT_IMPL 1
2213/** 16KiB granule supported at stage 2. */
2214# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED 2
2215/** 16KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2216# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED_52BIT 3
2217/** Bit 36 - 39 - Indicates support for 64KiB granule size at stage 2. */
2218#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2219#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SHIFT 36
2220/** Support for 64KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran64 field. */
2221# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORT_BY_TGRAN64 0
2222/** 64KiB granule not supported at stage 2. */
2223# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_NOT_IMPL 1
2224/** 64KiB granule supported at stage 2. */
2225# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORTED 2
2226/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
2227#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2228#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SHIFT 40
2229/** Support for 4KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran4 field. */
2230# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORT_BY_TGRAN16 0
2231/** 4KiB granule not supported at stage 2. */
2232# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_NOT_IMPL 1
2233/** 4KiB granule supported at stage 2. */
2234# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED 2
2235/** 4KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2236# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED_52BIT 3
2237/** Bit 44 - 47 - Indicates support for disabling context synchronizing exception entry and exit. */
2238#define ARMV8_ID_AA64MMFR0_EL1_EXS_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2239#define ARMV8_ID_AA64MMFR0_EL1_EXS_SHIFT 44
2240/** All exception entries and exits are context synchronization events. */
2241# define ARMV8_ID_AA64MMFR0_EL1_EXS_NOT_IMPL 0
2242/** Non-context synchronizing exception entry and exit are supported (FEAT_ExS). */
2243# define ARMV8_ID_AA64MMFR0_EL1_EXS_SUPPORTED 1
2244/* Bit 48 - 55 - Reserved. */
2245/** Bit 56 - 59 - Indicates the presence of the Fine-Grained Trap controls. */
2246#define ARMV8_ID_AA64MMFR0_EL1_FGT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2247#define ARMV8_ID_AA64MMFR0_EL1_FGT_SHIFT 56
2248/** Fine-grained trap controls are not implemented. */
2249# define ARMV8_ID_AA64MMFR0_EL1_FGT_NOT_IMPL 0
2250/** Fine-grained trap controls are implemented (FEAT_FGT). */
2251# define ARMV8_ID_AA64MMFR0_EL1_FGT_SUPPORTED 1
2252/** Bit 60 - 63 - Indicates the presence of Enhanced Counter Virtualization. */
2253#define ARMV8_ID_AA64MMFR0_EL1_ECV_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2254#define ARMV8_ID_AA64MMFR0_EL1_ECV_SHIFT 60
2255/** Enhanced Counter Virtualization is not implemented. */
2256# define ARMV8_ID_AA64MMFR0_EL1_ECV_NOT_IMPL 0
2257/** Enhanced Counter Virtualization is implemented (FEAT_ECV). */
2258# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED 1
2259/** Enhanced Counter Virtualization is implemented and includes support for CNTHCTL_EL2.ECV and CNTPOFF_EL2 (FEAT_ECV). */
2260# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED_2 2
2261/** @} */
2262
2263
2264/** @name ID_AA64MMFR1_EL1 - AArch64 Memory Model Feature Register 1.
2265 * @{ */
2266/** Bit 0 - 3 - Hardware updates to Access flag and Dirty state in translation tables. */
2267#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2268#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SHIFT 0
2269/** Hardware update of the Access flag and dirty state are not supported. */
2270# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_NOT_IMPL 0
2271/** Support for hardware update of the Access flag for Block and Page descriptors. */
2272# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SUPPORTED 1
2273/** Support for hardware update of the Access flag for Block and Page descriptors, hardware update of dirty state supported. */
2274# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_DIRTY_SUPPORTED 2
2275/** Bit 4 - 7 - EL1 Exception level handling. */
2276#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2277#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_SHIFT 4
2278/** VMID bits is 8. */
2279# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_8 0
2280/** VMID bits is 16 (FEAT_VMID16). */
2281# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_16 2
2282/** Bit 8 - 11 - Virtualization Host Extensions support. */
2283#define ARMV8_ID_AA64MMFR1_EL1_VHE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2284#define ARMV8_ID_AA64MMFR1_EL1_VHE_SHIFT 8
2285/** Virtualization Host Extensions are not supported. */
2286# define ARMV8_ID_AA64MMFR1_EL1_VHE_NOT_IMPL 0
2287/** Virtualization Host Extensions are supported. */
2288# define ARMV8_ID_AA64MMFR1_EL1_VHE_SUPPORTED 1
2289/** Bit 12 - 15 - Hierarchical Permission Disables. */
2290#define ARMV8_ID_AA64MMFR1_EL1_HPDS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2291#define ARMV8_ID_AA64MMFR1_EL1_HPDS_SHIFT 12
2292/** Disabling of hierarchical controls not supported. */
2293# define ARMV8_ID_AA64MMFR1_EL1_HPDS_NOT_IMPL 0
2294/** Disabling of hierarchical controls supported (FEAT_HPDS). */
2295# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED 1
2296/** FEAT_HPDS + possible hardware allocation of bits[62:59] of the translation table descriptors from the final lookup level (FEAT_HPDS2). */
2297# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED_2 2
2298/** Bit 16 - 19 - LORegions support. */
2299#define ARMV8_ID_AA64MMFR1_EL1_LO_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2300#define ARMV8_ID_AA64MMFR1_EL1_LO_SHIFT 16
2301/** LORegions not supported. */
2302# define ARMV8_ID_AA64MMFR1_EL1_LO_NOT_IMPL 0
2303/** LORegions supported. */
2304# define ARMV8_ID_AA64MMFR1_EL1_LO_SUPPORTED 1
2305/** Bit 20 - 23 - Privileged Access Never support. */
2306#define ARMV8_ID_AA64MMFR1_EL1_PAN_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2307#define ARMV8_ID_AA64MMFR1_EL1_PAN_SHIFT 20
2308/** PAN not supported. */
2309# define ARMV8_ID_AA64MMFR1_EL1_PAN_NOT_IMPL 0
2310/** PAN supported (FEAT_PAN). */
2311# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED 1
2312/** PAN supported and AT S1E1RP and AT S1E1WP instructions supported (FEAT_PAN2). */
2313# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_2 2
2314/** PAN supported and AT S1E1RP and AT S1E1WP instructions and SCTRL_EL1.EPAN and SCTRL_EL2.EPAN supported (FEAT_PAN3). */
2315# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_3 3
2316/** Bit 24 - 27 - Describes whether the PE can generate SError interrupt exceptions. */
2317#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2318#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SHIFT 24
2319/** The PE never generates an SError interrupt due to an External abort on a speculative read. */
2320# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_NOT_IMPL 0
2321/** The PE might generate an SError interrupt due to an External abort on a speculative read. */
2322# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SUPPORTED 1
2323/** Bit 28 - 31 - Indicates support for execute-never control distinction by Exception level at stage 2. */
2324#define ARMV8_ID_AA64MMFR1_EL1_XNX_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2325#define ARMV8_ID_AA64MMFR1_EL1_XNX_SHIFT 28
2326/** Distinction between EL0 and EL1 execute-never control at stage 2 not supported. */
2327# define ARMV8_ID_AA64MMFR1_EL1_XNX_NOT_IMPL 0
2328/** Distinction between EL0 and EL1 execute-never control at stage 2 supported (FEAT_XNX). */
2329# define ARMV8_ID_AA64MMFR1_EL1_XNX_SUPPORTED 1
2330/** Bit 32 - 35 - Indicates support for the configurable delayed trapping of WFE. */
2331#define ARMV8_ID_AA64MMFR1_EL1_TWED_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2332#define ARMV8_ID_AA64MMFR1_EL1_TWED_SHIFT 32
2333/** Configurable delayed trapping of WFE is not supported. */
2334# define ARMV8_ID_AA64MMFR1_EL1_TWED_NOT_IMPL 0
2335/** Configurable delayed trapping of WFE is supported (FEAT_TWED). */
2336# define ARMV8_ID_AA64MMFR1_EL1_TWED_SUPPORTED 1
2337/** Bit 36 - 39 - Indicates support for Enhanced Translation Synchronization. */
2338#define ARMV8_ID_AA64MMFR1_EL1_ETS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2339#define ARMV8_ID_AA64MMFR1_EL1_ETS_SHIFT 36
2340/** Enhanced Translation Synchronization is not supported. */
2341# define ARMV8_ID_AA64MMFR1_EL1_ETS_NOT_IMPL 0
2342/** Enhanced Translation Synchronization is implemented. */
2343# define ARMV8_ID_AA64MMFR1_EL1_ETS_SUPPORTED 1
2344/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
2345#define ARMV8_ID_AA64MMFR1_EL1_HCX_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2346#define ARMV8_ID_AA64MMFR1_EL1_HCX_SHIFT 40
2347/** HCRX_EL2 and its associated EL3 trap are not supported. */
2348# define ARMV8_ID_AA64MMFR1_EL1_HCX_NOT_IMPL 0
2349/** HCRX_EL2 and its associated EL3 trap are supported (FEAT_HCX). */
2350# define ARMV8_ID_AA64MMFR1_EL1_HCX_SUPPORTED 1
2351/** Bit 44 - 47 - Indicates support for FPCR.{AH,FIZ,NEP}. */
2352#define ARMV8_ID_AA64MMFR1_EL1_AFP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2353#define ARMV8_ID_AA64MMFR1_EL1_AFP_SHIFT 44
2354/** The FPCR.{AH,FIZ,NEP} fields are not supported. */
2355# define ARMV8_ID_AA64MMFR1_EL1_AFP_NOT_IMPL 0
2356/** The FPCR.{AH,FIZ,NEP} fields are supported (FEAT_AFP). */
2357# define ARMV8_ID_AA64MMFR1_EL1_AFP_SUPPORTED 1
2358/** Bit 48 - 51 - Indicates support for intermediate caching of translation table walks. */
2359#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2360#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_SHIFT 48
2361/** The intermediate caching of translation table walks might include non-coherent physical translation caches. */
2362# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_NON_COHERENT 0
2363/** The intermediate caching of translation table walks does not include non-coherent physical translation caches (FEAT_nTLBPA). */
2364# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_COHERENT_ONLY 1
2365/** Bit 52 - 55 - Indicates whether SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP are implemented in AArch64 state. */
2366#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2367#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SHIFT 52
2368/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are not implemented. */
2369# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_NOT_IMPL 0
2370/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are implemented (FEAT_TIDCP1). */
2371# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SUPPORTED 1
2372/** Bit 56 - 59 - Indicates support for cache maintenance instruction permission. */
2373#define ARMV8_ID_AA64MMFR1_EL1_CMOW_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2374#define ARMV8_ID_AA64MMFR1_EL1_CMOW_SHIFT 56
2375/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are not implemented. */
2376# define ARMV8_ID_AA64MMFR1_EL1_CMOW_NOT_IMPL 0
2377/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are implemented (FEAT_CMOW). */
2378# define ARMV8_ID_AA64MMFR1_EL1_CMOW_SUPPORTED 1
2379/* Bit 60 - 63 - Reserved. */
2380/** @} */
2381
2382
2383/** @name ID_AA64MMFR2_EL1 - AArch64 Memory Model Feature Register 2.
2384 * @{ */
2385/** Bit 0 - 3 - Indicates support for Common not Private translations. */
2386#define ARMV8_ID_AA64MMFR2_EL1_CNP_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2387#define ARMV8_ID_AA64MMFR2_EL1_CNP_SHIFT 0
2388/** Common not Private translations are not supported. */
2389# define ARMV8_ID_AA64MMFR2_EL1_CNP_NOT_IMPL 0
2390/** Support for Common not Private translations (FEAT_TTNCP). */
2391# define ARMV8_ID_AA64MMFR2_EL1_CNP_SUPPORTED 1
2392/** Bit 4 - 7 - Indicates support for User Access Override. */
2393#define ARMV8_ID_AA64MMFR2_EL1_UAO_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2394#define ARMV8_ID_AA64MMFR2_EL1_UAO_SHIFT 4
2395/** User Access Override is not supported. */
2396# define ARMV8_ID_AA64MMFR2_EL1_UAO_NOT_IMPL 0
2397/** User Access Override is supported (FEAT_UAO). */
2398# define ARMV8_ID_AA64MMFR2_EL1_UAO_SUPPORTED 1
2399/** Bit 8 - 11 - Indicates support for LSMAOE and nTLSMD bits in SCTLR_ELx. */
2400#define ARMV8_ID_AA64MMFR2_EL1_LSM_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2401#define ARMV8_ID_AA64MMFR2_EL1_LSM_SHIFT 8
2402/** LSMAOE and nTLSMD bits are not supported. */
2403# define ARMV8_ID_AA64MMFR2_EL1_LSM_NOT_IMPL 0
2404/** LSMAOE and nTLSMD bits are supported (FEAT_LSMAOC). */
2405# define ARMV8_ID_AA64MMFR2_EL1_LSM_SUPPORTED 1
2406/** Bit 12 - 15 - Indicates support for the IESB bit in SCTLR_ELx registers. */
2407#define ARMV8_ID_AA64MMFR2_EL1_IESB_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2408#define ARMV8_ID_AA64MMFR2_EL1_IESB_SHIFT 12
2409/** IESB bit is not supported. */
2410# define ARMV8_ID_AA64MMFR2_EL1_IESB_NOT_IMPL 0
2411/** IESB bit is supported (FEAT_IESB). */
2412# define ARMV8_ID_AA64MMFR2_EL1_IESB_SUPPORTED 1
2413/** Bit 16 - 19 - Indicates support for larger virtual address. */
2414#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2415#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_SHIFT 16
2416/** Virtual address range is 48 bits. */
2417# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_48BITS 0
2418/** 52 bit virtual addresses supported for 64KiB granules (FEAT_LVA). */
2419# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_52BITS_64KB_GRAN 1
2420/** Bit 20 - 23 - Revised CCSIDR_EL1 register format supported. */
2421#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2422#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
2423/** CCSIDR_EL1 register format is 32-bit. */
2424# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_32BIT 0
2425/** CCSIDR_EL1 register format is 64-bit (FEAT_CCIDX). */
2426# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_64BIT 1
2427/** Bit 24 - 27 - Indicates support for nested virtualization. */
2428#define ARMV8_ID_AA64MMFR2_EL1_NV_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2429#define ARMV8_ID_AA64MMFR2_EL1_NV_SHIFT 24
2430/** Nested virtualization is not supported. */
2431# define ARMV8_ID_AA64MMFR2_EL1_NV_NOT_IMPL 0
2432/** The HCR_EL2.{AT,NV1,NV} bits are implemented (FEAT_NV). */
2433# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED 1
2434/** The VNCR_EL2 register and HCR_EL2.{NV2,AT,NV1,NV} bits are implemented (FEAT_NV2). */
2435# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED_2 2
2436/** Bit 28 - 31 - Indicates support for small translation tables. */
2437#define ARMV8_ID_AA64MMFR2_EL1_ST_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2438#define ARMV8_ID_AA64MMFR2_EL1_ST_SHIFT 28
2439/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 39. */
2440# define ARMV8_ID_AA64MMFR2_EL1_ST_NOT_IMPL 0
2441/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 48 for 4KiB and 16KiB, and 47 for 64KiB granules (FEAT_TTST). */
2442# define ARMV8_ID_AA64MMFR2_EL1_ST_SUPPORTED 1
2443/** Bit 32 - 35 - Indicates support for unaligned single-copy atomicity and atomic functions. */
2444#define ARMV8_ID_AA64MMFR2_EL1_AT_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2445#define ARMV8_ID_AA64MMFR2_EL1_AT_SHIFT 32
2446/** Unaligned single-copy atomicity and atomic functions are not supported. */
2447# define ARMV8_ID_AA64MMFR2_EL1_AT_NOT_IMPL 0
2448/** Unaligned single-copy atomicity and atomic functions are supported (FEAT_LSE2). */
2449# define ARMV8_ID_AA64MMFR2_EL1_AT_SUPPORTED 1
2450/** Bit 36 - 39 - Indicates value of ESR_ELx.EC that reports an exception generated by a read access to the feature ID space. */
2451#define ARMV8_ID_AA64MMFR2_EL1_IDS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2452#define ARMV8_ID_AA64MMFR2_EL1_IDS_SHIFT 36
2453/** ESR_ELx.EC is 0 for traps generated by a read access to the feature ID space. */
2454# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_0 0
2455/** ESR_ELx.EC is 0x18 for traps generated by a read access to the feature ID space (FEAT_IDST). */
2456# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_18H 1
2457/** Bit 40 - 43 - Indicates support for the HCR_EL2.FWB bit. */
2458#define ARMV8_ID_AA64MMFR2_EL1_FWB_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2459#define ARMV8_ID_AA64MMFR2_EL1_FWB_SHIFT 40
2460/** HCR_EL2.FWB bit is not supported. */
2461# define ARMV8_ID_AA64MMFR2_EL1_FWB_NOT_IMPL 0
2462/** HCR_EL2.FWB bit is supported (FEAT_S2FWB). */
2463# define ARMV8_ID_AA64MMFR2_EL1_FWB_SUPPORTED 1
2464/* Bit 44 - 47 - Reserved. */
2465/** Bit 48 - 51 - Indicates support for TTL field in address operations. */
2466#define ARMV8_ID_AA64MMFR2_EL1_TTL_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2467#define ARMV8_ID_AA64MMFR2_EL1_TTL_SHIFT 48
2468/** TLB maintenance instructions by address have bits [47:44] Res0. */
2469# define ARMV8_ID_AA64MMFR2_EL1_TTL_NOT_IMPL 0
2470/** TLB maintenance instructions by address have bits [47:44] holding the TTL field (FEAT_TTL). */
2471# define ARMV8_ID_AA64MMFR2_EL1_TTL_SUPPORTED 1
2472/** Bit 52 - 55 - Identification of the hardware requirements of the hardware to have break-before-make sequences when
2473 * changing block size for a translation. */
2474#define ARMV8_ID_AA64MMFR2_EL1_BBM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2475#define ARMV8_ID_AA64MMFR2_EL1_BBM_SHIFT 52
2476/** Level 0 support for changing block size is supported (FEAT_BBM). */
2477# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL0 0
2478/** Level 1 support for changing block size is supported (FEAT_BBM). */
2479# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL1 1
2480/** Level 2 support for changing block size is supported (FEAT_BBM). */
2481# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL2 2
2482/** Bit 56 - 59 - Indicates support for Enhanced Virtualization Traps. */
2483#define ARMV8_ID_AA64MMFR2_EL1_EVT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2484#define ARMV8_ID_AA64MMFR2_EL1_EVT_SHIFT 56
2485/** Enhanced Virtualization Traps are not supported. */
2486# define ARMV8_ID_AA64MMFR2_EL1_EVT_NOT_IMPL 0
2487/** Enhanced Virtualization Traps are supported (FEAT_EVT). */
2488# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED 1
2489/** Enhanced Virtualization Traps are supported with additional traps (FEAT_EVT). */
2490# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED_2 2
2491/** Bit 60 - 63 - Indicates support for E0PDx mechanism. */
2492#define ARMV8_ID_AA64MMFR2_EL1_E0PD_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2493#define ARMV8_ID_AA64MMFR2_EL1_E0PD_SHIFT 60
2494/** E0PDx mechanism is not supported. */
2495# define ARMV8_ID_AA64MMFR2_EL1_E0PD_NOT_IMPL 0
2496/** E0PDx mechanism is supported (FEAT_E0PD). */
2497# define ARMV8_ID_AA64MMFR2_EL1_E0PD_SUPPORTED 1
2498/** @} */
2499
2500
2501/** @name ID_AA64DFR0_EL1 - AArch64 Debug Feature Register 0.
2502 * @{ */
2503/** Bit 0 - 3 - Indicates the Debug Architecture version supported. */
2504#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2505#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_SHIFT 0
2506/** Armv8 debug architecture version. */
2507# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8 6
2508/** Armv8 debug architecture version with virtualization host extensions. */
2509# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8_VHE 7
2510/** Armv8.2 debug architecture version (FEAT_Debugv8p2). */
2511# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p2 8
2512/** Armv8.4 debug architecture version (FEAT_Debugv8p4). */
2513# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p4 9
2514/** Armv8.8 debug architecture version (FEAT_Debugv8p8). */
2515# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p8 10
2516/** Bit 4 - 7 - Indicates trace support. */
2517#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2518#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SHIFT 4
2519/** Trace unit System registers not implemented. */
2520# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_NOT_IMPL 0
2521/** Trace unit System registers supported. */
2522# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SUPPORTED 1
2523/** Bit 8 - 11 - Performance Monitors Extension version. */
2524#define ARMV8_ID_AA64DFR0_EL1_PMUVER_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2525#define ARMV8_ID_AA64DFR0_EL1_PMUVER_SHIFT 8
2526/** Performance Monitors Extension not supported. */
2527# define ARMV8_ID_AA64DFR0_EL1_PMUVER_NOT_IMPL 0
2528/** Performance Monitors Extension v3 supported (FEAT_PMUv3). */
2529# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3 1
2530/** Performance Monitors Extension v3 supported (FEAT_PMUv3p1). */
2531# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P1 4
2532/** Performance Monitors Extension v3 supported (FEAT_PMUv3p4). */
2533# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P4 5
2534/** Performance Monitors Extension v3 supported (FEAT_PMUv3p5). */
2535# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P5 6
2536/** Performance Monitors Extension v3 supported (FEAT_PMUv3p7). */
2537# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P7 7
2538/** Performance Monitors Extension v3 supported (FEAT_PMUv3p8). */
2539# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P8 8
2540/** Bit 12 - 15 - Number of breakpoints, minus 1. */
2541#define ARMV8_ID_AA64DFR0_EL1_BRPS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2542#define ARMV8_ID_AA64DFR0_EL1_BRPS_SHIFT 12
2543/* Bit 16 - 19 - Reserved 0. */
2544/** Bit 20 - 23 - Number of watchpoints, minus 1. */
2545#define ARMV8_ID_AA64DFR0_EL1_WRPS_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2546#define ARMV8_ID_AA64DFR0_EL1_WRPS_SHIFT 20
2547/* Bit 24 - 27 - Reserved 0. */
2548/** Bit 28 - 31 - Number of context-aware breakpoints. */
2549#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2550#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_SHIFT 28
2551/** Bit 32 - 35 - Statistical Profiling Extension version. */
2552#define ARMV8_ID_AA64DFR0_EL1_PMSVER_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2553#define ARMV8_ID_AA64DFR0_EL1_PMSVER_SHIFT 32
2554/** Statistical Profiling Extension not implemented. */
2555# define ARMV8_ID_AA64DFR0_EL1_PMSVER_NOT_IMPL 0
2556/** Statistical Profiling Extension supported (FEAT_SPE). */
2557# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED 1
2558/** Statistical Profiling Extension supported, version 1.1 (FEAT_SPEv1p1). */
2559# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P1 2
2560/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p2). */
2561# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P2 3
2562/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p3). */
2563# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P3 4
2564/** Bit 36 - 39 - OS Double Lock implemented. */
2565#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2566#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SHIFT 36
2567/** OS Double Lock is not implemented. */
2568# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_NOT_IMPL 0xf
2569/** OS Double Lock is supported (FEAT_DoubleLock). */
2570# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SUPPORTED 0
2571/** Bit 40 - 43 - Indicates the Armv8.4 self-hosted Trace Extension. */
2572#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2573#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SHIFT 40
2574/** Armv8.4 self-hosted Trace Extension not implemented. */
2575# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_NOT_IMPL 0
2576/** Armv8.4 self-hosted Trace Extension is supported (FEAT_TRF). */
2577# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SUPPORTED 1
2578/** Bit 44 - 47 - Indicates support for the Trace Buffer Extension. */
2579#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2580#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SHIFT 44
2581/** Trace Buffer Extension is not implemented. */
2582# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_NOT_IMPL 0
2583/** Trace Buffer Extension is supported (FEAT_TRBE). */
2584# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SUPPORTED 1
2585/** Bit 48 - 51 - Indicates support for the multi-threaded PMU extension. */
2586#define ARMV8_ID_AA64DFR0_EL1_MTPMU_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2587#define ARMV8_ID_AA64DFR0_EL1_MTPMU_SHIFT 48
2588/** Multi-threaded PMU extension is not implemented. */
2589# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL 0
2590/** Multi-threaded PMU extension is supported (FEAT_MTPMU). */
2591# define ARMV8_ID_AA64DFR0_EL1_MTPMU_SUPPORTED 1
2592/** Multi-threaded PMU extension is not implemented. */
2593# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL_2 0xf
2594/** Bit 52 - 55 - Indicates support for the Branch Record Buffer extension. */
2595#define ARMV8_ID_AA64DFR0_EL1_BRBE_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2596#define ARMV8_ID_AA64DFR0_EL1_BRBE_SHIFT 52
2597/** Branch Record Buffer extension is not implemented. */
2598# define ARMV8_ID_AA64DFR0_EL1_BRBE_NOT_IMPL 0
2599/** Branch Record Buffer extension is supported (FEAT_BRBE). */
2600# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED 1
2601/** Branch Record Buffer extension is supported and supports branch recording at EL3 (FEAT_BRBEv1p1). */
2602# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED_V1P1 2
2603/* Bit 56 - 59 - Reserved. */
2604/** Bit 60 - 63 - Indicates support for Zero PMU event counters for guest operating systems. */
2605#define ARMV8_ID_AA64DFR0_EL1_HPMN0_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2606#define ARMV8_ID_AA64DFR0_EL1_HPMN0_SHIFT 60
2607/** Setting MDCE_EL2.HPMN to zero has CONSTRAINED UNPREDICTABLE behavior. */
2608# define ARMV8_ID_AA64DFR0_EL1_HPMN0_NOT_IMPL 0
2609/** Setting MDCE_EL2.HPMN to zero has defined behavior (FEAT_HPMN0). */
2610# define ARMV8_ID_AA64DFR0_EL1_HPMN0_SUPPORTED 1
2611/** @} */
2612
2613
2614/** @name FPCR - AArch64 Floating Point Control Register.
2615 * @{ */
2616/** Bit 0 - Flush Inputs to Zero when FEAT_AFP is supported. */
2617#define ARMV8_FPCR_FIZ RT_BIT_64(0)
2618#define ARMV8_FPCR_FIZ_BIT 0
2619/** Bit 1 - Alternate Handling of floating-point numbers when FEAT_AFP is supported. */
2620#define ARMV8_FPCR_AH RT_BIT_64(1)
2621#define ARMV8_FPCR_AH_BIT 1
2622/** Bit 2 - Controls how the output elements other than the lowest element of the vector are determined for
2623 * Advanced SIMD scalar instructions, when FEAT_AFP is supported. */
2624#define ARMV8_FPCR_NEP RT_BIT_64(2)
2625#define ARMV8_FPCR_NEP_BIT 2
2626/* Bit 3 - 7 - Reserved.*/
2627/** Bit 8 - Invalid Operation floating-point exception trap enable. */
2628#define ARMV8_FPCR_IOE RT_BIT_64(8)
2629#define ARMV8_FPCR_IOE_BIT 8
2630/** Bit 9 - Divide by Zero floating-point exception trap enable. */
2631#define ARMV8_FPCR_DZE RT_BIT_64(9)
2632#define ARMV8_FPCR_DZE_BIT 9
2633/** Bit 10 - Overflow floating-point exception trap enable. */
2634#define ARMV8_FPCR_OFE RT_BIT_64(10)
2635#define ARMV8_FPCR_OFE_BIT 10
2636/** Bit 11 - Underflow floating-point exception trap enable. */
2637#define ARMV8_FPCR_UFE RT_BIT_64(11)
2638#define ARMV8_FPCR_UFE_BIT 11
2639/** Bit 12 - Inexact floating-point exception trap enable. */
2640#define ARMV8_FPCR_IXE RT_BIT_64(12)
2641#define ARMV8_FPCR_IXE_BIT 12
2642/** Bit 13 - Controls numeric behavior of BFloat16 dot productions calculations performed,
2643 * supported when FEAT_EBF16 is supported. */
2644#define ARMV8_FPCR_EBF RT_BIT_64(13)
2645#define ARMV8_FPCR_EBF_BIT 13
2646/* Bit 14 - Reserved */
2647/** Bit 15 - Input Denormal floating-point exception trap enable. */
2648#define ARMV8_FPCR_IDE RT_BIT_64(15)
2649#define ARMV8_FPCR_IDE_BIT 15
2650/* Bit 16 - 18 - Reserved for AArch64 (Len field for AArch32). */
2651/** Bit 19 - Flushing denormalized numbers to zero control bit on half-precision data-processing instructions,
2652 * available when FEAT_FP16 is supported. */
2653#define ARMV8_FPCR_FZ16 RT_BIT_64(19)
2654#define ARMV8_FPCR_FZ16_BIT 19
2655/* Bit 20 - 21 - Reserved for AArch64 (Stride field dor AArch32). */
2656/** Bit 22 - 23 - Rounding Mode control field. */
2657#define ARMV8_FPCR_RMODE_MASK (RT_BIT_64(22) | RT_BIT_64(23))
2658#define ARMV8_FPCR_RMODE_SHIFT 22
2659/** Round to Nearest (RN) mode. */
2660# define ARMV8_FPCR_RMODE_RN 0
2661/** Round towards Plus Infinity (RP) mode. */
2662# define ARMV8_FPCR_RMODE_RP 1
2663/** Round towards Minus Infinity (RM) mode. */
2664# define ARMV8_FPCR_RMODE_RM 2
2665/** Round towards Zero (RZ) mode. */
2666# define ARMV8_FPCR_RMODE_RZ 3
2667/** Bit 24 - Flushing denormalized numbers to zero control bit. */
2668#define ARMV8_FPCR_FZ RT_BIT_64(24)
2669#define ARMV8_FPCR_FZ_BIT 24
2670/** Bit 25 - Default NaN use for NaN propagation. */
2671#define ARMV8_FPCR_DN RT_BIT_64(25)
2672#define ARMV8_FPCR_DN_BIT 25
2673/** Bit 26 - Alternative half-precision control bit. */
2674#define ARMV8_FPCR_AHP RT_BIT_64(26)
2675#define ARMV8_FPCR_AHP_BIT 26
2676/* Bit 27 - 63 - Reserved. */
2677/** @} */
2678
2679
2680/** @name FPSR - AArch64 Floating Point Status Register.
2681 * @{ */
2682/** Bit 0 - Invalid Operation cumulative floating-point exception bit. */
2683#define ARMV8_FPSR_IOC RT_BIT_64(0)
2684/** Bit 1 - Divide by Zero cumulative floating-point exception bit. */
2685#define ARMV8_FPSR_DZC RT_BIT_64(1)
2686/** Bit 2 - Overflow cumulative floating-point exception bit. */
2687#define ARMV8_FPSR_OFC RT_BIT_64(2)
2688/** Bit 3 - Underflow cumulative floating-point exception bit. */
2689#define ARMV8_FPSR_UFC RT_BIT_64(3)
2690/** Bit 4 - Inexact cumulative floating-point exception bit. */
2691#define ARMV8_FPSR_IXC RT_BIT_64(4)
2692/* Bit 5 - 6 - Reserved. */
2693/** Bit 7 - Input Denormal cumulative floating-point exception bit. */
2694#define ARMV8_FPSR_IDC RT_BIT_64(7)
2695/* Bit 8 - 26 - Reserved. */
2696/** Bit 27 - Cumulative saturation bit, Advanced SIMD only. */
2697#define ARMV8_FPSR_QC RT_BIT_64(27)
2698/* Bit 28 - 31 - NZCV bits for AArch32 floating point operations. */
2699/* Bit 32 - 63 - Reserved. */
2700/** @} */
2701
2702
2703
2704/** @name SCTLR_EL1 - AArch64 System Control Register (EL1).
2705 * @{ */
2706/** Bit 0 - MMU enable for EL1 and EL0 stage 1 address translation. */
2707#define ARMV8_SCTLR_EL1_M RT_BIT_64(0)
2708/** Bit 1 - Alignment check enable for EL1 and EL0. */
2709#define ARMV8_SCTLR_EL1_A RT_BIT_64(1)
2710/** Bit 2 - Stage 1 cacheability control, for data accesses. */
2711#define ARMV8_SCTLR_EL1_C RT_BIT_64(2)
2712/** Bit 3 - SP alignment check enable. */
2713#define ARMV8_SCTLR_EL1_SA RT_BIT_64(3)
2714/** Bit 4 - SP alignment check enable for EL0. */
2715#define ARMV8_SCTLR_EL1_SA0 RT_BIT_64(4)
2716/** Bit 5 - System instruction memory barrier enable from AArch32 EL0. */
2717#define ARMV8_SCTLR_EL1_CP15BEN RT_BIT_64(5)
2718/** Bit 6 - Non-aligned access enable. */
2719#define ARMV8_SCTLR_EL1_nAA RT_BIT_64(6)
2720/** Bit 7 - IT disable, disables some uses of IT instructions at EL0 using AArch32. */
2721#define ARMV8_SCTLR_EL1_ITD RT_BIT_64(7)
2722/** Bit 8 - SETEND instruction disable, disables SETEND instructions at EL0 using AArch32. */
2723#define ARMV8_SCTLR_EL1_SED RT_BIT_64(8)
2724/** Bit 9 - User Mask Access. Traps EL0 execution of MSR and MRS instructions that access the PSTATE.{D,A,I,F} masks to EL1. */
2725#define ARMV8_SCTLR_EL1_UMA RT_BIT_64(9)
2726/** Bit 10 - Enable EL0 acccess to the CFP*, DVP* and CPP* instructions if FEAT_SPECRES is supported. */
2727#define ARMV8_SCTLR_EL1_EnRCTX RT_BIT_64(10)
2728/** Bit 11 - Exception Exit is Context Synchronizing (FEAT_ExS required). */
2729#define ARMV8_SCTLR_EL1_EOS RT_BIT_64(11)
2730/** Bit 12 - Stage 1 instruction access cacheability control, for access at EL0 and EL1. */
2731#define ARMV8_SCTLR_EL1_I RT_BIT_64(12)
2732/** @todo Finish (lazy developer). */
2733/** @} */
2734
2735
2736/** @name SCTLR_EL2 - AArch64 System Control Register (EL2) - 32-bit.
2737 * @{ */
2738/** Bit 0 - MMU enable for EL2. */
2739#define ARMV8_SCTLR_EL2_M RT_BIT_64(0)
2740/** Bit 1 - Alignment check enable. */
2741#define ARMV8_SCTLR_EL2_A RT_BIT_64(1)
2742/** Bit 2 - Global enable for data and unified caches. */
2743#define ARMV8_SCTLR_EL2_C RT_BIT_64(2)
2744/** Bit 3 - SP alignment check enable. */
2745#define ARMV8_SCTLR_EL2_SA RT_BIT_64(3)
2746/* Bit 4 - 11 - Reserved. */
2747/** Bit 12 - Instruction cache enable. */
2748#define ARMV8_SCTLR_EL2_I RT_BIT_64(12)
2749/* Bit 13 - 18 - Reserved. */
2750/** Bit 19 - Force treatment of all memory regions with write permissions as XN. */
2751#define ARMV8_SCTLR_EL2_WXN RT_BIT_64(19)
2752/* Bit 20 - 24 - Reserved. */
2753/** Bit 25 - Exception endianess - set means big endian, clear little endian. */
2754#define ARMV8_SCTLR_EL2_EE RT_BIT_64(25)
2755/* Bit 26 - 31 - Reserved. */
2756/** @} */
2757
2758
2759#if (!defined(VBOX_FOR_DTRACE_LIB) && defined(__cplusplus) && !defined(ARMV8_WITHOUT_MK_INSTR)) || defined(DOXYGEN_RUNNING)
2760/** @defgroup grp_rt_armv8_mkinstr Instruction Encoding Helpers
2761 * @ingroup grp_rt_armv8
2762 *
2763 * A few inlined functions and macros for assiting in encoding common ARMv8
2764 * instructions.
2765 *
2766 * @{ */
2767
2768/** A64: Official NOP instruction. */
2769#define ARMV8_A64_INSTR_NOP UINT32_C(0xd503201f)
2770/** A64: Return instruction. */
2771#define ARMV8_A64_INSTR_RET UINT32_C(0xd65f03c0)
2772/** A64: Return instruction with LR pointer authentication using SP and key A. */
2773#define ARMV8_A64_INSTR_RETAA UINT32_C(0xd65f0bff)
2774/** A64: Return instruction with LR pointer authentication using SP and key B. */
2775#define ARMV8_A64_INSTR_RETAB UINT32_C(0xd65f0fff)
2776/** A64: Insert pointer authentication code into X17 using X16 and key B. */
2777#define ARMV8_A64_INSTR_PACIB1716 UINT32_C(0xd503215f)
2778/** A64: Insert pointer authentication code into LR using SP and key B. */
2779#define ARMV8_A64_INSTR_PACIBSP UINT32_C(0xd503237f)
2780/** A64: Insert pointer authentication code into LR using XZR and key B. */
2781#define ARMV8_A64_INSTR_PACIBZ UINT32_C(0xd503235f)
2782/** A64: Invert the carry flag (PSTATE.C). */
2783#define ARMV8_A64_INSTR_CFINV UINT32_C(0xd500401f)
2784
2785
2786/** Memory barrier: Shareability domain. */
2787typedef enum
2788{
2789 kArm64InstMbReqDomain_OuterShareable = 0,
2790 kArm64InstMbReqDomain_Nonshareable,
2791 kArm64InstMbReqDomain_InnerShareable,
2792 kArm64InstMbReqDomain_FullSystem
2793} ARM64INSTRMBREQDOMAIN;
2794
2795/** Memory barrier: Access type. */
2796typedef enum
2797{
2798 kArm64InstMbReqType_All0 = 0, /**< Special. Only used with PSSBB and SSBB. */
2799 kArm64InstMbReqType_Reads,
2800 kArm64InstMbReqType_Writes,
2801 kArm64InstMbReqType_All
2802} ARM64INSTRMBREQTYPE;
2803
2804/**
2805 * A64: DMB option
2806 */
2807DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrDmb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2808 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2809{
2810 return UINT32_C(0xd50330bf)
2811 | ((uint32_t)enmDomain << 8)
2812 | ((uint32_t)enmType << 10);
2813}
2814
2815
2816/**
2817 * A64: DSB option
2818 */
2819DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrDsb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2820 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2821{
2822 return UINT32_C(0xd503309f)
2823 | ((uint32_t)enmDomain << 8)
2824 | ((uint32_t)enmType << 10);
2825}
2826
2827
2828/**
2829 * A64: SSBB
2830 */
2831DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSsbb(void)
2832{
2833 return Armv8A64MkInstrDsb(kArm64InstMbReqDomain_OuterShareable, kArm64InstMbReqType_All0);
2834}
2835
2836
2837/**
2838 * A64: PSSBB
2839 */
2840DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrPSsbb(void)
2841{
2842 return Armv8A64MkInstrDsb(kArm64InstMbReqDomain_Nonshareable, kArm64InstMbReqType_All0);
2843}
2844
2845
2846/**
2847 * A64: ISB option
2848 *
2849 * @note Only the default option selection is supported, all others are
2850 * currently reserved.
2851 */
2852DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrIsb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2853 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2854{
2855 return UINT32_C(0xd50330df)
2856 | ((uint32_t)enmDomain << 8)
2857 | ((uint32_t)enmType << 10);
2858}
2859
2860
2861typedef enum
2862{
2863 /** Add @a iImm7*sizeof(reg) to @a iBaseReg after the store/load,
2864 * and update the register. */
2865 kArm64InstrStLdPairType_PostIndex = 1,
2866 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2867 * but don't update the register. */
2868 kArm64InstrStLdPairType_Signed = 2,
2869 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2870 * and update the register. */
2871 kArm64InstrStLdPairType_PreIndex = 3
2872} ARM64INSTRSTLDPAIRTYPE;
2873
2874/**
2875 * A64: Encodes either stp (store register pair) or ldp (load register pair).
2876 *
2877 * @returns The encoded instruction.
2878 * @param fLoad true for ldp, false of stp.
2879 * @param u2Opc When @a fSimdFp is @c false:
2880 * - 0 for 32-bit GPRs (Wt).
2881 * - 1 for encoding stgp or ldpsw.
2882 * - 2 for 64-bit GRPs (Xt).
2883 * - 3 illegal.
2884 * When @a fSimdFp is @c true:
2885 * - 0 for 32-bit SIMD&FP registers (St).
2886 * - 1 for 64-bit SIMD&FP registers (Dt).
2887 * - 2 for 128-bit SIMD&FP regsiters (Qt).
2888 * @param enmType The instruction variant wrt addressing and updating of the
2889 * addressing register.
2890 * @param iReg1 The first register to store/load.
2891 * @param iReg2 The second register to store/load.
2892 * @param iBaseReg The base register to use when addressing. SP is allowed.
2893 * @param iImm7 Signed addressing immediate value scaled, range -64..63,
2894 * will be multiplied by the register size.
2895 * @param fSimdFp true for SIMD&FP registers, false for GPRs and
2896 * stgp/ldpsw instructions.
2897 */
2898DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdPair(bool fLoad, uint32_t u2Opc, ARM64INSTRSTLDPAIRTYPE enmType,
2899 uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2900 bool fSimdFp = false)
2901{
2902 Assert(u2Opc < 3); Assert(iReg1 <= 31); Assert(iReg2 <= 31); Assert(iBaseReg <= 31); Assert(iImm7 < 64 && iImm7 >= -64);
2903 return (u2Opc << 30)
2904 | UINT32_C(0x28000000) /* 0b101000000000000000000000000000 */
2905 | ((uint32_t)fSimdFp << 26) /* VR bit, see "Top-level encodings for A64" */
2906 | ((uint32_t)enmType << 23)
2907 | ((uint32_t)fLoad << 22)
2908 | (((uint32_t)iImm7 & UINT32_C(0x7f)) << 15)
2909 | (iReg2 << 10)
2910 | (iBaseReg << 5)
2911 | iReg1;
2912}
2913
2914
2915/** A64: ldp x1, x2, [x3] */
2916DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2917 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2918 bool f64Bit = true)
2919{
2920 return Armv8A64MkInstrStLdPair(true /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2921}
2922
2923
2924/** A64: stp x1, x2, [x3] */
2925DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2926 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2927 bool f64Bit = true)
2928{
2929 return Armv8A64MkInstrStLdPair(false /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2930}
2931
2932
2933typedef enum /* Size VR Opc */
2934{ /* \ | / */
2935 kArmv8A64InstrLdStType_Mask_Size = 0x300,
2936 kArmv8A64InstrLdStType_Mask_VR = 0x010,
2937 kArmv8A64InstrLdStType_Mask_Opc = 0x003,
2938 kArmv8A64InstrLdStType_Shift_Size = 8,
2939 kArmv8A64InstrLdStType_Shift_VR = 4,
2940 kArmv8A64InstrLdStType_Shift_Opc = 0,
2941
2942 kArmv8A64InstrLdStType_St_Byte = 0x000,
2943 kArmv8A64InstrLdStType_Ld_Byte = 0x001,
2944 kArmv8A64InstrLdStType_Ld_SignByte64 = 0x002,
2945 kArmv8A64InstrLdStType_Ld_SignByte32 = 0x003,
2946
2947 kArmv8A64InstrLdStType_St_Half = 0x100, /**< Half = 16-bit */
2948 kArmv8A64InstrLdStType_Ld_Half = 0x101, /**< Half = 16-bit */
2949 kArmv8A64InstrLdStType_Ld_SignHalf64 = 0x102, /**< Half = 16-bit */
2950 kArmv8A64InstrLdStType_Ld_SignHalf32 = 0x103, /**< Half = 16-bit */
2951
2952 kArmv8A64InstrLdStType_St_Word = 0x200, /**< Word = 32-bit */
2953 kArmv8A64InstrLdStType_Ld_Word = 0x201, /**< Word = 32-bit */
2954 kArmv8A64InstrLdStType_Ld_SignWord64 = 0x202, /**< Word = 32-bit */
2955
2956 kArmv8A64InstrLdStType_St_Dword = 0x300, /**< Dword = 64-bit */
2957 kArmv8A64InstrLdStType_Ld_Dword = 0x301, /**< Dword = 64-bit */
2958
2959 kArmv8A64InstrLdStType_Prefetch = 0x302, /**< Not valid in all variations, check docs. */
2960
2961 kArmv8A64InstrLdStType_St_Vr_Byte = 0x010,
2962 kArmv8A64InstrLdStType_Ld_Vr_Byte = 0x011,
2963 kArmv8A64InstrLdStType_St_Vr_128 = 0x012,
2964 kArmv8A64InstrLdStType_Ld_Vr_128 = 0x013,
2965
2966 kArmv8A64InstrLdStType_St_Vr_Half = 0x110, /**< Half = 16-bit */
2967 kArmv8A64InstrLdStType_Ld_Vr_Half = 0x111, /**< Half = 16-bit */
2968
2969 kArmv8A64InstrLdStType_St_Vr_Word = 0x210, /**< Word = 32-bit */
2970 kArmv8A64InstrLdStType_Ld_Vr_Word = 0x211, /**< Word = 32-bit */
2971
2972 kArmv8A64InstrLdStType_St_Vr_Dword = 0x310, /**< Dword = 64-bit */
2973 kArmv8A64InstrLdStType_Ld_Vr_Dword = 0x311 /**< Dword = 64-bit */
2974
2975} ARMV8A64INSTRLDSTTYPE;
2976/** Checks if a ARMV8A64INSTRLDSTTYPE value is a store operation or not. */
2977#define ARMV8A64INSTRLDSTTYPE_IS_STORE(a_enmLdStType) (((unsigned)a_enmLdStType & (unsigned)kArmv8A64InstrLdStType_Mask_Opc) == 0)
2978
2979
2980/**
2981 * A64: Encodes load/store with unscaled 9-bit signed immediate.
2982 *
2983 * @returns The encoded instruction.
2984 * @param u32Opcode The base opcode value.
2985 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
2986 * @param iReg The register to load into / store.
2987 * @param iBaseReg The base register to use when addressing. SP is allowed.
2988 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2989 */
2990DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdImm9Ex(uint32_t u32Opcode, ARMV8A64INSTRLDSTTYPE enmType,
2991 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2992{
2993 Assert(i9ImmDisp >= -256 && i9ImmDisp < 256); Assert(iReg < 32); Assert(iBaseReg < 32);
2994 return u32Opcode
2995 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
2996 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
2997 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
2998 | (((uint32_t)i9ImmDisp & UINT32_C(0x1ff)) << 12)
2999 | (iBaseReg << 5)
3000 | iReg;
3001}
3002
3003
3004/**
3005 * A64: Encodes load/store with unscaled 9-bit signed immediate.
3006 *
3007 * @returns The encoded instruction.
3008 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
3009 * @param iReg The register to load into / store.
3010 * @param iBaseReg The base register to use when addressing. SP is allowed.
3011 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3012 */
3013DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSturLdur(ARMV8A64INSTRLDSTTYPE enmType,
3014 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3015{
3016 /* 3 2 1 0 */
3017 /* 10987654321098765432109876543210 */
3018 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000000) /* 0b00111000000000000000000000000000 */,
3019 enmType, iReg, iBaseReg, i9ImmDisp);
3020}
3021
3022/**
3023 * A64: Encodes load/store with unscaled 9-bit signed immediate, post-indexed.
3024 *
3025 * @returns The encoded instruction.
3026 * @param enmType The load/store instruction type. Prefech not valid.
3027 * @param iReg The register to load into / store.
3028 * @param iBaseReg The base register to use when addressing. SP is allowed.
3029 * Written back.
3030 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3031 */
3032DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPostIndex9(ARMV8A64INSTRLDSTTYPE enmType,
3033 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3034{
3035 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
3036 /* 10987654321098765432109876543210 */
3037 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000400) /* 0b00111000000000000000010000000000 */,
3038 enmType, iReg, iBaseReg, i9ImmDisp);
3039}
3040
3041/**
3042 * A64: Encodes load/store with unscaled 9-bit signed immediate, pre-indexed
3043 *
3044 * @returns The encoded instruction.
3045 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
3046 * @param iReg The register to load into / store.
3047 * @param iBaseReg The base register to use when addressing. SP is allowed.
3048 * Written back.
3049 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3050 */
3051DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPreIndex9(ARMV8A64INSTRLDSTTYPE enmType,
3052 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3053{
3054 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
3055 /* 10987654321098765432109876543210 */
3056 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000c00) /* 0b00111000000000000000110000000000 */,
3057 enmType, iReg, iBaseReg, i9ImmDisp);
3058}
3059
3060/**
3061 * A64: Encodes unprivileged load/store with unscaled 9-bit signed immediate.
3062 *
3063 * @returns The encoded instruction.
3064 * @param enmType The load/store instruction type. Prefech not valid,
3065 * nor any SIMD&FP variants.
3066 * @param iReg The register to load into / store.
3067 * @param iBaseReg The base register to use when addressing. SP is allowed.
3068 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3069 */
3070DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSttrLdtr(ARMV8A64INSTRLDSTTYPE enmType,
3071 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3072{
3073 Assert(enmType != kArmv8A64InstrLdStType_Prefetch);
3074 Assert(!((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR));
3075 /* 3 2 1 0 */
3076 /* 10987654321098765432109876543210 */
3077 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000800) /* 0b00111000000000000000100000000000 */,
3078 enmType, iReg, iBaseReg, i9ImmDisp);
3079}
3080
3081
3082/**
3083 * A64: Encodes load/store w/ scaled 12-bit unsigned address displacement.
3084 *
3085 * @returns The encoded instruction.
3086 * @param enmType The load/store instruction type. Prefech not valid,
3087 * nor any SIMD&FP variants.
3088 * @param iReg The register to load into / store.
3089 * @param iBaseReg The base register to use when addressing. SP is allowed.
3090 * @param u12ImmDisp Addressing displacement, scaled by size.
3091 */
3092DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRUOff(ARMV8A64INSTRLDSTTYPE enmType,
3093 uint32_t iReg, uint32_t iBaseReg, uint32_t u12ImmDisp)
3094{
3095 Assert(u12ImmDisp < 4096U);
3096 Assert(iReg < 32); /* 3 2 1 0 */
3097 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
3098 return UINT32_C(0x39000000) /* 0b00111001000000000000000000000000 */
3099 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
3100 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
3101 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
3102 | (u12ImmDisp << 10)
3103 | (iBaseReg << 5)
3104 | iReg;
3105}
3106
3107typedef enum
3108{
3109 kArmv8A64InstrLdStExtend_Uxtw = 2, /**< Zero-extend (32-bit) word. */
3110 kArmv8A64InstrLdStExtend_Lsl = 3, /**< Shift left (64-bit). */
3111 kArmv8A64InstrLdStExtend_Sxtw = 6, /**< Sign-extend (32-bit) word. */
3112 kArmv8A64InstrLdStExtend_Sxtx = 7 /**< Sign-extend (64-bit) dword (to 128-bit SIMD&FP reg, presumably). */
3113} ARMV8A64INSTRLDSTEXTEND;
3114
3115/**
3116 * A64: Encodes load/store w/ index register.
3117 *
3118 * @returns The encoded instruction.
3119 * @param enmType The load/store instruction type.
3120 * @param iReg The register to load into / store.
3121 * @param iBaseReg The base register to use when addressing. SP is allowed.
3122 * @param iRegIndex The index register.
3123 * @param enmExtend The extending to apply to @a iRegIndex.
3124 * @param fShifted Whether to shift the index. The shift amount corresponds
3125 * to the access size (thus irrelevant for byte accesses).
3126 */
3127DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRegIdx(ARMV8A64INSTRLDSTTYPE enmType,
3128 uint32_t iReg, uint32_t iBaseReg, uint32_t iRegIndex,
3129 ARMV8A64INSTRLDSTEXTEND enmExtend = kArmv8A64InstrLdStExtend_Lsl,
3130 bool fShifted = false)
3131{
3132 Assert(iRegIndex < 32);
3133 Assert(iReg < 32); /* 3 2 1 0 */
3134 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
3135 return UINT32_C(0x38200800) /* 0b00111000001000000000100000000000 */
3136 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
3137 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
3138 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
3139 | (iRegIndex << 16)
3140 | ((uint32_t)enmExtend << 13)
3141 | ((uint32_t)fShifted << 12)
3142 | (iBaseReg << 5)
3143 | iReg;
3144}
3145
3146typedef enum /* VR Opc */
3147{ /* \ | */
3148 kArmv8A64InstrLdrLitteral_Mask_Vr = 0x10,
3149 kArmv8A64InstrLdrLitteral_Mask_Opc = 0x03,
3150 kArmv8A64InstrLdrLitteral_Shift_Vr = 4,
3151 kArmv8A64InstrLdrLitteral_Shift_Opc = 0,
3152
3153 kArmv8A64InstrLdrLitteral_Word = 0x00, /**< word = 32-bit */
3154 kArmv8A64InstrLdrLitteral_Dword = 0x01, /**< dword = 64-bit */
3155 kArmv8A64InstrLdrLitteral_SignWord64 = 0x02, /**< Loads word, signextending it to 64-bit */
3156 kArmv8A64InstrLdrLitteral_Prefetch = 0x03, /**< prfm */
3157
3158 kArmv8A64InstrLdrLitteral_Vr_Word = 0x10, /**< word = 32-bit */
3159 kArmv8A64InstrLdrLitteral_Vr_Dword = 0x11, /**< dword = 64-bit */
3160 kArmv8A64InstrLdrLitteral_Vr_128 = 0x12
3161} ARMV8A64INSTRLDRLITTERAL;
3162
3163
3164/**
3165 * A64: Encodes load w/ a PC relative 19-bit signed immediate.
3166 *
3167 * @returns The encoded instruction.
3168 * @param enmType The load instruction type.
3169 * @param iReg The register to load into.
3170 * @param i19Imm The signed immediate value, multiplied by 4 regardless
3171 * of access size.
3172 */
3173DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdrLitteral(ARMV8A64INSTRLDRLITTERAL enmType, uint32_t iReg, int32_t i19Imm)
3174{
3175 Assert(i19Imm >= -262144 && i19Imm < 262144);
3176 Assert(iReg < 32); /* 3 2 1 0 */
3177 /* 10987654321098765432109876543210 */
3178 return UINT32_C(0x30000000) /* 0b00110000000000000000000000000000 */
3179 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Vr) << (26 - kArmv8A64InstrLdrLitteral_Shift_Vr))
3180 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Opc) << (30 - kArmv8A64InstrLdrLitteral_Shift_Opc))
3181 | (((uint32_t)i19Imm & UINT32_C(0x00ffffe0)) << 5)
3182 | iReg;
3183}
3184
3185
3186typedef enum
3187{
3188 kArmv8A64InstrMovWide_Not = 0, /**< MOVN - reg = ~(imm16 << hw*16; */
3189 kArmv8A64InstrMovWide_Zero = 2, /**< MOVZ - reg = imm16 << hw*16; */
3190 kArmv8A64InstrMovWide_Keep = 3 /**< MOVK - keep the other halfwords. */
3191} ARMV8A64INSTRMOVWIDE;
3192
3193/**
3194 * A64: Encode a move wide immediate instruction.
3195 *
3196 * @returns The encoded instruction.
3197 * @param enmType The load instruction type.
3198 * @param iRegDst The register to mov the immediate into.
3199 * @param uImm16 The immediate value.
3200 * @param iHalfWord Which of the 4 (@a f64Bit = true) or 2 register (16-bit)
3201 * half-words to target:
3202 * - 0 for bits 15:00,
3203 * - 1 for bits 31:16,
3204 * - 2 for bits 47:32 (f64Bit=true only),
3205 * - 3 for bits 63:48 (f64Bit=true only).
3206 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit GPRs.
3207 */
3208DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovWide(ARMV8A64INSTRMOVWIDE enmType, uint32_t iRegDst, uint32_t uImm16,
3209 uint32_t iHalfWord = 0, bool f64Bit = true)
3210{
3211 Assert(iRegDst < 32U); Assert(uImm16 <= (uint32_t)UINT16_MAX); Assert(iHalfWord < 2U + (2U * f64Bit));
3212 return ((uint32_t)f64Bit << 31)
3213 | ((uint32_t)enmType << 29)
3214 | UINT32_C(0x12800000)
3215 | (iHalfWord << 21)
3216 | (uImm16 << 5)
3217 | iRegDst;
3218}
3219
3220/** A64: Encodes a MOVN instruction.
3221 * @see Armv8A64MkInstrMovWide for parameter details. */
3222DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovN(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3223{
3224 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Not, iRegDst, uImm16, iHalfWord, f64Bit);
3225}
3226
3227/** A64: Encodes a MOVZ instruction.
3228 * @see Armv8A64MkInstrMovWide for parameter details. */
3229DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovZ(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3230{
3231 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Zero, iRegDst, uImm16, iHalfWord, f64Bit);
3232}
3233
3234/** A64: Encodes a MOVK instruction.
3235 * @see Armv8A64MkInstrMovWide for parameter details. */
3236DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovK(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3237{
3238 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Keep, iRegDst, uImm16, iHalfWord, f64Bit);
3239}
3240
3241
3242typedef enum
3243{
3244 kArmv8A64InstrShift_Lsl = 0,
3245 kArmv8A64InstrShift_Lsr,
3246 kArmv8A64InstrShift_Asr,
3247 kArmv8A64InstrShift_Ror
3248} ARMV8A64INSTRSHIFT;
3249
3250
3251/**
3252 * A64: Encodes a logical instruction with a shifted 2nd register operand.
3253 *
3254 * @returns The encoded instruction.
3255 * @param u2Opc The logical operation to perform.
3256 * @param fNot Whether to complement the 2nd operand.
3257 * @param iRegResult The output register.
3258 * @param iReg1 The 1st register operand.
3259 * @param iReg2Shifted The 2nd register operand, to which the optional
3260 * shifting is applied.
3261 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit
3262 * GPRs.
3263 * @param offShift6 The shift amount (default: none).
3264 * @param enmShift The shift operation (default: LSL).
3265 */
3266DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalShiftedReg(uint32_t u2Opc, bool fNot,
3267 uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted,
3268 bool f64Bit, uint32_t offShift6, ARMV8A64INSTRSHIFT enmShift)
3269{
3270 Assert(u2Opc < 4); Assert(offShift6 < (f64Bit ? UINT32_C(64) : UINT32_C(32)));
3271 Assert(iRegResult < 32); Assert(iReg1 < 32); Assert(iReg2Shifted < 32);
3272 return ((uint32_t)f64Bit << 31)
3273 | (u2Opc << 29)
3274 | UINT32_C(0x0a000000)
3275 | ((uint32_t)enmShift << 22)
3276 | ((uint32_t)fNot << 21)
3277 | (iReg2Shifted << 16)
3278 | (offShift6 << 10)
3279 | (iReg1 << 5)
3280 | iRegResult;
3281}
3282
3283
3284/** A64: Encodes an AND instruction.
3285 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3286DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnd(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3287 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3288{
3289 return Armv8A64MkInstrLogicalShiftedReg(0, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3290}
3291
3292
3293/** A64: Encodes an BIC instruction.
3294 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3295DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBic(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3296 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3297{
3298 return Armv8A64MkInstrLogicalShiftedReg(0, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3299}
3300
3301
3302/** A64: Encodes an ORR instruction.
3303 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3304DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrr(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3305 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3306{
3307 return Armv8A64MkInstrLogicalShiftedReg(1, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3308}
3309
3310
3311/** A64: Encodes an MOV instruction.
3312 * This is an alias for "orr dst, xzr, src". */
3313DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMov(uint32_t iRegResult, uint32_t idxRegSrc, bool f64Bit = true)
3314{
3315 return Armv8A64MkInstrOrr(iRegResult, ARMV8_A64_REG_XZR, idxRegSrc, f64Bit);
3316}
3317
3318
3319/** A64: Encodes an ORN instruction.
3320 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3321DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrn(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3322 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3323{
3324 return Armv8A64MkInstrLogicalShiftedReg(1, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3325}
3326
3327
3328/** A64: Encodes an EOR instruction.
3329 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3330DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEor(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3331 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3332{
3333 return Armv8A64MkInstrLogicalShiftedReg(2, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3334}
3335
3336
3337/** A64: Encodes an EON instruction.
3338 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3339DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEon(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3340 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3341{
3342 return Armv8A64MkInstrLogicalShiftedReg(2, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3343}
3344
3345
3346/** A64: Encodes an ANDS instruction.
3347 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3348DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnds(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3349 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3350{
3351 return Armv8A64MkInstrLogicalShiftedReg(3, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3352}
3353
3354
3355/** A64: Encodes an BICS instruction.
3356 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3357DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBics(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3358 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3359{
3360 return Armv8A64MkInstrLogicalShiftedReg(3, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3361}
3362
3363
3364
3365/*
3366 * Data processing instructions with two source register operands.
3367 */
3368
3369
3370/** A64: Encodes an SUBP instruction. */
3371DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubP(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
3372{
3373 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
3374 return UINT32_C(0x80000000)
3375 | UINT32_C(0x1ac00000)
3376 | (UINT32_C(0) << 10)
3377 | (iRegSubtrahend << 16)
3378 | (iRegMinuend << 5)
3379 | iRegResult;
3380}
3381
3382
3383/** A64: Encodes an SUBPS instruction. */
3384DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubPS(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
3385{
3386 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
3387 return UINT32_C(0x80000000)
3388 | UINT32_C(0x20000000)
3389 | UINT32_C(0x1ac00000)
3390 | (UINT32_C(0) << 10)
3391 | (iRegSubtrahend << 16)
3392 | (iRegMinuend << 5)
3393 | iRegResult;
3394}
3395
3396
3397/** A64: Encodes an UDIV instruction. */
3398DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
3399{
3400 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
3401 return ((uint32_t)f64Bit << 31)
3402 | UINT32_C(0x1ac00000)
3403 | (UINT32_C(2) << 10)
3404 | (iRegDivisor << 16)
3405 | (iRegDividend << 5)
3406 | iRegResult;
3407}
3408
3409
3410/** A64: Encodes an SDIV instruction. */
3411DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
3412{
3413 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
3414 return ((uint32_t)f64Bit << 31)
3415 | UINT32_C(0x1ac00000)
3416 | (UINT32_C(3) << 10)
3417 | (iRegDivisor << 16)
3418 | (iRegDividend << 5)
3419 | iRegResult;
3420}
3421
3422
3423/** A64: Encodes an IRG instruction. */
3424DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrIrg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3425{
3426 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3427 return UINT32_C(0x80000000)
3428 | UINT32_C(0x1ac00000)
3429 | (UINT32_C(4) << 10)
3430 | (iRegSrc2 << 16)
3431 | (iRegSrc1 << 5)
3432 | iRegResult;
3433}
3434
3435
3436/** A64: Encodes a GMI instruction. */
3437DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrGmi(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3438{
3439 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3440 return UINT32_C(0x80000000)
3441 | UINT32_C(0x1ac00000)
3442 | (UINT32_C(5) << 10)
3443 | (iRegSrc2 << 16)
3444 | (iRegSrc1 << 5)
3445 | iRegResult;
3446}
3447
3448
3449/** A64: Encodes an LSLV instruction. */
3450DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3451{
3452 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3453 return ((uint32_t)f64Bit << 31)
3454 | UINT32_C(0x1ac00000)
3455 | (UINT32_C(8) << 10)
3456 | (iRegCount << 16)
3457 | (iRegSrc << 5)
3458 | iRegResult;
3459}
3460
3461
3462/** A64: Encodes an LSRV instruction. */
3463DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3464{
3465 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3466 return ((uint32_t)f64Bit << 31)
3467 | UINT32_C(0x1ac00000)
3468 | (UINT32_C(9) << 10)
3469 | (iRegCount << 16)
3470 | (iRegSrc << 5)
3471 | iRegResult;
3472}
3473
3474
3475/** A64: Encodes an ASRV instruction. */
3476DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3477{
3478 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3479 return ((uint32_t)f64Bit << 31)
3480 | UINT32_C(0x1ac00000)
3481 | (UINT32_C(10) << 10)
3482 | (iRegCount << 16)
3483 | (iRegSrc << 5)
3484 | iRegResult;
3485}
3486
3487
3488/** A64: Encodes a RORV instruction. */
3489DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3490{
3491 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3492 return ((uint32_t)f64Bit << 31)
3493 | UINT32_C(0x1ac00000)
3494 | (UINT32_C(11) << 10)
3495 | (iRegCount << 16)
3496 | (iRegSrc << 5)
3497 | iRegResult;
3498}
3499
3500
3501/** A64: Encodes a PACGA instruction. */
3502DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrPacga(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3503{
3504 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3505 return UINT32_C(0x80000000)
3506 | UINT32_C(0x1ac00000)
3507 | (UINT32_C(12) << 10)
3508 | (iRegSrc2 << 16)
3509 | (iRegSrc1 << 5)
3510 | iRegResult;
3511}
3512
3513
3514/** A64: Encodes a CRC32* instruction. */
3515DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
3516{
3517 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
3518 return ((uint32_t)(uSize == 3) << 31)
3519 | UINT32_C(0x1ac00000)
3520 | (UINT32_C(16) << 10)
3521 | (uSize << 10)
3522 | (iRegValue << 16)
3523 | (iRegCrc << 5)
3524 | iRegResult;
3525}
3526
3527
3528/** A64: Encodes a CRC32B instruction. */
3529DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32B(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3530{
3531 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 0);
3532}
3533
3534
3535/** A64: Encodes a CRC32H instruction. */
3536DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32H(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3537{
3538 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 1);
3539}
3540
3541
3542/** A64: Encodes a CRC32W instruction. */
3543DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32W(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3544{
3545 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 2);
3546}
3547
3548
3549/** A64: Encodes a CRC32X instruction. */
3550DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32X(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3551{
3552 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 3);
3553}
3554
3555
3556/** A64: Encodes a CRC32C* instruction. */
3557DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32c(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
3558{
3559 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
3560 return ((uint32_t)(uSize == 3) << 31)
3561 | UINT32_C(0x1ac00000)
3562 | (UINT32_C(20) << 10)
3563 | (uSize << 10)
3564 | (iRegValue << 16)
3565 | (iRegCrc << 5)
3566 | iRegResult;
3567}
3568
3569
3570/** A64: Encodes a CRC32B instruction. */
3571DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cB(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3572{
3573 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 0);
3574}
3575
3576
3577/** A64: Encodes a CRC32CH instruction. */
3578DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cH(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3579{
3580 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 1);
3581}
3582
3583
3584/** A64: Encodes a CRC32CW instruction. */
3585DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cW(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3586{
3587 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 2);
3588}
3589
3590
3591/** A64: Encodes a CRC32CX instruction. */
3592DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cX(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3593{
3594 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 3);
3595}
3596
3597
3598/** A64: Encodes an SMAX instruction. */
3599DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3600{
3601 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3602 return ((uint32_t)f64Bit << 31)
3603 | UINT32_C(0x1ac00000)
3604 | (UINT32_C(24) << 10)
3605 | (iRegSrc2 << 16)
3606 | (iRegSrc1 << 5)
3607 | iRegResult;
3608}
3609
3610
3611/** A64: Encodes an UMAX instruction. */
3612DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3613{
3614 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3615 return ((uint32_t)f64Bit << 31)
3616 | UINT32_C(0x1ac00000)
3617 | (UINT32_C(25) << 10)
3618 | (iRegSrc2 << 16)
3619 | (iRegSrc1 << 5)
3620 | iRegResult;
3621}
3622
3623
3624/** A64: Encodes an SMIN instruction. */
3625DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3626{
3627 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3628 return ((uint32_t)f64Bit << 31)
3629 | UINT32_C(0x1ac00000)
3630 | (UINT32_C(26) << 10)
3631 | (iRegSrc2 << 16)
3632 | (iRegSrc1 << 5)
3633 | iRegResult;
3634}
3635
3636
3637/** A64: Encodes an UMIN instruction. */
3638DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3639{
3640 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3641 return ((uint32_t)f64Bit << 31)
3642 | UINT32_C(0x1ac00000)
3643 | (UINT32_C(27) << 10)
3644 | (iRegSrc2 << 16)
3645 | (iRegSrc1 << 5)
3646 | iRegResult;
3647}
3648
3649
3650# ifdef IPRT_INCLUDED_asm_h /* don't want this to be automatically included here. */
3651
3652/**
3653 * Converts immS and immR values (to logical instructions) to a 32-bit mask.
3654 *
3655 * @returns The decoded mask.
3656 * @param uImm6SizeLen The immS value from the instruction. (No N part
3657 * here, as that must be zero for instructions
3658 * operating on 32-bit wide registers.)
3659 * @param uImm6Rotations The immR value from the instruction.
3660 */
3661DECLINLINE(uint32_t) Armv8A64ConvertImmRImmS2Mask32(uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3662{
3663 Assert(uImm6SizeLen < 64); Assert(uImm6Rotations < 64);
3664
3665 /* Determine the element size. */
3666 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm6SizeLen ^ 0x3f) - 1U;
3667 Assert(cBitsElementLog2 + 1U != 0U);
3668
3669 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3670 Assert(uImm6Rotations < cBitsElement);
3671
3672 /* Extract the number of bits set to 1: */
3673 unsigned const cBitsSetTo1 = (uImm6SizeLen & (cBitsElement - 1U)) + 1;
3674 Assert(cBitsSetTo1 < cBitsElement);
3675 uint32_t const uElement = RT_BIT_32(cBitsSetTo1) - 1U;
3676
3677 /* Produce the unrotated pattern. */
3678 static const uint32_t s_auReplicate[]
3679 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3680 uint32_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3681
3682 /* Rotate it and return. */
3683 return ASMRotateRightU32(uPattern, uImm6Rotations & (cBitsElement - 1U));
3684}
3685
3686
3687/**
3688 * Converts N+immS and immR values (to logical instructions) to a 64-bit mask.
3689 *
3690 * @returns The decoded mask.
3691 * @param uImm7SizeLen The N:immS value from the instruction.
3692 * @param uImm6Rotations The immR value from the instruction.
3693 */
3694DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uImm7SizeLen, uint32_t uImm6Rotations)
3695{
3696 Assert(uImm7SizeLen < 128); Assert(uImm6Rotations < 64);
3697
3698 /* Determine the element size. */
3699 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm7SizeLen ^ 0x3f) - 1U;
3700 Assert(cBitsElementLog2 + 1U != 0U);
3701
3702 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3703 Assert(uImm6Rotations < cBitsElement);
3704
3705 /* Extract the number of bits set to 1: */
3706 unsigned const cBitsSetTo1 = (uImm7SizeLen & (cBitsElement - 1U)) + 1;
3707 Assert(cBitsSetTo1 < cBitsElement);
3708 uint64_t const uElement = RT_BIT_64(cBitsSetTo1) - 1U;
3709
3710 /* Produce the unrotated pattern. */
3711 static const uint64_t s_auReplicate[]
3712 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3713 uint64_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3714
3715 /* Rotate it and return. */
3716 return ASMRotateRightU64(uPattern, uImm6Rotations & (cBitsElement - 1U));
3717}
3718
3719
3720/**
3721 * Variant of Armv8A64ConvertImmRImmS2Mask64 where the N bit is separate from
3722 * the immS value.
3723 */
3724DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uN, uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3725{
3726 return Armv8A64ConvertImmRImmS2Mask64((uN << 6) | uImm6SizeLen, uImm6Rotations);
3727}
3728
3729
3730/**
3731 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3732 * 32-bit bitmask to a set of immediates for those instructions.
3733 *
3734 * @returns true if successful, false if not.
3735 * @param fMask The mask value to convert.
3736 * @param puImm6SizeLen Where to return the immS part (N is always zero for
3737 * 32-bit wide masks).
3738 * @param puImm6Rotations Where to return the immR.
3739 */
3740DECLINLINE(bool) Armv8A64ConvertMask32ToImmRImmS(uint32_t fMask, uint32_t *puImm6SizeLen, uint32_t *puImm6Rotations)
3741{
3742 /* Fend off 0 and UINT32_MAX as these cannot be represented. */
3743 if ((uint32_t)(fMask + 1U) <= 1)
3744 return false;
3745
3746 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3747 unsigned const cRor = ASMCountTrailingZerosU32(fMask);
3748 unsigned const cRol = ASMCountLeadingZerosU32(~fMask);
3749 if (cRor)
3750 fMask = ASMRotateRightU32(fMask, cRor);
3751 else
3752 fMask = ASMRotateLeftU32(fMask, cRol);
3753 Assert(fMask & RT_BIT_32(0));
3754 Assert(!(fMask & RT_BIT_32(31)));
3755
3756 /* Count the trailing ones and leading zeros. */
3757 unsigned const cOnes = ASMCountTrailingZerosU32(~fMask);
3758 unsigned const cZeros = ASMCountLeadingZerosU32(fMask);
3759
3760 /* The potential element length is then the sum of the two above. */
3761 unsigned const cBitsElement = cOnes + cZeros;
3762 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3763 return false;
3764
3765 /* Special case: 32 bits element size. Since we're done here. */
3766 if (cBitsElement == 32)
3767 *puImm6SizeLen = cOnes - 1;
3768 else
3769 {
3770 /* Extract the element bits and check that these are replicated in the whole pattern. */
3771 uint32_t const uElement = RT_BIT_32(cOnes) - 1U;
3772 unsigned const cBitsElementLog2 = ASMBitFirstSetU32(cBitsElement) - 1;
3773
3774 static const uint32_t s_auReplicate[]
3775 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3776 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3777 *puImm6SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3778 else
3779 return false;
3780 }
3781 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3782
3783 return true;
3784}
3785
3786
3787/**
3788 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3789 * 64-bit bitmask to a set of immediates for those instructions.
3790 *
3791 * @returns true if successful, false if not.
3792 * @param fMask The mask value to convert.
3793 * @param puImm7SizeLen Where to return the N:immS part.
3794 * @param puImm6Rotations Where to return the immR.
3795 */
3796DECLINLINE(bool) Armv8A64ConvertMask64ToImmRImmS(uint64_t fMask, uint32_t *puImm7SizeLen, uint32_t *puImm6Rotations)
3797{
3798 /* Fend off 0 and UINT64_MAX as these cannot be represented. */
3799 if ((uint64_t)(fMask + 1U) <= 1)
3800 return false;
3801
3802 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3803 unsigned const cRor = ASMCountTrailingZerosU64(fMask);
3804 unsigned const cRol = ASMCountLeadingZerosU64(~fMask);
3805 if (cRor)
3806 fMask = ASMRotateRightU64(fMask, cRor);
3807 else
3808 fMask = ASMRotateLeftU64(fMask, cRol);
3809 Assert(fMask & RT_BIT_64(0));
3810 Assert(!(fMask & RT_BIT_64(63)));
3811
3812 /* Count the trailing ones and leading zeros. */
3813 unsigned const cOnes = ASMCountTrailingZerosU64(~fMask);
3814 unsigned const cZeros = ASMCountLeadingZerosU64(fMask);
3815
3816 /* The potential element length is then the sum of the two above. */
3817 unsigned const cBitsElement = cOnes + cZeros;
3818 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3819 return false;
3820
3821 /* Special case: 64 bits element size. Since we're done here. */
3822 if (cBitsElement == 64)
3823 *puImm7SizeLen = (cOnes - 1) | 0x40 /*N*/;
3824 else
3825 {
3826 /* Extract the element bits and check that these are replicated in the whole pattern. */
3827 uint64_t const uElement = RT_BIT_64(cOnes) - 1U;
3828 unsigned const cBitsElementLog2 = ASMBitFirstSetU64(cBitsElement) - 1;
3829
3830 static const uint64_t s_auReplicate[]
3831 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3832 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3833 *puImm7SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3834 else
3835 return false;
3836 }
3837 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3838
3839 return true;
3840}
3841
3842# endif /* IPRT_INCLUDED_asm_h */
3843
3844/**
3845 * A64: Encodes a logical instruction with an complicated immediate mask.
3846 *
3847 * The @a uImm7SizeLen parameter specifies two things:
3848 * 1. the element size and
3849 * 2. the number of bits set to 1 in the pattern.
3850 *
3851 * The element size is extracted by NOT'ing bits 5:0 (excludes the N bit at the
3852 * top) and using the position of the first bit set as a power of two.
3853 *
3854 * | N | 5 | 4 | 3 | 2 | 1 | 0 | element size |
3855 * |---|---|---|---|---|---|---|--------------|
3856 * | 0 | 1 | 1 | 1 | 1 | 0 | x | 2 bits |
3857 * | 0 | 1 | 1 | 1 | 0 | x | x | 4 bits |
3858 * | 0 | 1 | 1 | 0 | x | x | x | 8 bits |
3859 * | 0 | 1 | 0 | x | x | x | x | 16 bits |
3860 * | 0 | 0 | x | x | x | x | x | 32 bits |
3861 * | 1 | x | x | x | x | x | x | 64 bits |
3862 *
3863 * The 'x' forms the number of 1 bits in the pattern, minus one (i.e.
3864 * there is always one zero bit in the pattern).
3865 *
3866 * The @a uImm6Rotations parameter specifies how many bits to the right,
3867 * the element pattern is rotated. The rotation count must be less than the
3868 * element bit count (size).
3869 *
3870 * @returns The encoded instruction.
3871 * @param u2Opc The logical operation to perform.
3872 * @param iRegResult The output register.
3873 * @param iRegSrc The 1st register operand.
3874 * @param uImm7SizeLen The size/pattern length. We've combined the 1-bit N
3875 * field at the top of the 6-bit 'imms' field.
3876 *
3877 * @param uImm6Rotations The rotation count.
3878 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3879 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3880 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3881 */
3882DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3883 uint32_t uImm7SizeLen, uint32_t uImm6Rotations, bool f64Bit)
3884{
3885 Assert(u2Opc < 4); Assert(uImm7SizeLen < (f64Bit ? UINT32_C(0x7f) : UINT32_C(0x3f)));
3886 Assert(uImm6Rotations <= UINT32_C(0x3f)); Assert(iRegResult < 32); Assert(iRegSrc < 32);
3887 return ((uint32_t)f64Bit << 31)
3888 | (u2Opc << 29)
3889 | UINT32_C(0x12000000)
3890 | ((uImm7SizeLen & UINT32_C(0x40)) << (22 - 6))
3891 | (uImm6Rotations << 16)
3892 | ((uImm7SizeLen & UINT32_C(0x3f)) << 10)
3893 | (iRegSrc << 5)
3894 | iRegResult;
3895}
3896
3897
3898/** A64: Encodes an AND instruction w/ complicated immediate mask.
3899 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3900DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndImm(uint32_t iRegResult, uint32_t iRegSrc,
3901 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3902{
3903 return Armv8A64MkInstrLogicalImm(0, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3904}
3905
3906
3907/** A64: Encodes an ORR instruction w/ complicated immediate mask.
3908 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3909DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrrImm(uint32_t iRegResult, uint32_t iRegSrc,
3910 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3911{
3912 return Armv8A64MkInstrLogicalImm(1, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3913}
3914
3915
3916/** A64: Encodes an EOR instruction w/ complicated immediate mask.
3917 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3918DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEorImm(uint32_t iRegResult, uint32_t iRegSrc,
3919 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3920{
3921 return Armv8A64MkInstrLogicalImm(2, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3922}
3923
3924
3925/** A64: Encodes an ANDS instruction w/ complicated immediate mask.
3926 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3927DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndsImm(uint32_t iRegResult, uint32_t iRegSrc,
3928 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3929{
3930 return Armv8A64MkInstrLogicalImm(3, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3931}
3932
3933
3934/** A64: Encodes an TST instruction w/ complicated immediate mask.
3935 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3936DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTstImm(uint32_t iRegSrc,
3937 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3938{
3939 return Armv8A64MkInstrAndsImm(ARMV8_A64_REG_XZR, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3940}
3941
3942
3943/**
3944 * A64: Encodes a bitfield instruction.
3945 *
3946 * @returns The encoded instruction.
3947 * @param u2Opc The bitfield operation to perform.
3948 * @param iRegResult The output register.
3949 * @param iRegSrc The 1st register operand.
3950 * @param cImm6Ror The right rotation count.
3951 * @param uImm6S The leftmost bit to be moved.
3952 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3953 * @param uN1 This must match @a f64Bit for all instructions
3954 * currently specified.
3955 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3956 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3957 */
3958DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBitfieldImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3959 uint32_t cImm6Ror, uint32_t uImm6S, bool f64Bit, uint32_t uN1)
3960{
3961 Assert(cImm6Ror <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegResult < 32); Assert(u2Opc < 4);
3962 Assert(uImm6S <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegSrc < 32); Assert(uN1 <= (unsigned)f64Bit);
3963 return ((uint32_t)f64Bit << 31)
3964 | (u2Opc << 29)
3965 | UINT32_C(0x13000000)
3966 | (uN1 << 22)
3967 | (cImm6Ror << 16)
3968 | (uImm6S << 10)
3969 | (iRegSrc << 5)
3970 | iRegResult;
3971}
3972
3973
3974/** A64: Encodes a SBFM instruction.
3975 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3976DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
3977 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
3978{
3979 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
3980}
3981
3982
3983/** A64: Encodes a SXTB instruction (sign-extend 8-bit value to 32/64-bit).
3984 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3985DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3986{
3987 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 7, f64Bit);
3988}
3989
3990
3991/** A64: Encodes a SXTH instruction (sign-extend 16-bit value to 32/64-bit).
3992 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3993DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3994{
3995 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 15, f64Bit);
3996}
3997
3998
3999/** A64: Encodes a SXTH instruction (sign-extend 32-bit value to 64-bit).
4000 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4001DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtw(uint32_t iRegResult, uint32_t iRegSrc)
4002{
4003 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 31, true /*f64Bit*/);
4004}
4005
4006
4007/** A64: Encodes an ASR instruction w/ immediate shift value.
4008 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4009DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4010{
4011 uint32_t const cWidth = f64Bit ? 63 : 31;
4012 Assert(cShift > 0); Assert(cShift <= cWidth);
4013 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
4014}
4015
4016
4017/** A64: Encodes a BFM instruction.
4018 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4019DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
4020 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
4021{
4022 return Armv8A64MkInstrBitfieldImm(1, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
4023}
4024
4025
4026/** A64: Encodes a BFI instruction (insert).
4027 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4028DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfi(uint32_t iRegResult, uint32_t iRegSrc,
4029 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4030{
4031 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
4032 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)-(int32_t)offFirstBit & (f64Bit ? 0x3f : 0x1f),
4033 cBitsWidth - 1, f64Bit);
4034}
4035
4036
4037/** A64: Encodes a BFC instruction (clear).
4038 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4039DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfc(uint32_t iRegResult,
4040 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4041{
4042 return Armv8A64MkInstrBfi(iRegResult, ARMV8_A64_REG_XZR, offFirstBit, cBitsWidth, f64Bit);
4043}
4044
4045
4046/** A64: Encodes a BFXIL instruction (insert low).
4047 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4048DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfxil(uint32_t iRegResult, uint32_t iRegSrc,
4049 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4050{
4051 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
4052 Assert(offFirstBit + cBitsWidth <= (f64Bit ? 64U : 32U));
4053 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
4054}
4055
4056
4057/** A64: Encodes an UBFM instruction.
4058 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4059DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
4060 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
4061{
4062 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
4063}
4064
4065
4066/** A64: Encodes an UBFX instruction (zero extending extract).
4067 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4068DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfx(uint32_t iRegResult, uint32_t iRegSrc,
4069 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4070{
4071 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
4072}
4073
4074
4075/** A64: Encodes an UBFIZ instruction (zero extending extract from bit zero,
4076 * shifted into destination).
4077 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4078DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfiz(uint32_t iRegResult, uint32_t iRegSrc,
4079 uint32_t offFirstBitDst, uint32_t cBitsWidth, bool f64Bit = true)
4080{
4081 uint32_t fMask = f64Bit ? 0x3f : 0x1f;
4082 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, -(int32_t)offFirstBitDst & fMask, cBitsWidth - 1, f64Bit);
4083}
4084
4085
4086/** A64: Encodes an LSL instruction w/ immediate shift value.
4087 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4088DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4089{
4090 uint32_t const cWidth = f64Bit ? 63 : 31;
4091 Assert(cShift > 0); Assert(cShift <= cWidth);
4092 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, (uint32_t)(0 - cShift) & cWidth,
4093 cWidth - cShift /*uImm6S*/, f64Bit, f64Bit);
4094}
4095
4096
4097/** A64: Encodes an LSR instruction w/ immediate shift value.
4098 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4099DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4100{
4101 uint32_t const cWidth = f64Bit ? 63 : 31;
4102 Assert(cShift > 0); Assert(cShift <= cWidth);
4103 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
4104}
4105
4106
4107/** A64: Encodes an UXTB instruction - zero extend byte (8-bit).
4108 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4109DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
4110{
4111 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 7, f64Bit, f64Bit);
4112}
4113
4114
4115/** A64: Encodes an UXTH instruction - zero extend half word (16-bit).
4116 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4117DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
4118{
4119 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 15, f64Bit, f64Bit);
4120}
4121
4122
4123/**
4124 * A64: Encodes an EXTR instruction with an immediate.
4125 *
4126 * @returns The encoded instruction.
4127 * @param iRegResult The register to store the result in. ZR is valid.
4128 * @param iRegLow The register holding the least significant bits in the
4129 * extraction. ZR is valid.
4130 * @param iRegHigh The register holding the most significant bits in the
4131 * extraction. ZR is valid.
4132 * @param uLsb The bit number of the least significant bit, or where in
4133 * @a iRegLow to start the
4134 * extraction.
4135 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4136 */
4137DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrExtrImm(uint32_t iRegResult, uint32_t iRegLow, uint32_t iRegHigh, uint32_t uLsb,
4138 bool f64Bit = true)
4139{
4140 Assert(uLsb < (uint32_t)(f64Bit ? 64 : 32)); Assert(iRegHigh < 32); Assert(iRegLow < 32); Assert(iRegResult < 32);
4141 return ((uint32_t)f64Bit << 31)
4142 | UINT32_C(0x13800000)
4143 | ((uint32_t)f64Bit << 22) /*N*/
4144 | (iRegHigh << 16)
4145 | (uLsb << 10)
4146 | (iRegLow << 5)
4147 | iRegResult;
4148}
4149
4150
4151/** A64: Rotates the value of a register (alias for EXTR). */
4152DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4153{
4154 return Armv8A64MkInstrExtrImm(iRegResult, iRegSrc, iRegSrc, cShift, f64Bit);
4155}
4156
4157
4158/**
4159 * A64: Encodes either add, adds, sub or subs with unsigned 12-bit immediate.
4160 *
4161 * @returns The encoded instruction.
4162 * @param fSub true for sub and subs, false for add and
4163 * adds.
4164 * @param iRegResult The register to store the result in.
4165 * SP is valid when @a fSetFlags = false,
4166 * and ZR is valid otherwise.
4167 * @param iRegSrc The register containing the augend (@a fSub
4168 * = false) or minuend (@a fSub = true). SP is
4169 * a valid registers for all variations.
4170 * @param uImm12AddendSubtrahend The addend (@a fSub = false) or subtrahend
4171 * (@a fSub = true).
4172 * @param f64Bit true for 64-bit GRPs (default), false for
4173 * 32-bit GPRs.
4174 * @param fSetFlags Whether to set flags (adds / subs) or not
4175 * (add / sub - default).
4176 * @param fShift12 Whether to shift uImm12AddendSubtrahend 12
4177 * bits to the left, or not (default).
4178 */
4179DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubUImm12(bool fSub, uint32_t iRegResult, uint32_t iRegSrc,
4180 uint32_t uImm12AddendSubtrahend, bool f64Bit = true,
4181 bool fSetFlags = false, bool fShift12 = false)
4182{
4183 Assert(uImm12AddendSubtrahend < 4096); Assert(iRegSrc < 32); Assert(iRegResult < 32);
4184 return ((uint32_t)f64Bit << 31)
4185 | ((uint32_t)fSub << 30)
4186 | ((uint32_t)fSetFlags << 29)
4187 | UINT32_C(0x11000000)
4188 | ((uint32_t)fShift12 << 22)
4189 | (uImm12AddendSubtrahend << 10)
4190 | (iRegSrc << 5)
4191 | iRegResult;
4192}
4193
4194
4195/** Alias for sub zxr, reg, \#uimm12. */
4196DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpUImm12(uint32_t iRegSrc, uint32_t uImm12Comprahend,
4197 bool f64Bit = true, bool fShift12 = false)
4198{
4199 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc, uImm12Comprahend,
4200 f64Bit, true /*fSetFlags*/, fShift12);
4201}
4202
4203
4204/** ADD dst, src, \#uimm12 */
4205DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Addend,
4206 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
4207{
4208 return Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iRegResult, iRegSrc, uImm12Addend, f64Bit, fSetFlags, fShift12);
4209}
4210
4211
4212/** SUB dst, src, \#uimm12 */
4213DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Subtrahend,
4214 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
4215{
4216 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, iRegResult, iRegSrc, uImm12Subtrahend, f64Bit, fSetFlags, fShift12);
4217}
4218
4219
4220/**
4221 * A64: Encodes either add, adds, sub or subs with shifted register.
4222 *
4223 * @returns The encoded instruction.
4224 * @param fSub true for sub and subs, false for add and
4225 * adds.
4226 * @param iRegResult The register to store the result in.
4227 * SP is NOT valid, but ZR is.
4228 * @param iRegSrc1 The register containing the augend (@a fSub
4229 * = false) or minuend (@a fSub = true).
4230 * SP is NOT valid, but ZR is.
4231 * @param iRegSrc2 The register containing the addened (@a fSub
4232 * = false) or subtrahend (@a fSub = true).
4233 * SP is NOT valid, but ZR is.
4234 * @param f64Bit true for 64-bit GRPs (default), false for
4235 * 32-bit GPRs.
4236 * @param fSetFlags Whether to set flags (adds / subs) or not
4237 * (add / sub - default).
4238 * @param cShift The shift count to apply to @a iRegSrc2.
4239 * @param enmShift The shift type to apply to the @a iRegSrc2
4240 * register. kArmv8A64InstrShift_Ror is
4241 * reserved.
4242 */
4243DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubReg(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4244 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4245 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4246{
4247 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4248 Assert(cShift < (f64Bit ? 64U : 32U)); Assert(enmShift != kArmv8A64InstrShift_Ror);
4249
4250 return ((uint32_t)f64Bit << 31)
4251 | ((uint32_t)fSub << 30)
4252 | ((uint32_t)fSetFlags << 29)
4253 | UINT32_C(0x0b000000)
4254 | ((uint32_t)enmShift << 22)
4255 | (iRegSrc2 << 16)
4256 | (cShift << 10)
4257 | (iRegSrc1 << 5)
4258 | iRegResult;
4259}
4260
4261
4262/** Alias for sub zxr, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx]. */
4263DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true, uint32_t cShift = 0,
4264 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4265{
4266 return Armv8A64MkInstrAddSubReg(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc1, iRegSrc2,
4267 f64Bit, true /*fSetFlags*/, cShift, enmShift);
4268}
4269
4270
4271/** ADD dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
4272DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4273 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4274 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4275{
4276 return Armv8A64MkInstrAddSubReg(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
4277}
4278
4279
4280/** SUB dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
4281DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4282 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4283 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4284{
4285 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
4286}
4287
4288
4289/** NEG dst */
4290DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrNeg(uint32_t iRegResult, bool f64Bit = true, bool fSetFlags = false)
4291{
4292 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, ARMV8_A64_REG_XZR, iRegResult, f64Bit, fSetFlags);
4293}
4294
4295
4296/** Extension option for 'extended register' instructions. */
4297typedef enum ARMV8A64INSTREXTEND
4298{
4299 kArmv8A64InstrExtend_UxtB = 0,
4300 kArmv8A64InstrExtend_UxtH,
4301 kArmv8A64InstrExtend_UxtW,
4302 kArmv8A64InstrExtend_UxtX,
4303 kArmv8A64InstrExtend_SxtB,
4304 kArmv8A64InstrExtend_SxtH,
4305 kArmv8A64InstrExtend_SxtW,
4306 kArmv8A64InstrExtend_SxtX,
4307 /** The default is either UXTW or UXTX depending on whether the instruction
4308 * is in 32-bit or 64-bit mode. Thus, this needs to be resolved according
4309 * to the f64Bit value. */
4310 kArmv8A64InstrExtend_Default
4311} ARMV8A64INSTREXTEND;
4312
4313
4314/**
4315 * A64: Encodes either add, adds, sub or subs with extended register encoding.
4316 *
4317 * @returns The encoded instruction.
4318 * @param fSub true for sub and subs, false for add and
4319 * adds.
4320 * @param iRegResult The register to store the result in.
4321 * SP is NOT valid, but ZR is.
4322 * @param iRegSrc1 The register containing the augend (@a fSub
4323 * = false) or minuend (@a fSub = true).
4324 * SP is valid, but ZR is NOT.
4325 * @param iRegSrc2 The register containing the addened (@a fSub
4326 * = false) or subtrahend (@a fSub = true).
4327 * SP is NOT valid, but ZR is.
4328 * @param f64Bit true for 64-bit GRPs (default), false for
4329 * 32-bit GPRs.
4330 * @param fSetFlags Whether to set flags (adds / subs) or not
4331 * (add / sub - default).
4332 * @param enmExtend The type of extension to apply to @a
4333 * iRegSrc2.
4334 * @param cShift The left shift count to apply to @a iRegSrc2
4335 * after enmExtend processing is done.
4336 * Max shift is 4 for some reason.
4337 */
4338DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubRegExtend(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4339 bool f64Bit = true, bool fSetFlags = false,
4340 ARMV8A64INSTREXTEND enmExtend = kArmv8A64InstrExtend_Default,
4341 uint32_t cShift = 0)
4342{
4343 if (enmExtend == kArmv8A64InstrExtend_Default)
4344 enmExtend = f64Bit ? kArmv8A64InstrExtend_UxtW : kArmv8A64InstrExtend_UxtX;
4345 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(cShift <= 4);
4346
4347 return ((uint32_t)f64Bit << 31)
4348 | ((uint32_t)fSub << 30)
4349 | ((uint32_t)fSetFlags << 29)
4350 | UINT32_C(0x0b200000)
4351 | (iRegSrc2 << 16)
4352 | ((uint32_t)enmExtend << 13)
4353 | (cShift << 10)
4354 | (iRegSrc1 << 5)
4355 | iRegResult;
4356}
4357
4358
4359/**
4360 * A64: Encodes either adc, adcs, sbc or sbcs with two source registers.
4361 *
4362 * @returns The encoded instruction.
4363 * @param fSub true for sbc and sbcs, false for adc and
4364 * adcs.
4365 * @param iRegResult The register to store the result in. SP is
4366 * NOT valid, but ZR is.
4367 * @param iRegSrc1 The register containing the augend (@a fSub
4368 * = false) or minuend (@a fSub = true).
4369 * SP is NOT valid, but ZR is.
4370 * @param iRegSrc2 The register containing the addened (@a fSub
4371 * = false) or subtrahend (@a fSub = true).
4372 * SP is NOT valid, but ZR is.
4373 * @param f64Bit true for 64-bit GRPs (default), false for
4374 * 32-bit GPRs.
4375 * @param fSetFlags Whether to set flags (adds / subs) or not
4376 * (add / sub - default).
4377 */
4378DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcSbc(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4379 bool f64Bit = true, bool fSetFlags = false)
4380{
4381 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4382
4383 return ((uint32_t)f64Bit << 31)
4384 | ((uint32_t)fSub << 30)
4385 | ((uint32_t)fSetFlags << 29)
4386 | UINT32_C(0x1a000000)
4387 | (iRegSrc2 << 16)
4388 | (iRegSrc1 << 5)
4389 | iRegResult;
4390}
4391
4392
4393/** ADC dst, reg1, reg2 */
4394DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4395 bool f64Bit = true, bool fSetFlags = false)
4396{
4397 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
4398}
4399
4400
4401/** ADCS dst, reg1, reg2 */
4402DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
4403{
4404 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
4405}
4406
4407
4408/** SBC dst, reg1, reg2 */
4409DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4410 bool f64Bit = true, bool fSetFlags = false)
4411{
4412 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
4413}
4414
4415
4416/** SBCS dst, reg1, reg2 */
4417DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
4418{
4419 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
4420}
4421
4422
4423/**
4424 * A64: Encodes a B (unconditional branch w/ imm) instruction.
4425 *
4426 * @returns The encoded instruction.
4427 * @param iImm26 Signed number of instruction to jump (i.e. *4).
4428 */
4429DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrB(int32_t iImm26)
4430{
4431 Assert(iImm26 >= -67108864 && iImm26 < 67108864);
4432 return UINT32_C(0x14000000) | ((uint32_t)iImm26 & UINT32_C(0x3ffffff));
4433}
4434
4435
4436/**
4437 * A64: Encodes a BL (unconditional call w/ imm) instruction.
4438 *
4439 * @returns The encoded instruction.
4440 * @param iImm26 Signed number of instruction to jump (i.e. *4).
4441 */
4442DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBl(int32_t iImm26)
4443{
4444 return Armv8A64MkInstrB(iImm26) | RT_BIT_32(31);
4445}
4446
4447
4448/**
4449 * A64: Encodes a BR (unconditional branch w/ register) instruction.
4450 *
4451 * @returns The encoded instruction.
4452 * @param iReg The register containing the target address.
4453 */
4454DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBr(uint32_t iReg)
4455{
4456 Assert(iReg < 32);
4457 return UINT32_C(0xd61f0000) | (iReg << 5);
4458}
4459
4460
4461/**
4462 * A64: Encodes a BLR instruction.
4463 *
4464 * @returns The encoded instruction.
4465 * @param iReg The register containing the target address.
4466 */
4467DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBlr(uint32_t iReg)
4468{
4469 return Armv8A64MkInstrBr(iReg) | RT_BIT_32(21);
4470}
4471
4472
4473/**
4474 * A64: Encodes CBZ and CBNZ (conditional branch w/ immediate) instructions.
4475 *
4476 * @returns The encoded instruction.
4477 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
4478 * its not zero.
4479 * @param iImm19 Signed number of instruction to jump (i.e. *4).
4480 * @param iReg The GPR to check for zero / non-zero value.
4481 * @param f64Bit true for 64-bit register, false for 32-bit.
4482 */
4483DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbzCbnz(bool fJmpIfNotZero, int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4484{
4485 Assert(iReg < 32); Assert(iImm19 >= -262144 && iImm19 < 262144);
4486 return ((uint32_t)f64Bit << 31)
4487 | UINT32_C(0x34000000)
4488 | ((uint32_t)fJmpIfNotZero << 24)
4489 | (((uint32_t)iImm19 & 0x7ffff) << 5)
4490 | iReg;
4491}
4492
4493
4494/** A64: Encodes the CBZ instructions. */
4495DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4496{
4497 return Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
4498}
4499
4500
4501/** A64: Encodes the CBNZ instructions. */
4502DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbnz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4503{
4504 return Armv8A64MkInstrCbzCbnz(true /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
4505}
4506
4507
4508/**
4509 * A64: Encodes TBZ and TBNZ (conditional branch w/ immediate) instructions.
4510 *
4511 * @returns The encoded instruction.
4512 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
4513 * its not zero.
4514 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4515 * @param iReg The GPR to check for zero / non-zero value.
4516 * @param iBitNo The bit to test for.
4517 */
4518DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbzTbnz(bool fJmpIfNotZero, int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4519{
4520 Assert(iReg < 32); Assert(iImm14 >= -8192 && iImm14 < 8192); Assert(iBitNo < 64);
4521 return ((uint32_t)(iBitNo & 0x20) << (31-5))
4522 | UINT32_C(0x36000000)
4523 | ((uint32_t)fJmpIfNotZero << 24)
4524 | ((iBitNo & 0x1f) << 19)
4525 | (((uint32_t)iImm14 & 0x3fff) << 5)
4526 | iReg;
4527}
4528
4529
4530/**
4531 * A64: Encodes TBZ (conditional branch w/ immediate) instructions.
4532 *
4533 * @returns The encoded instruction.
4534 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4535 * @param iReg The GPR to check for zero / non-zero value.
4536 * @param iBitNo The bit to test for.
4537 */
4538DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbz(int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4539{
4540 return Armv8A64MkInstrTbzTbnz(false /*fJmpIfNotZero*/, iImm14, iReg, iBitNo);
4541}
4542
4543
4544/**
4545 * A64: Encodes TBNZ (conditional branch w/ immediate) instructions.
4546 *
4547 * @returns The encoded instruction.
4548 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4549 * @param iReg The GPR to check for zero / non-zero value.
4550 * @param iBitNo The bit to test for.
4551 */
4552DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbnz(int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4553{
4554 return Armv8A64MkInstrTbzTbnz(true /*fJmpIfNotZero*/, iImm14, iReg, iBitNo);
4555}
4556
4557
4558
4559/** Armv8 Condition codes. */
4560typedef enum ARMV8INSTRCOND
4561{
4562 kArmv8InstrCond_Eq = 0, /**< 0 - Equal - Zero set. */
4563 kArmv8InstrCond_Ne, /**< 1 - Not equal - Zero clear. */
4564
4565 kArmv8InstrCond_Cs, /**< 2 - Carry set (also known as 'HS'). */
4566 kArmv8InstrCond_Hs = kArmv8InstrCond_Cs, /**< 2 - Unsigned higher or same. */
4567 kArmv8InstrCond_Cc, /**< 3 - Carry clear (also known as 'LO'). */
4568 kArmv8InstrCond_Lo = kArmv8InstrCond_Cc, /**< 3 - Unsigned lower. */
4569
4570 kArmv8InstrCond_Mi, /**< 4 - Negative result (minus). */
4571 kArmv8InstrCond_Pl, /**< 5 - Positive or zero result (plus). */
4572
4573 kArmv8InstrCond_Vs, /**< 6 - Overflow set. */
4574 kArmv8InstrCond_Vc, /**< 7 - Overflow clear. */
4575
4576 kArmv8InstrCond_Hi, /**< 8 - Unsigned higher. */
4577 kArmv8InstrCond_Ls, /**< 9 - Unsigned lower or same. */
4578
4579 kArmv8InstrCond_Ge, /**< a - Signed greater or equal. */
4580 kArmv8InstrCond_Lt, /**< b - Signed less than. */
4581
4582 kArmv8InstrCond_Gt, /**< c - Signed greater than. */
4583 kArmv8InstrCond_Le, /**< d - Signed less or equal. */
4584
4585 kArmv8InstrCond_Al, /**< e - Condition is always true. */
4586 kArmv8InstrCond_Al1 /**< f - Condition is always true. */
4587} ARMV8INSTRCOND;
4588
4589/**
4590 * A64: Encodes conditional branch instruction w/ immediate target.
4591 *
4592 * @returns The encoded instruction.
4593 * @param enmCond The branch condition.
4594 * @param iImm19 Signed number of instruction to jump (i.e. *4).
4595 */
4596DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBCond(ARMV8INSTRCOND enmCond, int32_t iImm19)
4597{
4598 Assert((unsigned)enmCond < 16);
4599 return UINT32_C(0x54000000)
4600 | (((uint32_t)iImm19 & 0x7ffff) << 5)
4601 | (uint32_t)enmCond;
4602}
4603
4604
4605/**
4606 * A64: Encodes the BRK instruction.
4607 *
4608 * @returns The encoded instruction.
4609 * @param uImm16 Unsigned immediate value.
4610 */
4611DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBrk(uint32_t uImm16)
4612{
4613 Assert(uImm16 < _64K);
4614 return UINT32_C(0xd4200000)
4615 | (uImm16 << 5);
4616}
4617
4618/** @name RMA64_NZCV_F_XXX - readable NZCV mask for CCMP and friends.
4619 * @{ */
4620#define ARMA64_NZCV_F_N0_Z0_C0_V0 UINT32_C(0x0)
4621#define ARMA64_NZCV_F_N0_Z0_C0_V1 UINT32_C(0x1)
4622#define ARMA64_NZCV_F_N0_Z0_C1_V0 UINT32_C(0x2)
4623#define ARMA64_NZCV_F_N0_Z0_C1_V1 UINT32_C(0x3)
4624#define ARMA64_NZCV_F_N0_Z1_C0_V0 UINT32_C(0x4)
4625#define ARMA64_NZCV_F_N0_Z1_C0_V1 UINT32_C(0x5)
4626#define ARMA64_NZCV_F_N0_Z1_C1_V0 UINT32_C(0x6)
4627#define ARMA64_NZCV_F_N0_Z1_C1_V1 UINT32_C(0x7)
4628
4629#define ARMA64_NZCV_F_N1_Z0_C0_V0 UINT32_C(0x8)
4630#define ARMA64_NZCV_F_N1_Z0_C0_V1 UINT32_C(0x9)
4631#define ARMA64_NZCV_F_N1_Z0_C1_V0 UINT32_C(0xa)
4632#define ARMA64_NZCV_F_N1_Z0_C1_V1 UINT32_C(0xb)
4633#define ARMA64_NZCV_F_N1_Z1_C0_V0 UINT32_C(0xc)
4634#define ARMA64_NZCV_F_N1_Z1_C0_V1 UINT32_C(0xd)
4635#define ARMA64_NZCV_F_N1_Z1_C1_V0 UINT32_C(0xe)
4636#define ARMA64_NZCV_F_N1_Z1_C1_V1 UINT32_C(0xf)
4637/** @} */
4638
4639/**
4640 * A64: Encodes CCMP or CCMN with two register operands.
4641 *
4642 * @returns The encoded instruction.
4643 * @param iRegSrc1 The 1st register. SP is NOT valid, but ZR is.
4644 * @param iRegSrc2 The 2nd register. SP is NOT valid, but ZR is.
4645 * @param fNzcv The N, Z, C & V flags values to load if the condition
4646 * does not match. See RMA64_NZCV_F_XXX.
4647 * @param enmCond The condition guarding the compare.
4648 * @param fCCmp Set for CCMP (default), clear for CCMN.
4649 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4650 */
4651DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4652 ARMV8INSTRCOND enmCond, bool fCCmp = true, bool f64Bit = true)
4653{
4654 Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(fNzcv < 16);
4655
4656 return ((uint32_t)f64Bit << 31)
4657 | ((uint32_t)fCCmp << 30)
4658 | UINT32_C(0x3a400000)
4659 | (iRegSrc2 << 16)
4660 | ((uint32_t)enmCond << 12)
4661 | (iRegSrc1 << 5)
4662 | fNzcv;
4663}
4664
4665/** CCMP w/ reg. */
4666DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4667 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4668{
4669 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4670}
4671
4672
4673/** CCMN w/ reg. */
4674DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4675 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4676{
4677 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4678}
4679
4680
4681/**
4682 * A64: Encodes CCMP or CCMN with register and 5-bit immediate.
4683 *
4684 * @returns The encoded instruction.
4685 * @param iRegSrc The register. SP is NOT valid, but ZR is.
4686 * @param uImm5 The immediate, to compare iRegSrc with.
4687 * @param fNzcv The N, Z, C & V flags values to load if the condition
4688 * does not match. See RMA64_NZCV_F_XXX.
4689 * @param enmCond The condition guarding the compare.
4690 * @param fCCmp Set for CCMP (default), clear for CCMN.
4691 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4692 */
4693DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv, ARMV8INSTRCOND enmCond,
4694 bool fCCmp = true, bool f64Bit = true)
4695{
4696 Assert(iRegSrc < 32); Assert(uImm5 < 32); Assert(fNzcv < 16);
4697
4698 return ((uint32_t)f64Bit << 31)
4699 | ((uint32_t)fCCmp << 30)
4700 | UINT32_C(0x3a400800)
4701 | (uImm5 << 16)
4702 | ((uint32_t)enmCond << 12)
4703 | (iRegSrc << 5)
4704 | fNzcv;
4705}
4706
4707/** CCMP w/ immediate. */
4708DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4709 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4710{
4711 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4712}
4713
4714
4715/** CCMN w/ immediate. */
4716DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4717 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4718{
4719 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4720}
4721
4722
4723/**
4724 * A64: Encodes CSEL, CSINC, CSINV and CSNEG (three registers)
4725 *
4726 * @returns The encoded instruction.
4727 * @param uOp Opcode bit 30.
4728 * @param uOp2 Opcode bits 11:10.
4729 * @param iRegResult The result register. SP is NOT valid, but ZR is.
4730 * @param iRegSrc1 The 1st source register. SP is NOT valid, but ZR is.
4731 * @param iRegSrc2 The 2nd source register. SP is NOT valid, but ZR is.
4732 * @param enmCond The condition guarding the compare.
4733 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4734 */
4735DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCondSelect(uint32_t uOp, uint32_t uOp2, uint32_t iRegResult, uint32_t iRegSrc1,
4736 uint32_t iRegSrc2, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4737{
4738 Assert(uOp <= 1); Assert(uOp2 <= 1); Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4739
4740 return ((uint32_t)f64Bit << 31)
4741 | (uOp << 30)
4742 | UINT32_C(0x1a800000)
4743 | (iRegSrc2 << 16)
4744 | ((uint32_t)enmCond << 12)
4745 | (uOp2 << 10)
4746 | (iRegSrc1 << 5)
4747 | iRegResult;
4748}
4749
4750
4751/** A64: Encodes CSEL.
4752 * @see Armv8A64MkInstrCondSelect for details. */
4753DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSel(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4754 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4755{
4756 return Armv8A64MkInstrCondSelect(0, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4757}
4758
4759
4760/** A64: Encodes CSINC.
4761 * @see Armv8A64MkInstrCondSelect for details. */
4762DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4763 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4764{
4765 return Armv8A64MkInstrCondSelect(0, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4766}
4767
4768
4769/** A64: Encodes CSET.
4770 * @see Armv8A64MkInstrCondSelect for details. */
4771DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSet(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4772{
4773 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4774 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4775 return Armv8A64MkInstrCSInc(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4776}
4777
4778
4779/** A64: Encodes CSINV.
4780 * @see Armv8A64MkInstrCondSelect for details. */
4781DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInv(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4782 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4783{
4784 return Armv8A64MkInstrCondSelect(1, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4785}
4786
4787/** A64: Encodes CSETM.
4788 * @see Armv8A64MkInstrCondSelect for details. */
4789DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSetM(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4790{
4791 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4792 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4793 return Armv8A64MkInstrCSInv(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4794}
4795
4796
4797/** A64: Encodes CSNEG.
4798 * @see Armv8A64MkInstrCondSelect for details. */
4799DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSNeg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4800 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4801{
4802 return Armv8A64MkInstrCondSelect(1, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4803}
4804
4805
4806/**
4807 * A64: Encodes REV instruction.
4808 *
4809 * @returns The encoded instruction.
4810 * @param iRegDst The destination register. SP is NOT valid.
4811 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4812 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4813 */
4814DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4815{
4816 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4817
4818 return ((uint32_t)f64Bit << 31)
4819 | UINT32_C(0x5ac00800)
4820 | ((uint32_t)f64Bit << 10)
4821 | (iRegSrc << 5)
4822 | iRegDst;
4823}
4824
4825
4826/**
4827 * A64: Encodes REV16 instruction.
4828 *
4829 * @returns The encoded instruction.
4830 * @param iRegDst The destination register. SP is NOT valid.
4831 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4832 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4833 */
4834DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev16(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4835{
4836 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4837
4838 return ((uint32_t)f64Bit << 31)
4839 | UINT32_C(0x5ac00400)
4840 | (iRegSrc << 5)
4841 | iRegDst;
4842}
4843
4844
4845/**
4846 * A64: Encodes SETF8 & SETF16.
4847 *
4848 * @returns The encoded instruction.
4849 * @param iRegResult The register holding the result. SP is NOT valid.
4850 * @param f16Bit Set for SETF16, clear for SETF8.
4851 */
4852DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSetF8SetF16(uint32_t iRegResult, bool f16Bit)
4853{
4854 Assert(iRegResult < 32);
4855
4856 return UINT32_C(0x3a00080d)
4857 | ((uint32_t)f16Bit << 14)
4858 | (iRegResult << 5);
4859}
4860
4861
4862/**
4863 * A64: Encodes RMIF.
4864 *
4865 * @returns The encoded instruction.
4866 * @param iRegSrc The source register to get flags from.
4867 * @param cRotateRight The right rotate count (LSB bit offset).
4868 * @param fMask Mask of which flag bits to set:
4869 * - bit 0: V
4870 * - bit 1: C
4871 * - bit 2: Z
4872 * - bit 3: N
4873 */
4874DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRmif(uint32_t iRegSrc, uint32_t cRotateRight, uint32_t fMask)
4875{
4876 Assert(iRegSrc < 32); Assert(cRotateRight < 64); Assert(fMask <= 0xf);
4877
4878 return UINT32_C(0xba000400)
4879 | (cRotateRight << 15)
4880 | (iRegSrc << 5)
4881 | fMask;
4882}
4883
4884
4885/**
4886 * A64: Encodes MRS (for reading a system register into a GPR).
4887 *
4888 * @returns The encoded instruction.
4889 * @param iRegDst The register to put the result into. SP is NOT valid.
4890 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4891 * IPRT specific format, of the register to read.
4892 */
4893DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMrs(uint32_t iRegDst, uint32_t idSysReg)
4894{
4895 Assert(iRegDst < 32);
4896 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4897
4898 /* Note. The top bit of idSysReg must always be set and is also set in
4899 0xd5300000, otherwise we'll be encoding a different instruction. */
4900 return UINT32_C(0xd5300000)
4901 | (idSysReg << 5)
4902 | iRegDst;
4903}
4904
4905
4906/**
4907 * A64: Encodes MSR (for writing a GPR to a system register).
4908 *
4909 * @returns The encoded instruction.
4910 * @param iRegSrc The register which value to write. SP is NOT valid.
4911 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4912 * IPRT specific format, of the register to write.
4913 */
4914DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMsr(uint32_t iRegSrc, uint32_t idSysReg)
4915{
4916 Assert(iRegSrc < 32);
4917 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4918
4919 /* Note. The top bit of idSysReg must always be set and is also set in
4920 0xd5100000, otherwise we'll be encoding a different instruction. */
4921 return UINT32_C(0xd5100000)
4922 | (idSysReg << 5)
4923 | iRegSrc;
4924}
4925
4926
4927/** @} */
4928
4929
4930/** @defgroup grp_rt_armv8_mkinstr_vec Vector Instruction Encoding Helpers
4931 * @ingroup grp_rt_armv8_mkinstr
4932 *
4933 * A few inlined functions and macros for assisting in encoding common ARMv8
4934 * Neon/SIMD instructions.
4935 *
4936 * @{ */
4937
4938/** Armv8 vector logical operation. */
4939typedef enum
4940{
4941 kArmv8VecInstrLogicOp_And = 0, /**< AND */
4942 kArmv8VecInstrLogicOp_Bic = RT_BIT_32(22), /**< BIC */
4943 kArmv8VecInstrLogicOp_Orr = RT_BIT_32(23), /**< ORR */
4944 kArmv8VecInstrLogicOp_Orn = RT_BIT_32(23) | RT_BIT_32(22), /**< ORN */
4945 kArmv8VecInstrLogicOp_Eor = RT_BIT_32(29), /**< EOR */
4946 kArmv8VecInstrLogicOp_Bsl = RT_BIT_32(29) | RT_BIT_32(22), /**< BSL */
4947 kArmv8VecInstrLogicOp_Bit = RT_BIT_32(29) | RT_BIT_32(23), /**< BIT */
4948 kArmv8VecInstrLogicOp_Bif = RT_BIT_32(29) | RT_BIT_32(23) | RT_BIT_32(22) /**< BIF */
4949} ARMV8INSTRVECLOGICOP;
4950
4951
4952/**
4953 * A64: Encodes logical instruction (vector, register).
4954 *
4955 * @returns The encoded instruction.
4956 * @param enmOp The operation to encode.
4957 * @param iVecRegDst The vector register to put the result into.
4958 * @param iVecRegSrc1 The 1st source register.
4959 * @param iVecRegSrc2 The 2nd source register.
4960 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4961 * or just the low 64-bit (false).
4962 */
4963DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrLogical(ARMV8INSTRVECLOGICOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4964 bool f128Bit = true)
4965{
4966 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
4967
4968 return UINT32_C(0x0e201c00)
4969 | (uint32_t)enmOp
4970 | ((uint32_t)f128Bit << 30)
4971 | (iVecRegSrc2 << 16)
4972 | (iVecRegSrc1 << 5)
4973 | iVecRegDst;
4974}
4975
4976
4977/**
4978 * A64: Encodes ORR (vector, register).
4979 *
4980 * @returns The encoded instruction.
4981 * @param iVecRegDst The vector register to put the result into.
4982 * @param iVecRegSrc1 The 1st source register.
4983 * @param iVecRegSrc2 The 2nd source register.
4984 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4985 * or just the low 64-bit (false).
4986 */
4987DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrOrr(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4988 bool f128Bit = true)
4989{
4990 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_Orr, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
4991}
4992
4993
4994/**
4995 * A64: Encodes EOR (vector, register).
4996 *
4997 * @returns The encoded instruction.
4998 * @param iVecRegDst The vector register to put the result into.
4999 * @param iVecRegSrc1 The 1st source register.
5000 * @param iVecRegSrc2 The 2nd source register.
5001 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5002 * or just the low 64-bit (false).
5003 */
5004DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrEor(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5005 bool f128Bit = true)
5006{
5007 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_Eor, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
5008}
5009
5010
5011/**
5012 * A64: Encodes AND (vector, register).
5013 *
5014 * @returns The encoded instruction.
5015 * @param iVecRegDst The vector register to put the result into.
5016 * @param iVecRegSrc1 The 1st source register.
5017 * @param iVecRegSrc2 The 2nd source register.
5018 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5019 * or just the low 64-bit (false).
5020 */
5021DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrAnd(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5022 bool f128Bit = true)
5023{
5024 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_And, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
5025}
5026
5027
5028/** Armv8 UMOV/INS vector element size. */
5029typedef enum ARMV8INSTRUMOVINSSZ
5030{
5031 kArmv8InstrUmovInsSz_U8 = 0, /**< Byte. */
5032 kArmv8InstrUmovInsSz_U16 = 1, /**< Halfword. */
5033 kArmv8InstrUmovInsSz_U32 = 2, /**< 32-bit. */
5034 kArmv8InstrUmovInsSz_U64 = 3 /**< 64-bit (only valid when the destination is a 64-bit register. */
5035} ARMV8INSTRUMOVINSSZ;
5036
5037
5038/**
5039 * A64: Encodes UMOV (vector, register).
5040 *
5041 * @returns The encoded instruction.
5042 * @param iRegDst The register to put the result into.
5043 * @param iVecRegSrc The vector source register.
5044 * @param idxElem The element index.
5045 * @param enmSz Element size of the source vector register.
5046 * @param fDst64Bit Flag whether the destination register is 64-bit (true) or 32-bit (false).
5047 */
5048DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUmov(uint32_t iRegDst, uint32_t iVecRegSrc, uint8_t idxElem,
5049 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64, bool fDst64Bit = true)
5050{
5051 Assert(iRegDst < 32); Assert(iVecRegSrc < 32);
5052 Assert((fDst64Bit && enmSz == kArmv8InstrUmovInsSz_U64) || (!fDst64Bit && enmSz != kArmv8InstrUmovInsSz_U64));
5053 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
5054 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
5055 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
5056 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
5057
5058 return UINT32_C(0x0e003c00)
5059 | ((uint32_t)fDst64Bit << 30)
5060 | ((uint32_t)idxElem << (16 + enmSz + 1))
5061 | (RT_BIT_32(enmSz) << 16)
5062 | (iVecRegSrc << 5)
5063 | iRegDst;
5064}
5065
5066
5067/**
5068 * A64: Encodes INS (vector, register).
5069 *
5070 * @returns The encoded instruction.
5071 * @param iVecRegDst The vector register to put the result into.
5072 * @param iRegSrc The source register.
5073 * @param idxElem The element index for the destination.
5074 * @param enmSz Element size of the source vector register.
5075 *
5076 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
5077 */
5078DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrIns(uint32_t iVecRegDst, uint32_t iRegSrc, uint8_t idxElem,
5079 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64)
5080{
5081 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
5082 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
5083 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
5084 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
5085 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
5086
5087 return UINT32_C(0x4e001c00)
5088 | ((uint32_t)idxElem << (16 + enmSz + 1))
5089 | (RT_BIT_32(enmSz) << 16)
5090 | (iRegSrc << 5)
5091 | iVecRegDst;
5092}
5093
5094
5095/**
5096 * A64: Encodes DUP (vector, register).
5097 *
5098 * @returns The encoded instruction.
5099 * @param iVecRegDst The vector register to put the result into.
5100 * @param iRegSrc The source register (ZR is valid).
5101 * @param enmSz Element size of the source vector register.
5102 * @param f128Bit Flag whether the instruction operates on the whole 128-bit of the vector register (true) or
5103 * just the low 64-bit (false).
5104 *
5105 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
5106 */
5107DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrDup(uint32_t iVecRegDst, uint32_t iRegSrc, ARMV8INSTRUMOVINSSZ enmSz,
5108 bool f128Bit = true)
5109{
5110 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
5111 Assert( (enmSz == kArmv8InstrUmovInsSz_U8)
5112 || (enmSz == kArmv8InstrUmovInsSz_U16)
5113 || (enmSz == kArmv8InstrUmovInsSz_U32)
5114 || (enmSz == kArmv8InstrUmovInsSz_U64));
5115
5116 return UINT32_C(0x0e000c00)
5117 | ((uint32_t)f128Bit << 30)
5118 | (RT_BIT_32(enmSz) << 16)
5119 | (iRegSrc << 5)
5120 | iVecRegDst;
5121}
5122
5123
5124/** Armv8 vector compare to zero vector element size. */
5125typedef enum ARMV8INSTRVECCMPZEROSZ
5126{
5127 kArmv8InstrCmpZeroSz_S8 = 0, /**< Byte. */
5128 kArmv8InstrCmpZeroSz_S16 = 1, /**< Halfword. */
5129 kArmv8InstrCmpZeroSz_S32 = 2, /**< 32-bit. */
5130 kArmv8InstrCmpZeroSz_S64 = 3 /**< 64-bit. */
5131} ARMV8INSTRVECCMPZEROSZ;
5132
5133
5134/** Armv8 vector compare to zero vector operation. */
5135typedef enum ARMV8INSTRVECCMPZEROOP
5136{
5137 kArmv8InstrCmpZeroOp_Gt = 0, /**< Greater than. */
5138 kArmv8InstrCmpZeroOp_Ge = RT_BIT_32(29), /**< Greater than or equal to. */
5139 kArmv8InstrCmpZeroOp_Eq = RT_BIT_32(12), /**< Equal to. */
5140 kArmv8InstrCmpZeroOp_Le = RT_BIT_32(29) | RT_BIT_32(12) /**< Lower than or equal to. */
5141} ARMV8INSTRVECCMPZEROOP;
5142
5143
5144/**
5145 * A64: Encodes CMGT, CMGE, CMEQ or CMLE against zero (vector, register).
5146 *
5147 * @returns The encoded instruction.
5148 * @param iVecRegDst The vector register to put the result into.
5149 * @param iVecRegSrc The vector source register.
5150 * @param enmSz Vector element size.
5151 * @param enmOp The compare operation against to encode.
5152 */
5153DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmpToZero(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECCMPZEROSZ enmSz,
5154 ARMV8INSTRVECCMPZEROOP enmOp)
5155{
5156 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5157
5158 return UINT32_C(0x5e208800)
5159 | ((uint32_t)enmSz << 22)
5160 | (RT_BIT_32(enmSz) << 16)
5161 | (iVecRegSrc << 5)
5162 | iVecRegDst
5163 | (uint32_t)enmOp;
5164}
5165
5166
5167/**
5168 * A64: Encodes CNT (vector, register).
5169 *
5170 * @returns The encoded instruction.
5171 * @param iVecRegDst The vector register to put the result into.
5172 * @param iVecRegSrc The vector source register.
5173 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5174 * or just the low 64-bit (false).
5175 */
5176DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCnt(uint32_t iVecRegDst, uint32_t iVecRegSrc, bool f128Bit = true)
5177{
5178 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5179
5180 return UINT32_C(0x0e205800)
5181 | ((uint32_t)f128Bit << 30)
5182 | (iVecRegSrc << 5)
5183 | iVecRegDst;
5184}
5185
5186
5187/** Armv8 vector unsigned sum long across vector element size. */
5188typedef enum ARMV8INSTRVECUADDLVSZ
5189{
5190 kArmv8InstrUAddLVSz_8B = 0, /**< 8 x 8-bit. */
5191 kArmv8InstrUAddLVSz_16B = RT_BIT_32(30), /**< 16 x 8-bit. */
5192 kArmv8InstrUAddLVSz_4H = 1, /**< 4 x 16-bit. */
5193 kArmv8InstrUAddLVSz_8H = RT_BIT_32(30) | 1, /**< 8 x 16-bit. */
5194 kArmv8InstrUAddLVSz_4S = RT_BIT_32(30) | 2 /**< 4 x 32-bit. */
5195} ARMV8INSTRVECUADDLVSZ;
5196
5197
5198/**
5199 * A64: Encodes UADDLV (vector, register).
5200 *
5201 * @returns The encoded instruction.
5202 * @param iVecRegDst The vector register to put the result into.
5203 * @param iVecRegSrc The vector source register.
5204 * @param enmSz Element size.
5205 */
5206DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUAddLV(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECUADDLVSZ enmSz)
5207{
5208 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5209
5210 return UINT32_C(0x2e303800)
5211 | ((uint32_t)enmSz)
5212 | (iVecRegSrc << 5)
5213 | iVecRegDst;
5214}
5215
5216
5217/** Armv8 USHR/USRA/URSRA/SSHR/SRSA/SSHR vector element size. */
5218typedef enum ARMV8INSTRUSHIFTSZ
5219{
5220 kArmv8InstrShiftSz_U8 = 8, /**< Byte. */
5221 kArmv8InstrShiftSz_U16 = 16, /**< Halfword. */
5222 kArmv8InstrShiftSz_U32 = 32, /**< 32-bit. */
5223 kArmv8InstrShiftSz_U64 = 64 /**< 64-bit. */
5224} ARMV8INSTRUSHIFTSZ;
5225
5226/**
5227 * A64: Encodes USHR/USRA/URSRA/SSHR/SRSA/SSHR (vector, register).
5228 *
5229 * @returns The encoded instruction.
5230 * @param iVecRegDst The vector register to put the result into.
5231 * @param iVecRegSrc The vector source register.
5232 * @param cShift Number of bits to shift.
5233 * @param enmSz Element size.
5234 * @param fUnsigned Flag whether this a signed or unsigned shift,
5235 * @param fRound Flag whether this is the rounding shift variant.
5236 * @param fAccum Flag whether this is the accumulate shift variant.
5237 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5238 * or just the low 64-bit (false).
5239 */
5240DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShrImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5241 bool fUnsigned = true, bool fRound = false, bool fAccum = false, bool f128Bit = true)
5242{
5243 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5244 Assert( cShift >= 1
5245 && ( (enmSz == kArmv8InstrShiftSz_U8 && cShift <= 8)
5246 || (enmSz == kArmv8InstrShiftSz_U16 && cShift <= 16)
5247 || (enmSz == kArmv8InstrShiftSz_U32 && cShift <= 32)
5248 || (enmSz == kArmv8InstrShiftSz_U64 && cShift <= 64)));
5249
5250 return UINT32_C(0x0f000400)
5251 | ((uint32_t)f128Bit << 30)
5252 | ((uint32_t)fUnsigned << 29)
5253 | ((((uint32_t)enmSz << 1) - cShift) << 16)
5254 | ((uint32_t)fRound << 13)
5255 | ((uint32_t)fAccum << 12)
5256 | (iVecRegSrc << 5)
5257 | iVecRegDst;
5258}
5259
5260
5261/**
5262 * A64: Encodes SHL (vector, register).
5263 *
5264 * @returns The encoded instruction.
5265 * @param iVecRegDst The vector register to put the result into.
5266 * @param iVecRegSrc The vector source register.
5267 * @param cShift Number of bits to shift.
5268 * @param enmSz Element size.
5269 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5270 * or just the low 64-bit (false).
5271 */
5272DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShlImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5273 bool f128Bit = true)
5274{
5275 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5276 Assert( (enmSz == kArmv8InstrShiftSz_U8 && cShift < 8)
5277 || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16)
5278 || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32)
5279 || (enmSz == kArmv8InstrShiftSz_U64 && cShift < 64));
5280
5281 return UINT32_C(0x0f005400)
5282 | ((uint32_t)f128Bit << 30)
5283 | (((uint32_t)enmSz | cShift) << 16)
5284 | (iVecRegSrc << 5)
5285 | iVecRegDst;
5286}
5287
5288
5289/**
5290 * A64: Encodes SHLL/SHLL2/USHLL/USHLL2 (vector, register).
5291 *
5292 * @returns The encoded instruction.
5293 * @param iVecRegDst The vector register to put the result into.
5294 * @param iVecRegSrc The vector source register.
5295 * @param cShift Number of bits to shift.
5296 * @param enmSz Element size of the source vector register, the destination vector register
5297 * element size is twice as large, kArmv8InstrShiftSz_U64 is invalid.
5298 * @param fUnsigned Flag whether this is an unsigned shift left (true, default) or signed (false).
5299 * @param fUpper Flag whether this operates on the lower half (false, default) of the source vector register
5300 * or the upper half (true).
5301 */
5302DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUShll(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5303 bool fUnsigned = true, bool fUpper = false)
5304{
5305 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5306 Assert( (enmSz == kArmv8InstrShiftSz_U8 && cShift < 8)
5307 || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16)
5308 || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32));
5309
5310 return UINT32_C(0x0f00a400)
5311 | ((uint32_t)fUpper << 30)
5312 | ((uint32_t)fUnsigned << 29)
5313 | (((uint32_t)enmSz | cShift) << 16)
5314 | (iVecRegSrc << 5)
5315 | iVecRegDst;
5316}
5317
5318
5319/** Armv8 vector arith ops element size. */
5320typedef enum ARMV8INSTRVECARITHSZ
5321{
5322 kArmv8VecInstrArithSz_8 = 0, /**< 8-bit. */
5323 kArmv8VecInstrArithSz_16 = 1, /**< 16-bit. */
5324 kArmv8VecInstrArithSz_32 = 2, /**< 32-bit. */
5325 kArmv8VecInstrArithSz_64 = 3 /**< 64-bit. */
5326} ARMV8INSTRVECARITHSZ;
5327
5328
5329/** Armv8 vector arithmetic operation. */
5330typedef enum
5331{
5332 kArmv8VecInstrArithOp_Add = RT_BIT_32(15), /**< ADD */
5333 kArmv8VecInstrArithOp_Sub = RT_BIT_32(29) | RT_BIT_32(15), /**< SUB */
5334 kArmv8VecInstrArithOp_UnsignSat_Add = RT_BIT_32(29) | RT_BIT_32(11), /**< UQADD */
5335 kArmv8VecInstrArithOp_UnsignSat_Sub = RT_BIT_32(29) | RT_BIT_32(13) | RT_BIT_32(11), /**< UQSUB */
5336 kArmv8VecInstrArithOp_SignSat_Add = RT_BIT_32(11), /**< SQADD */
5337 kArmv8VecInstrArithOp_SignSat_Sub = RT_BIT_32(13) | RT_BIT_32(11), /**< SQSUB */
5338 kArmv8VecInstrArithOp_Mul = RT_BIT_32(15) | RT_BIT_32(12) | RT_BIT_32(11) /**< MUL */
5339} ARMV8INSTRVECARITHOP;
5340
5341
5342/**
5343 * A64: Encodes an arithmetic operation (vector, register).
5344 *
5345 * @returns The encoded instruction.
5346 * @param enmOp The operation to encode.
5347 * @param iVecRegDst The vector register to put the result into.
5348 * @param iVecRegSrc1 The first vector source register.
5349 * @param iVecRegSrc2 The second vector source register.
5350 * @param enmSz Element size.
5351 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5352 * or just the low 64-bit (false).
5353 */
5354DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrArithOp(ARMV8INSTRVECARITHOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5355 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5356{
5357 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5358
5359 return UINT32_C(0x0e200400)
5360 | (uint32_t)enmOp
5361 | ((uint32_t)f128Bit << 30)
5362 | ((uint32_t)enmSz << 22)
5363 | (iVecRegSrc2 << 16)
5364 | (iVecRegSrc1 << 5)
5365 | iVecRegDst;
5366}
5367
5368
5369/** Armv8 vector compare operation. */
5370typedef enum ARMV8VECINSTRCMPOP
5371{
5372 /* U insn[15:10] */
5373 kArmv8VecInstrCmpOp_Gt = UINT32_C(0x3400), /**< Greater than (>) (signed) */
5374 kArmv8VecInstrCmpOp_Ge = UINT32_C(0x3c00), /**< Greater or equal (>=) (signed) */
5375 kArmv8VecInstrCmpOp_Hi = RT_BIT_32(29) | UINT32_C(0x3400), /**< Greater than (>) (unsigned) */
5376 kArmv8VecInstrCmpOp_Hs = RT_BIT_32(29) | UINT32_C(0x3c00), /**< Greater or equal (>=) (unsigned) */
5377 kArmv8VecInstrCmpOp_Eq = RT_BIT_32(29) | UINT32_C(0x8c00) /**< Equal (==) (unsigned) */
5378} ARMV8VECINSTRCMPOP;
5379
5380/**
5381 * A64: Encodes CMEQ/CMGE/CMGT/CMHI/CMHS (register variant) (vector, register).
5382 *
5383 * @returns The encoded instruction.
5384 * @param enmOp The operation to perform.
5385 * @param iVecRegDst The vector register to put the result into.
5386 * @param iVecRegSrc1 The first vector source register.
5387 * @param iVecRegSrc2 The second vector source register.
5388 * @param enmSz Element size.
5389 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5390 * or just the low 64-bit (false).
5391 */
5392DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmp(ARMV8VECINSTRCMPOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5393 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5394{
5395 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5396
5397 return UINT32_C(0x0e200000)
5398 | ((uint32_t)f128Bit << 30)
5399 | ((uint32_t)enmSz << 22)
5400 | (iVecRegSrc2 << 16)
5401 | ((uint32_t)enmOp)
5402 | (iVecRegSrc1 << 5)
5403 | iVecRegDst;
5404}
5405
5406
5407/** Armv8 vector compare against zero operation. */
5408typedef enum ARMV8VECINSTRCMPZEROOP
5409{
5410 /* U insn[15:10] */
5411 kArmv8VecInstrCmpZeroOp_Gt = UINT32_C(0x8800), /**< Greater than zero (>) (signed) */
5412 kArmv8VecInstrCmpZeroOp_Eq = UINT32_C(0x9800), /**< Equal to zero (==) */
5413 kArmv8VecInstrCmpZeroOp_Lt = UINT32_C(0xa800), /**< Lower than zero (>=) (signed) */
5414 kArmv8VecInstrCmpZeroOp_Ge = RT_BIT_32(29) | UINT32_C(0x8800), /**< Greater or equal to zero (>=) (signed) */
5415 kArmv8VecInstrCmpZeroOp_Le = RT_BIT_32(29) | UINT32_C(0x9800) /**< Lower or equal to zero (<=) (signed) */
5416} ARMV8VECINSTRCMPZEROOP;
5417
5418/**
5419 * A64: Encodes CMEQ/CMGE/CMGT/CMLE/CMLT (zero variant) (vector, register).
5420 *
5421 * @returns The encoded instruction.
5422 * @param enmOp The operation to perform.
5423 * @param iVecRegDst The vector register to put the result into.
5424 * @param iVecRegSrc The first vector source register.
5425 * @param enmSz Element size.
5426 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5427 * or just the low 64-bit (false).
5428 */
5429DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmpAgainstZero(ARMV8VECINSTRCMPOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc,
5430 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5431{
5432 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5433
5434 return UINT32_C(0x0e200000)
5435 | ((uint32_t)f128Bit << 30)
5436 | ((uint32_t)enmSz << 22)
5437 | ((uint32_t)enmOp)
5438 | (iVecRegSrc << 5)
5439 | iVecRegDst;
5440}
5441
5442
5443/** Armv8 [Signed,Unsigned] Extract {Unsigned} operation. */
5444typedef enum
5445{
5446 kArmv8VecInstrQxtnOp_Sqxtn = RT_BIT_32(14), /**< SQXTN */
5447 kArmv8VecInstrQxtnOp_Sqxtun = RT_BIT_32(29) | RT_BIT_32(13), /**< SQXTUN */
5448 kArmv8VecInstrQxtnOp_Uqxtn = RT_BIT_32(29) | RT_BIT_32(14) /**< UQXTN */
5449} ARMV8INSTRVECQXTNOP;
5450
5451/**
5452 * A64: Encodes SQXTN/SQXTN2/UQXTN/UQXTN2/SQXTUN/SQXTUN2 (vector, register).
5453 *
5454 * @returns The encoded instruction.
5455 * @param enmOp The operation to perform.
5456 * @param fUpper Flag whether to write the result to the lower (false) or upper (true) half of the destinatiom register.
5457 * @param iVecRegDst The vector register to put the result into.
5458 * @param iVecRegSrc The first vector source register.
5459 * @param enmSz Element size.
5460 */
5461DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrQxtn(ARMV8INSTRVECQXTNOP enmOp, bool fUpper, uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECARITHSZ enmSz)
5462{
5463 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5464
5465 return UINT32_C(0x0e210800)
5466 | ((uint32_t)enmOp)
5467 | ((uint32_t)fUpper << 30)
5468 | ((uint32_t)enmSz << 22)
5469 | (iVecRegSrc << 5)
5470 | iVecRegDst;
5471}
5472
5473
5474/** Armv8 floating point size. */
5475typedef enum
5476{
5477 kArmv8VecInstrFpSz_2x_Single = 0, /**< 2x single precision values in the low 64-bit of the 128-bit register. */
5478 kArmv8VecInstrFpSz_4x_Single = RT_BIT_32(30), /**< 4x single precision values in the 128-bit register. */
5479 kArmv8VecInstrFpSz_2x_Double = RT_BIT_32(30) | RT_BIT_32(22) /**< 2x double precision values in the 128-bit register. */
5480} ARMV8INSTRVECFPSZ;
5481
5482
5483/** Armv8 3 operand floating point operation. */
5484typedef enum
5485{
5486 /* insn[29] insn[23] insn[15:11] */
5487 kArmv8VecInstrFpOp_Add = UINT32_C(0xd000), /**< FADD */
5488 kArmv8VecInstrFpOp_Sub = RT_BIT_32(23) | UINT32_C(0xd000), /**< FADD */
5489 kArmv8VecInstrFpOp_AddPairwise = RT_BIT_32(29) | UINT32_C(0xd000), /**< FADDP */
5490 kArmv8VecInstrFpOp_Mul = RT_BIT_32(29) | UINT32_C(0xd800), /**< FMUL */
5491 kArmv8VecInstrFpOp_Div = RT_BIT_32(29) | UINT32_C(0xf800), /**< FDIV */
5492
5493 kArmv8VecInstrFpOp_Max = UINT32_C(0xf000), /**< FMAX */
5494 kArmv8VecInstrFpOp_MaxNumber = UINT32_C(0xc000), /**< FMAXNM */
5495 kArmv8VecInstrFpOp_MaxNumberPairwise = RT_BIT_32(29) | UINT32_C(0xc000), /**< FMAXNMP */
5496 kArmv8VecInstrFpOp_MaxPairwise = RT_BIT_32(29) | UINT32_C(0xf000), /**< FMAXP */
5497
5498 kArmv8VecInstrFpOp_Min = RT_BIT_32(23) | UINT32_C(0xf000), /**< FMIN */
5499 kArmv8VecInstrFpOp_MinNumber = RT_BIT_32(23) | UINT32_C(0xc000), /**< FMINNM */
5500 kArmv8VecInstrFpOp_MinNumberPairwise = RT_BIT_32(29) | RT_BIT_32(23) | UINT32_C(0xc000), /**< FMINNMP */
5501 kArmv8VecInstrFpOp_MinPairwise = RT_BIT_32(29) | RT_BIT_32(23) | UINT32_C(0xf000), /**< FMINP */
5502
5503 kArmv8VecInstrFpOp_Fmla = UINT32_C(0xc800), /**< FMLA */
5504 kArmv8VecInstrFpOp_Fmls = RT_BIT_32(23) | UINT32_C(0xc800) /**< FMLS */
5505} ARMV8INSTRVECFPOP;
5506
5507/**
5508 * A64: Encodes a 3 operand floating point operation (vector, register).
5509 *
5510 * @returns The encoded instruction.
5511 * @param enmOp The operation to perform.
5512 * @param enmSz The size to operate on.
5513 * @param iVecRegDst The vector register to put the result into.
5514 * @param iVecRegSrc1 The first vector source register.
5515 * @param iVecRegSrc2 The second vector source register.
5516 */
5517DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrFp3Op(ARMV8INSTRVECFPOP enmOp, ARMV8INSTRVECFPSZ enmSz, uint32_t iVecRegDst,
5518 uint32_t iVecRegSrc1, uint32_t iVecRegSrc2)
5519{
5520 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5521
5522 return UINT32_C(0x0e200400)
5523 | ((uint32_t)enmOp)
5524 | ((uint32_t)enmSz)
5525 | (iVecRegSrc2 << 16)
5526 | (iVecRegSrc1 << 5)
5527 | iVecRegDst;
5528}
5529
5530
5531/** @} */
5532
5533#endif /* !dtrace && __cplusplus */
5534
5535/** @} */
5536
5537#endif /* !IPRT_INCLUDED_armv8_h */
5538
Note: See TracBrowser for help on using the repository browser.

© 2024 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette