VirtualBox

source: vbox/trunk/include/iprt/armv8.h

最後變更 在這個檔案是 107933,由 vboxsync 提交於 7 週 前

include/iprt/armv8.h,VMM: Drop the deprecated ARMV8_AARCH64_REG_XXX defines and replace where still being used

  • 屬性 svn:eol-style 設為 native
  • 屬性 svn:keywords 設為 Author Date Id Revision
檔案大小: 286.7 KB
 
1/** @file
2 * IPRT - ARMv8 (AArch64 and AArch32) Structures and Definitions.
3 */
4
5/*
6 * Copyright (C) 2023-2024 Oracle and/or its affiliates.
7 *
8 * This file is part of VirtualBox base platform packages, as
9 * available from https://www.alldomusa.eu.org.
10 *
11 * This program is free software; you can redistribute it and/or
12 * modify it under the terms of the GNU General Public License
13 * as published by the Free Software Foundation, in version 3 of the
14 * License.
15 *
16 * This program is distributed in the hope that it will be useful, but
17 * WITHOUT ANY WARRANTY; without even the implied warranty of
18 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
19 * General Public License for more details.
20 *
21 * You should have received a copy of the GNU General Public License
22 * along with this program; if not, see <https://www.gnu.org/licenses>.
23 *
24 * The contents of this file may alternatively be used under the terms
25 * of the Common Development and Distribution License Version 1.0
26 * (CDDL), a copy of it is provided in the "COPYING.CDDL" file included
27 * in the VirtualBox distribution, in which case the provisions of the
28 * CDDL are applicable instead of those of the GPL.
29 *
30 * You may elect to license modified versions of this file under the
31 * terms and conditions of either the GPL or the CDDL or both.
32 *
33 * SPDX-License-Identifier: GPL-3.0-only OR CDDL-1.0
34 */
35
36#ifndef IPRT_INCLUDED_armv8_h
37#define IPRT_INCLUDED_armv8_h
38#ifndef RT_WITHOUT_PRAGMA_ONCE
39# pragma once
40#endif
41
42#ifndef VBOX_FOR_DTRACE_LIB
43# include <iprt/cdefs.h>
44# ifndef RT_IN_ASSEMBLER
45# include <iprt/types.h>
46# include <iprt/assert.h>
47# endif
48# include <iprt/assertcompile.h>
49#else
50# pragma D depends_on library vbox-types.d
51#endif
52
53/** @defgroup grp_rt_armv8 ARMv8 Types and Definitions
54 * @ingroup grp_rt
55 * @{
56 */
57
58/** @name The AArch64 general purpose register encoding.
59 * @{ */
60#define ARMV8_A64_REG_X0 0
61#define ARMV8_A64_REG_X1 1
62#define ARMV8_A64_REG_X2 2
63#define ARMV8_A64_REG_X3 3
64#define ARMV8_A64_REG_X4 4
65#define ARMV8_A64_REG_X5 5
66#define ARMV8_A64_REG_X6 6
67#define ARMV8_A64_REG_X7 7
68#define ARMV8_A64_REG_X8 8
69#define ARMV8_A64_REG_X9 9
70#define ARMV8_A64_REG_X10 10
71#define ARMV8_A64_REG_X11 11
72#define ARMV8_A64_REG_X12 12
73#define ARMV8_A64_REG_X13 13
74#define ARMV8_A64_REG_X14 14
75#define ARMV8_A64_REG_X15 15
76#define ARMV8_A64_REG_X16 16
77#define ARMV8_A64_REG_X17 17
78#define ARMV8_A64_REG_X18 18
79#define ARMV8_A64_REG_X19 19
80#define ARMV8_A64_REG_X20 20
81#define ARMV8_A64_REG_X21 21
82#define ARMV8_A64_REG_X22 22
83#define ARMV8_A64_REG_X23 23
84#define ARMV8_A64_REG_X24 24
85#define ARMV8_A64_REG_X25 25
86#define ARMV8_A64_REG_X26 26
87#define ARMV8_A64_REG_X27 27
88#define ARMV8_A64_REG_X28 28
89#define ARMV8_A64_REG_X29 29
90#define ARMV8_A64_REG_X30 30
91/** @} */
92
93/** @name The AArch64 32-bit general purpose register names.
94 * @{ */
95#define ARMV8_A64_REG_W0 ARMV8_A64_REG_X0
96#define ARMV8_A64_REG_W1 ARMV8_A64_REG_X1
97#define ARMV8_A64_REG_W2 ARMV8_A64_REG_X2
98#define ARMV8_A64_REG_W3 ARMV8_A64_REG_X3
99#define ARMV8_A64_REG_W4 ARMV8_A64_REG_X4
100#define ARMV8_A64_REG_W5 ARMV8_A64_REG_X5
101#define ARMV8_A64_REG_W6 ARMV8_A64_REG_X6
102#define ARMV8_A64_REG_W7 ARMV8_A64_REG_X7
103#define ARMV8_A64_REG_W8 ARMV8_A64_REG_X8
104#define ARMV8_A64_REG_W9 ARMV8_A64_REG_X9
105#define ARMV8_A64_REG_W10 ARMV8_A64_REG_X10
106#define ARMV8_A64_REG_W11 ARMV8_A64_REG_X11
107#define ARMV8_A64_REG_W12 ARMV8_A64_REG_X12
108#define ARMV8_A64_REG_W13 ARMV8_A64_REG_X13
109#define ARMV8_A64_REG_W14 ARMV8_A64_REG_X14
110#define ARMV8_A64_REG_W15 ARMV8_A64_REG_X15
111#define ARMV8_A64_REG_W16 ARMV8_A64_REG_X16
112#define ARMV8_A64_REG_W17 ARMV8_A64_REG_X17
113#define ARMV8_A64_REG_W18 ARMV8_A64_REG_X18
114#define ARMV8_A64_REG_W19 ARMV8_A64_REG_X19
115#define ARMV8_A64_REG_W20 ARMV8_A64_REG_X20
116#define ARMV8_A64_REG_W21 ARMV8_A64_REG_X21
117#define ARMV8_A64_REG_W22 ARMV8_A64_REG_X22
118#define ARMV8_A64_REG_W23 ARMV8_A64_REG_X23
119#define ARMV8_A64_REG_W24 ARMV8_A64_REG_X24
120#define ARMV8_A64_REG_W25 ARMV8_A64_REG_X25
121#define ARMV8_A64_REG_W26 ARMV8_A64_REG_X26
122#define ARMV8_A64_REG_W27 ARMV8_A64_REG_X27
123#define ARMV8_A64_REG_W28 ARMV8_A64_REG_X28
124#define ARMV8_A64_REG_W29 ARMV8_A64_REG_X29
125#define ARMV8_A64_REG_W30 ARMV8_A64_REG_X30
126/** @} */
127
128/** @name The AArch64 NEON scalar register encoding.
129 * @{ */
130#define ARMV8_A64_REG_Q0 0
131#define ARMV8_A64_REG_Q1 1
132#define ARMV8_A64_REG_Q2 2
133#define ARMV8_A64_REG_Q3 3
134#define ARMV8_A64_REG_Q4 4
135#define ARMV8_A64_REG_Q5 5
136#define ARMV8_A64_REG_Q6 6
137#define ARMV8_A64_REG_Q7 7
138#define ARMV8_A64_REG_Q8 8
139#define ARMV8_A64_REG_Q9 9
140#define ARMV8_A64_REG_Q10 10
141#define ARMV8_A64_REG_Q11 11
142#define ARMV8_A64_REG_Q12 12
143#define ARMV8_A64_REG_Q13 13
144#define ARMV8_A64_REG_Q14 14
145#define ARMV8_A64_REG_Q15 15
146#define ARMV8_A64_REG_Q16 16
147#define ARMV8_A64_REG_Q17 17
148#define ARMV8_A64_REG_Q18 18
149#define ARMV8_A64_REG_Q19 19
150#define ARMV8_A64_REG_Q20 20
151#define ARMV8_A64_REG_Q21 21
152#define ARMV8_A64_REG_Q22 22
153#define ARMV8_A64_REG_Q23 23
154#define ARMV8_A64_REG_Q24 24
155#define ARMV8_A64_REG_Q25 25
156#define ARMV8_A64_REG_Q26 26
157#define ARMV8_A64_REG_Q27 27
158#define ARMV8_A64_REG_Q28 28
159#define ARMV8_A64_REG_Q29 29
160#define ARMV8_A64_REG_Q30 30
161#define ARMV8_A64_REG_Q31 31
162/** @} */
163
164/** @name The AArch64 NEON vector register encoding.
165 * @{ */
166#define ARMV8_A64_REG_V0 ARMV8_A64_REG_Q0
167#define ARMV8_A64_REG_V1 ARMV8_A64_REG_Q1
168#define ARMV8_A64_REG_V2 ARMV8_A64_REG_Q2
169#define ARMV8_A64_REG_V3 ARMV8_A64_REG_Q3
170#define ARMV8_A64_REG_V4 ARMV8_A64_REG_Q4
171#define ARMV8_A64_REG_V5 ARMV8_A64_REG_Q5
172#define ARMV8_A64_REG_V6 ARMV8_A64_REG_Q6
173#define ARMV8_A64_REG_V7 ARMV8_A64_REG_Q7
174#define ARMV8_A64_REG_V8 ARMV8_A64_REG_Q8
175#define ARMV8_A64_REG_V9 ARMV8_A64_REG_Q9
176#define ARMV8_A64_REG_V10 ARMV8_A64_REG_Q10
177#define ARMV8_A64_REG_V11 ARMV8_A64_REG_Q11
178#define ARMV8_A64_REG_V12 ARMV8_A64_REG_Q12
179#define ARMV8_A64_REG_V13 ARMV8_A64_REG_Q13
180#define ARMV8_A64_REG_V14 ARMV8_A64_REG_Q14
181#define ARMV8_A64_REG_V15 ARMV8_A64_REG_Q15
182#define ARMV8_A64_REG_V16 ARMV8_A64_REG_Q16
183#define ARMV8_A64_REG_V17 ARMV8_A64_REG_Q17
184#define ARMV8_A64_REG_V18 ARMV8_A64_REG_Q18
185#define ARMV8_A64_REG_V19 ARMV8_A64_REG_Q19
186#define ARMV8_A64_REG_V20 ARMV8_A64_REG_Q20
187#define ARMV8_A64_REG_V21 ARMV8_A64_REG_Q21
188#define ARMV8_A64_REG_V22 ARMV8_A64_REG_Q22
189#define ARMV8_A64_REG_V23 ARMV8_A64_REG_Q23
190#define ARMV8_A64_REG_V24 ARMV8_A64_REG_Q24
191#define ARMV8_A64_REG_V25 ARMV8_A64_REG_Q25
192#define ARMV8_A64_REG_V26 ARMV8_A64_REG_Q26
193#define ARMV8_A64_REG_V27 ARMV8_A64_REG_Q27
194#define ARMV8_A64_REG_V28 ARMV8_A64_REG_Q28
195#define ARMV8_A64_REG_V29 ARMV8_A64_REG_Q29
196#define ARMV8_A64_REG_V30 ARMV8_A64_REG_Q30
197#define ARMV8_A64_REG_V31 ARMV8_A64_REG_Q31
198/** @} */
199
200/** @name The AArch64 register 31.
201 * @note Register 31 typically refers to the zero register, but can also in
202 * select case (by instruction and opecode field) refer the to stack
203 * pointer of the current exception level. ARM typically uses \<Xn|SP\>
204 * to indicate that register 31 is taken as SP, if just \<Xn\> is used
205 * 31 will be the zero register.
206 * @{ */
207/** The stack pointer. */
208#define ARMV8_A64_REG_SP 31
209/** The zero register. Reads as zero, writes ignored. */
210#define ARMV8_A64_REG_XZR 31
211/** The zero register, the 32-bit register name. */
212#define ARMV8_A64_REG_WZR ARMV8_A64_REG_XZR
213/** @} */
214
215/** @name AArch64 register aliases
216 * @{ */
217/** The link register is typically mapped to x30 as that's the default pick of
218 * the RET instruction. */
219#define ARMV8_A64_REG_LR ARMV8_A64_REG_X30
220/** Frame base pointer is typically mapped to x29. */
221#define ARMV8_A64_REG_BP ARMV8_A64_REG_X29
222/** @} */
223
224
225/** @name System register encoding.
226 * @{
227 */
228/** Mask for the op0 part of an MSR/MRS instruction */
229#define ARMV8_AARCH64_SYSREG_OP0_MASK (RT_BIT_32(19) | RT_BIT_32(20))
230/** Shift for the op0 part of an MSR/MRS instruction */
231#define ARMV8_AARCH64_SYSREG_OP0_SHIFT 19
232/** Returns the op0 part of the given MRS/MSR instruction. */
233#define ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP0_MASK) >> ARMV8_AARCH64_SYSREG_OP0_SHIFT)
234/** Mask for the op1 part of an MSR/MRS instruction */
235#define ARMV8_AARCH64_SYSREG_OP1_MASK (RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18))
236/** Shift for the op1 part of an MSR/MRS instruction */
237#define ARMV8_AARCH64_SYSREG_OP1_SHIFT 16
238/** Returns the op1 part of the given MRS/MSR instruction. */
239#define ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP1_MASK) >> ARMV8_AARCH64_SYSREG_OP1_SHIFT)
240/** Mask for the CRn part of an MSR/MRS instruction */
241#define ARMV8_AARCH64_SYSREG_CRN_MASK ( RT_BIT_32(12) | RT_BIT_32(13) | RT_BIT_32(14) \
242 | RT_BIT_32(15) )
243/** Shift for the CRn part of an MSR/MRS instruction */
244#define ARMV8_AARCH64_SYSREG_CRN_SHIFT 12
245/** Returns the CRn part of the given MRS/MSR instruction. */
246#define ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRN_MASK) >> ARMV8_AARCH64_SYSREG_CRN_SHIFT)
247/** Mask for the CRm part of an MSR/MRS instruction */
248#define ARMV8_AARCH64_SYSREG_CRM_MASK ( RT_BIT_32(8) | RT_BIT_32(9) | RT_BIT_32(10) \
249 | RT_BIT_32(11) )
250/** Shift for the CRm part of an MSR/MRS instruction */
251#define ARMV8_AARCH64_SYSREG_CRM_SHIFT 8
252/** Returns the CRn part of the given MRS/MSR instruction. */
253#define ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_CRM_MASK) >> ARMV8_AARCH64_SYSREG_CRM_SHIFT)
254/** Mask for the op2 part of an MSR/MRS instruction */
255#define ARMV8_AARCH64_SYSREG_OP2_MASK (RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7))
256/** Shift for the op2 part of an MSR/MRS instruction */
257#define ARMV8_AARCH64_SYSREG_OP2_SHIFT 5
258/** Returns the op2 part of the given MRS/MSR instruction. */
259#define ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn) (((a_MsrMrsInsn) & ARMV8_AARCH64_SYSREG_OP2_MASK) >> ARMV8_AARCH64_SYSREG_OP2_SHIFT)
260/** Mask for all system register encoding relevant fields in an MRS/MSR instruction. */
261#define ARMV8_AARCH64_SYSREG_MASK ( ARMV8_AARCH64_SYSREG_OP0_MASK | ARMV8_AARCH64_SYSREG_OP1_MASK \
262 | ARMV8_AARCH64_SYSREG_CRN_MASK | ARMV8_AARCH64_SYSREG_CRN_MASK \
263 | ARMV8_AARCH64_SYSREG_OP2_MASK)
264/** @} */
265
266/** @name Mapping of op0:op1:CRn:CRm:op2 to a system register ID. This is
267 * IPRT specific and not part of the ARMv8 specification.
268 * @{ */
269#define ARMV8_AARCH64_SYSREG_ID_CREATE(a_Op0, a_Op1, a_CRn, a_CRm, a_Op2) \
270 (uint16_t)( (((a_Op0) & 0x3) << 14) \
271 | (((a_Op1) & 0x7) << 11) \
272 | (((a_CRn) & 0xf) << 7) \
273 | (((a_CRm) & 0xf) << 3) \
274 | ((a_Op2) & 0x7))
275/** Returns the internal system register ID from the given MRS/MSR instruction. */
276#define ARMV8_AARCH64_SYSREG_ID_FROM_MRS_MSR(a_MsrMrsInsn) \
277 ARMV8_AARCH64_SYSREG_ID_CREATE(ARMV8_AARCH64_SYSREG_OP0_GET(a_MsrMrsInsn), \
278 ARMV8_AARCH64_SYSREG_OP1_GET(a_MsrMrsInsn), \
279 ARMV8_AARCH64_SYSREG_CRN_GET(a_MsrMrsInsn), \
280 ARMV8_AARCH64_SYSREG_CRM_GET(a_MsrMrsInsn), \
281 ARMV8_AARCH64_SYSREG_OP2_GET(a_MsrMrsInsn))
282/** Encodes the given system register ID in the given MSR/MRS instruction. */
283#define ARMV8_AARCH64_SYSREG_ID_ENCODE_IN_MRS_MSR(a_MsrMrsInsn, a_SysregId) \
284 ((a_MsrMrsInsn) = ((a_MsrMrsInsn) & ~ARMV8_AARCH64_SYSREG_MASK) | (a_SysregId << ARMV8_AARCH64_SYSREG_OP2_SHIFT))
285/** @} */
286
287
288/** @name System register IDs.
289 * @{ */
290/** OSDTRRX_EL1 register - RW. */
291#define ARMV8_AARCH64_SYSREG_OSDTRRX_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 0, 2)
292/** MDSCR_EL1 - RW. */
293#define ARMV8_AARCH64_SYSREG_MDSCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 2, 2)
294/** DBGBVR<0..15>_EL1 register - RW. */
295#define ARMV8_AARCH64_SYSREG_DBGBVRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 4)
296/** DBGBCR<0..15>_EL1 register - RW. */
297#define ARMV8_AARCH64_SYSREG_DBGBCRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 5)
298/** DBGWVR<0..15>_EL1 register - RW. */
299#define ARMV8_AARCH64_SYSREG_DBGWVRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 6)
300/** DBGWCR<0..15>_EL1 register - RW. */
301#define ARMV8_AARCH64_SYSREG_DBGWCRn_EL1(a_Id) ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, (a_Id), 7)
302/** MDCCINT_EL1 register - RW. */
303#define ARMV8_AARCH64_SYSREG_MDCCINT_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 2, 0)
304/** OSDTRTX_EL1 register - RW. */
305#define ARMV8_AARCH64_SYSREG_OSDTRTX_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 3, 2)
306/** OSECCR_EL1 register - RW. */
307#define ARMV8_AARCH64_SYSREG_OSECCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 0, 6, 2)
308/** MDRAR_EL1 register - RO. */
309#define ARMV8_AARCH64_SYSREG_MDRAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 0, 0)
310/** OSLAR_EL1 register - WO. */
311#define ARMV8_AARCH64_SYSREG_OSLAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 0, 4)
312/** OSLSR_EL1 register - RO. */
313#define ARMV8_AARCH64_SYSREG_OSLSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 1, 4)
314/** OSDLR_EL1 register - RW. */
315#define ARMV8_AARCH64_SYSREG_OSDLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(2, 0, 1, 3, 4)
316
317/** MIDR_EL1 register - RO. */
318#define ARMV8_AARCH64_SYSREG_MIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 0)
319/** MIPDR_EL1 register - RO. */
320#define ARMV8_AARCH64_SYSREG_MPIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 5)
321/** REVIDR_EL1 register - RO. */
322#define ARMV8_AARCH64_SYSREG_REVIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 0, 6)
323/** ID_PFR0_EL1 register - RO. */
324#define ARMV8_AARCH64_SYSREG_ID_PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 0)
325/** ID_PFR1_EL1 register - RO. */
326#define ARMV8_AARCH64_SYSREG_ID_PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 1)
327/** ID_DFR0_EL1 register - RO. */
328#define ARMV8_AARCH64_SYSREG_ID_DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 2)
329/** ID_AFR0_EL1 register - RO. */
330#define ARMV8_AARCH64_SYSREG_ID_AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 3)
331/** ID_MMFR0_EL1 register - RO. */
332#define ARMV8_AARCH64_SYSREG_ID_MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 4)
333/** ID_MMFR1_EL1 register - RO. */
334#define ARMV8_AARCH64_SYSREG_ID_MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 5)
335/** ID_MMFR2_EL1 register - RO. */
336#define ARMV8_AARCH64_SYSREG_ID_MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 6)
337/** ID_MMFR3_EL1 register - RO. */
338#define ARMV8_AARCH64_SYSREG_ID_MMFR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 1, 7)
339
340/** ID_ISAR0_EL1 register - RO. */
341#define ARMV8_AARCH64_SYSREG_ID_ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 0)
342/** ID_ISAR1_EL1 register - RO. */
343#define ARMV8_AARCH64_SYSREG_ID_ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 1)
344/** ID_ISAR2_EL1 register - RO. */
345#define ARMV8_AARCH64_SYSREG_ID_ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 2)
346/** ID_ISAR3_EL1 register - RO. */
347#define ARMV8_AARCH64_SYSREG_ID_ISAR3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 3)
348/** ID_ISAR4_EL1 register - RO. */
349#define ARMV8_AARCH64_SYSREG_ID_ISAR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 4)
350/** ID_ISAR5_EL1 register - RO. */
351#define ARMV8_AARCH64_SYSREG_ID_ISAR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 5)
352/** ID_MMFR4_EL1 register - RO. */
353#define ARMV8_AARCH64_SYSREG_ID_MMFR4_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 6)
354/** ID_ISAR6_EL1 register - RO. */
355#define ARMV8_AARCH64_SYSREG_ID_ISAR6_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 2, 7)
356
357/** MVFR0_EL1 register - RO. */
358#define ARMV8_AARCH64_SYSREG_MVFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 0)
359/** MVFR1_EL1 register - RO. */
360#define ARMV8_AARCH64_SYSREG_MVFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 1)
361/** MVFR2_EL1 register - RO. */
362#define ARMV8_AARCH64_SYSREG_MVFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 2)
363/** ID_PFR2_EL1 register - RO. */
364#define ARMV8_AARCH64_SYSREG_ID_PFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 4)
365/** ID_DFR1_EL1 register - RO. */
366#define ARMV8_AARCH64_SYSREG_ID_DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 5)
367/** ID_MMFR5_EL1 register - RO. */
368#define ARMV8_AARCH64_SYSREG_ID_MMFR5_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 3, 6)
369
370/** ID_AA64PFR0_EL1 register - RO. */
371#define ARMV8_AARCH64_SYSREG_ID_AA64PFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 0)
372/** ID_AA64PFR0_EL1 register - RO. */
373#define ARMV8_AARCH64_SYSREG_ID_AA64PFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 1)
374/** ID_AA64ZFR0_EL1 register - RO. */
375#define ARMV8_AARCH64_SYSREG_ID_AA64ZFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 4)
376/** ID_AA64SMFR0_EL1 register - RO. */
377#define ARMV8_AARCH64_SYSREG_ID_AA64SMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 4, 5)
378
379/** ID_AA64DFR0_EL1 register - RO. */
380#define ARMV8_AARCH64_SYSREG_ID_AA64DFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 0)
381/** ID_AA64DFR0_EL1 register - RO. */
382#define ARMV8_AARCH64_SYSREG_ID_AA64DFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 1)
383/** ID_AA64AFR0_EL1 register - RO. */
384#define ARMV8_AARCH64_SYSREG_ID_AA64AFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 4)
385/** ID_AA64AFR1_EL1 register - RO. */
386#define ARMV8_AARCH64_SYSREG_ID_AA64AFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 5, 5)
387
388/** ID_AA64ISAR0_EL1 register - RO. */
389#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 0)
390/** ID_AA64ISAR1_EL1 register - RO. */
391#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 1)
392/** ID_AA64ISAR2_EL1 register - RO. */
393#define ARMV8_AARCH64_SYSREG_ID_AA64ISAR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 6, 2)
394
395/** ID_AA64MMFR0_EL1 register - RO. */
396#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 0)
397/** ID_AA64MMFR1_EL1 register - RO. */
398#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 1)
399/** ID_AA64MMFR2_EL1 register - RO. */
400#define ARMV8_AARCH64_SYSREG_ID_AA64MMFR2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 0, 7, 2)
401
402/** SCTRL_EL1 register - RW. */
403#define ARMV8_AARCH64_SYSREG_SCTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 0)
404/** ACTRL_EL1 register - RW. */
405#define ARMV8_AARCH64_SYSREG_ACTRL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 1)
406/** CPACR_EL1 register - RW. */
407#define ARMV8_AARCH64_SYSREG_CPACR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 2)
408/** RGSR_EL1 register - RW. */
409#define ARMV8_AARCH64_SYSREG_RGSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 5)
410/** GCR_EL1 register - RW. */
411#define ARMV8_AARCH64_SYSREG_GCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 0, 6)
412
413/** ZCR_EL1 register - RW. */
414#define ARMV8_AARCH64_SYSREG_ZCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 0)
415/** TRFCR_EL1 register - RW. */
416#define ARMV8_AARCH64_SYSREG_TRFCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 1)
417/** SMPRI_EL1 register - RW. */
418#define ARMV8_AARCH64_SYSREG_SMPRI_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 4)
419/** SMCR_EL1 register - RW. */
420#define ARMV8_AARCH64_SYSREG_SMCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 1, 2, 6)
421
422/** TTBR0_EL1 register - RW. */
423#define ARMV8_AARCH64_SYSREG_TTBR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 0)
424/** TTBR1_EL1 register - RW. */
425#define ARMV8_AARCH64_SYSREG_TTBR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 1)
426/** TCR_EL1 register - RW. */
427#define ARMV8_AARCH64_SYSREG_TCR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 0, 2)
428
429/** APIAKeyLo_EL1 register - RW. */
430#define ARMV8_AARCH64_SYSREG_APIAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 0)
431/** APIAKeyHi_EL1 register - RW. */
432#define ARMV8_AARCH64_SYSREG_APIAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 1)
433/** APIBKeyLo_EL1 register - RW. */
434#define ARMV8_AARCH64_SYSREG_APIBKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 2)
435/** APIBKeyHi_EL1 register - RW. */
436#define ARMV8_AARCH64_SYSREG_APIBKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 1, 3)
437
438/** APDAKeyLo_EL1 register - RW. */
439#define ARMV8_AARCH64_SYSREG_APDAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 0)
440/** APDAKeyHi_EL1 register - RW. */
441#define ARMV8_AARCH64_SYSREG_APDAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 1)
442/** APDBKeyLo_EL1 register - RW. */
443#define ARMV8_AARCH64_SYSREG_APDBKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 2)
444/** APDBKeyHi_EL1 register - RW. */
445#define ARMV8_AARCH64_SYSREG_APDBKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 2, 3)
446
447/** APGAKeyLo_EL1 register - RW. */
448#define ARMV8_AARCH64_SYSREG_APGAKeyLo_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 3, 0)
449/** APGAKeyHi_EL1 register - RW. */
450#define ARMV8_AARCH64_SYSREG_APGAKeyHi_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 2, 3, 1)
451
452/** SPSR_EL1 register - RW. */
453#define ARMV8_AARCH64_SYSREG_SPSR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 0)
454/** ELR_EL1 register - RW. */
455#define ARMV8_AARCH64_SYSREG_ELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 0, 1)
456
457/** SP_EL0 register - RW. */
458#define ARMV8_AARCH64_SYSREG_SP_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 1, 0)
459
460/** PSTATE.SPSel value. */
461#define ARMV8_AARCH64_SYSREG_SPSEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 0)
462/** PSTATE.CurrentEL value. */
463#define ARMV8_AARCH64_SYSREG_CURRENTEL ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 2)
464/** PSTATE.PAN value. */
465#define ARMV8_AARCH64_SYSREG_PAN ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 3)
466/** PSTATE.UAO value. */
467#define ARMV8_AARCH64_SYSREG_UAO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 2, 4)
468
469/** PSTATE.ALLINT value. */
470#define ARMV8_AARCH64_SYSREG_ALLINT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 3, 0)
471
472/** ICC_PMR_EL1 register - RW. */
473#define ARMV8_AARCH64_SYSREG_ICC_PMR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 4, 6, 0)
474
475/** AFSR0_EL1 register - RW. */
476#define ARMV8_AARCH64_SYSREG_AFSR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 0)
477/** AFSR1_EL1 register - RW. */
478#define ARMV8_AARCH64_SYSREG_AFSR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 1, 1)
479
480/** ESR_EL1 register - RW. */
481#define ARMV8_AARCH64_SYSREG_ESR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 2, 0)
482
483/** ERRIDR_EL1 register - RO. */
484#define ARMV8_AARCH64_SYSREG_ERRIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 0)
485/** ERRSELR_EL1 register - RW. */
486#define ARMV8_AARCH64_SYSREG_ERRSELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 5, 3, 1)
487
488/** FAR_EL1 register - RW. */
489#define ARMV8_AARCH64_SYSREG_FAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 6, 0, 0)
490
491/** PAR_EL1 register - RW. */
492#define ARMV8_AARCH64_SYSREG_PAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 7, 4, 0)
493
494/** PMINTENCLR_EL1 register - RW. */
495#define ARMV8_AARCH64_SYSREG_PMINTENCLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 9, 14, 2)
496
497/** MAIR_EL1 register - RW. */
498#define ARMV8_AARCH64_SYSREG_MAIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 10, 2, 0)
499
500/** AMAIR_EL1 register - RW. */
501#define ARMV8_AARCH64_SYSREG_AMAIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 10, 3, 0)
502
503/** VBAR_EL1 register - RW. */
504#define ARMV8_AARCH64_SYSREG_VBAR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 0, 0)
505
506/** ICC_IAR0_EL1 register - RO. */
507#define ARMV8_AARCH64_SYSREG_ICC_IAR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 0)
508/** ICC_EOIR0_EL1 register - WO. */
509#define ARMV8_AARCH64_SYSREG_ICC_EOIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 1)
510/** ICC_HPPIR0_EL1 register - WO. */
511#define ARMV8_AARCH64_SYSREG_ICC_HPPIR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 2)
512/** ICC_BPR0_EL1 register - RW. */
513#define ARMV8_AARCH64_SYSREG_ICC_BPR0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 3)
514/** ICC_AP0R0_EL1 register - RW. */
515#define ARMV8_AARCH64_SYSREG_ICC_AP0R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 4)
516/** ICC_AP0R1_EL1 register - RW. */
517#define ARMV8_AARCH64_SYSREG_ICC_AP0R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 5)
518/** ICC_AP0R2_EL1 register - RW. */
519#define ARMV8_AARCH64_SYSREG_ICC_AP0R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 6)
520/** ICC_AP0R3_EL1 register - RW. */
521#define ARMV8_AARCH64_SYSREG_ICC_AP0R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 8, 7)
522
523/** ICC_AP1R0_EL1 register - RW. */
524#define ARMV8_AARCH64_SYSREG_ICC_AP1R0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 0)
525/** ICC_AP1R1_EL1 register - RW. */
526#define ARMV8_AARCH64_SYSREG_ICC_AP1R1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 1)
527/** ICC_AP1R2_EL1 register - RW. */
528#define ARMV8_AARCH64_SYSREG_ICC_AP1R2_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 2)
529/** ICC_AP1R3_EL1 register - RW. */
530#define ARMV8_AARCH64_SYSREG_ICC_AP1R3_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 3)
531/** ICC_NMIAR1_EL1 register - RO. */
532#define ARMV8_AARCH64_SYSREG_ICC_NMIAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 9, 5)
533
534/** ICC_DIR_EL1 register - WO. */
535#define ARMV8_AARCH64_SYSREG_ICC_DIR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 1)
536/** ICC_RPR_EL1 register - RO. */
537#define ARMV8_AARCH64_SYSREG_ICC_RPR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 3)
538/** ICC_SGI1R_EL1 register - WO. */
539#define ARMV8_AARCH64_SYSREG_ICC_SGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 5)
540/** ICC_ASGI1R_EL1 register - WO. */
541#define ARMV8_AARCH64_SYSREG_ICC_ASGI1R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 6)
542/** ICC_SGI0R_EL1 register - WO. */
543#define ARMV8_AARCH64_SYSREG_ICC_SGI0R_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 11, 7)
544
545/** ICC_IAR1_EL1 register - RO. */
546#define ARMV8_AARCH64_SYSREG_ICC_IAR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 0)
547/** ICC_EOIR1_EL1 register - WO. */
548#define ARMV8_AARCH64_SYSREG_ICC_EOIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 1)
549/** ICC_HPPIR1_EL1 register - RO. */
550#define ARMV8_AARCH64_SYSREG_ICC_HPPIR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 2)
551/** ICC_BPR1_EL1 register - RW. */
552#define ARMV8_AARCH64_SYSREG_ICC_BPR1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 3)
553/** ICC_CTLR_EL1 register - RW. */
554#define ARMV8_AARCH64_SYSREG_ICC_CTLR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 4)
555/** ICC_SRE_EL1 register - RW. */
556#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 5)
557/** ICC_IGRPEN0_EL1 register - RW. */
558#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN0_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 6)
559/** ICC_IGRPEN1_EL1 register - RW. */
560#define ARMV8_AARCH64_SYSREG_ICC_IGRPEN1_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 12, 12, 7)
561
562/** CONTEXTIDR_EL1 register - RW. */
563#define ARMV8_AARCH64_SYSREG_CONTEXTIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 13, 0, 1)
564/** TPIDR_EL1 register - RW. */
565#define ARMV8_AARCH64_SYSREG_TPIDR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 13, 0, 4)
566
567/** CNTKCTL_EL1 register - RW. */
568#define ARMV8_AARCH64_SYSREG_CNTKCTL_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 0, 14, 1, 0)
569
570/** CSSELR_EL1 register - RW. */
571#define ARMV8_AARCH64_SYSREG_CSSELR_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 2, 0, 0, 0)
572
573/** CTR_EL0 - Cache Type Register - RO. */
574#define ARMV8_AARCH64_SYSREG_CTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 0, 0, 1)
575/** DCZID_EL0 - Data Cache Zero ID Register - RO. */
576#define ARMV8_AARCH64_SYSREG_DCZID_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 0, 0, 7)
577
578
579/** NZCV - Status Flags - ??. */
580#define ARMV8_AARCH64_SYSREG_NZCV ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 0)
581/** DAIF - Interrupt Mask Bits - ??. */
582#define ARMV8_AARCH64_SYSREG_DAIF ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 1)
583/** SVCR - Streaming Vector Control Register - ??. */
584#define ARMV8_AARCH64_SYSREG_SVCR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 2)
585/** DIT - Data Independent Timing - ??. */
586#define ARMV8_AARCH64_SYSREG_DIT ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 5)
587/** SSBS - Speculative Store Bypass Safe - ??. */
588#define ARMV8_AARCH64_SYSREG_SSBS ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 6)
589/** TCO - Tag Check Override - ??. */
590#define ARMV8_AARCH64_SYSREG_TCO ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 2, 7)
591
592/** FPCR register - RW. */
593#define ARMV8_AARCH64_SYSREG_FPCR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 4, 0)
594/** FPSR register - RW. */
595#define ARMV8_AARCH64_SYSREG_FPSR ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 4, 4, 1)
596
597/** PMCR_EL0 register - RW. */
598#define ARMV8_AARCH64_SYSREG_PMCR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 0)
599/** PMCNTENSET_EL0 register - RW. */
600#define ARMV8_AARCH64_SYSREG_PMCNTENSET_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 1)
601/** PMCNTENCLR_EL0 register - RW. */
602#define ARMV8_AARCH64_SYSREG_PMCNTENCLR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 2)
603/** PMOVSCLR_EL0 register - RW. */
604#define ARMV8_AARCH64_SYSREG_PMOVSCLR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 12, 3)
605
606/** PMCCNTR_EL0 register - RW. */
607#define ARMV8_AARCH64_SYSREG_PMCCNTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 13, 0)
608
609/** PMUSERENR_EL0 register - RW. */
610#define ARMV8_AARCH64_SYSREG_PMUSERENR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 9, 14, 0)
611
612/** PMCCFILTR_EL0 register - RW. */
613#define ARMV8_AARCH64_SYSREG_PMCCFILTR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 15, 7)
614
615/** ICC_SRE_EL2 register - RW. */
616#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 9, 5)
617
618/** TPIDR_EL0 register - RW. */
619#define ARMV8_AARCH64_SYSREG_TPIDR_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 13, 0, 2)
620/** TPIDRRO_EL0 register - RO. */
621#define ARMV8_AARCH64_SYSREG_TPIDRRO_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 13, 0, 3)
622
623/** CNTFRQ_EL0 register - RW. */
624#define ARMV8_AARCH64_SYSREG_CNTFRQ_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 0, 0)
625/** CNTVCT_EL0 register - RW. */
626#define ARMV8_AARCH64_SYSREG_CNTVCT_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 0, 2)
627
628/** CNTP_TVAL_EL0 register - RW. */
629#define ARMV8_AARCH64_SYSREG_CNTP_TVAL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 0)
630/** CNTP_CTL_EL0 register - RW. */
631#define ARMV8_AARCH64_SYSREG_CNTP_CTL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 1)
632/** CNTP_CVAL_EL0 register - RW. */
633#define ARMV8_AARCH64_SYSREG_CNTP_CVAL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 2, 2)
634
635/** CNTV_CTL_EL0 register - RW. */
636#define ARMV8_AARCH64_SYSREG_CNTV_CTL_EL0 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 3, 14, 3, 1)
637
638/** VPIDR_EL2 register - RW. */
639#define ARMV8_AARCH64_SYSREG_VPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 0, 0, 0)
640/** VMPIDR_EL2 register - RW. */
641#define ARMV8_AARCH64_SYSREG_VMPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 0, 0, 5)
642
643/** SCTLR_EL2 register - RW. */
644#define ARMV8_AARCH64_SYSREG_SCTLR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 0, 0)
645/** ACTLR_EL2 register - RW. */
646#define ARMV8_AARCH64_SYSREG_ACTLR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 0, 1)
647
648/** HCR_EL2 register - RW. */
649#define ARMV8_AARCH64_SYSREG_HCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 0)
650/** MDCR_EL2 register - RW. */
651#define ARMV8_AARCH64_SYSREG_MDCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 1)
652/** CPTR_EL2 register - RW. */
653#define ARMV8_AARCH64_SYSREG_CPTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 2)
654/** HSTR_EL2 register - RW. */
655#define ARMV8_AARCH64_SYSREG_HSTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 3)
656/** HFGRTR_EL2 register - RW. */
657#define ARMV8_AARCH64_SYSREG_HFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 4)
658/** HFGWTR_EL2 register - RW. */
659#define ARMV8_AARCH64_SYSREG_HFGWTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 5)
660/** HFGITR_EL2 register - RW. */
661#define ARMV8_AARCH64_SYSREG_HFGITR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 6)
662/** HACR_EL2 register - RW. */
663#define ARMV8_AARCH64_SYSREG_HACR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 1, 7)
664
665/** ZCR_EL2 register - RW. */
666#define ARMV8_AARCH64_SYSREG_ZCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 0)
667/** TRFCR_EL2 register - RW. */
668#define ARMV8_AARCH64_SYSREG_TRFCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 1)
669/** HCRX_EL2 register - RW. */
670#define ARMV8_AARCH64_SYSREG_HCRX_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 2, 2)
671
672/** SDER32_EL2 register - RW. */
673#define ARMV8_AARCH64_SYSREG_SDER32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 1, 3, 0)
674
675/** TTBR0_EL2 register - RW. */
676#define ARMV8_AARCH64_SYSREG_TTBR0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 0)
677/** TTBR1_EL2 register - RW. */
678#define ARMV8_AARCH64_SYSREG_TTBR1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 1)
679/** TCR_EL2 register - RW. */
680#define ARMV8_AARCH64_SYSREG_TCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 0, 2)
681
682/** VTTBR_EL2 register - RW. */
683#define ARMV8_AARCH64_SYSREG_VTTBR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 1, 0)
684/** VTCR_EL2 register - RW. */
685#define ARMV8_AARCH64_SYSREG_VTCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 1, 2)
686
687/** VNCR_EL2 register - RW. */
688#define ARMV8_AARCH64_SYSREG_VNCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 2, 0)
689
690/** VSTTBR_EL2 register - RW. */
691#define ARMV8_AARCH64_SYSREG_VSTTBR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 6, 0)
692/** VSTCR_EL2 register - RW. */
693#define ARMV8_AARCH64_SYSREG_VSTCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 2, 6, 2)
694
695/** DACR32_EL2 register - RW. */
696#define ARMV8_AARCH64_SYSREG_DACR32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 0, 0)
697
698/** HDFGRTR_EL2 register - RW. */
699#define ARMV8_AARCH64_SYSREG_HDFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 4)
700/** HDFGWTR_EL2 register - RW. */
701#define ARMV8_AARCH64_SYSREG_HDFGWTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 5)
702/** HAFGRTR_EL2 register - RW. */
703#define ARMV8_AARCH64_SYSREG_HAFGRTR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 3, 1, 6)
704
705/** SPSR_EL2 register - RW. */
706#define ARMV8_AARCH64_SYSREG_SPSR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 0, 0)
707/** ELR_EL2 register - RW. */
708#define ARMV8_AARCH64_SYSREG_ELR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 0, 1)
709
710/** SP_EL1 register - RW. */
711#define ARMV8_AARCH64_SYSREG_SP_EL1 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 4, 1, 0)
712
713/** IFSR32_EL2 register - RW. */
714#define ARMV8_AARCH64_SYSREG_IFSR32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 0, 1)
715
716/** AFSR0_EL2 register - RW. */
717#define ARMV8_AARCH64_SYSREG_AFSR0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 1, 0)
718/** AFSR1_EL2 register - RW. */
719#define ARMV8_AARCH64_SYSREG_AFSR1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 1, 1)
720
721/** ESR_EL2 register - RW. */
722#define ARMV8_AARCH64_SYSREG_ESR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 2, 0)
723/** VSESR_EL2 register - RW. */
724#define ARMV8_AARCH64_SYSREG_VSESR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 2, 3)
725
726/** FPEXC32_EL2 register - RW. */
727#define ARMV8_AARCH64_SYSREG_FPEXC32_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 3, 0)
728
729/** TFSR_EL2 register - RW. */
730#define ARMV8_AARCH64_SYSREG_TFSR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 5, 6, 0)
731
732/** FAR_EL2 register - RW. */
733#define ARMV8_AARCH64_SYSREG_FAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 6, 0, 0)
734/** HPFAR_EL2 register - RW. */
735#define ARMV8_AARCH64_SYSREG_HPFAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 6, 0, 4)
736
737/** PMSCR_EL2 register - RW. */
738#define ARMV8_AARCH64_SYSREG_PMSCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 9, 9, 0)
739
740/** MAIR_EL2 register - RW. */
741#define ARMV8_AARCH64_SYSREG_MAIR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 2, 0)
742
743/** AMAIR_EL2 register - RW. */
744#define ARMV8_AARCH64_SYSREG_AMAIR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 3, 0)
745
746/** MPAMHCR_EL2 register - RW. */
747#define ARMV8_AARCH64_SYSREG_MPAMHCR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 4, 0)
748/** MPAMVPMV_EL2 register - RW. */
749#define ARMV8_AARCH64_SYSREG_MPAMVPMV_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 4, 1)
750
751/** MPAM2_EL2 register - RW. */
752#define ARMV8_AARCH64_SYSREG_MPAM2_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 5, 0)
753
754/** MPAMVPM0_EL2 register - RW. */
755#define ARMV8_AARCH64_SYSREG_MPAMVPM0_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 0)
756/** MPAMVPM1_EL2 register - RW. */
757#define ARMV8_AARCH64_SYSREG_MPAMVPM1_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 1)
758/** MPAMVPM2_EL2 register - RW. */
759#define ARMV8_AARCH64_SYSREG_MPAMVPM2_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 2)
760/** MPAMVPM3_EL2 register - RW. */
761#define ARMV8_AARCH64_SYSREG_MPAMVPM3_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 3)
762/** MPAMVPM4_EL2 register - RW. */
763#define ARMV8_AARCH64_SYSREG_MPAMVPM4_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 4)
764/** MPAMVPM5_EL2 register - RW. */
765#define ARMV8_AARCH64_SYSREG_MPAMVPM5_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 5)
766/** MPAMVPM6_EL2 register - RW. */
767#define ARMV8_AARCH64_SYSREG_MPAMVPM6_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 6)
768/** MPAMVPM7_EL2 register - RW. */
769#define ARMV8_AARCH64_SYSREG_MPAMVPM7_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 10, 6, 7)
770
771/** VBAR_EL2 register - RW. */
772#define ARMV8_AARCH64_SYSREG_VBAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 0)
773/** RVBAR_EL2 register - RW. */
774#define ARMV8_AARCH64_SYSREG_RVBAR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 1)
775/** RMR_EL2 register - RW. */
776#define ARMV8_AARCH64_SYSREG_RMR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 0, 2)
777
778/** VDISR_EL2 register - RW. */
779#define ARMV8_AARCH64_SYSREG_VDISR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 12, 1, 1)
780
781/** CONTEXTIDR_EL2 register - RW. */
782#define ARMV8_AARCH64_SYSREG_CONTEXTIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 1)
783/** TPIDR_EL2 register - RW. */
784#define ARMV8_AARCH64_SYSREG_TPIDR_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 2)
785/** SCXTNUM_EL2 register - RW. */
786#define ARMV8_AARCH64_SYSREG_SCXTNUM_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 13, 0, 7)
787
788/** CNTVOFF_EL2 register - RW. */
789#define ARMV8_AARCH64_SYSREG_CNTVOFF_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 0, 3)
790/** CNTPOFF_EL2 register - RW. */
791#define ARMV8_AARCH64_SYSREG_CNTPOFF_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 0, 6)
792
793/** CNTHCTL_EL2 register - RW. */
794#define ARMV8_AARCH64_SYSREG_CNTHCTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 1, 0)
795
796/** CNTHP_TVAL_EL2 register - RW. */
797#define ARMV8_AARCH64_SYSREG_CNTHP_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 0)
798/** CNTHP_CTL_EL2 register - RW. */
799#define ARMV8_AARCH64_SYSREG_CNTHP_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 1)
800/** CNTHP_CVAL_EL2 register - RW. */
801#define ARMV8_AARCH64_SYSREG_CNTHP_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 2, 2)
802
803/** CNTHV_TVAL_EL2 register - RW. */
804#define ARMV8_AARCH64_SYSREG_CNTHV_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 0)
805/** CNTHV_CTL_EL2 register - RW. */
806#define ARMV8_AARCH64_SYSREG_CNTHV_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 1)
807/** CNTHV_CVAL_EL2 register - RW. */
808#define ARMV8_AARCH64_SYSREG_CNTHV_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 3, 2)
809
810/** CNTHVS_TVAL_EL2 register - RW. */
811#define ARMV8_AARCH64_SYSREG_CNTHVS_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 0)
812/** CNTHVS_CTL_EL2 register - RW. */
813#define ARMV8_AARCH64_SYSREG_CNTHVS_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 1)
814/** CNTHVS_CVAL_EL2 register - RW. */
815#define ARMV8_AARCH64_SYSREG_CNTHVS_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 4, 2)
816
817/** CNTHPS_TVAL_EL2 register - RW. */
818#define ARMV8_AARCH64_SYSREG_CNTHPS_TVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 0)
819/** CNTHPS_CTL_EL2 register - RW. */
820#define ARMV8_AARCH64_SYSREG_CNTHPS_CTL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 1)
821/** CNTHPS_CVAL_EL2 register - RW. */
822#define ARMV8_AARCH64_SYSREG_CNTHPS_CVAL_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 4, 14, 5, 2)
823
824/** SP_EL2 register - RW. */
825#define ARMV8_AARCH64_SYSREG_SP_EL2 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 6, 4, 1, 0)
826
827/** SP_EL2 register - RW. */
828#define ARMV8_AARCH64_SYSREG_ICC_SRE_EL3 ARMV8_AARCH64_SYSREG_ID_CREATE(3, 6, 12, 12, 5)
829/** @} */
830
831
832#ifndef RT_IN_ASSEMBLER
833/**
834 * SPSR_EL2 (according to chapter C5.2.19)
835 */
836typedef union ARMV8SPSREL2
837{
838 /** The plain unsigned view. */
839 uint64_t u;
840 /** The 8-bit view. */
841 uint8_t au8[8];
842 /** The 16-bit view. */
843 uint16_t au16[4];
844 /** The 32-bit view. */
845 uint32_t au32[2];
846 /** The 64-bit view. */
847 uint64_t u64;
848} ARMV8SPSREL2;
849/** Pointer to SPSR_EL2. */
850typedef ARMV8SPSREL2 *PARMV8SPSREL2;
851/** Pointer to const SPSR_EL2. */
852typedef const ARMV8SPSREL2 *PCXARMV8SPSREL2;
853#endif /* !RT_IN_ASSEMBLER */
854
855
856/** @name SPSR_EL2 (When exception is taken from AArch64 state)
857 * @{
858 */
859/** Bit 0 - 3 - M - AArch64 Exception level and selected stack pointer. */
860#define ARMV8_SPSR_EL2_AARCH64_M (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
861#define ARMV8_SPSR_EL2_AARCH64_GET_M(a_Spsr) ((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M)
862/** Bit 0 - SP - Selected stack pointer. */
863#define ARMV8_SPSR_EL2_AARCH64_SP RT_BIT_64(0)
864#define ARMV8_SPSR_EL2_AARCH64_SP_BIT 0
865/** Bit 1 - Reserved (read as zero). */
866#define ARMV8_SPSR_EL2_AARCH64_RSVD_1 RT_BIT_64(1)
867/** Bit 2 - 3 - EL - Exception level. */
868#define ARMV8_SPSR_EL2_AARCH64_EL (RT_BIT_64(2) | RT_BIT_64(3))
869#define ARMV8_SPSR_EL2_AARCH64_EL_SHIFT 2
870#define ARMV8_SPSR_EL2_AARCH64_GET_EL(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_EL_SHIFT) & 3)
871#define ARMV8_SPSR_EL2_AARCH64_SET_EL(a_El) ((a_El) << ARMV8_SPSR_EL2_AARCH64_EL_SHIFT)
872/** Bit 4 - M[4] - Execution state (0 means AArch64, when 1 this contains a AArch32 state). */
873#define ARMV8_SPSR_EL2_AARCH64_M4 RT_BIT_64(4)
874#define ARMV8_SPSR_EL2_AARCH64_M4_BIT 4
875/** Bit 5 - T - T32 instruction set state (only valid when ARMV8_SPSR_EL2_AARCH64_M4 is set). */
876#define ARMV8_SPSR_EL2_AARCH64_T RT_BIT_64(5)
877#define ARMV8_SPSR_EL2_AARCH64_T_BIT 5
878/** Bit 6 - I - FIQ interrupt mask. */
879#define ARMV8_SPSR_EL2_AARCH64_F RT_BIT_64(6)
880#define ARMV8_SPSR_EL2_AARCH64_F_BIT 6
881/** Bit 7 - I - IRQ interrupt mask. */
882#define ARMV8_SPSR_EL2_AARCH64_I RT_BIT_64(7)
883#define ARMV8_SPSR_EL2_AARCH64_I_BIT 7
884/** Bit 8 - A - SError interrupt mask. */
885#define ARMV8_SPSR_EL2_AARCH64_A RT_BIT_64(8)
886#define ARMV8_SPSR_EL2_AARCH64_A_BIT 8
887/** Bit 9 - D - Debug Exception mask. */
888#define ARMV8_SPSR_EL2_AARCH64_D RT_BIT_64(9)
889#define ARMV8_SPSR_EL2_AARCH64_D_BIT 9
890/** Bit 10 - 11 - BTYPE - Branch Type indicator. */
891#define ARMV8_SPSR_EL2_AARCH64_BYTPE (RT_BIT_64(10) | RT_BIT_64(11))
892#define ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT 10
893#define ARMV8_SPSR_EL2_AARCH64_GET_BYTPE(a_Spsr) (((a_Spsr) >> ARMV8_SPSR_EL2_AARCH64_BYTPE_SHIFT) & 3)
894/** Bit 12 - SSBS - Speculative Store Bypass. */
895#define ARMV8_SPSR_EL2_AARCH64_SSBS RT_BIT_64(12)
896#define ARMV8_SPSR_EL2_AARCH64_SSBS_BIT 12
897/** Bit 13 - ALLINT - All IRQ or FIQ interrupts mask. */
898#define ARMV8_SPSR_EL2_AARCH64_ALLINT RT_BIT_64(13)
899#define ARMV8_SPSR_EL2_AARCH64_ALLINT_BIT 13
900/** Bit 14 - 19 - Reserved (read as zero). */
901#define ARMV8_SPSR_EL2_AARCH64_RSVD_14_19 ( RT_BIT_64(14) | RT_BIT_64(15) | RT_BIT_64(16) \
902 | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
903/** Bit 20 - IL - Illegal Execution State flag. */
904#define ARMV8_SPSR_EL2_AARCH64_IL RT_BIT_64(20)
905#define ARMV8_SPSR_EL2_AARCH64_IL_BIT 20
906/** Bit 21 - SS - Software Step flag. */
907#define ARMV8_SPSR_EL2_AARCH64_SS RT_BIT_64(21)
908#define ARMV8_SPSR_EL2_AARCH64_SS_BIT 21
909/** Bit 22 - PAN - Privileged Access Never flag. */
910#define ARMV8_SPSR_EL2_AARCH64_PAN RT_BIT_64(25)
911#define ARMV8_SPSR_EL2_AARCH64_PAN_BIT 22
912/** Bit 23 - UAO - User Access Override flag. */
913#define ARMV8_SPSR_EL2_AARCH64_UAO RT_BIT_64(23)
914#define ARMV8_SPSR_EL2_AARCH64_UAO_BIT 23
915/** Bit 24 - DIT - Data Independent Timing flag. */
916#define ARMV8_SPSR_EL2_AARCH64_DIT RT_BIT_64(24)
917#define ARMV8_SPSR_EL2_AARCH64_DIT_BIT 24
918/** Bit 25 - TCO - Tag Check Override flag. */
919#define ARMV8_SPSR_EL2_AARCH64_TCO RT_BIT_64(25)
920#define ARMV8_SPSR_EL2_AARCH64_TCO_BIT 25
921/** Bit 26 - 27 - Reserved (read as zero). */
922#define ARMV8_SPSR_EL2_AARCH64_RSVD_26_27 (RT_BIT_64(26) | RT_BIT_64(27))
923/** Bit 28 - V - Overflow condition flag. */
924#define ARMV8_SPSR_EL2_AARCH64_V RT_BIT_64(28)
925#define ARMV8_SPSR_EL2_AARCH64_V_BIT 28
926/** Bit 29 - C - Carry condition flag. */
927#define ARMV8_SPSR_EL2_AARCH64_C RT_BIT_64(29)
928#define ARMV8_SPSR_EL2_AARCH64_C_BIT 29
929/** Bit 30 - Z - Zero condition flag. */
930#define ARMV8_SPSR_EL2_AARCH64_Z RT_BIT_64(30)
931#define ARMV8_SPSR_EL2_AARCH64_Z_BIT 30
932/** Bit 31 - N - Negative condition flag. */
933#define ARMV8_SPSR_EL2_AARCH64_N RT_BIT_64(31)
934#define ARMV8_SPSR_EL2_AARCH64_N_BIT 31
935/** Bit 32 - 63 - Reserved (read as zero). */
936#define ARMV8_SPSR_EL2_AARCH64_RSVD_32_63 (UINT64_C(0xffffffff00000000))
937/** Checks whether the given SPSR value contains a AARCH64 execution state. */
938#define ARMV8_SPSR_EL2_IS_AARCH64_STATE(a_Spsr) (!((a_Spsr) & ARMV8_SPSR_EL2_AARCH64_M4))
939/** @} */
940
941/** @name Aarch64 Exception levels
942 * @{ */
943/** Exception Level 0 - User mode. */
944#define ARMV8_AARCH64_EL_0 0
945/** Exception Level 1 - Supervisor mode. */
946#define ARMV8_AARCH64_EL_1 1
947/** Exception Level 2 - Hypervisor mode. */
948#define ARMV8_AARCH64_EL_2 2
949/** @} */
950
951
952/** @name ESR_EL2 (Exception Syndrome Register, EL2)
953 * @{
954 */
955/** Bit 0 - 24 - ISS - Instruction Specific Syndrome, encoding depends on the exception class. */
956#define ARMV8_ESR_EL2_ISS UINT64_C(0x1ffffff)
957#define ARMV8_ESR_EL2_ISS_GET(a_Esr) ((a_Esr) & ARMV8_ESR_EL2_ISS)
958/** Bit 25 - IL - Instruction length for synchronous exception (0 means 16-bit instruction, 1 32-bit instruction). */
959#define ARMV8_ESR_EL2_IL RT_BIT_64(25)
960#define ARMV8_ESR_EL2_IL_BIT 25
961#define ARMV8_ESR_EL2_IL_IS_32BIT(a_Esr) RT_BOOL((a_Esr) & ARMV8_ESR_EL2_IL)
962#define ARMV8_ESR_EL2_IL_IS_16BIT(a_Esr) (!((a_Esr) & ARMV8_ESR_EL2_IL))
963/** Bit 26 - 31 - EC - Exception class, indicates reason for the exception that this register holds information about. */
964#define ARMV8_ESR_EL2_EC ( RT_BIT_64(26) | RT_BIT_64(27) | RT_BIT_64(28) \
965 | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
966#define ARMV8_ESR_EL2_EC_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_EC) >> 26)
967/** Bit 32 - 36 - ISS2 - Only valid when FEAT_LS64_V and/or FEAT_LS64_ACCDATA is present. */
968#define ARMV8_ESR_EL2_ISS2 ( RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) \
969 | RT_BIT_64(35) | RT_BIT_64(36))
970#define ARMV8_ESR_EL2_ISS2_GET(a_Esr) (((a_Esr) & ARMV8_ESR_EL2_ISS2) >> 32)
971/** @} */
972
973
974/** @name ESR_EL2 Exception Classes (EC)
975 * @{ */
976/** Unknown exception reason. */
977#define ARMV8_ESR_EL2_EC_UNKNOWN UINT32_C(0)
978/** Trapped WF* instruction. */
979#define ARMV8_ESR_EL2_EC_TRAPPED_WFX UINT32_C(1)
980/** AArch32 - Trapped MCR or MRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
981#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_15 UINT32_C(3)
982/** AArch32 - Trapped MCRR or MRRC access (coproc == 0b1111) not reported through ARMV8_ESR_EL2_EC_UNKNOWN. */
983#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCRR_MRRC_COPROC15 UINT32_C(4)
984/** AArch32 - Trapped MCR or MRC access (coproc == 0b1110). */
985#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MCR_MRC_COPROC_14 UINT32_C(5)
986/** AArch32 - Trapped LDC or STC access. */
987#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_LDC_STC UINT32_C(6)
988/** AArch32 - Trapped access to SME, SVE or Advanced SIMD or floating point fnunctionality. */
989#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON UINT32_C(7)
990/** AArch32 - Trapped VMRS access not reported using ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_SME_SVE_NEON. */
991#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_VMRS UINT32_C(8)
992/** AArch32 - Trapped pointer authentication instruction. */
993#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_PA_INSN UINT32_C(9)
994/** FEAT_LS64 - Exception from LD64B or ST64B instruction. */
995#define ARMV8_ESR_EL2_EC_LS64_EXCEPTION UINT32_C(10)
996/** AArch32 - Trapped MRRC access (coproc == 0b1110). */
997#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_MRRC_COPROC14 UINT32_C(12)
998/** FEAT_BTI - Branch Target Exception. */
999#define ARMV8_ESR_EL2_EC_BTI_BRANCH_TARGET_EXCEPTION UINT32_C(13)
1000/** Illegal Execution State. */
1001#define ARMV8_ESR_EL2_ILLEGAL_EXECUTION_STATE UINT32_C(14)
1002/** AArch32 - SVC instruction execution. */
1003#define ARMV8_ESR_EL2_EC_AARCH32_SVC_INSN UINT32_C(17)
1004/** AArch32 - HVC instruction execution. */
1005#define ARMV8_ESR_EL2_EC_AARCH32_HVC_INSN UINT32_C(18)
1006/** AArch32 - SMC instruction execution. */
1007#define ARMV8_ESR_EL2_EC_AARCH32_SMC_INSN UINT32_C(19)
1008/** AArch64 - SVC instruction execution. */
1009#define ARMV8_ESR_EL2_EC_AARCH64_SVC_INSN UINT32_C(21)
1010/** AArch64 - HVC instruction execution. */
1011#define ARMV8_ESR_EL2_EC_AARCH64_HVC_INSN UINT32_C(22)
1012/** AArch64 - SMC instruction execution. */
1013#define ARMV8_ESR_EL2_EC_AARCH64_SMC_INSN UINT32_C(23)
1014/** AArch64 - Trapped MSR, MRS or System instruction execution in AArch64 state. */
1015#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_SYS_INSN UINT32_C(24)
1016/** FEAT_SVE - Access to SVE vunctionality not reported using ARMV8_ESR_EL2_EC_UNKNOWN. */
1017#define ARMV8_ESR_EL2_EC_SVE_TRAPPED UINT32_C(25)
1018/** FEAT_PAuth and FEAT_NV - Trapped ERET, ERETAA or ERTAB instruction. */
1019#define ARMV8_ESR_EL2_EC_PAUTH_NV_TRAPPED_ERET_ERETAA_ERETAB UINT32_C(26)
1020/** FEAT_TME - Exception from TSTART instruction. */
1021#define ARMV8_ESR_EL2_EC_TME_TSTART_INSN_EXCEPTION UINT32_C(27)
1022/** FEAT_FPAC - Exception from a Pointer Authentication instruction failure. */
1023#define ARMV8_ESR_EL2_EC_FPAC_PA_INSN_FAILURE_EXCEPTION UINT32_C(28)
1024/** FEAT_SME - Access to SME functionality trapped. */
1025#define ARMV8_ESR_EL2_EC_SME_TRAPPED_SME_ACCESS UINT32_C(29)
1026/** FEAT_RME - Exception from Granule Protection Check. */
1027#define ARMV8_ESR_EL2_EC_RME_GRANULE_PROT_CHECK_EXCEPTION UINT32_C(30)
1028/** Instruction Abort from a lower Exception level. */
1029#define ARMV8_ESR_EL2_INSN_ABORT_FROM_LOWER_EL UINT32_C(32)
1030/** Instruction Abort from the same Exception level. */
1031#define ARMV8_ESR_EL2_INSN_ABORT_FROM_EL2 UINT32_C(33)
1032/** PC alignment fault exception. */
1033#define ARMV8_ESR_EL2_PC_ALIGNMENT_EXCEPTION UINT32_C(34)
1034/** Data Abort from a lower Exception level. */
1035#define ARMV8_ESR_EL2_DATA_ABORT_FROM_LOWER_EL UINT32_C(36)
1036/** Data Abort from the same Exception level (or access associated with VNCR_EL2). */
1037#define ARMV8_ESR_EL2_DATA_ABORT_FROM_EL2 UINT32_C(37)
1038/** SP alignment fault exception. */
1039#define ARMV8_ESR_EL2_SP_ALIGNMENT_EXCEPTION UINT32_C(38)
1040/** FEAT_MOPS - Memory Operation Exception. */
1041#define ARMV8_ESR_EL2_EC_MOPS_EXCEPTION UINT32_C(39)
1042/** AArch32 - Trapped floating point exception. */
1043#define ARMV8_ESR_EL2_EC_AARCH32_TRAPPED_FP_EXCEPTION UINT32_C(40)
1044/** AArch64 - Trapped floating point exception. */
1045#define ARMV8_ESR_EL2_EC_AARCH64_TRAPPED_FP_EXCEPTION UINT32_C(44)
1046/** SError interrupt. */
1047#define ARMV8_ESR_EL2_SERROR_INTERRUPT UINT32_C(47)
1048/** Breakpoint Exception from a lower Exception level. */
1049#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_LOWER_EL UINT32_C(48)
1050/** Breakpoint Exception from the same Exception level. */
1051#define ARMV8_ESR_EL2_BKPT_EXCEPTION_FROM_EL2 UINT32_C(49)
1052/** Software Step Exception from a lower Exception level. */
1053#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_LOWER_EL UINT32_C(50)
1054/** Software Step Exception from the same Exception level. */
1055#define ARMV8_ESR_EL2_SS_EXCEPTION_FROM_EL2 UINT32_C(51)
1056/** Watchpoint Exception from a lower Exception level. */
1057#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_LOWER_EL UINT32_C(52)
1058/** Watchpoint Exception from the same Exception level. */
1059#define ARMV8_ESR_EL2_WATCHPOINT_EXCEPTION_FROM_EL2 UINT32_C(53)
1060/** AArch32 - BKPT instruction execution. */
1061#define ARMV8_ESR_EL2_EC_AARCH32_BKPT_INSN UINT32_C(56)
1062/** AArch32 - Vector Catch exception. */
1063#define ARMV8_ESR_EL2_EC_AARCH32_VEC_CATCH_EXCEPTION UINT32_C(58)
1064/** AArch64 - BRK instruction execution. */
1065#define ARMV8_ESR_EL2_EC_AARCH64_BRK_INSN UINT32_C(60)
1066/** @} */
1067
1068
1069/** @name ISS encoding for Data Abort exceptions.
1070 * @{ */
1071/** Bit 0 - 5 - DFSC - Data Fault Status Code. */
1072#define ARMV8_EC_ISS_DATA_ABRT_DFSC ( RT_BIT_32(0) | RT_BIT_32(1) | RT_BIT_32(2) \
1073 | RT_BIT_32(3) | RT_BIT_32(4) | RT_BIT_32(5))
1074#define ARMV8_EC_ISS_DATA_ABRT_DFSC_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_DFSC)
1075/** Bit 6 - WnR - Write not Read. */
1076#define ARMV8_EC_ISS_DATA_ABRT_WNR RT_BIT_32(6)
1077#define ARMV8_EC_ISS_DATA_ABRT_WNR_BIT 6
1078/** Bit 7 - S1PTW - Stage 2 translation fault for an access made for a stage 1 translation table walk. */
1079#define ARMV8_EC_ISS_DATA_ABRT_S1PTW RT_BIT_32(7)
1080#define ARMV8_EC_ISS_DATA_ABRT_S1PTW_BIT 7
1081/** Bit 8 - CM - Cache maintenance instruction. */
1082#define ARMV8_EC_ISS_DATA_ABRT_CM RT_BIT_32(8)
1083#define ARMV8_EC_ISS_DATA_ABRT_CM_BIT 8
1084/** Bit 9 - EA - External abort type. */
1085#define ARMV8_EC_ISS_DATA_ABRT_EA RT_BIT_32(9)
1086#define ARMV8_EC_ISS_DATA_ABRT_EA_BIT 9
1087/** Bit 10 - FnV - FAR not Valid. */
1088#define ARMV8_EC_ISS_DATA_ABRT_FNV RT_BIT_32(10)
1089#define ARMV8_EC_ISS_DATA_ABRT_FNV_BIT 10
1090/** Bit 11 - 12 - LST - Load/Store Type. */
1091#define ARMV8_EC_ISS_DATA_ABRT_LST (RT_BIT_32(11) | RT_BIT_32(12))
1092#define ARMV8_EC_ISS_DATA_ABRT_LST_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_LST) >> 11)
1093/** Bit 13 - VNCR - Fault came from use of VNCR_EL2 register by EL1 code. */
1094#define ARMV8_EC_ISS_DATA_ABRT_VNCR RT_BIT_32(13)
1095#define ARMV8_EC_ISS_DATA_ABRT_VNCR_BIT 13
1096/** Bit 14 - AR - Acquire/Release semantics. */
1097#define ARMV8_EC_ISS_DATA_ABRT_AR RT_BIT_32(14)
1098#define ARMV8_EC_ISS_DATA_ABRT_AR_BIT 14
1099/** Bit 15 - SF - Sixty Four bit general-purpose register transfer (only when ISV is 1). */
1100#define ARMV8_EC_ISS_DATA_ABRT_SF RT_BIT_32(15)
1101#define ARMV8_EC_ISS_DATA_ABRT_SF_BIT 15
1102/** Bit 16 - 20 - SRT - Syndrome Register Transfer. */
1103#define ARMV8_EC_ISS_DATA_ABRT_SRT ( RT_BIT_32(16) | RT_BIT_32(17) | RT_BIT_32(18) \
1104 | RT_BIT_32(19) | RT_BIT_32(20))
1105#define ARMV8_EC_ISS_DATA_ABRT_SRT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SRT) >> 16)
1106/** Bit 21 - SSE - Syndrome Sign Extend. */
1107#define ARMV8_EC_ISS_DATA_ABRT_SSE RT_BIT_32(21)
1108#define ARMV8_EC_ISS_DATA_ABRT_SSE_BIT 21
1109/** Bit 22 - 23 - SAS - Syndrome Access Size. */
1110#define ARMV8_EC_ISS_DATA_ABRT_SAS (RT_BIT_32(22) | RT_BIT_32(23))
1111#define ARMV8_EC_ISS_DATA_ABRT_SAS_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_DATA_ABRT_SAS) >> 22)
1112/** Bit 24 - ISV - Instruction Syndrome Valid. */
1113#define ARMV8_EC_ISS_DATA_ABRT_ISV RT_BIT_32(24)
1114#define ARMV8_EC_ISS_DATA_ABRT_ISV_BIT 24
1115/** @} */
1116
1117
1118/** @name Data Fault Status Code (DFSC).
1119 * @{ */
1120/** Address size fault, level 0 of translation or translation table base register. */
1121#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL0 0
1122/** Address size fault, level 1. */
1123#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL1 1
1124/** Address size fault, level 2. */
1125#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL2 2
1126/** Address size fault, level 3. */
1127#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ADDR_SIZE_FAULT_LVL3 3
1128/** Translation fault, level 0. */
1129#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL0 4
1130/** Translation fault, level 1. */
1131#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL1 5
1132/** Translation fault, level 2. */
1133#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL2 6
1134/** Translation fault, level 3. */
1135#define ARMV8_EC_ISS_DATA_ABRT_DFSC_TRANSLATION_FAULT_LVL3 7
1136/** FEAT_LPA2 - Access flag fault, level 0. */
1137#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL0 8
1138/** Access flag fault, level 1. */
1139#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL1 9
1140/** Access flag fault, level 2. */
1141#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL2 10
1142/** Access flag fault, level 3. */
1143#define ARMV8_EC_ISS_DATA_ABRT_DFSC_ACCESS_FLAG_FAULT_LVL3 11
1144/** FEAT_LPA2 - Permission fault, level 0. */
1145#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL0 12
1146/** Permission fault, level 1. */
1147#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL1 13
1148/** Permission fault, level 2. */
1149#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL2 14
1150/** Permission fault, level 3. */
1151#define ARMV8_EC_ISS_DATA_ABRT_DFSC_PERMISSION_FAULT_LVL3 15
1152/** Synchronous External abort, not a translation table walk or hardware update of translation table. */
1153#define ARMV8_EC_ISS_DATA_ABRT_DFSC_SYNC_EXTERNAL 16
1154/** FEAT_MTE2 - Synchronous Tag Check Fault. */
1155#define ARMV8_EC_ISS_DATA_ABRT_DFSC_MTE2_SYNC_TAG_CHK_FAULT 17
1156/** @todo Do the rest (lazy developer). */
1157/** @} */
1158
1159
1160/** @name SAS encoding.
1161 * @{ */
1162/** Byte access. */
1163#define ARMV8_EC_ISS_DATA_ABRT_SAS_BYTE 0
1164/** Halfword access (uint16_t). */
1165#define ARMV8_EC_ISS_DATA_ABRT_SAS_HALFWORD 1
1166/** Word access (uint32_t). */
1167#define ARMV8_EC_ISS_DATA_ABRT_SAS_WORD 2
1168/** Doubleword access (uint64_t). */
1169#define ARMV8_EC_ISS_DATA_ABRT_SAS_DWORD 3
1170/** @} */
1171
1172
1173/** @name ISS encoding for trapped MSR, MRS or System instruction exceptions.
1174 * @{ */
1175/** Bit 0 - Direction flag. */
1176#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION RT_BIT_32(0)
1177#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION_IS_READ(a_Iss) RT_BOOL((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_DIRECTION)
1178/** Bit 1 - 4 - CRm value from the instruction. */
1179#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM ( RT_BIT_32(1) | RT_BIT_32(2) | RT_BIT_32(3) \
1180 | RT_BIT_32(4))
1181#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRM) >> 1)
1182/** Bit 5 - 9 - Rt value from the instruction. */
1183#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT ( RT_BIT_32(5) | RT_BIT_32(6) | RT_BIT_32(7) \
1184 | RT_BIT_32(8) | RT_BIT_32(9))
1185#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RT) >> 5)
1186/** Bit 10 - 13 - CRn value from the instruction. */
1187#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN ( RT_BIT_32(10) | RT_BIT_32(11) | RT_BIT_32(12) \
1188 | RT_BIT_32(13))
1189#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_CRN) >> 10)
1190/** Bit 14 - 16 - Op2 value from the instruction. */
1191#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1 (RT_BIT_32(14) | RT_BIT_32(15) | RT_BIT_32(16))
1192#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP1) >> 14)
1193/** Bit 17 - 19 - Op2 value from the instruction. */
1194#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2 (RT_BIT_32(17) | RT_BIT_32(18) | RT_BIT_32(19))
1195#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP2) >> 17)
1196/** Bit 20 - 21 - Op0 value from the instruction. */
1197#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0 (RT_BIT_32(20) | RT_BIT_32(21))
1198#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0_GET(a_Iss) (((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_OP0) >> 20)
1199/** Bit 22 - 24 - Reserved. */
1200#define ARMV8_EC_ISS_AARCH64_TRAPPED_SYS_INSN_RSVD (RT_BIT_32(22) | RT_BIT_32(23) | RT_BIT_32(24))
1201/** @} */
1202
1203
1204/** @name ISS encoding for trapped HVC instruction exceptions.
1205 * @{ */
1206/** Bit 0 - 15 - imm16 value of the instruction. */
1207#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM (UINT16_C(0xffff))
1208#define ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM_GET(a_Iss) ((a_Iss) & ARMV8_EC_ISS_AARCH64_TRAPPED_HVC_INSN_IMM)
1209/** @} */
1210
1211
1212/** @name TCR_EL1 - Translation Control Register (EL1)
1213 * @{
1214 */
1215/** Bit 0 - 5 - Size offset of the memory region addressed by TTBR0_EL1 (2^(64-T0SZ)). */
1216#define ARMV8_TCR_EL1_AARCH64_T0SZ ( RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) \
1217 | RT_BIT_64(3) | RT_BIT_64(4) | RT_BIT_64(5))
1218#define ARMV8_TCR_EL1_AARCH64_T0SZ_GET(a_Tcr) ((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ)
1219/** Bit 7 - Translation table walk disable for translations using TTBR0_EL1. */
1220#define ARMV8_TCR_EL1_AARCH64_EPD0 RT_BIT_64(7)
1221#define ARMV8_TCR_EL1_AARCH64_EPD0_BIT 7
1222/** Bit 8 - 9 - Inner cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
1223#define ARMV8_TCR_EL1_AARCH64_IRGN0 (RT_BIT_64(8) | RT_BIT_64(9))
1224#define ARMV8_TCR_EL1_AARCH64_IRGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN0) >> 8)
1225/** Non cacheable. */
1226# define ARMV8_TCR_EL1_AARCH64_IRGN0_NON_CACHEABLE 0
1227/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1228# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_WA 1
1229/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1230# define ARMV8_TCR_EL1_AARCH64_IRGN0_WT_RA_NWA 2
1231/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1232# define ARMV8_TCR_EL1_AARCH64_IRGN0_WB_RA_NWA 3
1233/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR0_EL1. */
1234#define ARMV8_TCR_EL1_AARCH64_ORGN0 (RT_BIT_64(10) | RT_BIT_64(11))
1235#define ARMV8_TCR_EL1_AARCH64_ORGN0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN0) >> 10)
1236/** Non cacheable. */
1237# define ARMV8_TCR_EL1_AARCH64_ORGN0_NON_CACHEABLE 0
1238/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1239# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_WA 1
1240/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1241# define ARMV8_TCR_EL1_AARCH64_ORGN0_WT_RA_NWA 2
1242/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1243# define ARMV8_TCR_EL1_AARCH64_ORGN0_WB_RA_NWA 3
1244/** Bit 12 - 13 - Shareability attribute memory associated with translation table walks using TTBR0_EL1. */
1245#define ARMV8_TCR_EL1_AARCH64_SH0 (RT_BIT_64(12) | RT_BIT_64(13))
1246#define ARMV8_TCR_EL1_AARCH64_SH0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH0) >> 12)
1247/** Non shareable. */
1248# define ARMV8_TCR_EL1_AARCH64_SH0_NON_SHAREABLE 0
1249/** Invalid value. */
1250# define ARMV8_TCR_EL1_AARCH64_SH0_INVALID 1
1251/** Outer Shareable. */
1252# define ARMV8_TCR_EL1_AARCH64_SH0_OUTER_SHAREABLE 2
1253/** Inner Shareable. */
1254# define ARMV8_TCR_EL1_AARCH64_SH0_INNER_SHAREABLE 3
1255/** Bit 14 - 15 - Translation Granule Size for TTBR0_EL1. */
1256#define ARMV8_TCR_EL1_AARCH64_TG0 (RT_BIT_64(14) | RT_BIT_64(15))
1257#define ARMV8_TCR_EL1_AARCH64_TG0_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG0) >> 14)
1258/** Invalid granule size. */
1259# define ARMV8_TCR_EL1_AARCH64_TG0_INVALID 0
1260/** 16KiB granule size. */
1261# define ARMV8_TCR_EL1_AARCH64_TG0_16KB 1
1262/** 4KiB granule size. */
1263# define ARMV8_TCR_EL1_AARCH64_TG0_4KB 2
1264/** 64KiB granule size. */
1265# define ARMV8_TCR_EL1_AARCH64_TG0_64KB 3
1266/** Bit 16 - 21 - Size offset of the memory region addressed by TTBR1_EL1 (2^(64-T1SZ)). */
1267#define ARMV8_TCR_EL1_AARCH64_T1SZ ( RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) \
1268 | RT_BIT_64(19) | RT_BIT_64(20) | RT_BIT_64(21))
1269#define ARMV8_TCR_EL1_AARCH64_T1SZ_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_T1SZ) >> 16)
1270/** Bit 22 - Selects whether TTBR0_EL1 (0) or TTBR1_EL1 (1) defines the ASID. */
1271#define ARMV8_TCR_EL1_AARCH64_A1 RT_BIT_64(22)
1272#define ARMV8_TCR_EL1_AARCH64_A1_BIT 22
1273/** Bit 23 - Translation table walk disable for translations using TTBR1_EL1. */
1274#define ARMV8_TCR_EL1_AARCH64_EPD1 RT_BIT_64(23)
1275#define ARMV8_TCR_EL1_AARCH64_EPD1_BIT 23
1276/** Bit 24 - 25 - Inner cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
1277#define ARMV8_TCR_EL1_AARCH64_IRGN1 (RT_BIT_64(24) | RT_BIT_64(25))
1278#define ARMV8_TCR_EL1_AARCH64_IRGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IRGN1) >> 26)
1279/** Non cacheable. */
1280# define ARMV8_TCR_EL1_AARCH64_IRGN1_NON_CACHEABLE 0
1281/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1282# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_WA 1
1283/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1284# define ARMV8_TCR_EL1_AARCH64_IRGN1_WT_RA_NWA 2
1285/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1286# define ARMV8_TCR_EL1_AARCH64_IRGN1_WB_RA_NWA 3
1287/** Bit 27 - 26 - Outer cacheability attribute for memory associated with translation table walks using TTBR1_EL1. */
1288#define ARMV8_TCR_EL1_AARCH64_ORGN1 (RT_BIT_64(26) | RT_BIT_64(27))
1289#define ARMV8_TCR_EL1_AARCH64_ORGN1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_ORGN1) >> 26)
1290/** Non cacheable. */
1291# define ARMV8_TCR_EL1_AARCH64_ORGN1_NON_CACHEABLE 0
1292/** Write-Back, Read-Allocate, Write-Allocate Cacheable. */
1293# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_WA 1
1294/** Write-Through, Read-Allocate, No Write-Allocate Cacheable. */
1295# define ARMV8_TCR_EL1_AARCH64_ORGN1_WT_RA_NWA 2
1296/** Write-Back, Read-Allocate, No Write-Allocate Cacheable. */
1297# define ARMV8_TCR_EL1_AARCH64_ORGN1_WB_RA_NWA 3
1298/** Bit 28 - 29 - Shareability attribute memory associated with translation table walks using TTBR1_EL1. */
1299#define ARMV8_TCR_EL1_AARCH64_SH1 (RT_BIT_64(28) | RT_BIT_64(29))
1300#define ARMV8_TCR_EL1_AARCH64_SH1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_SH1) >> 28)
1301/** Non shareable. */
1302# define ARMV8_TCR_EL1_AARCH64_SH1_NON_SHAREABLE 0
1303/** Invalid value. */
1304# define ARMV8_TCR_EL1_AARCH64_SH1_INVALID 1
1305/** Outer Shareable. */
1306# define ARMV8_TCR_EL1_AARCH64_SH1_OUTER_SHAREABLE 2
1307/** Inner Shareable. */
1308# define ARMV8_TCR_EL1_AARCH64_SH1_INNER_SHAREABLE 3
1309/** Bit 30 - 31 - Translation Granule Size for TTBR1_EL1. */
1310#define ARMV8_TCR_EL1_AARCH64_TG1 (RT_BIT_64(30) | RT_BIT_64(31))
1311#define ARMV8_TCR_EL1_AARCH64_TG1_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_TG1) >> 30)
1312/** Invalid granule size. */
1313# define ARMV8_TCR_EL1_AARCH64_TG1_INVALID 0
1314/** 16KiB granule size. */
1315# define ARMV8_TCR_EL1_AARCH64_TG1_16KB 1
1316/** 4KiB granule size. */
1317# define ARMV8_TCR_EL1_AARCH64_TG1_4KB 2
1318/** 64KiB granule size. */
1319# define ARMV8_TCR_EL1_AARCH64_TG1_64KB 3
1320/** Bit 32 - 34 - Intermediate Physical Address Size. */
1321#define ARMV8_TCR_EL1_AARCH64_IPS (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34))
1322#define ARMV8_TCR_EL1_AARCH64_IPS_GET(a_Tcr) (((a_Tcr) & ARMV8_TCR_EL1_AARCH64_IPS) >> 32)
1323/** IPA - 32 bits, 4GiB. */
1324# define ARMV8_TCR_EL1_AARCH64_IPS_32BITS 0
1325/** IPA - 36 bits, 64GiB. */
1326# define ARMV8_TCR_EL1_AARCH64_IPS_36BITS 1
1327/** IPA - 40 bits, 1TiB. */
1328# define ARMV8_TCR_EL1_AARCH64_IPS_40BITS 2
1329/** IPA - 42 bits, 4TiB. */
1330# define ARMV8_TCR_EL1_AARCH64_IPS_42BITS 3
1331/** IPA - 44 bits, 16TiB. */
1332# define ARMV8_TCR_EL1_AARCH64_IPS_44BITS 4
1333/** IPA - 48 bits, 256TiB. */
1334# define ARMV8_TCR_EL1_AARCH64_IPS_48BITS 5
1335/** IPA - 52 bits, 4PiB. */
1336# define ARMV8_TCR_EL1_AARCH64_IPS_52BITS 6
1337/** Bit 36 - ASID Size (0 - 8 bit, 1 - 16 bit). */
1338#define ARMV8_TCR_EL1_AARCH64_AS RT_BIT_64(36)
1339#define ARMV8_TCR_EL1_AARCH64_AS_BIT 36
1340/** Bit 37 - Top Byte Ignore for translations from TTBR0_EL1. */
1341#define ARMV8_TCR_EL1_AARCH64_TBI0 RT_BIT_64(37)
1342#define ARMV8_TCR_EL1_AARCH64_TBI0_BIT 37
1343/** Bit 38 - Top Byte Ignore for translations from TTBR1_EL1. */
1344#define ARMV8_TCR_EL1_AARCH64_TBI1 RT_BIT_64(38)
1345#define ARMV8_TCR_EL1_AARCH64_TBI1_BIT 38
1346/** Bit 39 - Hardware Access flag update in stage 1 translations from EL0 and EL1. */
1347#define ARMV8_TCR_EL1_AARCH64_HA RT_BIT_64(39)
1348#define ARMV8_TCR_EL1_AARCH64_HA_BIT 39
1349/** Bit 40 - Hardware management of dirty state in stage 1 translations from EL0 and EL1. */
1350#define ARMV8_TCR_EL1_AARCH64_HD RT_BIT_64(40)
1351#define ARMV8_TCR_EL1_AARCH64_HD_BIT 40
1352/** Bit 41 - Hierarchical Permission Disables for TTBR0_EL1. */
1353#define ARMV8_TCR_EL1_AARCH64_HPD0 RT_BIT_64(41)
1354#define ARMV8_TCR_EL1_AARCH64_HPD0_BIT 41
1355/** Bit 42 - Hierarchical Permission Disables for TTBR1_EL1. */
1356#define ARMV8_TCR_EL1_AARCH64_HPD1 RT_BIT_64(42)
1357#define ARMV8_TCR_EL1_AARCH64_HPD1_BIT 42
1358/** Bit 43 - Bit[59] Hardware Use for translations using TTBR0_EL1. */
1359#define ARMV8_TCR_EL1_AARCH64_HWU059 RT_BIT_64(43)
1360#define ARMV8_TCR_EL1_AARCH64_HWU059_BIT 43
1361/** Bit 44 - Bit[60] Hardware Use for translations using TTBR0_EL1. */
1362#define ARMV8_TCR_EL1_AARCH64_HWU060 RT_BIT_64(44)
1363#define ARMV8_TCR_EL1_AARCH64_HWU060_BIT 44
1364/** Bit 46 - Bit[61] Hardware Use for translations using TTBR0_EL1. */
1365#define ARMV8_TCR_EL1_AARCH64_HWU061 RT_BIT_64(45)
1366#define ARMV8_TCR_EL1_AARCH64_HWU061_BIT 45
1367/** Bit 46 - Bit[62] Hardware Use for translations using TTBR0_EL1. */
1368#define ARMV8_TCR_EL1_AARCH64_HWU062 RT_BIT_64(46)
1369#define ARMV8_TCR_EL1_AARCH64_HWU062_BIT 46
1370/** Bit 47 - Bit[59] Hardware Use for translations using TTBR1_EL1. */
1371#define ARMV8_TCR_EL1_AARCH64_HWU159 RT_BIT_64(47)
1372#define ARMV8_TCR_EL1_AARCH64_HWU159_BIT 47
1373/** Bit 48 - Bit[60] Hardware Use for translations using TTBR1_EL1. */
1374#define ARMV8_TCR_EL1_AARCH64_HWU160 RT_BIT_64(48)
1375#define ARMV8_TCR_EL1_AARCH64_HWU160_BIT 48
1376/** Bit 49 - Bit[61] Hardware Use for translations using TTBR1_EL1. */
1377#define ARMV8_TCR_EL1_AARCH64_HWU161 RT_BIT_64(49)
1378#define ARMV8_TCR_EL1_AARCH64_HWU161_BIT 49
1379/** Bit 50 - Bit[62] Hardware Use for translations using TTBR1_EL1. */
1380#define ARMV8_TCR_EL1_AARCH64_HWU162 RT_BIT_64(50)
1381#define ARMV8_TCR_EL1_AARCH64_HWU162_BIT 50
1382/** Bit 51 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR0_EL1. */
1383#define ARMV8_TCR_EL1_AARCH64_TBID0 RT_BIT_64(51)
1384#define ARMV8_TCR_EL1_AARCH64_TBID0_BIT 51
1385/** Bit 52 - Control the use of the top byte of instruction addresses for address matching for translations using TTBR1_EL1. */
1386#define ARMV8_TCR_EL1_AARCH64_TBID1 RT_BIT_64(52)
1387#define ARMV8_TCR_EL1_AARCH64_TBID1_BIT 52
1388/** Bit 53 - Non fault translation table walk disable for stage 1 translations using TTBR0_EL1. */
1389#define ARMV8_TCR_EL1_AARCH64_NFD0 RT_BIT_64(53)
1390#define ARMV8_TCR_EL1_AARCH64_NFD0_BIT 53
1391/** Bit 54 - Non fault translation table walk disable for stage 1 translations using TTBR1_EL1. */
1392#define ARMV8_TCR_EL1_AARCH64_NFD1 RT_BIT_64(54)
1393#define ARMV8_TCR_EL1_AARCH64_NFD1_BIT 54
1394/** Bit 55 - Faulting Control for Unprivileged access to any address translated by TTBR0_EL1. */
1395#define ARMV8_TCR_EL1_AARCH64_E0PD0 RT_BIT_64(55)
1396#define ARMV8_TCR_EL1_AARCH64_E0PD0_BIT 55
1397/** Bit 56 - Faulting Control for Unprivileged access to any address translated by TTBR1_EL1. */
1398#define ARMV8_TCR_EL1_AARCH64_E0PD1 RT_BIT_64(56)
1399#define ARMV8_TCR_EL1_AARCH64_E0PD1_BIT 56
1400/** Bit 57 - TCMA0 */
1401#define ARMV8_TCR_EL1_AARCH64_TCMA0 RT_BIT_64(57)
1402#define ARMV8_TCR_EL1_AARCH64_TCMA0_BIT 57
1403/** Bit 58 - TCMA1 */
1404#define ARMV8_TCR_EL1_AARCH64_TCMA1 RT_BIT_64(58)
1405#define ARMV8_TCR_EL1_AARCH64_TCMA1_BIT 58
1406/** Bit 59 - Data Sharing(?). */
1407#define ARMV8_TCR_EL1_AARCH64_DS RT_BIT_64(59)
1408#define ARMV8_TCR_EL1_AARCH64_DS_BIT 59
1409/** @} */
1410
1411
1412/** @name TTBR<0,1>_EL1 - Translation Table Base Register <0,1> (EL1)
1413 * @{
1414 */
1415/** Bit 0 - Common not Private (FEAT_TTCNP). */
1416#define ARMV8_TTBR_EL1_AARCH64_CNP RT_BIT_64(0)
1417#define ARMV8_TTBR_EL1_AARCH64_CNP_BIT 0
1418/** Bit 1 - 47 - Translation table base address. */
1419#define ARMV8_TTBR_EL1_AARCH64_BADDR UINT64_C(0x0000fffffffffffe)
1420#define ARMV8_TTBR_EL1_AARCH64_BADDR_GET(a_Ttbr) ((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_BADDR)
1421/** Bit 48 - 63 - ASID. */
1422#define ARMV8_TTBR_EL1_AARCH64_ASID UINT64_C(0xffff000000000000)
1423#define ARMV8_TTBR_EL1_AARCH64_ASID_GET(a_Ttbr) (((a_Ttbr) & ARMV8_TTBR_EL1_AARCH64_ASID) >> 48)
1424/** @} */
1425
1426
1427/** @name MDSCR_EL1 - MOnitor Debug System Control Register (EL1).
1428 * @{ */
1429/** Bit 0 - SS - Software step control bit. */
1430#define ARMV8_MDSCR_EL1_AARCH64_SS RT_BIT_64(0)
1431#define ARMV8_MDSCR_EL1_AARCH64_SS_BIT 0
1432/** @} */
1433
1434
1435/** @name ICC_PMR_EL1 - Interrupt Controller Interrupt Priority Mask Register
1436 * @{ */
1437/** Bit 0 - 7 - Priority - The priority mask level for the CPU interface. */
1438#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY UINT64_C(0xff)
1439#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_GET(a_Pmr) ((a_Pmr) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1440#define ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY_SET(a_Prio) ((a_Prio) & ARMV8_ICC_PMR_EL1_AARCH64_PRIORITY)
1441/** @} */
1442
1443
1444/** @name ICC_BPR0_EL1 - The group priority for Group 0 interrupts.
1445 * @{ */
1446/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1447#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1448#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_GET(a_Bpr0) ((a_Bpr0) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1449#define ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR0_EL1_AARCH64_BINARYPOINT)
1450/** @} */
1451
1452
1453/** @name ICC_BPR1_EL1 - The group priority for Group 1 interrupts.
1454 * @{ */
1455/** Bit 0 - 2 - BinaryPoint - Controls how the 8-bit interrupt priority field is split into a group priority and subpriority field. */
1456#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2))
1457#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_GET(a_Bpr1) ((a_Bpr1) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1458#define ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT_SET(a_BinaryPt) ((a_BinaryPt) & ARMV8_ICC_BPR1_EL1_AARCH64_BINARYPOINT)
1459/** @} */
1460
1461
1462/** @name ICC_CTLR_EL1 - Interrupt Controller Control Register (EL1)
1463 * @{ */
1464/** Bit 0 - Common Binary Pointer Register - RW. */
1465#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR RT_BIT_64(0)
1466#define ARMV8_ICC_CTLR_EL1_AARCH64_CBPR_BIT 0
1467/** Bit 1 - EOI mode for current security state, when set ICC_DIR_EL1 provides interrupt deactivation functionality - RW. */
1468#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE RT_BIT_64(1)
1469#define ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE_BIT 1
1470/** Bit 7 - Priority Mask Hint Enable - RW (under circumstances). */
1471#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE RT_BIT_64(7)
1472#define ARMV8_ICC_CTLR_EL1_AARCH64_PMHE_BIT 7
1473/** Bit 8 - 10 - Priority bits - RO. */
1474#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10))
1475#define ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS_SET(a_PriBits) (((a_PriBits) << 8) & ARMV8_ICC_CTLR_EL1_AARCH64_PRIBITS)
1476/** Bit 11 - 13 - Interrupt identifier bits - RO. */
1477#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS (RT_BIT_64(11) | RT_BIT_64(12) | RT_BIT_64(13))
1478#define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_SET(a_IdBits) (((a_IdBits) << 11) & ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS)
1479/** INTIDS are 16-bit wide. */
1480# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_16BITS 0
1481/** INTIDS are 24-bit wide. */
1482# define ARMV8_ICC_CTLR_EL1_AARCH64_IDBITS_24BITS 1
1483/** Bit 14 - SEI Supported - RO. */
1484#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS RT_BIT_64(14)
1485#define ARMV8_ICC_CTLR_EL1_AARCH64_SEIS_BIT 14
1486/** Bit 15 - Affinity 3 Valid - RO. */
1487#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V RT_BIT_64(15)
1488#define ARMV8_ICC_CTLR_EL1_AARCH64_A3V_BIT 15
1489/** Bit 18 - Range Selector Support - RO. */
1490#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS RT_BIT_64(18)
1491#define ARMV8_ICC_CTLR_EL1_AARCH64_RSS_BIT 18
1492/** Bit 19 - Extended INTID range supported - RO. */
1493#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE RT_BIT_64(19)
1494#define ARMV8_ICC_CTLR_EL1_AARCH64_EXTRANGE_BIT 19
1495/** All RW bits. */
1496#define ARMV8_ICC_CTLR_EL1_RW (ARMV8_ICC_CTLR_EL1_AARCH64_CBPR | ARMV8_ICC_CTLR_EL1_AARCH64_EOIMODE | ARMV8_ICC_CTLR_EL1_AARCH64_PMHE)
1497/** All RO bits (including Res0). */
1498#define ARMV8_ICC_CTLR_EL1_RO ~ARMV8_ICC_CTLR_EL1_RW
1499/** @} */
1500
1501
1502/** @name ICC_IGRPEN0_EL1 - Interrupt Controller Interrupt Group 0 Enable Register (EL1)
1503 * @{ */
1504/** Bit 0 - Enables Group 0 interrupts for the current Security state. */
1505#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE RT_BIT_64(0)
1506#define ARMV8_ICC_IGRPEN0_EL1_AARCH64_ENABLE_BIT 0
1507/** @} */
1508
1509
1510/** @name ICC_IGRPEN1_EL1 - Interrupt Controller Interrupt Group 1 Enable Register (EL1)
1511 * @{ */
1512/** Bit 0 - Enables Group 1 interrupts for the current Security state. */
1513#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE RT_BIT_64(0)
1514#define ARMV8_ICC_IGRPEN1_EL1_AARCH64_ENABLE_BIT 0
1515/** @} */
1516
1517
1518/** @name ICC_SGI1R_EL1 - Interrupt Controller Software Generated Interrupt Group 1 Register (EL1) - WO
1519 * @{ */
1520/** Bit 0 - 15 - Target List, the set of PEs for which SGI interrupts will be generated. */
1521#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST (UINT64_C(0x000000000000ffff))
1522#define ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST_GET(a_Sgi1R) ((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_TARGET_LIST)
1523/** Bit 16 - 23 - The affinity 1 of the affinity path of the cluster for which SGI interrupts will be generated. */
1524#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1 (UINT64_C(0x00000000007f0000))
1525#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF1) >> 16)
1526/** Bit 24 - 27 - The INTID of the SGI. */
1527#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1528#define ARMV8_ICC_SGI1R_EL1_AARCH64_INTID_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_INTID) >> 24)
1529/* Bit 28 - 31 - Reserved. */
1530/** Bit 32 - 39 - The affinity 2 of the affinity path of the cluster for which SGI interrupts will be generated. */
1531#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2 (UINT64_C(0x000000ff00000000))
1532#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF2) >> 32)
1533/** Bit 40 - Interrupt Routing Mode - 1 means interrupts to all PEs in the system excluding the generating PE. */
1534#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM RT_BIT_64(40)
1535#define ARMV8_ICC_SGI1R_EL1_AARCH64_IRM_BIT 40
1536/* Bit 41 - 43 - Reserved. */
1537/** Bit 44 - 47 - Range selector. */
1538#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1539#define ARMV8_ICC_SGI1R_EL1_AARCH64_RS_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_RS) >> 44)
1540/** Bit 48 - 55 - The affinity 3 of the affinity path of the cluster for which SGI interrupts will be generated. */
1541#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3 (UINT64_C(0x00ff000000000000))
1542#define ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3_GET(a_Sgi1R) (((a_Sgi1R) & ARMV8_ICC_SGI1R_EL1_AARCH64_AFF3) >> 48)
1543/* Bit 56 - 63 - Reserved. */
1544/** @} */
1545
1546
1547/** @name CNTV_CTL_EL0 - Counter-timer Virtual Timer Control register.
1548 * @{ */
1549/** Bit 0 - Enables the timer. */
1550#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE RT_BIT_64(0)
1551#define ARMV8_CNTV_CTL_EL0_AARCH64_ENABLE_BIT 0
1552/** Bit 1 - Timer interrupt mask bit. */
1553#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK RT_BIT_64(1)
1554#define ARMV8_CNTV_CTL_EL0_AARCH64_IMASK_BIT 1
1555/** Bit 2 - Timer status bit. */
1556#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS RT_BIT_64(2)
1557#define ARMV8_CNTV_CTL_EL0_AARCH64_ISTATUS_BIT 2
1558/** @} */
1559
1560
1561/** @name OSLAR_EL1 - OS Lock Access Register.
1562 * @{ */
1563/** Bit 0 - The OS Lock status bit. */
1564#define ARMV8_OSLAR_EL1_AARCH64_OSLK RT_BIT_64(0)
1565#define ARMV8_OSLAR_EL1_AARCH64_OSLK_BIT 0
1566/** @} */
1567
1568
1569/** @name OSLSR_EL1 - OS Lock Status Register.
1570 * @{ */
1571/** Bit 0 - OSLM[0] Bit 0 of OS Lock model implemented. */
1572#define ARMV8_OSLSR_EL1_AARCH64_OSLM0 RT_BIT_64(0)
1573#define ARMV8_OSLSR_EL1_AARCH64_OSLM0_BIT 0
1574/** Bit 1 - The OS Lock status bit. */
1575#define ARMV8_OSLSR_EL1_AARCH64_OSLK RT_BIT_64(1)
1576#define ARMV8_OSLSR_EL1_AARCH64_OSLK_BIT 1
1577/** Bit 2 - Not 32-bit access. */
1578#define ARMV8_OSLSR_EL1_AARCH64_NTT RT_BIT_64(2)
1579#define ARMV8_OSLSR_EL1_AARCH64_NTT_BIT 2
1580/** Bit 0 - OSLM[1] Bit 1 of OS Lock model implemented. */
1581#define ARMV8_OSLSR_EL1_AARCH64_OSLM1 RT_BIT_64(3)
1582#define ARMV8_OSLSR_EL1_AARCH64_OSLM1_BIT 3
1583/** @} */
1584
1585
1586/** @name ID_AA64ISAR0_EL1 - AArch64 Instruction Set Attribute Register 0.
1587 * @{ */
1588/* Bit 0 - 3 - Reserved. */
1589/** Bit 4 - 7 - Indicates support for AES instructions in AArch64 state. */
1590#define ARMV8_ID_AA64ISAR0_EL1_AES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1591#define ARMV8_ID_AA64ISAR0_EL1_AES_SHIFT 4
1592/** No AES instructions implemented. */
1593# define ARMV8_ID_AA64ISAR0_EL1_AES_NOT_IMPL 0
1594/** AES, AESD, AESMC and AESIMC instructions implemented (FEAT_AES). */
1595# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED 1
1596/** AES, AESD, AESMC and AESIMC instructions implemented and PMULL and PMULL2 instructions operating on 64bit source elements (FEAT_PMULL). */
1597# define ARMV8_ID_AA64ISAR0_EL1_AES_SUPPORTED_PMULL 2
1598/** Bit 8 - 11 - Indicates support for SHA1 instructions in AArch64 state. */
1599#define ARMV8_ID_AA64ISAR0_EL1_SHA1_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1600#define ARMV8_ID_AA64ISAR0_EL1_SHA1_SHIFT 8
1601/** No SHA1 instructions implemented. */
1602# define ARMV8_ID_AA64ISAR0_EL1_SHA1_NOT_IMPL 0
1603/** SHA1C, SHA1P, SHA1M, SHA1H, SHA1SU0 and SHA1SU1 instructions implemented (FEAT_SHA1). */
1604# define ARMV8_ID_AA64ISAR0_EL1_SHA1_SUPPORTED 1
1605/** Bit 12 - 15 - Indicates support for SHA2 instructions in AArch64 state. */
1606#define ARMV8_ID_AA64ISAR0_EL1_SHA2_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1607#define ARMV8_ID_AA64ISAR0_EL1_SHA2_SHIFT 12
1608/** No SHA2 instructions implemented. */
1609# define ARMV8_ID_AA64ISAR0_EL1_SHA2_NOT_IMPL 0
1610/** SHA256 instructions implemented (FEAT_SHA256). */
1611# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256 1
1612/** SHA256 and SHA512 instructions implemented (FEAT_SHA512). */
1613# define ARMV8_ID_AA64ISAR0_EL1_SHA2_SUPPORTED_SHA256_SHA512 2
1614/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1615#define ARMV8_ID_AA64ISAR0_EL1_CRC32_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1616#define ARMV8_ID_AA64ISAR0_EL1_CRC32_SHIFT 16
1617/** No CRC32 instructions implemented. */
1618# define ARMV8_ID_AA64ISAR0_EL1_CRC32_NOT_IMPL 0
1619/** CRC32 instructions implemented (FEAT_CRC32). */
1620# define ARMV8_ID_AA64ISAR0_EL1_CRC32_SUPPORTED 1
1621/** Bit 20 - 23 - Indicates support for Atomic instructions in AArch64 state. */
1622#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1623#define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SHIFT 20
1624/** No Atomic instructions implemented. */
1625# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_NOT_IMPL 0
1626/** Atomic instructions implemented (FEAT_LSE). */
1627# define ARMV8_ID_AA64ISAR0_EL1_ATOMIC_SUPPORTED 2
1628/** Bit 24 - 27 - Indicates support for TME instructions. */
1629#define ARMV8_ID_AA64ISAR0_EL1_TME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1630#define ARMV8_ID_AA64ISAR0_EL1_TME_SHIFT 24
1631/** TME instructions are not implemented. */
1632# define ARMV8_ID_AA64ISAR0_EL1_TME_NOT_IMPL 0
1633/** TME instructions are implemented. */
1634# define ARMV8_ID_AA64ISAR0_EL1_TME_SUPPORTED 1
1635/** Bit 28 - 31 - Indicates support for SQRDMLAH and SQRDMLSH instructions in AArch64 state. */
1636#define ARMV8_ID_AA64ISAR0_EL1_RDM_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1637#define ARMV8_ID_AA64ISAR0_EL1_RDM_SHIFT 28
1638/** No RDMA instructions implemented. */
1639# define ARMV8_ID_AA64ISAR0_EL1_RDM_NOT_IMPL 0
1640/** SQRDMLAH and SQRDMLSH instructions implemented (FEAT_RDM). */
1641# define ARMV8_ID_AA64ISAR0_EL1_RDM_SUPPORTED 1
1642/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1643#define ARMV8_ID_AA64ISAR0_EL1_SHA3_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1644#define ARMV8_ID_AA64ISAR0_EL1_SHA3_SHIFT 32
1645/** No SHA3 instructions implemented. */
1646# define ARMV8_ID_AA64ISAR0_EL1_SHA3_NOT_IMPL 0
1647/** EOR3, RAX1, XAR and BCAX instructions implemented (FEAT_SHA3). */
1648# define ARMV8_ID_AA64ISAR0_EL1_SHA3_SUPPORTED 1
1649/** Bit 36 - 39 - Indicates support for SM3 instructions in AArch64 state. */
1650#define ARMV8_ID_AA64ISAR0_EL1_SM3_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1651#define ARMV8_ID_AA64ISAR0_EL1_SM3_SHIFT 36
1652/** No SM3 instructions implemented. */
1653# define ARMV8_ID_AA64ISAR0_EL1_SM3_NOT_IMPL 0
1654/** SM3 instructions implemented (FEAT_SM3). */
1655# define ARMV8_ID_AA64ISAR0_EL1_SM3_SUPPORTED 1
1656/** Bit 40 - 43 - Indicates support for SM4 instructions in AArch64 state. */
1657#define ARMV8_ID_AA64ISAR0_EL1_SM4_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1658#define ARMV8_ID_AA64ISAR0_EL1_SM4_SHIFT 40
1659/** No SM4 instructions implemented. */
1660# define ARMV8_ID_AA64ISAR0_EL1_SM4_NOT_IMPL 0
1661/** SM4 instructions implemented (FEAT_SM4). */
1662# define ARMV8_ID_AA64ISAR0_EL1_SM4_SUPPORTED 1
1663/** Bit 44 - 47 - Indicates support for Dot Product instructions in AArch64 state. */
1664#define ARMV8_ID_AA64ISAR0_EL1_DP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1665#define ARMV8_ID_AA64ISAR0_EL1_DP_SHIFT 44
1666/** No Dot Product instructions implemented. */
1667# define ARMV8_ID_AA64ISAR0_EL1_DP_NOT_IMPL 0
1668/** UDOT and SDOT instructions implemented (FEAT_DotProd). */
1669# define ARMV8_ID_AA64ISAR0_EL1_DP_SUPPORTED 1
1670/** Bit 48 - 51 - Indicates support for FMLAL and FMLSL instructions. */
1671#define ARMV8_ID_AA64ISAR0_EL1_FHM_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1672#define ARMV8_ID_AA64ISAR0_EL1_FHM_SHIFT 48
1673/** FMLAL and FMLSL instructions are not implemented. */
1674# define ARMV8_ID_AA64ISAR0_EL1_FHM_NOT_IMPL 0
1675/** FMLAL and FMLSL instructions are implemented (FEAT_FHM). */
1676# define ARMV8_ID_AA64ISAR0_EL1_FHM_SUPPORTED 1
1677/** Bit 52 - 55 - Indicates support for flag manipulation instructions. */
1678#define ARMV8_ID_AA64ISAR0_EL1_TS_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1679#define ARMV8_ID_AA64ISAR0_EL1_TS_SHIFT 52
1680/** No flag manipulation instructions implemented. */
1681# define ARMV8_ID_AA64ISAR0_EL1_TS_NOT_IMPL 0
1682/** CFINV, RMIF, SETF16 and SETF8 instrutions are implemented (FEAT_FlagM). */
1683# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED 1
1684/** CFINV, RMIF, SETF16, SETF8, AXFLAG and XAFLAG instrutions are implemented (FEAT_FlagM2). */
1685# define ARMV8_ID_AA64ISAR0_EL1_TS_SUPPORTED_2 2
1686/** Bit 56 - 59 - Indicates support for Outer Shareable and TLB range maintenance instructions. */
1687#define ARMV8_ID_AA64ISAR0_EL1_TLB_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1688#define ARMV8_ID_AA64ISAR0_EL1_TLB_SHIFT 56
1689/** Outer Sahreable and TLB range maintenance instructions are not implemented. */
1690# define ARMV8_ID_AA64ISAR0_EL1_TLB_NOT_IMPL 0
1691/** Outer Shareable TLB maintenance instructions are implemented (FEAT_TLBIOS). */
1692# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED 1
1693/** Outer Shareable and TLB range maintenance instructions are implemented (FEAT_TLBIRANGE). */
1694# define ARMV8_ID_AA64ISAR0_EL1_TLB_SUPPORTED_RANGE 2
1695/** Bit 60 - 63 - Indicates support for Random Number instructons in AArch64 state. */
1696#define ARMV8_ID_AA64ISAR0_EL1_RNDR_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1697#define ARMV8_ID_AA64ISAR0_EL1_RNDR_SHIFT 60
1698/** No Random Number instructions implemented. */
1699# define ARMV8_ID_AA64ISAR0_EL1_RNDR_NOT_IMPL 0
1700/** RNDR and RDNRRS registers are implemented . */
1701# define ARMV8_ID_AA64ISAR0_EL1_RNDR_SUPPORTED 1
1702/** @} */
1703
1704
1705/** @name ID_AA64ISAR1_EL1 - AArch64 Instruction Set Attribute Register 0.
1706 * @{ */
1707/** Bit 0 - 3 - Indicates support for Data Persistence writeback instructions in AArch64 state. */
1708#define ARMV8_ID_AA64ISAR1_EL1_DPB_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1709#define ARMV8_ID_AA64ISAR1_EL1_DPB_SHIFT 0
1710/** DC CVAP not supported. */
1711# define ARMV8_ID_AA64ISAR1_EL1_DPB_NOT_IMPL 0
1712/** DC CVAP supported (FEAT_DPB). */
1713# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED 1
1714/** DC CVAP and DC CVADP supported (FEAT_DPB2). */
1715# define ARMV8_ID_AA64ISAR1_EL1_DPB_SUPPORTED_2 2
1716/** Bit 4 - 7 - Indicates whether QARMA5 algorithm is implemented in the PE for address authentication. */
1717#define ARMV8_ID_AA64ISAR1_EL1_APA_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1718#define ARMV8_ID_AA64ISAR1_EL1_APA_SHIFT 4
1719/** Address Authentication using the QARMA5 algorithm is not implemented. */
1720# define ARMV8_ID_AA64ISAR1_EL1_APA_NOT_IMPL 0
1721/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA5). */
1722# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH 1
1723/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA5). */
1724# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_EPAC 2
1725/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA5). */
1726# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_PAUTH2 3
1727/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA5). */
1728# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPAC 4
1729/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA5). */
1730# define ARMV8_ID_AA64ISAR1_EL1_APA_SUPPORTED_FPACCOMBINE 5
1731/** Bit 8 - 11 - Indicates whether an implementation defined algorithm is implemented in the PE for address authentication. */
1732#define ARMV8_ID_AA64ISAR1_EL1_API_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1733#define ARMV8_ID_AA64ISAR1_EL1_API_SHIFT 8
1734/** Address Authentication using the QARMA5 algorithm is not implemented. */
1735# define ARMV8_ID_AA64ISAR1_EL1_API_NOT_IMPL 0
1736/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACIMP). */
1737# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH 1
1738/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACIMP). */
1739# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_EPAC 2
1740/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACIMP). */
1741# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_PAUTH2 3
1742/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACIMP). */
1743# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPAC 4
1744/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACIMP). */
1745# define ARMV8_ID_AA64ISAR1_EL1_API_SUPPORTED_FPACCOMBINE 5
1746/** Bit 12 - 15 - Indicates support for JavaScript conversion from double precision floating values to integers in AArch64 state. */
1747#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1748#define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SHIFT 12
1749/** No FJCVTZS instruction implemented. */
1750# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_NOT_IMPL 0
1751/** FJCVTZS instruction implemented (FEAT_JSCVT). */
1752# define ARMV8_ID_AA64ISAR1_EL1_FJCVTZS_SUPPORTED 1
1753/** Bit 16 - 19 - Indicates support for CRC32 instructions in AArch64 state. */
1754#define ARMV8_ID_AA64ISAR1_EL1_FCMA_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1755#define ARMV8_ID_AA64ISAR1_EL1_FCMA_SHIFT 16
1756/** No FCMLA and FCADD instructions implemented. */
1757# define ARMV8_ID_AA64ISAR1_EL1_FCMA_NOT_IMPL 0
1758/** FCMLA and FCADD instructions implemented (FEAT_FCMA). */
1759# define ARMV8_ID_AA64ISAR1_EL1_FCMA_SUPPORTED 1
1760/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1761#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1762#define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SHIFT 20
1763/** No RCpc instructions implemented. */
1764# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_NOT_IMPL 0
1765/** The no offset LDAPR, LDAPRB and LDAPRH instructions are implemented (FEAT_LRCPC). */
1766# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED 1
1767/** The no offset LDAPR, LDAPRB, LDAPRH, LDAPR and STLR instructions are implemented (FEAT_LRCPC2). */
1768# define ARMV8_ID_AA64ISAR1_EL1_LRCPC_SUPPORTED_2 2
1769/** Bit 24 - 27 - Indicates whether the QARMA5 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1770#define ARMV8_ID_AA64ISAR1_EL1_GPA_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1771#define ARMV8_ID_AA64ISAR1_EL1_GPA_SHIFT 24
1772/** Generic Authentication using the QARMA5 algorithm is not implemented. */
1773# define ARMV8_ID_AA64ISAR1_EL1_GPA_NOT_IMPL 0
1774/** Generic Authentication using the QARMA5 algorithm is implemented (FEAT_PACQARMA5). */
1775# define ARMV8_ID_AA64ISAR1_EL1_GPA_SUPPORTED 1
1776/** Bit 28 - 31 - Indicates whether an implementation defined algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1777#define ARMV8_ID_AA64ISAR1_EL1_GPI_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1778#define ARMV8_ID_AA64ISAR1_EL1_GPI_SHIFT 28
1779/** Generic Authentication using an implementation defined algorithm is not implemented. */
1780# define ARMV8_ID_AA64ISAR1_EL1_GPI_NOT_IMPL 0
1781/** Generic Authentication using an implementation defined algorithm is implemented (FEAT_PACIMP). */
1782# define ARMV8_ID_AA64ISAR1_EL1_GPI_SUPPORTED 1
1783/** Bit 32 - 35 - Indicates support for SHA3 instructions in AArch64 state. */
1784#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1785#define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SHIFT 32
1786/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are not implemented. */
1787# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_NOT_IMPL 0
1788/** FRINT32Z, FRINT32X, FRINT64Z and FRINT64X instructions are implemented (FEAT_FRINTTS). */
1789# define ARMV8_ID_AA64ISAR1_EL1_FRINTTS_SUPPORTED 1
1790/** Bit 36 - 39 - Indicates support for SB instructions in AArch64 state. */
1791#define ARMV8_ID_AA64ISAR1_EL1_SB_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1792#define ARMV8_ID_AA64ISAR1_EL1_SB_SHIFT 36
1793/** No SB instructions implemented. */
1794# define ARMV8_ID_AA64ISAR1_EL1_SB_NOT_IMPL 0
1795/** SB instructions implemented (FEAT_SB). */
1796# define ARMV8_ID_AA64ISAR1_EL1_SB_SUPPORTED 1
1797/** Bit 40 - 43 - Indicates support for prediction invalidation instructions in AArch64 state. */
1798#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1799#define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SHIFT 40
1800/** Prediction invalidation instructions are not implemented. */
1801# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_NOT_IMPL 0
1802/** Prediction invalidation instructions are implemented (FEAT_SPECRES). */
1803# define ARMV8_ID_AA64ISAR1_EL1_SPECRES_SUPPORTED 1
1804/** Bit 44 - 47 - Indicates support for Advanced SIMD and Floating-point BFloat16 instructions in AArch64 state. */
1805#define ARMV8_ID_AA64ISAR1_EL1_BF16_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
1806#define ARMV8_ID_AA64ISAR1_EL1_BF16_SHIFT 44
1807/** BFloat16 instructions are not implemented. */
1808# define ARMV8_ID_AA64ISAR1_EL1_BF16_NOT_IMPL 0
1809/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented (FEAT_BF16). */
1810# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_BF16 1
1811/** BFCVT, BFCVTN, BFCVTN2, BFDOT, BFMLALB, BFMLALT and BFMMLA instructions are implemented and FPCR.EBF is supported (FEAT_EBF16). */
1812# define ARMV8_ID_AA64ISAR1_EL1_BF16_SUPPORTED_EBF16 2
1813/** Bit 48 - 51 - Indicates support for Data Gathering Hint instructions. */
1814#define ARMV8_ID_AA64ISAR1_EL1_DGH_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
1815#define ARMV8_ID_AA64ISAR1_EL1_DGH_SHIFT 48
1816/** Data Gathering Hint instructions are not implemented. */
1817# define ARMV8_ID_AA64ISAR1_EL1_DGH_NOT_IMPL 0
1818/** Data Gathering Hint instructions are implemented (FEAT_DGH). */
1819# define ARMV8_ID_AA64ISAR1_EL1_DGH_SUPPORTED 1
1820/** Bit 52 - 55 - Indicates support for Advanced SIMD and Floating-point Int8 matri multiplication instructions. */
1821#define ARMV8_ID_AA64ISAR1_EL1_I8MM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
1822#define ARMV8_ID_AA64ISAR1_EL1_I8MM_SHIFT 52
1823/** No Int8 matrix multiplication instructions implemented. */
1824# define ARMV8_ID_AA64ISAR1_EL1_I8MM_NOT_IMPL 0
1825/** SMMLA, SUDOT, UMMLA, USMMLA and USDOT instrutions are implemented (FEAT_I8MM). */
1826# define ARMV8_ID_AA64ISAR1_EL1_I8MM_SUPPORTED 1
1827/** Bit 56 - 59 - Indicates support for the XS attribute, the TLBI and DSB insturctions with the nXS qualifier in AArch64 state. */
1828#define ARMV8_ID_AA64ISAR1_EL1_XS_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
1829#define ARMV8_ID_AA64ISAR1_EL1_XS_SHIFT 56
1830/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are not supported. */
1831# define ARMV8_ID_AA64ISAR1_EL1_XS_NOT_IMPL 0
1832/** The XS attribute and the TLBI and DSB instructions with the nXS qualifier are supported (FEAT_XS). */
1833# define ARMV8_ID_AA64ISAR1_EL1_XS_SUPPORTED 1
1834/** Bit 60 - 63 - Indicates support LD64B and ST64B* instructons and the ACCDATA_EL1 register. */
1835#define ARMV8_ID_AA64ISAR1_EL1_LS64_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
1836#define ARMV8_ID_AA64ISAR1_EL1_LS64_SHIFT 60
1837/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are not supported. */
1838# define ARMV8_ID_AA64ISAR1_EL1_LS64_NOT_IMPL 0
1839/** The LD64B and ST64B instructions are supported (FEAT_LS64). */
1840# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED 1
1841/** The LD64B, ST64B, ST64BV and associated traps are not supported (FEAT_LS64_V). */
1842# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_V 2
1843/** The LD64B, ST64B, ST64BV and ST64BV0 instructions, the ACCDATA_EL1 register and associated traps are supported (FEAT_LS64_ACCDATA). */
1844# define ARMV8_ID_AA64ISAR1_EL1_LS64_SUPPORTED_ACCDATA 3
1845/** @} */
1846
1847
1848/** @name ID_AA64ISAR2_EL1 - AArch64 Instruction Set Attribute Register 0.
1849 * @{ */
1850/** Bit 0 - 3 - Indicates support for WFET and WFIT instructions in AArch64 state. */
1851#define ARMV8_ID_AA64ISAR2_EL1_WFXT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1852#define ARMV8_ID_AA64ISAR2_EL1_WFXT_SHIFT 0
1853/** WFET and WFIT are not supported. */
1854# define ARMV8_ID_AA64ISAR2_EL1_WFXT_NOT_IMPL 0
1855/** WFET and WFIT are supported (FEAT_WFxT). */
1856# define ARMV8_ID_AA64ISAR2_EL1_WFXT_SUPPORTED 2
1857/** Bit 4 - 7 - Indicates support for 12 bits of mantissa in reciprocal and reciprocal square root instructions in AArch64 state, when FPCR.AH is 1. */
1858#define ARMV8_ID_AA64ISAR2_EL1_RPRES_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1859#define ARMV8_ID_AA64ISAR2_EL1_RPRES_SHIFT 4
1860/** Reciprocal and reciprocal square root estimates give 8 bits of mantissa when FPCR.AH is 1. */
1861# define ARMV8_ID_AA64ISAR2_EL1_RPRES_NOT_IMPL 0
1862/** Reciprocal and reciprocal square root estimates give 12 bits of mantissa when FPCR.AH is 1 (FEAT_RPRES). */
1863# define ARMV8_ID_AA64ISAR2_EL1_RPRES_SUPPORTED 1
1864/** Bit 8 - 11 - Indicates whether the QARMA3 algorithm is implemented in the PE for generic code authentication in AArch64 state. */
1865#define ARMV8_ID_AA64ISAR2_EL1_GPA3_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1866#define ARMV8_ID_AA64ISAR2_EL1_GPA3_SHIFT 8
1867/** Generic Authentication using the QARMA3 algorithm is not implemented. */
1868# define ARMV8_ID_AA64ISAR2_EL1_GPA3_NOT_IMPL 0
1869/** Generic Authentication using the QARMA3 algorithm is implemented (FEAT_PACQARMA3). */
1870# define ARMV8_ID_AA64ISAR2_EL1_GPA3_SUPPORTED 1
1871/** Bit 12 - 15 - Indicates whether QARMA3 algorithm is implemented in the PE for address authentication. */
1872#define ARMV8_ID_AA64ISAR2_EL1_APA3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1873#define ARMV8_ID_AA64ISAR2_EL1_APA3_SHIFT 12
1874/** Address Authentication using the QARMA3 algorithm is not implemented. */
1875# define ARMV8_ID_AA64ISAR2_EL1_APA3_NOT_IMPL 0
1876/** Address Authentication using the QARMA5 algorithm is implemented (FEAT_PAuth, FEAT_PACQARMA3). */
1877# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH 1
1878/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC is supported (FEAT_EPAC, FEAT_PACQARMA3). */
1879# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_EPAC 2
1880/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 is supported (FEAT_PAuth2, FEAT_PACQARMA3). */
1881# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_PAUTH2 3
1882/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and FPAC are supported (FEAT_FPAC, FEAT_PACQARMA3). */
1883# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPAC 4
1884/** Address Authentication using the QARMA5 algorithm is implemented and enhanced PAC 2 and combined FPAC are supported (FEAT_FPACCOMBINE, FEAT_PACQARMA3). */
1885# define ARMV8_ID_AA64ISAR2_EL1_APA3_SUPPORTED_FPACCOMBINE 5
1886/** Bit 16 - 19 - Indicates support for Memory Copy and Memory Set instructions in AArch64 state. */
1887#define ARMV8_ID_AA64ISAR2_EL1_MOPS_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1888#define ARMV8_ID_AA64ISAR2_EL1_MOPS_SHIFT 16
1889/** No Memory Copy and Memory Set instructions implemented. */
1890# define ARMV8_ID_AA64ISAR2_EL1_MOPS_NOT_IMPL 0
1891/** Memory Copy and Memory Set instructions implemented (FEAT_MOPS). */
1892# define ARMV8_ID_AA64ISAR2_EL1_MOPS_SUPPORTED 1
1893/** Bit 20 - 23 - Indicates support for weaker release consistency, RCpc, based model. */
1894#define ARMV8_ID_AA64ISAR2_EL1_BC_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1895#define ARMV8_ID_AA64ISAR2_EL1_BC_SHIFT 20
1896/** BC instruction is not implemented. */
1897# define ARMV8_ID_AA64ISAR2_EL1_BC_NOT_IMPL 0
1898/** BC instruction is implemented (FEAT_HBC). */
1899# define ARMV8_ID_AA64ISAR2_EL1_BC_SUPPORTED 1
1900/** Bit 24 - 27 - Indicates whether the ConstPACField() functions used as part of PAC additions returns FALSE or TRUE. */
1901#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1902#define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_SHIFT 24
1903/** ConstPACField() returns FALSE. */
1904# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_FALSE 0
1905/** ConstPACField() returns TRUE (FEAT_CONSTPACFIELD). */
1906# define ARMV8_ID_AA64ISAR2_EL1_PACFRAC_TRUE 1
1907/* Bit 28 - 63 - Reserved. */
1908/** @} */
1909
1910
1911/** @name ID_AA64PFR0_EL1 - AArch64 Processor Feature Register 0.
1912 * @{ */
1913/** Bit 0 - 3 - EL0 Exception level handling. */
1914#define ARMV8_ID_AA64PFR0_EL1_EL0_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
1915#define ARMV8_ID_AA64PFR0_EL1_EL0_SHIFT 0
1916/** EL0 can be executed in AArch64 state only. */
1917# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_ONLY 1
1918/** EL0 can be executed in AArch64 and AArch32 state. */
1919# define ARMV8_ID_AA64PFR0_EL1_EL0_AARCH64_AARCH32 2
1920/** Bit 4 - 7 - EL1 Exception level handling. */
1921#define ARMV8_ID_AA64PFR0_EL1_EL1_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
1922#define ARMV8_ID_AA64PFR0_EL1_EL1_SHIFT 4
1923/** EL1 can be executed in AArch64 state only. */
1924# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_ONLY 1
1925/** EL1 can be executed in AArch64 and AArch32 state. */
1926# define ARMV8_ID_AA64PFR0_EL1_EL1_AARCH64_AARCH32 2
1927/** Bit 8 - 11 - EL2 Exception level handling. */
1928#define ARMV8_ID_AA64PFR0_EL1_EL2_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
1929#define ARMV8_ID_AA64PFR0_EL1_EL2_SHIFT 8
1930/** EL2 is not implemented. */
1931# define ARMV8_ID_AA64PFR0_EL1_EL2_NOT_IMPL 0
1932/** EL2 can be executed in AArch64 state only. */
1933# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_ONLY 1
1934/** EL2 can be executed in AArch64 and AArch32 state. */
1935# define ARMV8_ID_AA64PFR0_EL1_EL2_AARCH64_AARCH32 2
1936/** Bit 12 - 15 - EL3 Exception level handling. */
1937#define ARMV8_ID_AA64PFR0_EL1_EL3_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
1938#define ARMV8_ID_AA64PFR0_EL1_EL3_SHIFT 12
1939/** EL3 is not implemented. */
1940# define ARMV8_ID_AA64PFR0_EL1_EL3_NOT_IMPL 0
1941/** EL3 can be executed in AArch64 state only. */
1942# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_ONLY 1
1943/** EL3 can be executed in AArch64 and AArch32 state. */
1944# define ARMV8_ID_AA64PFR0_EL1_EL3_AARCH64_AARCH32 2
1945/** Bit 16 - 19 - Floating-point support. */
1946#define ARMV8_ID_AA64PFR0_EL1_FP_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
1947#define ARMV8_ID_AA64PFR0_EL1_FP_SHIFT 16
1948/** Floating-point is implemented and support single and double precision. */
1949# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP 0
1950/** Floating-point is implemented and support single, double and half precision. */
1951# define ARMV8_ID_AA64PFR0_EL1_FP_IMPL_SP_DP_HP 1
1952/** Floating-point is not implemented. */
1953# define ARMV8_ID_AA64PFR0_EL1_FP_NOT_IMPL 0xf
1954/** Bit 20 - 23 - Advanced SIMD support. */
1955#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
1956#define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_SHIFT 20
1957/** Advanced SIMD is implemented and support single and double precision. */
1958# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP 0
1959/** Advanced SIMD is implemented and support single, double and half precision. */
1960# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_IMPL_SP_DP_HP 1
1961/** Advanced SIMD is not implemented. */
1962# define ARMV8_ID_AA64PFR0_EL1_ADVSIMD_NOT_IMPL 0xf
1963/** Bit 24 - 27 - System register GIC CPU interface support. */
1964#define ARMV8_ID_AA64PFR0_EL1_GIC_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
1965#define ARMV8_ID_AA64PFR0_EL1_GIC_SHIFT 24
1966/** GIC CPU interface system registers are not implemented. */
1967# define ARMV8_ID_AA64PFR0_EL1_GIC_NOT_IMPL 0
1968/** System register interface to versions 3.0 and 4.0 of the GIC CPU interface is supported. */
1969# define ARMV8_ID_AA64PFR0_EL1_GIC_V3_V4 1
1970/** System register interface to version 4.1 of the GIC CPU interface is supported. */
1971# define ARMV8_ID_AA64PFR0_EL1_GIC_V4_1 3
1972/** Bit 28 - 31 - RAS Extension version. */
1973#define ARMV8_ID_AA64PFR0_EL1_RAS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
1974#define ARMV8_ID_AA64PFR0_EL1_RAS_SHIFT 28
1975/** No RAS extension. */
1976# define ARMV8_ID_AA64PFR0_EL1_RAS_NOT_IMPL 0
1977/** RAS Extension implemented. */
1978# define ARMV8_ID_AA64PFR0_EL1_RAS_SUPPORTED 1
1979/** FEAT_RASv1p1 implemented. */
1980# define ARMV8_ID_AA64PFR0_EL1_RAS_V1P1 2
1981/** Bit 32 - 35 - Scalable Vector Extension (SVE) support. */
1982#define ARMV8_ID_AA64PFR0_EL1_SVE_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
1983#define ARMV8_ID_AA64PFR0_EL1_SVE_SHIFT 32
1984/** SVE is not supported. */
1985# define ARMV8_ID_AA64PFR0_EL1_SVE_NOT_IMPL 0
1986/** SVE is supported. */
1987# define ARMV8_ID_AA64PFR0_EL1_SVE_SUPPORTED 1
1988/** Bit 36 - 39 - Secure EL2 support. */
1989#define ARMV8_ID_AA64PFR0_EL1_SEL2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
1990#define ARMV8_ID_AA64PFR0_EL1_SEL2_SHIFT 36
1991/** Secure EL2 is not supported. */
1992# define ARMV8_ID_AA64PFR0_EL1_SEL2_NOT_IMPL 0
1993/** Secure EL2 is implemented. */
1994# define ARMV8_ID_AA64PFR0_EL1_SEL2_SUPPORTED 1
1995/** Bit 40 - 43 - MPAM support. */
1996#define ARMV8_ID_AA64PFR0_EL1_MPAM_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
1997#define ARMV8_ID_AA64PFR0_EL1_MPAM_SHIFT 40
1998/** MPAM extension major version number is 0. */
1999# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V0 0
2000/** MPAM extension major version number is 1. */
2001# define ARMV8_ID_AA64PFR0_EL1_MPAM_MAJOR_V1 1
2002/** Bit 44 - 47 - Activity Monitor Extension support. */
2003#define ARMV8_ID_AA64PFR0_EL1_AMU_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2004#define ARMV8_ID_AA64PFR0_EL1_AMU_SHIFT 44
2005/** Activity Monitor extension is not implemented. */
2006# define ARMV8_ID_AA64PFR0_EL1_AMU_NOT_IMPL 0
2007/** Activity Monitor extension is implemented as of FEAT_AMUv1. */
2008# define ARMV8_ID_AA64PFR0_EL1_AMU_V1 1
2009/** Activity Monitor extension is implemented as of FEAT_AMUv1p1 including virtualization support. */
2010# define ARMV8_ID_AA64PFR0_EL1_AMU_V1P1 2
2011/** Bit 48 - 51 - Data Independent Timing support. */
2012#define ARMV8_ID_AA64PFR0_EL1_DIT_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2013#define ARMV8_ID_AA64PFR0_EL1_DIT_SHIFT 48
2014/** AArch64 does not guarantee constant execution time of any instructions. */
2015# define ARMV8_ID_AA64PFR0_EL1_DIT_NOT_IMPL 0
2016/** AArch64 provides the PSTATE.DIT mechanism to guarantee constant execution time of certain instructions (FEAT_DIT). */
2017# define ARMV8_ID_AA64PFR0_EL1_DIT_SUPPORTED 1
2018/** Bit 52 - 55 - Realm Management Extension support. */
2019#define ARMV8_ID_AA64PFR0_EL1_RME_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2020#define ARMV8_ID_AA64PFR0_EL1_RME_SHIFT 52
2021/** Realm Management Extension not implemented. */
2022# define ARMV8_ID_AA64PFR0_EL1_RME_NOT_IMPL 0
2023/** RMEv1 is implemented (FEAT_RME). */
2024# define ARMV8_ID_AA64PFR0_EL1_RME_SUPPORTED 1
2025/** Bit 56 - 59 - Speculative use out of context branch targets support. */
2026#define ARMV8_ID_AA64PFR0_EL1_CSV2_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2027#define ARMV8_ID_AA64PFR0_EL1_CSV2_SHIFT 56
2028/** Implementation does not disclose whether FEAT_CSV2 is implemented. */
2029# define ARMV8_ID_AA64PFR0_EL1_CSV2_NOT_EXPOSED 0
2030/** FEAT_CSV2 is implemented. */
2031# define ARMV8_ID_AA64PFR0_EL1_CSV2_SUPPORTED 1
2032/** FEAT_CSV2_2 is implemented. */
2033# define ARMV8_ID_AA64PFR0_EL1_CSV2_2_SUPPORTED 2
2034/** FEAT_CSV2_3 is implemented. */
2035# define ARMV8_ID_AA64PFR0_EL1_CSV2_3_SUPPORTED 3
2036/** Bit 60 - 63 - Speculative use of faulting data support. */
2037#define ARMV8_ID_AA64PFR0_EL1_CSV3_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2038#define ARMV8_ID_AA64PFR0_EL1_CSV3_SHIFT 60
2039/** Implementation does not disclose whether data loaded under speculation with a permission or domain fault can be used. */
2040# define ARMV8_ID_AA64PFR0_EL1_CSV3_NOT_EXPOSED 0
2041/** FEAT_CSV3 is supported . */
2042# define ARMV8_ID_AA64PFR0_EL1_CSV3_SUPPORTED 1
2043/** @} */
2044
2045
2046/** @name ID_AA64PFR1_EL1 - AArch64 Processor Feature Register 1.
2047 * @{ */
2048/** Bit 0 - 3 - Branch Target Identification support. */
2049#define ARMV8_ID_AA64PFR1_EL1_BT_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2050#define ARMV8_ID_AA64PFR1_EL1_BT_SHIFT 0
2051/** The Branch Target Identification mechanism is not implemented. */
2052# define ARMV8_ID_AA64PFR1_EL1_BT_NOT_IMPL 0
2053/** The Branch Target Identifcation mechanism is implemented. */
2054# define ARMV8_ID_AA64PFR1_EL1_BT_SUPPORTED 1
2055/** Bit 4 - 7 - Speculative Store Bypassing control support. */
2056#define ARMV8_ID_AA64PFR1_EL1_SSBS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2057#define ARMV8_ID_AA64PFR1_EL1_SSBS_SHIFT 4
2058/** AArch64 provides no mechanism to control the use of Speculative Store Bypassing. */
2059# define ARMV8_ID_AA64PFR1_EL1_SSBS_NOT_IMPL 0
2060/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe. */
2061# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED 1
2062/** AArch64 provides the PSTATE.SSBS mechanism to mark regions that are Speculative Store Bypass Safe and adds MSR and MRS instructions
2063 * to directly read and write the PSTATE.SSBS field. */
2064# define ARMV8_ID_AA64PFR1_EL1_SSBS_SUPPORTED_MSR_MRS 2
2065/** Bit 8 - 11 - Memory Tagging Extension support. */
2066#define ARMV8_ID_AA64PFR1_EL1_MTE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2067#define ARMV8_ID_AA64PFR1_EL1_MTE_SHIFT 8
2068/** MTE is not implemented. */
2069# define ARMV8_ID_AA64PFR1_EL1_MTE_NOT_IMPL 0
2070/** Instruction only Memory Tagging Extensions implemented. */
2071# define ARMV8_ID_AA64PFR1_EL1_MTE_INSN_ONLY 1
2072/** Full Memory Tagging Extension implemented. */
2073# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL 2
2074/** Full Memory Tagging Extension with asymmetric Tag Check Fault handling implemented. */
2075# define ARMV8_ID_AA64PFR1_EL1_MTE_FULL_ASYM_TAG_FAULT_CHK 3
2076/** Bit 12 - 15 - RAS Extension fractional field. */
2077#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2078#define ARMV8_ID_AA64PFR1_EL1_RASFRAC_SHIFT 12
2079/** RAS Extension is implemented. */
2080# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_IMPL 0
2081/** FEAT_RASv1p1 is implemented. */
2082# define ARMV8_ID_AA64PFR1_EL1_RASFRAC_RASV1P1 1
2083/** Bit 16 - 19 - MPAM minor version number. */
2084#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2085#define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_SHIFT 16
2086/** The minor version of number of the MPAM extension is 0. */
2087# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_0 0
2088/** The minor version of number of the MPAM extension is 1. */
2089# define ARMV8_ID_AA64PFR1_EL1_MPAMFRAC_1 1
2090/* Bit 20 - 23 - Reserved. */
2091/** Bit 24 - 27 - Scalable Matrix Extension support. */
2092#define ARMV8_ID_AA64PFR1_EL1_SME_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2093#define ARMV8_ID_AA64PFR1_EL1_SME_SHIFT 24
2094/** Scalable Matrix Extensions are not implemented. */
2095# define ARMV8_ID_AA64PFR1_EL1_SME_NOT_IMPL 0
2096/** Scalable Matrix Extensions are implemented (FEAT_SME). */
2097# define ARMV8_ID_AA64PFR1_EL1_SME_SUPPORTED 1
2098/** Scalable Matrix Extensions are implemented + SME2 ZT0 register(FEAT_SME2). */
2099# define ARMV8_ID_AA64PFR1_EL1_SME_SME2 2
2100/** Bit 28 - 31 - Random Number trap to EL3 support. */
2101#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2102#define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SHIFT 28
2103/** Trapping of RNDR and RNDRRS to EL3 is not supported. */
2104# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_NOT_IMPL 0
2105/** Trapping of RNDR and RDNRRS to EL3 is supported. */
2106# define ARMV8_ID_AA64PFR1_EL1_RNDRTRAP_SUPPORTED 1
2107/** Bit 32 - 35 - CSV2 fractional field. */
2108#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2109#define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_SHIFT 32
2110/** Either CSV2 not exposed or implementation does not expose whether FEAT_CSV2_1p1 is implemented. */
2111# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_NOT_EXPOSED 0
2112/** FEAT_CSV2_1p1 is implemented. */
2113# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P1 1
2114/** FEAT_CSV2_1p2 is implemented. */
2115# define ARMV8_ID_AA64PFR1_EL1_CSV2FRAC_1P2 2
2116/** Bit 36 - 39 - Non-maskable Interrupt support. */
2117#define ARMV8_ID_AA64PFR1_EL1_NMI_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2118#define ARMV8_ID_AA64PFR1_EL1_NMI_SHIFT 36
2119/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are not supported. */
2120# define ARMV8_ID_AA64PFR1_EL1_NMI_NOT_IMPL 0
2121/** SCTLR_ELx.{SPINTMASK, NMI} and PSTATE.ALLINT and associated instructions are supported (FEAT_NMI). */
2122# define ARMV8_ID_AA64PFR1_EL1_NMI_SUPPORTED 1
2123/** @} */
2124
2125
2126/** @name ID_AA64MMFR0_EL1 - AArch64 Memory Model Feature Register 0.
2127 * @{ */
2128/** Bit 0 - 3 - Physical Address range supported. */
2129#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2130#define ARMV8_ID_AA64MMFR0_EL1_PARANGE_SHIFT 0
2131/** Physical Address range is 32 bits, 4GiB. */
2132# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_32BITS 0
2133/** Physical Address range is 36 bits, 64GiB. */
2134# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_36BITS 1
2135/** Physical Address range is 40 bits, 1TiB. */
2136# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_40BITS 2
2137/** Physical Address range is 42 bits, 4TiB. */
2138# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_42BITS 3
2139/** Physical Address range is 44 bits, 16TiB. */
2140# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_44BITS 4
2141/** Physical Address range is 48 bits, 256TiB. */
2142# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_48BITS 5
2143/** Physical Address range is 52 bits, 4PiB. */
2144# define ARMV8_ID_AA64MMFR0_EL1_PARANGE_52BITS 6
2145/** Bit 4 - 7 - Number of ASID bits. */
2146#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2147#define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_SHIFT 4
2148/** ASID bits is 8. */
2149# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_8 0
2150/** ASID bits is 16. */
2151# define ARMV8_ID_AA64MMFR0_EL1_ASIDBITS_16 2
2152/** Bit 8 - 11 - Indicates support for mixed-endian configuration. */
2153#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2154#define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SHIFT 8
2155/** No mixed-endian support. */
2156# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_NOT_IMPL 0
2157/** Mixed-endian supported. */
2158# define ARMV8_ID_AA64MMFR0_EL1_BIGEND_SUPPORTED 1
2159/** Bit 12 - 15 - Indicates support for a distinction between Secure and Non-secure Memory. */
2160#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2161#define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SHIFT 12
2162/** No distinction between Secure and Non-secure Memory supported. */
2163# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_NOT_IMPL 0
2164/** Distinction between Secure and Non-secure Memory supported. */
2165# define ARMV8_ID_AA64MMFR0_EL1_SNSMEM_SUPPORTED 1
2166/** Bit 16 - 19 - Indicates support for mixed-endian at EL0 only. */
2167#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2168#define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SHIFT 16
2169/** No mixed-endian support at EL0. */
2170# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_NOT_IMPL 0
2171/** Mixed-endian support at EL0. */
2172# define ARMV8_ID_AA64MMFR0_EL1_BIGENDEL0_SUPPORTED 1
2173/** Bit 20 - 23 - Indicates support for 16KiB memory translation granule size. */
2174#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2175#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SHIFT 20
2176/** 16KiB granule size not supported. */
2177# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_NOT_IMPL 0
2178/** 16KiB granule size is supported. */
2179# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED 1
2180/** 16KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2181# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_SUPPORTED_52BIT 2
2182/** Bit 24 - 27 - Indicates support for 64KiB memory translation granule size. */
2183#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2184#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SHIFT 24
2185/** 64KiB granule supported. */
2186# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_SUPPORTED 0
2187/** 64KiB granule not supported. */
2188# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_NOT_IMPL 0xf
2189/** Bit 28 - 31 - Indicates support for 4KiB memory translation granule size. */
2190#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2191#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SHIFT 28
2192/** 4KiB granule supported. */
2193# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED 0
2194/** 4KiB granule size is supported and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2195# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_SUPPORTED_52BIT 1
2196/** 4KiB granule not supported. */
2197# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_NOT_IMPL 0xf
2198/** Bit 32 - 35 - Indicates support for 16KiB granule size at stage 2. */
2199#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2200#define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SHIFT 32
2201/** Support for 16KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran16 field. */
2202# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORT_BY_TGRAN16 0
2203/** 16KiB granule not supported at stage 2. */
2204# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_NOT_IMPL 1
2205/** 16KiB granule supported at stage 2. */
2206# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED 2
2207/** 16KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2208# define ARMV8_ID_AA64MMFR0_EL1_TGRAN16_2_SUPPORTED_52BIT 3
2209/** Bit 36 - 39 - Indicates support for 64KiB granule size at stage 2. */
2210#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2211#define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SHIFT 36
2212/** Support for 64KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran64 field. */
2213# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORT_BY_TGRAN64 0
2214/** 64KiB granule not supported at stage 2. */
2215# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_NOT_IMPL 1
2216/** 64KiB granule supported at stage 2. */
2217# define ARMV8_ID_AA64MMFR0_EL1_TGRAN64_2_SUPPORTED 2
2218/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
2219#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2220#define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SHIFT 40
2221/** Support for 4KiB granule at stage 2 is identified in the ID_AA64MMFR0_EL1.TGran4 field. */
2222# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORT_BY_TGRAN16 0
2223/** 4KiB granule not supported at stage 2. */
2224# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_NOT_IMPL 1
2225/** 4KiB granule supported at stage 2. */
2226# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED 2
2227/** 4KiB granule supported at stage 2 and supports 52-bit input addresses and can describe 52-bit output addresses (FEAT_LPA2). */
2228# define ARMV8_ID_AA64MMFR0_EL1_TGRAN4_2_SUPPORTED_52BIT 3
2229/** Bit 44 - 47 - Indicates support for disabling context synchronizing exception entry and exit. */
2230#define ARMV8_ID_AA64MMFR0_EL1_EXS_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2231#define ARMV8_ID_AA64MMFR0_EL1_EXS_SHIFT 44
2232/** All exception entries and exits are context synchronization events. */
2233# define ARMV8_ID_AA64MMFR0_EL1_EXS_NOT_IMPL 0
2234/** Non-context synchronizing exception entry and exit are supported (FEAT_ExS). */
2235# define ARMV8_ID_AA64MMFR0_EL1_EXS_SUPPORTED 1
2236/* Bit 48 - 55 - Reserved. */
2237/** Bit 56 - 59 - Indicates the presence of the Fine-Grained Trap controls. */
2238#define ARMV8_ID_AA64MMFR0_EL1_FGT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2239#define ARMV8_ID_AA64MMFR0_EL1_FGT_SHIFT 56
2240/** Fine-grained trap controls are not implemented. */
2241# define ARMV8_ID_AA64MMFR0_EL1_FGT_NOT_IMPL 0
2242/** Fine-grained trap controls are implemented (FEAT_FGT). */
2243# define ARMV8_ID_AA64MMFR0_EL1_FGT_SUPPORTED 1
2244/** Bit 60 - 63 - Indicates the presence of Enhanced Counter Virtualization. */
2245#define ARMV8_ID_AA64MMFR0_EL1_ECV_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2246#define ARMV8_ID_AA64MMFR0_EL1_ECV_SHIFT 60
2247/** Enhanced Counter Virtualization is not implemented. */
2248# define ARMV8_ID_AA64MMFR0_EL1_ECV_NOT_IMPL 0
2249/** Enhanced Counter Virtualization is implemented (FEAT_ECV). */
2250# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED 1
2251/** Enhanced Counter Virtualization is implemented and includes support for CNTHCTL_EL2.ECV and CNTPOFF_EL2 (FEAT_ECV). */
2252# define ARMV8_ID_AA64MMFR0_EL1_ECV_SUPPORTED_2 2
2253/** @} */
2254
2255
2256/** @name ID_AA64MMFR1_EL1 - AArch64 Memory Model Feature Register 1.
2257 * @{ */
2258/** Bit 0 - 3 - Hardware updates to Access flag and Dirty state in translation tables. */
2259#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2260#define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SHIFT 0
2261/** Hardware update of the Access flag and dirty state are not supported. */
2262# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_NOT_IMPL 0
2263/** Support for hardware update of the Access flag for Block and Page descriptors. */
2264# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_SUPPORTED 1
2265/** Support for hardware update of the Access flag for Block and Page descriptors, hardware update of dirty state supported. */
2266# define ARMV8_ID_AA64MMFR1_EL1_HAFDBS_DIRTY_SUPPORTED 2
2267/** Bit 4 - 7 - EL1 Exception level handling. */
2268#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2269#define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_SHIFT 4
2270/** VMID bits is 8. */
2271# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_8 0
2272/** VMID bits is 16 (FEAT_VMID16). */
2273# define ARMV8_ID_AA64MMFR1_EL1_VMIDBITS_16 2
2274/** Bit 8 - 11 - Virtualization Host Extensions support. */
2275#define ARMV8_ID_AA64MMFR1_EL1_VHE_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2276#define ARMV8_ID_AA64MMFR1_EL1_VHE_SHIFT 8
2277/** Virtualization Host Extensions are not supported. */
2278# define ARMV8_ID_AA64MMFR1_EL1_VHE_NOT_IMPL 0
2279/** Virtualization Host Extensions are supported. */
2280# define ARMV8_ID_AA64MMFR1_EL1_VHE_SUPPORTED 1
2281/** Bit 12 - 15 - Hierarchical Permission Disables. */
2282#define ARMV8_ID_AA64MMFR1_EL1_HPDS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2283#define ARMV8_ID_AA64MMFR1_EL1_HPDS_SHIFT 12
2284/** Disabling of hierarchical controls not supported. */
2285# define ARMV8_ID_AA64MMFR1_EL1_HPDS_NOT_IMPL 0
2286/** Disabling of hierarchical controls supported (FEAT_HPDS). */
2287# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED 1
2288/** FEAT_HPDS + possible hardware allocation of bits[62:59] of the translation table descriptors from the final lookup level (FEAT_HPDS2). */
2289# define ARMV8_ID_AA64MMFR1_EL1_HPDS_SUPPORTED_2 2
2290/** Bit 16 - 19 - LORegions support. */
2291#define ARMV8_ID_AA64MMFR1_EL1_LO_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2292#define ARMV8_ID_AA64MMFR1_EL1_LO_SHIFT 16
2293/** LORegions not supported. */
2294# define ARMV8_ID_AA64MMFR1_EL1_LO_NOT_IMPL 0
2295/** LORegions supported. */
2296# define ARMV8_ID_AA64MMFR1_EL1_LO_SUPPORTED 1
2297/** Bit 20 - 23 - Privileged Access Never support. */
2298#define ARMV8_ID_AA64MMFR1_EL1_PAN_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2299#define ARMV8_ID_AA64MMFR1_EL1_PAN_SHIFT 20
2300/** PAN not supported. */
2301# define ARMV8_ID_AA64MMFR1_EL1_PAN_NOT_IMPL 0
2302/** PAN supported (FEAT_PAN). */
2303# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED 1
2304/** PAN supported and AT S1E1RP and AT S1E1WP instructions supported (FEAT_PAN2). */
2305# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_2 2
2306/** PAN supported and AT S1E1RP and AT S1E1WP instructions and SCTRL_EL1.EPAN and SCTRL_EL2.EPAN supported (FEAT_PAN3). */
2307# define ARMV8_ID_AA64MMFR1_EL1_PAN_SUPPORTED_3 3
2308/** Bit 24 - 27 - Describes whether the PE can generate SError interrupt exceptions. */
2309#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2310#define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SHIFT 24
2311/** The PE never generates an SError interrupt due to an External abort on a speculative read. */
2312# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_NOT_IMPL 0
2313/** The PE might generate an SError interrupt due to an External abort on a speculative read. */
2314# define ARMV8_ID_AA64MMFR1_EL1_SPECSEI_SUPPORTED 1
2315/** Bit 28 - 31 - Indicates support for execute-never control distinction by Exception level at stage 2. */
2316#define ARMV8_ID_AA64MMFR1_EL1_XNX_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2317#define ARMV8_ID_AA64MMFR1_EL1_XNX_SHIFT 28
2318/** Distinction between EL0 and EL1 execute-never control at stage 2 not supported. */
2319# define ARMV8_ID_AA64MMFR1_EL1_XNX_NOT_IMPL 0
2320/** Distinction between EL0 and EL1 execute-never control at stage 2 supported (FEAT_XNX). */
2321# define ARMV8_ID_AA64MMFR1_EL1_XNX_SUPPORTED 1
2322/** Bit 32 - 35 - Indicates support for the configurable delayed trapping of WFE. */
2323#define ARMV8_ID_AA64MMFR1_EL1_TWED_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2324#define ARMV8_ID_AA64MMFR1_EL1_TWED_SHIFT 32
2325/** Configurable delayed trapping of WFE is not supported. */
2326# define ARMV8_ID_AA64MMFR1_EL1_TWED_NOT_IMPL 0
2327/** Configurable delayed trapping of WFE is supported (FEAT_TWED). */
2328# define ARMV8_ID_AA64MMFR1_EL1_TWED_SUPPORTED 1
2329/** Bit 36 - 39 - Indicates support for Enhanced Translation Synchronization. */
2330#define ARMV8_ID_AA64MMFR1_EL1_ETS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2331#define ARMV8_ID_AA64MMFR1_EL1_ETS_SHIFT 36
2332/** Enhanced Translation Synchronization is not supported. */
2333# define ARMV8_ID_AA64MMFR1_EL1_ETS_NOT_IMPL 0
2334/** Enhanced Translation Synchronization is implemented. */
2335# define ARMV8_ID_AA64MMFR1_EL1_ETS_SUPPORTED 1
2336/** Bit 40 - 43 - Indicates HCRX_EL2 and its associated EL3 trap support. */
2337#define ARMV8_ID_AA64MMFR1_EL1_HCX_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2338#define ARMV8_ID_AA64MMFR1_EL1_HCX_SHIFT 40
2339/** HCRX_EL2 and its associated EL3 trap are not supported. */
2340# define ARMV8_ID_AA64MMFR1_EL1_HCX_NOT_IMPL 0
2341/** HCRX_EL2 and its associated EL3 trap are supported (FEAT_HCX). */
2342# define ARMV8_ID_AA64MMFR1_EL1_HCX_SUPPORTED 1
2343/** Bit 44 - 47 - Indicates support for FPCR.{AH,FIZ,NEP}. */
2344#define ARMV8_ID_AA64MMFR1_EL1_AFP_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2345#define ARMV8_ID_AA64MMFR1_EL1_AFP_SHIFT 44
2346/** The FPCR.{AH,FIZ,NEP} fields are not supported. */
2347# define ARMV8_ID_AA64MMFR1_EL1_AFP_NOT_IMPL 0
2348/** The FPCR.{AH,FIZ,NEP} fields are supported (FEAT_AFP). */
2349# define ARMV8_ID_AA64MMFR1_EL1_AFP_SUPPORTED 1
2350/** Bit 48 - 51 - Indicates support for intermediate caching of translation table walks. */
2351#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2352#define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_SHIFT 48
2353/** The intermediate caching of translation table walks might include non-coherent physical translation caches. */
2354# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_NON_COHERENT 0
2355/** The intermediate caching of translation table walks does not include non-coherent physical translation caches (FEAT_nTLBPA). */
2356# define ARMV8_ID_AA64MMFR1_EL1_NTLBPA_INCLUDE_COHERENT_ONLY 1
2357/** Bit 52 - 55 - Indicates whether SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP are implemented in AArch64 state. */
2358#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2359#define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SHIFT 52
2360/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are not implemented. */
2361# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_NOT_IMPL 0
2362/** SCTLR_EL1.TIDCP and SCTLR_EL2.TIDCP bits are implemented (FEAT_TIDCP1). */
2363# define ARMV8_ID_AA64MMFR1_EL1_TIDCP1_SUPPORTED 1
2364/** Bit 56 - 59 - Indicates support for cache maintenance instruction permission. */
2365#define ARMV8_ID_AA64MMFR1_EL1_CMOW_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2366#define ARMV8_ID_AA64MMFR1_EL1_CMOW_SHIFT 56
2367/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are not implemented. */
2368# define ARMV8_ID_AA64MMFR1_EL1_CMOW_NOT_IMPL 0
2369/** SCTLR_EL1.CMOW, SCTLR_EL2.CMOW and HCRX_EL2.CMOW bits are implemented (FEAT_CMOW). */
2370# define ARMV8_ID_AA64MMFR1_EL1_CMOW_SUPPORTED 1
2371/* Bit 60 - 63 - Reserved. */
2372/** @} */
2373
2374
2375/** @name ID_AA64MMFR2_EL1 - AArch64 Memory Model Feature Register 2.
2376 * @{ */
2377/** Bit 0 - 3 - Indicates support for Common not Private translations. */
2378#define ARMV8_ID_AA64MMFR2_EL1_CNP_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2379#define ARMV8_ID_AA64MMFR2_EL1_CNP_SHIFT 0
2380/** Common not Private translations are not supported. */
2381# define ARMV8_ID_AA64MMFR2_EL1_CNP_NOT_IMPL 0
2382/** Support for Common not Private translations (FEAT_TTNCP). */
2383# define ARMV8_ID_AA64MMFR2_EL1_CNP_SUPPORTED 1
2384/** Bit 4 - 7 - Indicates support for User Access Override. */
2385#define ARMV8_ID_AA64MMFR2_EL1_UAO_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2386#define ARMV8_ID_AA64MMFR2_EL1_UAO_SHIFT 4
2387/** User Access Override is not supported. */
2388# define ARMV8_ID_AA64MMFR2_EL1_UAO_NOT_IMPL 0
2389/** User Access Override is supported (FEAT_UAO). */
2390# define ARMV8_ID_AA64MMFR2_EL1_UAO_SUPPORTED 1
2391/** Bit 8 - 11 - Indicates support for LSMAOE and nTLSMD bits in SCTLR_ELx. */
2392#define ARMV8_ID_AA64MMFR2_EL1_LSM_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2393#define ARMV8_ID_AA64MMFR2_EL1_LSM_SHIFT 8
2394/** LSMAOE and nTLSMD bits are not supported. */
2395# define ARMV8_ID_AA64MMFR2_EL1_LSM_NOT_IMPL 0
2396/** LSMAOE and nTLSMD bits are supported (FEAT_LSMAOC). */
2397# define ARMV8_ID_AA64MMFR2_EL1_LSM_SUPPORTED 1
2398/** Bit 12 - 15 - Indicates support for the IESB bit in SCTLR_ELx registers. */
2399#define ARMV8_ID_AA64MMFR2_EL1_IESB_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2400#define ARMV8_ID_AA64MMFR2_EL1_IESB_SHIFT 12
2401/** IESB bit is not supported. */
2402# define ARMV8_ID_AA64MMFR2_EL1_IESB_NOT_IMPL 0
2403/** IESB bit is supported (FEAT_IESB). */
2404# define ARMV8_ID_AA64MMFR2_EL1_IESB_SUPPORTED 1
2405/** Bit 16 - 19 - Indicates support for larger virtual address. */
2406#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_MASK (RT_BIT_64(16) | RT_BIT_64(17) | RT_BIT_64(18) | RT_BIT_64(19))
2407#define ARMV8_ID_AA64MMFR2_EL1_VARANGE_SHIFT 16
2408/** Virtual address range is 48 bits. */
2409# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_48BITS 0
2410/** 52 bit virtual addresses supported for 64KiB granules (FEAT_LVA). */
2411# define ARMV8_ID_AA64MMFR2_EL1_VARANGE_52BITS_64KB_GRAN 1
2412/** Bit 20 - 23 - Revised CCSIDR_EL1 register format supported. */
2413#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2414#define ARMV8_ID_AA64MMFR2_EL1_CCIDX_SHIFT 20
2415/** CCSIDR_EL1 register format is 32-bit. */
2416# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_32BIT 0
2417/** CCSIDR_EL1 register format is 64-bit (FEAT_CCIDX). */
2418# define ARMV8_ID_AA64MMFR2_EL1_CCIDX_64BIT 1
2419/** Bit 24 - 27 - Indicates support for nested virtualization. */
2420#define ARMV8_ID_AA64MMFR2_EL1_NV_MASK (RT_BIT_64(24) | RT_BIT_64(25) | RT_BIT_64(26) | RT_BIT_64(27))
2421#define ARMV8_ID_AA64MMFR2_EL1_NV_SHIFT 24
2422/** Nested virtualization is not supported. */
2423# define ARMV8_ID_AA64MMFR2_EL1_NV_NOT_IMPL 0
2424/** The HCR_EL2.{AT,NV1,NV} bits are implemented (FEAT_NV). */
2425# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED 1
2426/** The VNCR_EL2 register and HCR_EL2.{NV2,AT,NV1,NV} bits are implemented (FEAT_NV2). */
2427# define ARMV8_ID_AA64MMFR2_EL1_NV_SUPPORTED_2 2
2428/** Bit 28 - 31 - Indicates support for small translation tables. */
2429#define ARMV8_ID_AA64MMFR2_EL1_ST_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2430#define ARMV8_ID_AA64MMFR2_EL1_ST_SHIFT 28
2431/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 39. */
2432# define ARMV8_ID_AA64MMFR2_EL1_ST_NOT_IMPL 0
2433/** The maximum value of TCR_ELx.{T0SZ,T1SZ} is 48 for 4KiB and 16KiB, and 47 for 64KiB granules (FEAT_TTST). */
2434# define ARMV8_ID_AA64MMFR2_EL1_ST_SUPPORTED 1
2435/** Bit 32 - 35 - Indicates support for unaligned single-copy atomicity and atomic functions. */
2436#define ARMV8_ID_AA64MMFR2_EL1_AT_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2437#define ARMV8_ID_AA64MMFR2_EL1_AT_SHIFT 32
2438/** Unaligned single-copy atomicity and atomic functions are not supported. */
2439# define ARMV8_ID_AA64MMFR2_EL1_AT_NOT_IMPL 0
2440/** Unaligned single-copy atomicity and atomic functions are supported (FEAT_LSE2). */
2441# define ARMV8_ID_AA64MMFR2_EL1_AT_SUPPORTED 1
2442/** Bit 36 - 39 - Indicates value of ESR_ELx.EC that reports an exception generated by a read access to the feature ID space. */
2443#define ARMV8_ID_AA64MMFR2_EL1_IDS_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2444#define ARMV8_ID_AA64MMFR2_EL1_IDS_SHIFT 36
2445/** ESR_ELx.EC is 0 for traps generated by a read access to the feature ID space. */
2446# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_0 0
2447/** ESR_ELx.EC is 0x18 for traps generated by a read access to the feature ID space (FEAT_IDST). */
2448# define ARMV8_ID_AA64MMFR2_EL1_IDS_EC_18H 1
2449/** Bit 40 - 43 - Indicates support for the HCR_EL2.FWB bit. */
2450#define ARMV8_ID_AA64MMFR2_EL1_FWB_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2451#define ARMV8_ID_AA64MMFR2_EL1_FWB_SHIFT 40
2452/** HCR_EL2.FWB bit is not supported. */
2453# define ARMV8_ID_AA64MMFR2_EL1_FWB_NOT_IMPL 0
2454/** HCR_EL2.FWB bit is supported (FEAT_S2FWB). */
2455# define ARMV8_ID_AA64MMFR2_EL1_FWB_SUPPORTED 1
2456/* Bit 44 - 47 - Reserved. */
2457/** Bit 48 - 51 - Indicates support for TTL field in address operations. */
2458#define ARMV8_ID_AA64MMFR2_EL1_TTL_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2459#define ARMV8_ID_AA64MMFR2_EL1_TTL_SHIFT 48
2460/** TLB maintenance instructions by address have bits [47:44] Res0. */
2461# define ARMV8_ID_AA64MMFR2_EL1_TTL_NOT_IMPL 0
2462/** TLB maintenance instructions by address have bits [47:44] holding the TTL field (FEAT_TTL). */
2463# define ARMV8_ID_AA64MMFR2_EL1_TTL_SUPPORTED 1
2464/** Bit 52 - 55 - Identification of the hardware requirements of the hardware to have break-before-make sequences when
2465 * changing block size for a translation. */
2466#define ARMV8_ID_AA64MMFR2_EL1_BBM_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2467#define ARMV8_ID_AA64MMFR2_EL1_BBM_SHIFT 52
2468/** Level 0 support for changing block size is supported (FEAT_BBM). */
2469# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL0 0
2470/** Level 1 support for changing block size is supported (FEAT_BBM). */
2471# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL1 1
2472/** Level 2 support for changing block size is supported (FEAT_BBM). */
2473# define ARMV8_ID_AA64MMFR2_EL1_BBM_LVL2 2
2474/** Bit 56 - 59 - Indicates support for Enhanced Virtualization Traps. */
2475#define ARMV8_ID_AA64MMFR2_EL1_EVT_MASK (RT_BIT_64(56) | RT_BIT_64(57) | RT_BIT_64(58) | RT_BIT_64(59))
2476#define ARMV8_ID_AA64MMFR2_EL1_EVT_SHIFT 56
2477/** Enhanced Virtualization Traps are not supported. */
2478# define ARMV8_ID_AA64MMFR2_EL1_EVT_NOT_IMPL 0
2479/** Enhanced Virtualization Traps are supported (FEAT_EVT). */
2480# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED 1
2481/** Enhanced Virtualization Traps are supported with additional traps (FEAT_EVT). */
2482# define ARMV8_ID_AA64MMFR2_EL1_EVT_SUPPORTED_2 2
2483/** Bit 60 - 63 - Indicates support for E0PDx mechanism. */
2484#define ARMV8_ID_AA64MMFR2_EL1_E0PD_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2485#define ARMV8_ID_AA64MMFR2_EL1_E0PD_SHIFT 60
2486/** E0PDx mechanism is not supported. */
2487# define ARMV8_ID_AA64MMFR2_EL1_E0PD_NOT_IMPL 0
2488/** E0PDx mechanism is supported (FEAT_E0PD). */
2489# define ARMV8_ID_AA64MMFR2_EL1_E0PD_SUPPORTED 1
2490/** @} */
2491
2492
2493/** @name ID_AA64DFR0_EL1 - AArch64 Debug Feature Register 0.
2494 * @{ */
2495/** Bit 0 - 3 - Indicates the Debug Architecture version supported. */
2496#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_MASK (RT_BIT_64(0) | RT_BIT_64(1) | RT_BIT_64(2) | RT_BIT_64(3))
2497#define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_SHIFT 0
2498/** Armv8 debug architecture version. */
2499# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8 6
2500/** Armv8 debug architecture version with virtualization host extensions. */
2501# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8_VHE 7
2502/** Armv8.2 debug architecture version (FEAT_Debugv8p2). */
2503# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p2 8
2504/** Armv8.4 debug architecture version (FEAT_Debugv8p4). */
2505# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p4 9
2506/** Armv8.8 debug architecture version (FEAT_Debugv8p8). */
2507# define ARMV8_ID_AA64DFR0_EL1_DEBUGVER_ARMV8p8 10
2508/** Bit 4 - 7 - Indicates trace support. */
2509#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_MASK (RT_BIT_64(4) | RT_BIT_64(5) | RT_BIT_64(6) | RT_BIT_64(7))
2510#define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SHIFT 4
2511/** Trace unit System registers not implemented. */
2512# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_NOT_IMPL 0
2513/** Trace unit System registers supported. */
2514# define ARMV8_ID_AA64DFR0_EL1_TRACEVER_SUPPORTED 1
2515/** Bit 8 - 11 - Performance Monitors Extension version. */
2516#define ARMV8_ID_AA64DFR0_EL1_PMUVER_MASK (RT_BIT_64(8) | RT_BIT_64(9) | RT_BIT_64(10) | RT_BIT_64(11))
2517#define ARMV8_ID_AA64DFR0_EL1_PMUVER_SHIFT 8
2518/** Performance Monitors Extension not supported. */
2519# define ARMV8_ID_AA64DFR0_EL1_PMUVER_NOT_IMPL 0
2520/** Performance Monitors Extension v3 supported (FEAT_PMUv3). */
2521# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3 1
2522/** Performance Monitors Extension v3 supported (FEAT_PMUv3p1). */
2523# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P1 4
2524/** Performance Monitors Extension v3 supported (FEAT_PMUv3p4). */
2525# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P4 5
2526/** Performance Monitors Extension v3 supported (FEAT_PMUv3p5). */
2527# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P5 6
2528/** Performance Monitors Extension v3 supported (FEAT_PMUv3p7). */
2529# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P7 7
2530/** Performance Monitors Extension v3 supported (FEAT_PMUv3p8). */
2531# define ARMV8_ID_AA64DFR0_EL1_PMUVER_SUPPORTED_V3P8 8
2532/** Bit 12 - 15 - Number of breakpoints, minus 1. */
2533#define ARMV8_ID_AA64DFR0_EL1_BRPS_MASK (RT_BIT_64(12) | RT_BIT_64(13) | RT_BIT_64(14) | RT_BIT_64(15))
2534#define ARMV8_ID_AA64DFR0_EL1_BRPS_SHIFT 12
2535/* Bit 16 - 19 - Reserved 0. */
2536/** Bit 20 - 23 - Number of watchpoints, minus 1. */
2537#define ARMV8_ID_AA64DFR0_EL1_WRPS_MASK (RT_BIT_64(20) | RT_BIT_64(21) | RT_BIT_64(22) | RT_BIT_64(23))
2538#define ARMV8_ID_AA64DFR0_EL1_WRPS_SHIFT 20
2539/* Bit 24 - 27 - Reserved 0. */
2540/** Bit 28 - 31 - Number of context-aware breakpoints. */
2541#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_MASK (RT_BIT_64(28) | RT_BIT_64(29) | RT_BIT_64(30) | RT_BIT_64(31))
2542#define ARMV8_ID_AA64DFR0_EL1_CTXCMPS_SHIFT 28
2543/** Bit 32 - 35 - Statistical Profiling Extension version. */
2544#define ARMV8_ID_AA64DFR0_EL1_PMSVER_MASK (RT_BIT_64(32) | RT_BIT_64(33) | RT_BIT_64(34) | RT_BIT_64(35))
2545#define ARMV8_ID_AA64DFR0_EL1_PMSVER_SHIFT 32
2546/** Statistical Profiling Extension not implemented. */
2547# define ARMV8_ID_AA64DFR0_EL1_PMSVER_NOT_IMPL 0
2548/** Statistical Profiling Extension supported (FEAT_SPE). */
2549# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED 1
2550/** Statistical Profiling Extension supported, version 1.1 (FEAT_SPEv1p1). */
2551# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P1 2
2552/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p2). */
2553# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P2 3
2554/** Statistical Profiling Extension supported, version 1.2 (FEAT_SPEv1p3). */
2555# define ARMV8_ID_AA64DFR0_EL1_PMSVER_SUPPORTED_V1P3 4
2556/** Bit 36 - 39 - OS Double Lock implemented. */
2557#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_MASK (RT_BIT_64(36) | RT_BIT_64(37) | RT_BIT_64(38) | RT_BIT_64(39))
2558#define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SHIFT 36
2559/** OS Double Lock is not implemented. */
2560# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_NOT_IMPL 0xf
2561/** OS Double Lock is supported (FEAT_DoubleLock). */
2562# define ARMV8_ID_AA64DFR0_EL1_DOUBLELOCK_SUPPORTED 0
2563/** Bit 40 - 43 - Indicates the Armv8.4 self-hosted Trace Extension. */
2564#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_MASK (RT_BIT_64(40) | RT_BIT_64(41) | RT_BIT_64(42) | RT_BIT_64(43))
2565#define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SHIFT 40
2566/** Armv8.4 self-hosted Trace Extension not implemented. */
2567# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_NOT_IMPL 0
2568/** Armv8.4 self-hosted Trace Extension is supported (FEAT_TRF). */
2569# define ARMV8_ID_AA64DFR0_EL1_TRACEFILT_SUPPORTED 1
2570/** Bit 44 - 47 - Indicates support for the Trace Buffer Extension. */
2571#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_MASK (RT_BIT_64(44) | RT_BIT_64(45) | RT_BIT_64(46) | RT_BIT_64(47))
2572#define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SHIFT 44
2573/** Trace Buffer Extension is not implemented. */
2574# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_NOT_IMPL 0
2575/** Trace Buffer Extension is supported (FEAT_TRBE). */
2576# define ARMV8_ID_AA64DFR0_EL1_TRACEBUFFER_SUPPORTED 1
2577/** Bit 48 - 51 - Indicates support for the multi-threaded PMU extension. */
2578#define ARMV8_ID_AA64DFR0_EL1_MTPMU_MASK (RT_BIT_64(48) | RT_BIT_64(49) | RT_BIT_64(50) | RT_BIT_64(51))
2579#define ARMV8_ID_AA64DFR0_EL1_MTPMU_SHIFT 48
2580/** Multi-threaded PMU extension is not implemented. */
2581# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL 0
2582/** Multi-threaded PMU extension is supported (FEAT_MTPMU). */
2583# define ARMV8_ID_AA64DFR0_EL1_MTPMU_SUPPORTED 1
2584/** Multi-threaded PMU extension is not implemented. */
2585# define ARMV8_ID_AA64DFR0_EL1_MTPMU_NOT_IMPL_2 0xf
2586/** Bit 52 - 55 - Indicates support for the Branch Record Buffer extension. */
2587#define ARMV8_ID_AA64DFR0_EL1_BRBE_MASK (RT_BIT_64(52) | RT_BIT_64(53) | RT_BIT_64(54) | RT_BIT_64(55))
2588#define ARMV8_ID_AA64DFR0_EL1_BRBE_SHIFT 52
2589/** Branch Record Buffer extension is not implemented. */
2590# define ARMV8_ID_AA64DFR0_EL1_BRBE_NOT_IMPL 0
2591/** Branch Record Buffer extension is supported (FEAT_BRBE). */
2592# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED 1
2593/** Branch Record Buffer extension is supported and supports branch recording at EL3 (FEAT_BRBEv1p1). */
2594# define ARMV8_ID_AA64DFR0_EL1_BRBE_SUPPORTED_V1P1 2
2595/* Bit 56 - 59 - Reserved. */
2596/** Bit 60 - 63 - Indicates support for Zero PMU event counters for guest operating systems. */
2597#define ARMV8_ID_AA64DFR0_EL1_HPMN0_MASK (RT_BIT_64(60) | RT_BIT_64(61) | RT_BIT_64(62) | RT_BIT_64(63))
2598#define ARMV8_ID_AA64DFR0_EL1_HPMN0_SHIFT 60
2599/** Setting MDCE_EL2.HPMN to zero has CONSTRAINED UNPREDICTABLE behavior. */
2600# define ARMV8_ID_AA64DFR0_EL1_HPMN0_NOT_IMPL 0
2601/** Setting MDCE_EL2.HPMN to zero has defined behavior (FEAT_HPMN0). */
2602# define ARMV8_ID_AA64DFR0_EL1_HPMN0_SUPPORTED 1
2603/** @} */
2604
2605
2606/** @name FPCR - AArch64 Floating Point Control Register.
2607 * @{ */
2608/** Bit 0 - Flush Inputs to Zero when FEAT_AFP is supported. */
2609#define ARMV8_FPCR_FIZ RT_BIT_64(0)
2610#define ARMV8_FPCR_FIZ_BIT 0
2611/** Bit 1 - Alternate Handling of floating-point numbers when FEAT_AFP is supported. */
2612#define ARMV8_FPCR_AH RT_BIT_64(1)
2613#define ARMV8_FPCR_AH_BIT 1
2614/** Bit 2 - Controls how the output elements other than the lowest element of the vector are determined for
2615 * Advanced SIMD scalar instructions, when FEAT_AFP is supported. */
2616#define ARMV8_FPCR_NEP RT_BIT_64(2)
2617#define ARMV8_FPCR_NEP_BIT 2
2618/* Bit 3 - 7 - Reserved.*/
2619/** Bit 8 - Invalid Operation floating-point exception trap enable. */
2620#define ARMV8_FPCR_IOE RT_BIT_64(8)
2621#define ARMV8_FPCR_IOE_BIT 8
2622/** Bit 9 - Divide by Zero floating-point exception trap enable. */
2623#define ARMV8_FPCR_DZE RT_BIT_64(9)
2624#define ARMV8_FPCR_DZE_BIT 9
2625/** Bit 10 - Overflow floating-point exception trap enable. */
2626#define ARMV8_FPCR_OFE RT_BIT_64(10)
2627#define ARMV8_FPCR_OFE_BIT 10
2628/** Bit 11 - Underflow floating-point exception trap enable. */
2629#define ARMV8_FPCR_UFE RT_BIT_64(11)
2630#define ARMV8_FPCR_UFE_BIT 11
2631/** Bit 12 - Inexact floating-point exception trap enable. */
2632#define ARMV8_FPCR_IXE RT_BIT_64(12)
2633#define ARMV8_FPCR_IXE_BIT 12
2634/** Bit 13 - Controls numeric behavior of BFloat16 dot productions calculations performed,
2635 * supported when FEAT_EBF16 is supported. */
2636#define ARMV8_FPCR_EBF RT_BIT_64(13)
2637#define ARMV8_FPCR_EBF_BIT 13
2638/* Bit 14 - Reserved */
2639/** Bit 15 - Input Denormal floating-point exception trap enable. */
2640#define ARMV8_FPCR_IDE RT_BIT_64(15)
2641#define ARMV8_FPCR_IDE_BIT 15
2642/* Bit 16 - 18 - Reserved for AArch64 (Len field for AArch32). */
2643/** Bit 19 - Flushing denormalized numbers to zero control bit on half-precision data-processing instructions,
2644 * available when FEAT_FP16 is supported. */
2645#define ARMV8_FPCR_FZ16 RT_BIT_64(19)
2646#define ARMV8_FPCR_FZ16_BIT 19
2647/* Bit 20 - 21 - Reserved for AArch64 (Stride field dor AArch32). */
2648/** Bit 22 - 23 - Rounding Mode control field. */
2649#define ARMV8_FPCR_RMODE_MASK (RT_BIT_64(22) | RT_BIT_64(23))
2650#define ARMV8_FPCR_RMODE_SHIFT 22
2651/** Round to Nearest (RN) mode. */
2652# define ARMV8_FPCR_RMODE_RN 0
2653/** Round towards Plus Infinity (RP) mode. */
2654# define ARMV8_FPCR_RMODE_RP 1
2655/** Round towards Minus Infinity (RM) mode. */
2656# define ARMV8_FPCR_RMODE_RM 2
2657/** Round towards Zero (RZ) mode. */
2658# define ARMV8_FPCR_RMODE_RZ 3
2659/** Bit 24 - Flushing denormalized numbers to zero control bit. */
2660#define ARMV8_FPCR_FZ RT_BIT_64(24)
2661#define ARMV8_FPCR_FZ_BIT 24
2662/** Bit 25 - Default NaN use for NaN propagation. */
2663#define ARMV8_FPCR_DN RT_BIT_64(25)
2664#define ARMV8_FPCR_DN_BIT 25
2665/** Bit 26 - Alternative half-precision control bit. */
2666#define ARMV8_FPCR_AHP RT_BIT_64(26)
2667#define ARMV8_FPCR_AHP_BIT 26
2668/* Bit 27 - 63 - Reserved. */
2669/** @} */
2670
2671
2672/** @name FPSR - AArch64 Floating Point Status Register.
2673 * @{ */
2674/** Bit 0 - Invalid Operation cumulative floating-point exception bit. */
2675#define ARMV8_FPSR_IOC RT_BIT_64(0)
2676/** Bit 1 - Divide by Zero cumulative floating-point exception bit. */
2677#define ARMV8_FPSR_DZC RT_BIT_64(1)
2678/** Bit 2 - Overflow cumulative floating-point exception bit. */
2679#define ARMV8_FPSR_OFC RT_BIT_64(2)
2680/** Bit 3 - Underflow cumulative floating-point exception bit. */
2681#define ARMV8_FPSR_UFC RT_BIT_64(3)
2682/** Bit 4 - Inexact cumulative floating-point exception bit. */
2683#define ARMV8_FPSR_IXC RT_BIT_64(4)
2684/* Bit 5 - 6 - Reserved. */
2685/** Bit 7 - Input Denormal cumulative floating-point exception bit. */
2686#define ARMV8_FPSR_IDC RT_BIT_64(7)
2687/* Bit 8 - 26 - Reserved. */
2688/** Bit 27 - Cumulative saturation bit, Advanced SIMD only. */
2689#define ARMV8_FPSR_QC RT_BIT_64(27)
2690/* Bit 28 - 31 - NZCV bits for AArch32 floating point operations. */
2691/* Bit 32 - 63 - Reserved. */
2692/** @} */
2693
2694
2695
2696/** @name SCTLR_EL1 - AArch64 System Control Register (EL1).
2697 * @{ */
2698/** Bit 0 - MMU enable for EL1 and EL0 stage 1 address translation. */
2699#define ARMV8_SCTLR_EL1_M RT_BIT_64(0)
2700/** Bit 1 - Alignment check enable for EL1 and EL0. */
2701#define ARMV8_SCTLR_EL1_A RT_BIT_64(1)
2702/** Bit 2 - Stage 1 cacheability control, for data accesses. */
2703#define ARMV8_SCTLR_EL1_C RT_BIT_64(2)
2704/** Bit 3 - SP alignment check enable. */
2705#define ARMV8_SCTLR_EL1_SA RT_BIT_64(3)
2706/** Bit 4 - SP alignment check enable for EL0. */
2707#define ARMV8_SCTLR_EL1_SA0 RT_BIT_64(4)
2708/** Bit 5 - System instruction memory barrier enable from AArch32 EL0. */
2709#define ARMV8_SCTLR_EL1_CP15BEN RT_BIT_64(5)
2710/** Bit 6 - Non-aligned access enable. */
2711#define ARMV8_SCTLR_EL1_nAA RT_BIT_64(6)
2712/** Bit 7 - IT disable, disables some uses of IT instructions at EL0 using AArch32. */
2713#define ARMV8_SCTLR_EL1_ITD RT_BIT_64(7)
2714/** Bit 8 - SETEND instruction disable, disables SETEND instructions at EL0 using AArch32. */
2715#define ARMV8_SCTLR_EL1_SED RT_BIT_64(8)
2716/** Bit 9 - User Mask Access. Traps EL0 execution of MSR and MRS instructions that access the PSTATE.{D,A,I,F} masks to EL1. */
2717#define ARMV8_SCTLR_EL1_UMA RT_BIT_64(9)
2718/** Bit 10 - Enable EL0 acccess to the CFP*, DVP* and CPP* instructions if FEAT_SPECRES is supported. */
2719#define ARMV8_SCTLR_EL1_EnRCTX RT_BIT_64(10)
2720/** Bit 11 - Exception Exit is Context Synchronizing (FEAT_ExS required). */
2721#define ARMV8_SCTLR_EL1_EOS RT_BIT_64(11)
2722/** Bit 12 - Stage 1 instruction access cacheability control, for access at EL0 and EL1. */
2723#define ARMV8_SCTLR_EL1_I RT_BIT_64(12)
2724/** @todo Finish (lazy developer). */
2725/** @} */
2726
2727
2728/** @name SCTLR_EL2 - AArch64 System Control Register (EL2) - 32-bit.
2729 * @{ */
2730/** Bit 0 - MMU enable for EL2. */
2731#define ARMV8_SCTLR_EL2_M RT_BIT_64(0)
2732/** Bit 1 - Alignment check enable. */
2733#define ARMV8_SCTLR_EL2_A RT_BIT_64(1)
2734/** Bit 2 - Global enable for data and unified caches. */
2735#define ARMV8_SCTLR_EL2_C RT_BIT_64(2)
2736/** Bit 3 - SP alignment check enable. */
2737#define ARMV8_SCTLR_EL2_SA RT_BIT_64(3)
2738/* Bit 4 - 11 - Reserved. */
2739/** Bit 12 - Instruction cache enable. */
2740#define ARMV8_SCTLR_EL2_I RT_BIT_64(12)
2741/* Bit 13 - 18 - Reserved. */
2742/** Bit 19 - Force treatment of all memory regions with write permissions as XN. */
2743#define ARMV8_SCTLR_EL2_WXN RT_BIT_64(19)
2744/* Bit 20 - 24 - Reserved. */
2745/** Bit 25 - Exception endianess - set means big endian, clear little endian. */
2746#define ARMV8_SCTLR_EL2_EE RT_BIT_64(25)
2747/* Bit 26 - 31 - Reserved. */
2748/** @} */
2749
2750
2751#if (!defined(VBOX_FOR_DTRACE_LIB) && defined(__cplusplus) && !defined(ARMV8_WITHOUT_MK_INSTR)) || defined(DOXYGEN_RUNNING)
2752/** @defgroup grp_rt_armv8_mkinstr Instruction Encoding Helpers
2753 * @ingroup grp_rt_armv8
2754 *
2755 * A few inlined functions and macros for assiting in encoding common ARMv8
2756 * instructions.
2757 *
2758 * @{ */
2759
2760/** A64: Official NOP instruction. */
2761#define ARMV8_A64_INSTR_NOP UINT32_C(0xd503201f)
2762/** A64: Return instruction. */
2763#define ARMV8_A64_INSTR_RET UINT32_C(0xd65f03c0)
2764/** A64: Return instruction with LR pointer authentication using SP and key A. */
2765#define ARMV8_A64_INSTR_RETAA UINT32_C(0xd65f0bff)
2766/** A64: Return instruction with LR pointer authentication using SP and key B. */
2767#define ARMV8_A64_INSTR_RETAB UINT32_C(0xd65f0fff)
2768/** A64: Insert pointer authentication code into X17 using X16 and key B. */
2769#define ARMV8_A64_INSTR_PACIB1716 UINT32_C(0xd503215f)
2770/** A64: Insert pointer authentication code into LR using SP and key B. */
2771#define ARMV8_A64_INSTR_PACIBSP UINT32_C(0xd503237f)
2772/** A64: Insert pointer authentication code into LR using XZR and key B. */
2773#define ARMV8_A64_INSTR_PACIBZ UINT32_C(0xd503235f)
2774/** A64: Invert the carry flag (PSTATE.C). */
2775#define ARMV8_A64_INSTR_CFINV UINT32_C(0xd500401f)
2776
2777
2778/** Memory barrier: Shareability domain. */
2779typedef enum
2780{
2781 kArm64InstMbReqDomain_OuterShareable = 0,
2782 kArm64InstMbReqDomain_Nonshareable,
2783 kArm64InstMbReqDomain_InnerShareable,
2784 kArm64InstMbReqDomain_FullSystem
2785} ARM64INSTRMBREQDOMAIN;
2786
2787/** Memory barrier: Access type. */
2788typedef enum
2789{
2790 kArm64InstMbReqType_All0 = 0, /**< Special. Only used with PSSBB and SSBB. */
2791 kArm64InstMbReqType_Reads,
2792 kArm64InstMbReqType_Writes,
2793 kArm64InstMbReqType_All
2794} ARM64INSTRMBREQTYPE;
2795
2796/**
2797 * A64: DMB option
2798 */
2799DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrDmb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2800 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2801{
2802 return UINT32_C(0xd50330bf)
2803 | ((uint32_t)enmDomain << 8)
2804 | ((uint32_t)enmType << 10);
2805}
2806
2807
2808/**
2809 * A64: DSB option
2810 */
2811DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrDsb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2812 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2813{
2814 return UINT32_C(0xd503309f)
2815 | ((uint32_t)enmDomain << 8)
2816 | ((uint32_t)enmType << 10);
2817}
2818
2819
2820/**
2821 * A64: SSBB
2822 */
2823DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSsbb(void)
2824{
2825 return Armv8A64MkInstrDsb(kArm64InstMbReqDomain_OuterShareable, kArm64InstMbReqType_All0);
2826}
2827
2828
2829/**
2830 * A64: PSSBB
2831 */
2832DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrPSsbb(void)
2833{
2834 return Armv8A64MkInstrDsb(kArm64InstMbReqDomain_Nonshareable, kArm64InstMbReqType_All0);
2835}
2836
2837
2838/**
2839 * A64: ISB option
2840 *
2841 * @note Only the default option selection is supported, all others are
2842 * currently reserved.
2843 */
2844DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrIsb(ARM64INSTRMBREQDOMAIN enmDomain = kArm64InstMbReqDomain_FullSystem,
2845 ARM64INSTRMBREQTYPE enmType = kArm64InstMbReqType_All)
2846{
2847 return UINT32_C(0xd50330df)
2848 | ((uint32_t)enmDomain << 8)
2849 | ((uint32_t)enmType << 10);
2850}
2851
2852
2853typedef enum
2854{
2855 /** Add @a iImm7*sizeof(reg) to @a iBaseReg after the store/load,
2856 * and update the register. */
2857 kArm64InstrStLdPairType_PostIndex = 1,
2858 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2859 * but don't update the register. */
2860 kArm64InstrStLdPairType_Signed = 2,
2861 /** Add @a iImm7*sizeof(reg) to @a iBaseReg before the store/load,
2862 * and update the register. */
2863 kArm64InstrStLdPairType_PreIndex = 3
2864} ARM64INSTRSTLDPAIRTYPE;
2865
2866/**
2867 * A64: Encodes either stp (store register pair) or ldp (load register pair).
2868 *
2869 * @returns The encoded instruction.
2870 * @param fLoad true for ldp, false of stp.
2871 * @param u2Opc When @a fSimdFp is @c false:
2872 * - 0 for 32-bit GPRs (Wt).
2873 * - 1 for encoding stgp or ldpsw.
2874 * - 2 for 64-bit GRPs (Xt).
2875 * - 3 illegal.
2876 * When @a fSimdFp is @c true:
2877 * - 0 for 32-bit SIMD&FP registers (St).
2878 * - 1 for 64-bit SIMD&FP registers (Dt).
2879 * - 2 for 128-bit SIMD&FP regsiters (Qt).
2880 * @param enmType The instruction variant wrt addressing and updating of the
2881 * addressing register.
2882 * @param iReg1 The first register to store/load.
2883 * @param iReg2 The second register to store/load.
2884 * @param iBaseReg The base register to use when addressing. SP is allowed.
2885 * @param iImm7 Signed addressing immediate value scaled, range -64..63,
2886 * will be multiplied by the register size.
2887 * @param fSimdFp true for SIMD&FP registers, false for GPRs and
2888 * stgp/ldpsw instructions.
2889 */
2890DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdPair(bool fLoad, uint32_t u2Opc, ARM64INSTRSTLDPAIRTYPE enmType,
2891 uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2892 bool fSimdFp = false)
2893{
2894 Assert(u2Opc < 3); Assert(iReg1 <= 31); Assert(iReg2 <= 31); Assert(iBaseReg <= 31); Assert(iImm7 < 64 && iImm7 >= -64);
2895 return (u2Opc << 30)
2896 | UINT32_C(0x28000000) /* 0b101000000000000000000000000000 */
2897 | ((uint32_t)fSimdFp << 26) /* VR bit, see "Top-level encodings for A64" */
2898 | ((uint32_t)enmType << 23)
2899 | ((uint32_t)fLoad << 22)
2900 | (((uint32_t)iImm7 & UINT32_C(0x7f)) << 15)
2901 | (iReg2 << 10)
2902 | (iBaseReg << 5)
2903 | iReg1;
2904}
2905
2906
2907/** A64: ldp x1, x2, [x3] */
2908DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2909 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2910 bool f64Bit = true)
2911{
2912 return Armv8A64MkInstrStLdPair(true /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2913}
2914
2915
2916/** A64: stp x1, x2, [x3] */
2917DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStPairGpr(uint32_t iReg1, uint32_t iReg2, uint32_t iBaseReg, int32_t iImm7 = 0,
2918 ARM64INSTRSTLDPAIRTYPE enmType = kArm64InstrStLdPairType_Signed,
2919 bool f64Bit = true)
2920{
2921 return Armv8A64MkInstrStLdPair(false /*fLoad*/, f64Bit ? 2 : 0, enmType, iReg1, iReg2, iBaseReg, iImm7);
2922}
2923
2924
2925typedef enum /* Size VR Opc */
2926{ /* \ | / */
2927 kArmv8A64InstrLdStType_Mask_Size = 0x300,
2928 kArmv8A64InstrLdStType_Mask_VR = 0x010,
2929 kArmv8A64InstrLdStType_Mask_Opc = 0x003,
2930 kArmv8A64InstrLdStType_Shift_Size = 8,
2931 kArmv8A64InstrLdStType_Shift_VR = 4,
2932 kArmv8A64InstrLdStType_Shift_Opc = 0,
2933
2934 kArmv8A64InstrLdStType_St_Byte = 0x000,
2935 kArmv8A64InstrLdStType_Ld_Byte = 0x001,
2936 kArmv8A64InstrLdStType_Ld_SignByte64 = 0x002,
2937 kArmv8A64InstrLdStType_Ld_SignByte32 = 0x003,
2938
2939 kArmv8A64InstrLdStType_St_Half = 0x100, /**< Half = 16-bit */
2940 kArmv8A64InstrLdStType_Ld_Half = 0x101, /**< Half = 16-bit */
2941 kArmv8A64InstrLdStType_Ld_SignHalf64 = 0x102, /**< Half = 16-bit */
2942 kArmv8A64InstrLdStType_Ld_SignHalf32 = 0x103, /**< Half = 16-bit */
2943
2944 kArmv8A64InstrLdStType_St_Word = 0x200, /**< Word = 32-bit */
2945 kArmv8A64InstrLdStType_Ld_Word = 0x201, /**< Word = 32-bit */
2946 kArmv8A64InstrLdStType_Ld_SignWord64 = 0x202, /**< Word = 32-bit */
2947
2948 kArmv8A64InstrLdStType_St_Dword = 0x300, /**< Dword = 64-bit */
2949 kArmv8A64InstrLdStType_Ld_Dword = 0x301, /**< Dword = 64-bit */
2950
2951 kArmv8A64InstrLdStType_Prefetch = 0x302, /**< Not valid in all variations, check docs. */
2952
2953 kArmv8A64InstrLdStType_St_Vr_Byte = 0x010,
2954 kArmv8A64InstrLdStType_Ld_Vr_Byte = 0x011,
2955 kArmv8A64InstrLdStType_St_Vr_128 = 0x012,
2956 kArmv8A64InstrLdStType_Ld_Vr_128 = 0x013,
2957
2958 kArmv8A64InstrLdStType_St_Vr_Half = 0x110, /**< Half = 16-bit */
2959 kArmv8A64InstrLdStType_Ld_Vr_Half = 0x111, /**< Half = 16-bit */
2960
2961 kArmv8A64InstrLdStType_St_Vr_Word = 0x210, /**< Word = 32-bit */
2962 kArmv8A64InstrLdStType_Ld_Vr_Word = 0x211, /**< Word = 32-bit */
2963
2964 kArmv8A64InstrLdStType_St_Vr_Dword = 0x310, /**< Dword = 64-bit */
2965 kArmv8A64InstrLdStType_Ld_Vr_Dword = 0x311 /**< Dword = 64-bit */
2966
2967} ARMV8A64INSTRLDSTTYPE;
2968/** Checks if a ARMV8A64INSTRLDSTTYPE value is a store operation or not. */
2969#define ARMV8A64INSTRLDSTTYPE_IS_STORE(a_enmLdStType) (((unsigned)a_enmLdStType & (unsigned)kArmv8A64InstrLdStType_Mask_Opc) == 0)
2970
2971
2972/**
2973 * A64: Encodes load/store with unscaled 9-bit signed immediate.
2974 *
2975 * @returns The encoded instruction.
2976 * @param u32Opcode The base opcode value.
2977 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
2978 * @param iReg The register to load into / store.
2979 * @param iBaseReg The base register to use when addressing. SP is allowed.
2980 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
2981 */
2982DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdImm9Ex(uint32_t u32Opcode, ARMV8A64INSTRLDSTTYPE enmType,
2983 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
2984{
2985 Assert(i9ImmDisp >= -256 && i9ImmDisp < 256); Assert(iReg < 32); Assert(iBaseReg < 32);
2986 return u32Opcode
2987 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
2988 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
2989 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
2990 | (((uint32_t)i9ImmDisp & UINT32_C(0x1ff)) << 12)
2991 | (iBaseReg << 5)
2992 | iReg;
2993}
2994
2995
2996/**
2997 * A64: Encodes load/store with unscaled 9-bit signed immediate.
2998 *
2999 * @returns The encoded instruction.
3000 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
3001 * @param iReg The register to load into / store.
3002 * @param iBaseReg The base register to use when addressing. SP is allowed.
3003 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3004 */
3005DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSturLdur(ARMV8A64INSTRLDSTTYPE enmType,
3006 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3007{
3008 /* 3 2 1 0 */
3009 /* 10987654321098765432109876543210 */
3010 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000000) /* 0b00111000000000000000000000000000 */,
3011 enmType, iReg, iBaseReg, i9ImmDisp);
3012}
3013
3014/**
3015 * A64: Encodes load/store with unscaled 9-bit signed immediate, post-indexed.
3016 *
3017 * @returns The encoded instruction.
3018 * @param enmType The load/store instruction type. Prefech not valid.
3019 * @param iReg The register to load into / store.
3020 * @param iBaseReg The base register to use when addressing. SP is allowed.
3021 * Written back.
3022 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3023 */
3024DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPostIndex9(ARMV8A64INSTRLDSTTYPE enmType,
3025 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3026{
3027 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
3028 /* 10987654321098765432109876543210 */
3029 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000400) /* 0b00111000000000000000010000000000 */,
3030 enmType, iReg, iBaseReg, i9ImmDisp);
3031}
3032
3033/**
3034 * A64: Encodes load/store with unscaled 9-bit signed immediate, pre-indexed
3035 *
3036 * @returns The encoded instruction.
3037 * @param enmType The load/store instruction type. Prefech valid (PRFUM)
3038 * @param iReg The register to load into / store.
3039 * @param iBaseReg The base register to use when addressing. SP is allowed.
3040 * Written back.
3041 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3042 */
3043DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStrLdrPreIndex9(ARMV8A64INSTRLDSTTYPE enmType,
3044 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3045{
3046 Assert(enmType != kArmv8A64InstrLdStType_Prefetch); /* 3 2 1 0 */
3047 /* 10987654321098765432109876543210 */
3048 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000c00) /* 0b00111000000000000000110000000000 */,
3049 enmType, iReg, iBaseReg, i9ImmDisp);
3050}
3051
3052/**
3053 * A64: Encodes unprivileged load/store with unscaled 9-bit signed immediate.
3054 *
3055 * @returns The encoded instruction.
3056 * @param enmType The load/store instruction type. Prefech not valid,
3057 * nor any SIMD&FP variants.
3058 * @param iReg The register to load into / store.
3059 * @param iBaseReg The base register to use when addressing. SP is allowed.
3060 * @param i9ImmDisp The 9-bit signed addressing displacement. Unscaled.
3061 */
3062DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSttrLdtr(ARMV8A64INSTRLDSTTYPE enmType,
3063 uint32_t iReg, uint32_t iBaseReg, int32_t i9ImmDisp = 0)
3064{
3065 Assert(enmType != kArmv8A64InstrLdStType_Prefetch);
3066 Assert(!((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR));
3067 /* 3 2 1 0 */
3068 /* 10987654321098765432109876543210 */
3069 return Armv8A64MkInstrStLdImm9Ex(UINT32_C(0x38000800) /* 0b00111000000000000000100000000000 */,
3070 enmType, iReg, iBaseReg, i9ImmDisp);
3071}
3072
3073
3074/**
3075 * A64: Encodes load/store w/ scaled 12-bit unsigned address displacement.
3076 *
3077 * @returns The encoded instruction.
3078 * @param enmType The load/store instruction type. Prefech not valid,
3079 * nor any SIMD&FP variants.
3080 * @param iReg The register to load into / store.
3081 * @param iBaseReg The base register to use when addressing. SP is allowed.
3082 * @param u12ImmDisp Addressing displacement, scaled by size.
3083 */
3084DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRUOff(ARMV8A64INSTRLDSTTYPE enmType,
3085 uint32_t iReg, uint32_t iBaseReg, uint32_t u12ImmDisp)
3086{
3087 Assert(u12ImmDisp < 4096U);
3088 Assert(iReg < 32); /* 3 2 1 0 */
3089 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
3090 return UINT32_C(0x39000000) /* 0b00111001000000000000000000000000 */
3091 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
3092 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
3093 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
3094 | (u12ImmDisp << 10)
3095 | (iBaseReg << 5)
3096 | iReg;
3097}
3098
3099typedef enum
3100{
3101 kArmv8A64InstrLdStExtend_Uxtw = 2, /**< Zero-extend (32-bit) word. */
3102 kArmv8A64InstrLdStExtend_Lsl = 3, /**< Shift left (64-bit). */
3103 kArmv8A64InstrLdStExtend_Sxtw = 6, /**< Sign-extend (32-bit) word. */
3104 kArmv8A64InstrLdStExtend_Sxtx = 7 /**< Sign-extend (64-bit) dword (to 128-bit SIMD&FP reg, presumably). */
3105} ARMV8A64INSTRLDSTEXTEND;
3106
3107/**
3108 * A64: Encodes load/store w/ index register.
3109 *
3110 * @returns The encoded instruction.
3111 * @param enmType The load/store instruction type.
3112 * @param iReg The register to load into / store.
3113 * @param iBaseReg The base register to use when addressing. SP is allowed.
3114 * @param iRegIndex The index register.
3115 * @param enmExtend The extending to apply to @a iRegIndex.
3116 * @param fShifted Whether to shift the index. The shift amount corresponds
3117 * to the access size (thus irrelevant for byte accesses).
3118 */
3119DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrStLdRegIdx(ARMV8A64INSTRLDSTTYPE enmType,
3120 uint32_t iReg, uint32_t iBaseReg, uint32_t iRegIndex,
3121 ARMV8A64INSTRLDSTEXTEND enmExtend = kArmv8A64InstrLdStExtend_Lsl,
3122 bool fShifted = false)
3123{
3124 Assert(iRegIndex < 32);
3125 Assert(iReg < 32); /* 3 2 1 0 */
3126 Assert(iBaseReg < 32); /* 10987654321098765432109876543210 */
3127 return UINT32_C(0x38200800) /* 0b00111000001000000000100000000000 */
3128 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Size) << (30 - kArmv8A64InstrLdStType_Shift_Size))
3129 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_VR) << (26 - kArmv8A64InstrLdStType_Shift_VR))
3130 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdStType_Mask_Opc) << (22 - kArmv8A64InstrLdStType_Shift_Opc))
3131 | (iRegIndex << 16)
3132 | ((uint32_t)enmExtend << 13)
3133 | ((uint32_t)fShifted << 12)
3134 | (iBaseReg << 5)
3135 | iReg;
3136}
3137
3138typedef enum /* VR Opc */
3139{ /* \ | */
3140 kArmv8A64InstrLdrLitteral_Mask_Vr = 0x10,
3141 kArmv8A64InstrLdrLitteral_Mask_Opc = 0x03,
3142 kArmv8A64InstrLdrLitteral_Shift_Vr = 4,
3143 kArmv8A64InstrLdrLitteral_Shift_Opc = 0,
3144
3145 kArmv8A64InstrLdrLitteral_Word = 0x00, /**< word = 32-bit */
3146 kArmv8A64InstrLdrLitteral_Dword = 0x01, /**< dword = 64-bit */
3147 kArmv8A64InstrLdrLitteral_SignWord64 = 0x02, /**< Loads word, signextending it to 64-bit */
3148 kArmv8A64InstrLdrLitteral_Prefetch = 0x03, /**< prfm */
3149
3150 kArmv8A64InstrLdrLitteral_Vr_Word = 0x10, /**< word = 32-bit */
3151 kArmv8A64InstrLdrLitteral_Vr_Dword = 0x11, /**< dword = 64-bit */
3152 kArmv8A64InstrLdrLitteral_Vr_128 = 0x12
3153} ARMV8A64INSTRLDRLITTERAL;
3154
3155
3156/**
3157 * A64: Encodes load w/ a PC relative 19-bit signed immediate.
3158 *
3159 * @returns The encoded instruction.
3160 * @param enmType The load instruction type.
3161 * @param iReg The register to load into.
3162 * @param i19Imm The signed immediate value, multiplied by 4 regardless
3163 * of access size.
3164 */
3165DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLdrLitteral(ARMV8A64INSTRLDRLITTERAL enmType, uint32_t iReg, int32_t i19Imm)
3166{
3167 Assert(i19Imm >= -262144 && i19Imm < 262144);
3168 Assert(iReg < 32); /* 3 2 1 0 */
3169 /* 10987654321098765432109876543210 */
3170 return UINT32_C(0x30000000) /* 0b00110000000000000000000000000000 */
3171 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Vr) << (26 - kArmv8A64InstrLdrLitteral_Shift_Vr))
3172 | (((uint32_t)enmType & (uint32_t)kArmv8A64InstrLdrLitteral_Mask_Opc) << (30 - kArmv8A64InstrLdrLitteral_Shift_Opc))
3173 | (((uint32_t)i19Imm & UINT32_C(0x00ffffe0)) << 5)
3174 | iReg;
3175}
3176
3177
3178typedef enum
3179{
3180 kArmv8A64InstrMovWide_Not = 0, /**< MOVN - reg = ~(imm16 << hw*16; */
3181 kArmv8A64InstrMovWide_Zero = 2, /**< MOVZ - reg = imm16 << hw*16; */
3182 kArmv8A64InstrMovWide_Keep = 3 /**< MOVK - keep the other halfwords. */
3183} ARMV8A64INSTRMOVWIDE;
3184
3185/**
3186 * A64: Encode a move wide immediate instruction.
3187 *
3188 * @returns The encoded instruction.
3189 * @param enmType The load instruction type.
3190 * @param iRegDst The register to mov the immediate into.
3191 * @param uImm16 The immediate value.
3192 * @param iHalfWord Which of the 4 (@a f64Bit = true) or 2 register (16-bit)
3193 * half-words to target:
3194 * - 0 for bits 15:00,
3195 * - 1 for bits 31:16,
3196 * - 2 for bits 47:32 (f64Bit=true only),
3197 * - 3 for bits 63:48 (f64Bit=true only).
3198 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit GPRs.
3199 */
3200DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovWide(ARMV8A64INSTRMOVWIDE enmType, uint32_t iRegDst, uint32_t uImm16,
3201 uint32_t iHalfWord = 0, bool f64Bit = true)
3202{
3203 Assert(iRegDst < 32U); Assert(uImm16 <= (uint32_t)UINT16_MAX); Assert(iHalfWord < 2U + (2U * f64Bit));
3204 return ((uint32_t)f64Bit << 31)
3205 | ((uint32_t)enmType << 29)
3206 | UINT32_C(0x12800000)
3207 | (iHalfWord << 21)
3208 | (uImm16 << 5)
3209 | iRegDst;
3210}
3211
3212/** A64: Encodes a MOVN instruction.
3213 * @see Armv8A64MkInstrMovWide for parameter details. */
3214DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovN(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3215{
3216 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Not, iRegDst, uImm16, iHalfWord, f64Bit);
3217}
3218
3219/** A64: Encodes a MOVZ instruction.
3220 * @see Armv8A64MkInstrMovWide for parameter details. */
3221DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovZ(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3222{
3223 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Zero, iRegDst, uImm16, iHalfWord, f64Bit);
3224}
3225
3226/** A64: Encodes a MOVK instruction.
3227 * @see Armv8A64MkInstrMovWide for parameter details. */
3228DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMovK(uint32_t iRegDst, uint32_t uImm16, uint32_t iHalfWord = 0, bool f64Bit = true)
3229{
3230 return Armv8A64MkInstrMovWide(kArmv8A64InstrMovWide_Keep, iRegDst, uImm16, iHalfWord, f64Bit);
3231}
3232
3233
3234typedef enum
3235{
3236 kArmv8A64InstrShift_Lsl = 0,
3237 kArmv8A64InstrShift_Lsr,
3238 kArmv8A64InstrShift_Asr,
3239 kArmv8A64InstrShift_Ror
3240} ARMV8A64INSTRSHIFT;
3241
3242
3243/**
3244 * A64: Encodes a logical instruction with a shifted 2nd register operand.
3245 *
3246 * @returns The encoded instruction.
3247 * @param u2Opc The logical operation to perform.
3248 * @param fNot Whether to complement the 2nd operand.
3249 * @param iRegResult The output register.
3250 * @param iReg1 The 1st register operand.
3251 * @param iReg2Shifted The 2nd register operand, to which the optional
3252 * shifting is applied.
3253 * @param f64Bit true for 64-bit GPRs (default), @c false for 32-bit
3254 * GPRs.
3255 * @param offShift6 The shift amount (default: none).
3256 * @param enmShift The shift operation (default: LSL).
3257 */
3258DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalShiftedReg(uint32_t u2Opc, bool fNot,
3259 uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted,
3260 bool f64Bit, uint32_t offShift6, ARMV8A64INSTRSHIFT enmShift)
3261{
3262 Assert(u2Opc < 4); Assert(offShift6 < (f64Bit ? UINT32_C(64) : UINT32_C(32)));
3263 Assert(iRegResult < 32); Assert(iReg1 < 32); Assert(iReg2Shifted < 32);
3264 return ((uint32_t)f64Bit << 31)
3265 | (u2Opc << 29)
3266 | UINT32_C(0x0a000000)
3267 | ((uint32_t)enmShift << 22)
3268 | ((uint32_t)fNot << 21)
3269 | (iReg2Shifted << 16)
3270 | (offShift6 << 10)
3271 | (iReg1 << 5)
3272 | iRegResult;
3273}
3274
3275
3276/** A64: Encodes an AND instruction.
3277 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3278DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnd(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3279 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3280{
3281 return Armv8A64MkInstrLogicalShiftedReg(0, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3282}
3283
3284
3285/** A64: Encodes an BIC instruction.
3286 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3287DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBic(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3288 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3289{
3290 return Armv8A64MkInstrLogicalShiftedReg(0, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3291}
3292
3293
3294/** A64: Encodes an ORR instruction.
3295 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3296DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrr(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3297 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3298{
3299 return Armv8A64MkInstrLogicalShiftedReg(1, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3300}
3301
3302
3303/** A64: Encodes an MOV instruction.
3304 * This is an alias for "orr dst, xzr, src". */
3305DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMov(uint32_t iRegResult, uint32_t idxRegSrc, bool f64Bit = true)
3306{
3307 return Armv8A64MkInstrOrr(iRegResult, ARMV8_A64_REG_XZR, idxRegSrc, f64Bit);
3308}
3309
3310
3311/** A64: Encodes an ORN instruction.
3312 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3313DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrn(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3314 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3315{
3316 return Armv8A64MkInstrLogicalShiftedReg(1, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3317}
3318
3319
3320/** A64: Encodes an EOR instruction.
3321 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3322DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEor(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3323 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3324{
3325 return Armv8A64MkInstrLogicalShiftedReg(2, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3326}
3327
3328
3329/** A64: Encodes an EON instruction.
3330 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3331DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEon(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3332 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3333{
3334 return Armv8A64MkInstrLogicalShiftedReg(2, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3335}
3336
3337
3338/** A64: Encodes an ANDS instruction.
3339 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3340DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAnds(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3341 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3342{
3343 return Armv8A64MkInstrLogicalShiftedReg(3, false /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3344}
3345
3346
3347/** A64: Encodes an BICS instruction.
3348 * @see Armv8A64MkInstrLogicalShiftedReg for parameter details. */
3349DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBics(uint32_t iRegResult, uint32_t iReg1, uint32_t iReg2Shifted, bool f64Bit = true,
3350 uint32_t offShift6 = 0, ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
3351{
3352 return Armv8A64MkInstrLogicalShiftedReg(3, true /*fNot*/, iRegResult, iReg1, iReg2Shifted, f64Bit, offShift6, enmShift);
3353}
3354
3355
3356
3357/*
3358 * Data processing instructions with two source register operands.
3359 */
3360
3361
3362/** A64: Encodes an SUBP instruction. */
3363DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubP(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
3364{
3365 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
3366 return UINT32_C(0x80000000)
3367 | UINT32_C(0x1ac00000)
3368 | (UINT32_C(0) << 10)
3369 | (iRegSubtrahend << 16)
3370 | (iRegMinuend << 5)
3371 | iRegResult;
3372}
3373
3374
3375/** A64: Encodes an SUBPS instruction. */
3376DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubPS(uint32_t iRegResult, uint32_t iRegMinuend, uint32_t iRegSubtrahend)
3377{
3378 Assert(iRegResult < 32); Assert(iRegMinuend < 32); Assert(iRegSubtrahend < 32);
3379 return UINT32_C(0x80000000)
3380 | UINT32_C(0x20000000)
3381 | UINT32_C(0x1ac00000)
3382 | (UINT32_C(0) << 10)
3383 | (iRegSubtrahend << 16)
3384 | (iRegMinuend << 5)
3385 | iRegResult;
3386}
3387
3388
3389/** A64: Encodes an UDIV instruction. */
3390DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
3391{
3392 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
3393 return ((uint32_t)f64Bit << 31)
3394 | UINT32_C(0x1ac00000)
3395 | (UINT32_C(2) << 10)
3396 | (iRegDivisor << 16)
3397 | (iRegDividend << 5)
3398 | iRegResult;
3399}
3400
3401
3402/** A64: Encodes an SDIV instruction. */
3403DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSDiv(uint32_t iRegResult, uint32_t iRegDividend, uint32_t iRegDivisor, bool f64Bit = true)
3404{
3405 Assert(iRegResult < 32); Assert(iRegDividend < 32); Assert(iRegDivisor < 32);
3406 return ((uint32_t)f64Bit << 31)
3407 | UINT32_C(0x1ac00000)
3408 | (UINT32_C(3) << 10)
3409 | (iRegDivisor << 16)
3410 | (iRegDividend << 5)
3411 | iRegResult;
3412}
3413
3414
3415/** A64: Encodes an IRG instruction. */
3416DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrIrg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3417{
3418 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3419 return UINT32_C(0x80000000)
3420 | UINT32_C(0x1ac00000)
3421 | (UINT32_C(4) << 10)
3422 | (iRegSrc2 << 16)
3423 | (iRegSrc1 << 5)
3424 | iRegResult;
3425}
3426
3427
3428/** A64: Encodes a GMI instruction. */
3429DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrGmi(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3430{
3431 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3432 return UINT32_C(0x80000000)
3433 | UINT32_C(0x1ac00000)
3434 | (UINT32_C(5) << 10)
3435 | (iRegSrc2 << 16)
3436 | (iRegSrc1 << 5)
3437 | iRegResult;
3438}
3439
3440
3441/** A64: Encodes an LSLV instruction. */
3442DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3443{
3444 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3445 return ((uint32_t)f64Bit << 31)
3446 | UINT32_C(0x1ac00000)
3447 | (UINT32_C(8) << 10)
3448 | (iRegCount << 16)
3449 | (iRegSrc << 5)
3450 | iRegResult;
3451}
3452
3453
3454/** A64: Encodes an LSRV instruction. */
3455DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3456{
3457 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3458 return ((uint32_t)f64Bit << 31)
3459 | UINT32_C(0x1ac00000)
3460 | (UINT32_C(9) << 10)
3461 | (iRegCount << 16)
3462 | (iRegSrc << 5)
3463 | iRegResult;
3464}
3465
3466
3467/** A64: Encodes an ASRV instruction. */
3468DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3469{
3470 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3471 return ((uint32_t)f64Bit << 31)
3472 | UINT32_C(0x1ac00000)
3473 | (UINT32_C(10) << 10)
3474 | (iRegCount << 16)
3475 | (iRegSrc << 5)
3476 | iRegResult;
3477}
3478
3479
3480/** A64: Encodes a RORV instruction. */
3481DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorv(uint32_t iRegResult, uint32_t iRegSrc, uint32_t iRegCount, bool f64Bit = true)
3482{
3483 Assert(iRegResult < 32); Assert(iRegSrc < 32); Assert(iRegCount < 32);
3484 return ((uint32_t)f64Bit << 31)
3485 | UINT32_C(0x1ac00000)
3486 | (UINT32_C(11) << 10)
3487 | (iRegCount << 16)
3488 | (iRegSrc << 5)
3489 | iRegResult;
3490}
3491
3492
3493/** A64: Encodes a PACGA instruction. */
3494DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrPacga(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2)
3495{
3496 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3497 return UINT32_C(0x80000000)
3498 | UINT32_C(0x1ac00000)
3499 | (UINT32_C(12) << 10)
3500 | (iRegSrc2 << 16)
3501 | (iRegSrc1 << 5)
3502 | iRegResult;
3503}
3504
3505
3506/** A64: Encodes a CRC32* instruction. */
3507DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
3508{
3509 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
3510 return ((uint32_t)(uSize == 3) << 31)
3511 | UINT32_C(0x1ac00000)
3512 | (UINT32_C(16) << 10)
3513 | (uSize << 10)
3514 | (iRegValue << 16)
3515 | (iRegCrc << 5)
3516 | iRegResult;
3517}
3518
3519
3520/** A64: Encodes a CRC32B instruction. */
3521DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32B(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3522{
3523 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 0);
3524}
3525
3526
3527/** A64: Encodes a CRC32H instruction. */
3528DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32H(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3529{
3530 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 1);
3531}
3532
3533
3534/** A64: Encodes a CRC32W instruction. */
3535DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32W(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3536{
3537 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 2);
3538}
3539
3540
3541/** A64: Encodes a CRC32X instruction. */
3542DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32X(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3543{
3544 return Armv8A64MkInstrCrc32(iRegResult, iRegCrc, iRegValue, 3);
3545}
3546
3547
3548/** A64: Encodes a CRC32C* instruction. */
3549DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32c(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue, uint32_t uSize)
3550{
3551 Assert(iRegResult < 32); Assert(iRegCrc < 32); Assert(iRegValue < 32); Assert(uSize < 4);
3552 return ((uint32_t)(uSize == 3) << 31)
3553 | UINT32_C(0x1ac00000)
3554 | (UINT32_C(20) << 10)
3555 | (uSize << 10)
3556 | (iRegValue << 16)
3557 | (iRegCrc << 5)
3558 | iRegResult;
3559}
3560
3561
3562/** A64: Encodes a CRC32B instruction. */
3563DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cB(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3564{
3565 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 0);
3566}
3567
3568
3569/** A64: Encodes a CRC32CH instruction. */
3570DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cH(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3571{
3572 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 1);
3573}
3574
3575
3576/** A64: Encodes a CRC32CW instruction. */
3577DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cW(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3578{
3579 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 2);
3580}
3581
3582
3583/** A64: Encodes a CRC32CX instruction. */
3584DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCrc32cX(uint32_t iRegResult, uint32_t iRegCrc, uint32_t iRegValue)
3585{
3586 return Armv8A64MkInstrCrc32c(iRegResult, iRegCrc, iRegValue, 3);
3587}
3588
3589
3590/** A64: Encodes an SMAX instruction. */
3591DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3592{
3593 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3594 return ((uint32_t)f64Bit << 31)
3595 | UINT32_C(0x1ac00000)
3596 | (UINT32_C(24) << 10)
3597 | (iRegSrc2 << 16)
3598 | (iRegSrc1 << 5)
3599 | iRegResult;
3600}
3601
3602
3603/** A64: Encodes an UMAX instruction. */
3604DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMax(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3605{
3606 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3607 return ((uint32_t)f64Bit << 31)
3608 | UINT32_C(0x1ac00000)
3609 | (UINT32_C(25) << 10)
3610 | (iRegSrc2 << 16)
3611 | (iRegSrc1 << 5)
3612 | iRegResult;
3613}
3614
3615
3616/** A64: Encodes an SMIN instruction. */
3617DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3618{
3619 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3620 return ((uint32_t)f64Bit << 31)
3621 | UINT32_C(0x1ac00000)
3622 | (UINT32_C(26) << 10)
3623 | (iRegSrc2 << 16)
3624 | (iRegSrc1 << 5)
3625 | iRegResult;
3626}
3627
3628
3629/** A64: Encodes an UMIN instruction. */
3630DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUMin(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
3631{
3632 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
3633 return ((uint32_t)f64Bit << 31)
3634 | UINT32_C(0x1ac00000)
3635 | (UINT32_C(27) << 10)
3636 | (iRegSrc2 << 16)
3637 | (iRegSrc1 << 5)
3638 | iRegResult;
3639}
3640
3641
3642# ifdef IPRT_INCLUDED_asm_h /* don't want this to be automatically included here. */
3643
3644/**
3645 * Converts immS and immR values (to logical instructions) to a 32-bit mask.
3646 *
3647 * @returns The decoded mask.
3648 * @param uImm6SizeLen The immS value from the instruction. (No N part
3649 * here, as that must be zero for instructions
3650 * operating on 32-bit wide registers.)
3651 * @param uImm6Rotations The immR value from the instruction.
3652 */
3653DECLINLINE(uint32_t) Armv8A64ConvertImmRImmS2Mask32(uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3654{
3655 Assert(uImm6SizeLen < 64); Assert(uImm6Rotations < 64);
3656
3657 /* Determine the element size. */
3658 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm6SizeLen ^ 0x3f) - 1U;
3659 Assert(cBitsElementLog2 + 1U != 0U);
3660
3661 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3662 Assert(uImm6Rotations < cBitsElement);
3663
3664 /* Extract the number of bits set to 1: */
3665 unsigned const cBitsSetTo1 = (uImm6SizeLen & (cBitsElement - 1U)) + 1;
3666 Assert(cBitsSetTo1 < cBitsElement);
3667 uint32_t const uElement = RT_BIT_32(cBitsSetTo1) - 1U;
3668
3669 /* Produce the unrotated pattern. */
3670 static const uint32_t s_auReplicate[]
3671 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3672 uint32_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3673
3674 /* Rotate it and return. */
3675 return ASMRotateRightU32(uPattern, uImm6Rotations & (cBitsElement - 1U));
3676}
3677
3678
3679/**
3680 * Converts N+immS and immR values (to logical instructions) to a 64-bit mask.
3681 *
3682 * @returns The decoded mask.
3683 * @param uImm7SizeLen The N:immS value from the instruction.
3684 * @param uImm6Rotations The immR value from the instruction.
3685 */
3686DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uImm7SizeLen, uint32_t uImm6Rotations)
3687{
3688 Assert(uImm7SizeLen < 128); Assert(uImm6Rotations < 64);
3689
3690 /* Determine the element size. */
3691 unsigned const cBitsElementLog2 = ASMBitLastSetU32(uImm7SizeLen ^ 0x3f) - 1U;
3692 Assert(cBitsElementLog2 + 1U != 0U);
3693
3694 unsigned const cBitsElement = RT_BIT_32(cBitsElementLog2);
3695 Assert(uImm6Rotations < cBitsElement);
3696
3697 /* Extract the number of bits set to 1: */
3698 unsigned const cBitsSetTo1 = (uImm7SizeLen & (cBitsElement - 1U)) + 1;
3699 Assert(cBitsSetTo1 < cBitsElement);
3700 uint64_t const uElement = RT_BIT_64(cBitsSetTo1) - 1U;
3701
3702 /* Produce the unrotated pattern. */
3703 static const uint64_t s_auReplicate[]
3704 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3705 uint64_t const uPattern = s_auReplicate[cBitsElementLog2] * uElement;
3706
3707 /* Rotate it and return. */
3708 return ASMRotateRightU64(uPattern, uImm6Rotations & (cBitsElement - 1U));
3709}
3710
3711
3712/**
3713 * Variant of Armv8A64ConvertImmRImmS2Mask64 where the N bit is separate from
3714 * the immS value.
3715 */
3716DECLINLINE(uint64_t) Armv8A64ConvertImmRImmS2Mask64(uint32_t uN, uint32_t uImm6SizeLen, uint32_t uImm6Rotations)
3717{
3718 return Armv8A64ConvertImmRImmS2Mask64((uN << 6) | uImm6SizeLen, uImm6Rotations);
3719}
3720
3721
3722/**
3723 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3724 * 32-bit bitmask to a set of immediates for those instructions.
3725 *
3726 * @returns true if successful, false if not.
3727 * @param fMask The mask value to convert.
3728 * @param puImm6SizeLen Where to return the immS part (N is always zero for
3729 * 32-bit wide masks).
3730 * @param puImm6Rotations Where to return the immR.
3731 */
3732DECLINLINE(bool) Armv8A64ConvertMask32ToImmRImmS(uint32_t fMask, uint32_t *puImm6SizeLen, uint32_t *puImm6Rotations)
3733{
3734 /* Fend off 0 and UINT32_MAX as these cannot be represented. */
3735 if ((uint32_t)(fMask + 1U) <= 1)
3736 return false;
3737
3738 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3739 unsigned const cRor = ASMCountTrailingZerosU32(fMask);
3740 unsigned const cRol = ASMCountLeadingZerosU32(~fMask);
3741 if (cRor)
3742 fMask = ASMRotateRightU32(fMask, cRor);
3743 else
3744 fMask = ASMRotateLeftU32(fMask, cRol);
3745 Assert(fMask & RT_BIT_32(0));
3746 Assert(!(fMask & RT_BIT_32(31)));
3747
3748 /* Count the trailing ones and leading zeros. */
3749 unsigned const cOnes = ASMCountTrailingZerosU32(~fMask);
3750 unsigned const cZeros = ASMCountLeadingZerosU32(fMask);
3751
3752 /* The potential element length is then the sum of the two above. */
3753 unsigned const cBitsElement = cOnes + cZeros;
3754 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3755 return false;
3756
3757 /* Special case: 32 bits element size. Since we're done here. */
3758 if (cBitsElement == 32)
3759 *puImm6SizeLen = cOnes - 1;
3760 else
3761 {
3762 /* Extract the element bits and check that these are replicated in the whole pattern. */
3763 uint32_t const uElement = RT_BIT_32(cOnes) - 1U;
3764 unsigned const cBitsElementLog2 = ASMBitFirstSetU32(cBitsElement) - 1;
3765
3766 static const uint32_t s_auReplicate[]
3767 = { UINT32_MAX, UINT32_MAX / 3, UINT32_MAX / 15, UINT32_MAX / 255, UINT32_MAX / 65535, 1 };
3768 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3769 *puImm6SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3770 else
3771 return false;
3772 }
3773 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3774
3775 return true;
3776}
3777
3778
3779/**
3780 * Helper for Armv8A64MkInstrLogicalImm and friends that tries to convert a
3781 * 64-bit bitmask to a set of immediates for those instructions.
3782 *
3783 * @returns true if successful, false if not.
3784 * @param fMask The mask value to convert.
3785 * @param puImm7SizeLen Where to return the N:immS part.
3786 * @param puImm6Rotations Where to return the immR.
3787 */
3788DECLINLINE(bool) Armv8A64ConvertMask64ToImmRImmS(uint64_t fMask, uint32_t *puImm7SizeLen, uint32_t *puImm6Rotations)
3789{
3790 /* Fend off 0 and UINT64_MAX as these cannot be represented. */
3791 if ((uint64_t)(fMask + 1U) <= 1)
3792 return false;
3793
3794 /* Rotate the value will we get all 1s at the bottom and the zeros at the top. */
3795 unsigned const cRor = ASMCountTrailingZerosU64(fMask);
3796 unsigned const cRol = ASMCountLeadingZerosU64(~fMask);
3797 if (cRor)
3798 fMask = ASMRotateRightU64(fMask, cRor);
3799 else
3800 fMask = ASMRotateLeftU64(fMask, cRol);
3801 Assert(fMask & RT_BIT_64(0));
3802 Assert(!(fMask & RT_BIT_64(63)));
3803
3804 /* Count the trailing ones and leading zeros. */
3805 unsigned const cOnes = ASMCountTrailingZerosU64(~fMask);
3806 unsigned const cZeros = ASMCountLeadingZerosU64(fMask);
3807
3808 /* The potential element length is then the sum of the two above. */
3809 unsigned const cBitsElement = cOnes + cZeros;
3810 if (!RT_IS_POWER_OF_TWO(cBitsElement) || cBitsElement < 2)
3811 return false;
3812
3813 /* Special case: 64 bits element size. Since we're done here. */
3814 if (cBitsElement == 64)
3815 *puImm7SizeLen = (cOnes - 1) | 0x40 /*N*/;
3816 else
3817 {
3818 /* Extract the element bits and check that these are replicated in the whole pattern. */
3819 uint64_t const uElement = RT_BIT_64(cOnes) - 1U;
3820 unsigned const cBitsElementLog2 = ASMBitFirstSetU64(cBitsElement) - 1;
3821
3822 static const uint64_t s_auReplicate[]
3823 = { UINT64_MAX, UINT64_MAX / 3, UINT64_MAX / 15, UINT64_MAX / 255, UINT64_MAX / 65535, UINT64_MAX / UINT32_MAX, 1 };
3824 if (s_auReplicate[cBitsElementLog2] * uElement == fMask)
3825 *puImm7SizeLen = (cOnes - 1) | ((0x3e << cBitsElementLog2) & 0x3f);
3826 else
3827 return false;
3828 }
3829 *puImm6Rotations = cRor ? cBitsElement - cRor : cRol;
3830
3831 return true;
3832}
3833
3834# endif /* IPRT_INCLUDED_asm_h */
3835
3836/**
3837 * A64: Encodes a logical instruction with an complicated immediate mask.
3838 *
3839 * The @a uImm7SizeLen parameter specifies two things:
3840 * 1. the element size and
3841 * 2. the number of bits set to 1 in the pattern.
3842 *
3843 * The element size is extracted by NOT'ing bits 5:0 (excludes the N bit at the
3844 * top) and using the position of the first bit set as a power of two.
3845 *
3846 * | N | 5 | 4 | 3 | 2 | 1 | 0 | element size |
3847 * |---|---|---|---|---|---|---|--------------|
3848 * | 0 | 1 | 1 | 1 | 1 | 0 | x | 2 bits |
3849 * | 0 | 1 | 1 | 1 | 0 | x | x | 4 bits |
3850 * | 0 | 1 | 1 | 0 | x | x | x | 8 bits |
3851 * | 0 | 1 | 0 | x | x | x | x | 16 bits |
3852 * | 0 | 0 | x | x | x | x | x | 32 bits |
3853 * | 1 | x | x | x | x | x | x | 64 bits |
3854 *
3855 * The 'x' forms the number of 1 bits in the pattern, minus one (i.e.
3856 * there is always one zero bit in the pattern).
3857 *
3858 * The @a uImm6Rotations parameter specifies how many bits to the right,
3859 * the element pattern is rotated. The rotation count must be less than the
3860 * element bit count (size).
3861 *
3862 * @returns The encoded instruction.
3863 * @param u2Opc The logical operation to perform.
3864 * @param iRegResult The output register.
3865 * @param iRegSrc The 1st register operand.
3866 * @param uImm7SizeLen The size/pattern length. We've combined the 1-bit N
3867 * field at the top of the 6-bit 'imms' field.
3868 *
3869 * @param uImm6Rotations The rotation count.
3870 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3871 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3872 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3873 */
3874DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLogicalImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3875 uint32_t uImm7SizeLen, uint32_t uImm6Rotations, bool f64Bit)
3876{
3877 Assert(u2Opc < 4); Assert(uImm7SizeLen < (f64Bit ? UINT32_C(0x7f) : UINT32_C(0x3f)));
3878 Assert(uImm6Rotations <= UINT32_C(0x3f)); Assert(iRegResult < 32); Assert(iRegSrc < 32);
3879 return ((uint32_t)f64Bit << 31)
3880 | (u2Opc << 29)
3881 | UINT32_C(0x12000000)
3882 | ((uImm7SizeLen & UINT32_C(0x40)) << (22 - 6))
3883 | (uImm6Rotations << 16)
3884 | ((uImm7SizeLen & UINT32_C(0x3f)) << 10)
3885 | (iRegSrc << 5)
3886 | iRegResult;
3887}
3888
3889
3890/** A64: Encodes an AND instruction w/ complicated immediate mask.
3891 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3892DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndImm(uint32_t iRegResult, uint32_t iRegSrc,
3893 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3894{
3895 return Armv8A64MkInstrLogicalImm(0, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3896}
3897
3898
3899/** A64: Encodes an ORR instruction w/ complicated immediate mask.
3900 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3901DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrOrrImm(uint32_t iRegResult, uint32_t iRegSrc,
3902 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3903{
3904 return Armv8A64MkInstrLogicalImm(1, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3905}
3906
3907
3908/** A64: Encodes an EOR instruction w/ complicated immediate mask.
3909 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3910DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrEorImm(uint32_t iRegResult, uint32_t iRegSrc,
3911 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3912{
3913 return Armv8A64MkInstrLogicalImm(2, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3914}
3915
3916
3917/** A64: Encodes an ANDS instruction w/ complicated immediate mask.
3918 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3919DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAndsImm(uint32_t iRegResult, uint32_t iRegSrc,
3920 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3921{
3922 return Armv8A64MkInstrLogicalImm(3, iRegResult, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3923}
3924
3925
3926/** A64: Encodes an TST instruction w/ complicated immediate mask.
3927 * @see Armv8A64MkInstrLogicalImm for parameter details. */
3928DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTstImm(uint32_t iRegSrc,
3929 uint32_t uImm7SizeLen, uint32_t uImm6Rotations = 0, bool f64Bit = true)
3930{
3931 return Armv8A64MkInstrAndsImm(ARMV8_A64_REG_XZR, iRegSrc, uImm7SizeLen, uImm6Rotations, f64Bit);
3932}
3933
3934
3935/**
3936 * A64: Encodes a bitfield instruction.
3937 *
3938 * @returns The encoded instruction.
3939 * @param u2Opc The bitfield operation to perform.
3940 * @param iRegResult The output register.
3941 * @param iRegSrc The 1st register operand.
3942 * @param cImm6Ror The right rotation count.
3943 * @param uImm6S The leftmost bit to be moved.
3944 * @param f64Bit true for 64-bit GPRs, @c false for 32-bit GPRs.
3945 * @param uN1 This must match @a f64Bit for all instructions
3946 * currently specified.
3947 * @see https://dinfuehr.github.io/blog/encoding-of-immediate-values-on-aarch64/
3948 * https://gist.githubusercontent.com/dinfuehr/51a01ac58c0b23e4de9aac313ed6a06a/raw/1892a274aa3238d55f83eec5b3828da2aec5f229/aarch64-logical-immediates.txt
3949 */
3950DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBitfieldImm(uint32_t u2Opc, uint32_t iRegResult, uint32_t iRegSrc,
3951 uint32_t cImm6Ror, uint32_t uImm6S, bool f64Bit, uint32_t uN1)
3952{
3953 Assert(cImm6Ror <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegResult < 32); Assert(u2Opc < 4);
3954 Assert(uImm6S <= (f64Bit ? UINT32_C(0x3f) : UINT32_C(0x1f))); Assert(iRegSrc < 32); Assert(uN1 <= (unsigned)f64Bit);
3955 return ((uint32_t)f64Bit << 31)
3956 | (u2Opc << 29)
3957 | UINT32_C(0x13000000)
3958 | (uN1 << 22)
3959 | (cImm6Ror << 16)
3960 | (uImm6S << 10)
3961 | (iRegSrc << 5)
3962 | iRegResult;
3963}
3964
3965
3966/** A64: Encodes a SBFM instruction.
3967 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3968DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
3969 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
3970{
3971 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
3972}
3973
3974
3975/** A64: Encodes a SXTB instruction (sign-extend 8-bit value to 32/64-bit).
3976 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3977DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3978{
3979 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 7, f64Bit);
3980}
3981
3982
3983/** A64: Encodes a SXTH instruction (sign-extend 16-bit value to 32/64-bit).
3984 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3985DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = true)
3986{
3987 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 15, f64Bit);
3988}
3989
3990
3991/** A64: Encodes a SXTH instruction (sign-extend 32-bit value to 64-bit).
3992 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
3993DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSxtw(uint32_t iRegResult, uint32_t iRegSrc)
3994{
3995 return Armv8A64MkInstrSbfm(iRegResult, iRegSrc, 0, 31, true /*f64Bit*/);
3996}
3997
3998
3999/** A64: Encodes an ASR instruction w/ immediate shift value.
4000 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4001DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4002{
4003 uint32_t const cWidth = f64Bit ? 63 : 31;
4004 Assert(cShift > 0); Assert(cShift <= cWidth);
4005 return Armv8A64MkInstrBitfieldImm(0, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
4006}
4007
4008
4009/** A64: Encodes a BFM instruction.
4010 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4011DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
4012 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
4013{
4014 return Armv8A64MkInstrBitfieldImm(1, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
4015}
4016
4017
4018/** A64: Encodes a BFI instruction (insert).
4019 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4020DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfi(uint32_t iRegResult, uint32_t iRegSrc,
4021 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4022{
4023 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
4024 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)-(int32_t)offFirstBit & (f64Bit ? 0x3f : 0x1f),
4025 cBitsWidth - 1, f64Bit);
4026}
4027
4028
4029/** A64: Encodes a BFC instruction (clear).
4030 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4031DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfc(uint32_t iRegResult,
4032 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4033{
4034 return Armv8A64MkInstrBfi(iRegResult, ARMV8_A64_REG_XZR, offFirstBit, cBitsWidth, f64Bit);
4035}
4036
4037
4038/** A64: Encodes a BFXIL instruction (insert low).
4039 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4040DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBfxil(uint32_t iRegResult, uint32_t iRegSrc,
4041 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4042{
4043 Assert(cBitsWidth > 0U); Assert(cBitsWidth < (f64Bit ? 64U : 32U)); Assert(offFirstBit < (f64Bit ? 64U : 32U));
4044 Assert(offFirstBit + cBitsWidth <= (f64Bit ? 64U : 32U));
4045 return Armv8A64MkInstrBfm(iRegResult, iRegSrc, (uint32_t)offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
4046}
4047
4048
4049/** A64: Encodes an UBFM instruction.
4050 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4051DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cImm6Ror, uint32_t uImm6S,
4052 bool f64Bit = true, uint32_t uN1 = UINT32_MAX)
4053{
4054 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cImm6Ror, uImm6S, f64Bit, uN1 == UINT32_MAX ? f64Bit : uN1);
4055}
4056
4057
4058/** A64: Encodes an UBFX instruction (zero extending extract).
4059 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4060DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfx(uint32_t iRegResult, uint32_t iRegSrc,
4061 uint32_t offFirstBit, uint32_t cBitsWidth, bool f64Bit = true)
4062{
4063 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, offFirstBit, offFirstBit + cBitsWidth - 1, f64Bit);
4064}
4065
4066
4067/** A64: Encodes an UBFIZ instruction (zero extending extract from bit zero,
4068 * shifted into destination).
4069 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4070DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUbfiz(uint32_t iRegResult, uint32_t iRegSrc,
4071 uint32_t offFirstBitDst, uint32_t cBitsWidth, bool f64Bit = true)
4072{
4073 uint32_t fMask = f64Bit ? 0x3f : 0x1f;
4074 return Armv8A64MkInstrUbfm(iRegResult, iRegSrc, -(int32_t)offFirstBitDst & fMask, cBitsWidth - 1, f64Bit);
4075}
4076
4077
4078/** A64: Encodes an LSL instruction w/ immediate shift value.
4079 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4080DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLslImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4081{
4082 uint32_t const cWidth = f64Bit ? 63 : 31;
4083 Assert(cShift > 0); Assert(cShift <= cWidth);
4084 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, (uint32_t)(0 - cShift) & cWidth,
4085 cWidth - cShift /*uImm6S*/, f64Bit, f64Bit);
4086}
4087
4088
4089/** A64: Encodes an LSR instruction w/ immediate shift value.
4090 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4091DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrLsrImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4092{
4093 uint32_t const cWidth = f64Bit ? 63 : 31;
4094 Assert(cShift > 0); Assert(cShift <= cWidth);
4095 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, cShift, cWidth /*uImm6S*/, f64Bit, f64Bit);
4096}
4097
4098
4099/** A64: Encodes an UXTB instruction - zero extend byte (8-bit).
4100 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4101DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxtb(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
4102{
4103 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 7, f64Bit, f64Bit);
4104}
4105
4106
4107/** A64: Encodes an UXTH instruction - zero extend half word (16-bit).
4108 * @see Armv8A64MkInstrBitfieldImm for parameter details. */
4109DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrUxth(uint32_t iRegResult, uint32_t iRegSrc, bool f64Bit = false)
4110{
4111 return Armv8A64MkInstrBitfieldImm(2, iRegResult, iRegSrc, 0, 15, f64Bit, f64Bit);
4112}
4113
4114
4115/**
4116 * A64: Encodes an EXTR instruction with an immediate.
4117 *
4118 * @returns The encoded instruction.
4119 * @param iRegResult The register to store the result in. ZR is valid.
4120 * @param iRegLow The register holding the least significant bits in the
4121 * extraction. ZR is valid.
4122 * @param iRegHigh The register holding the most significant bits in the
4123 * extraction. ZR is valid.
4124 * @param uLsb The bit number of the least significant bit, or where in
4125 * @a iRegLow to start the
4126 * extraction.
4127 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4128 */
4129DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrExtrImm(uint32_t iRegResult, uint32_t iRegLow, uint32_t iRegHigh, uint32_t uLsb,
4130 bool f64Bit = true)
4131{
4132 Assert(uLsb < (uint32_t)(f64Bit ? 64 : 32)); Assert(iRegHigh < 32); Assert(iRegLow < 32); Assert(iRegResult < 32);
4133 return ((uint32_t)f64Bit << 31)
4134 | UINT32_C(0x13800000)
4135 | ((uint32_t)f64Bit << 22) /*N*/
4136 | (iRegHigh << 16)
4137 | (uLsb << 10)
4138 | (iRegLow << 5)
4139 | iRegResult;
4140}
4141
4142
4143/** A64: Rotates the value of a register (alias for EXTR). */
4144DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRorImm(uint32_t iRegResult, uint32_t iRegSrc, uint32_t cShift, bool f64Bit = true)
4145{
4146 return Armv8A64MkInstrExtrImm(iRegResult, iRegSrc, iRegSrc, cShift, f64Bit);
4147}
4148
4149
4150/**
4151 * A64: Encodes either add, adds, sub or subs with unsigned 12-bit immediate.
4152 *
4153 * @returns The encoded instruction.
4154 * @param fSub true for sub and subs, false for add and
4155 * adds.
4156 * @param iRegResult The register to store the result in.
4157 * SP is valid when @a fSetFlags = false,
4158 * and ZR is valid otherwise.
4159 * @param iRegSrc The register containing the augend (@a fSub
4160 * = false) or minuend (@a fSub = true). SP is
4161 * a valid registers for all variations.
4162 * @param uImm12AddendSubtrahend The addend (@a fSub = false) or subtrahend
4163 * (@a fSub = true).
4164 * @param f64Bit true for 64-bit GRPs (default), false for
4165 * 32-bit GPRs.
4166 * @param fSetFlags Whether to set flags (adds / subs) or not
4167 * (add / sub - default).
4168 * @param fShift12 Whether to shift uImm12AddendSubtrahend 12
4169 * bits to the left, or not (default).
4170 */
4171DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubUImm12(bool fSub, uint32_t iRegResult, uint32_t iRegSrc,
4172 uint32_t uImm12AddendSubtrahend, bool f64Bit = true,
4173 bool fSetFlags = false, bool fShift12 = false)
4174{
4175 Assert(uImm12AddendSubtrahend < 4096); Assert(iRegSrc < 32); Assert(iRegResult < 32);
4176 return ((uint32_t)f64Bit << 31)
4177 | ((uint32_t)fSub << 30)
4178 | ((uint32_t)fSetFlags << 29)
4179 | UINT32_C(0x11000000)
4180 | ((uint32_t)fShift12 << 22)
4181 | (uImm12AddendSubtrahend << 10)
4182 | (iRegSrc << 5)
4183 | iRegResult;
4184}
4185
4186
4187/** Alias for sub zxr, reg, \#uimm12. */
4188DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpUImm12(uint32_t iRegSrc, uint32_t uImm12Comprahend,
4189 bool f64Bit = true, bool fShift12 = false)
4190{
4191 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc, uImm12Comprahend,
4192 f64Bit, true /*fSetFlags*/, fShift12);
4193}
4194
4195
4196/** ADD dst, src, \#uimm12 */
4197DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Addend,
4198 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
4199{
4200 return Armv8A64MkInstrAddSubUImm12(false /*fSub*/, iRegResult, iRegSrc, uImm12Addend, f64Bit, fSetFlags, fShift12);
4201}
4202
4203
4204/** SUB dst, src, \#uimm12 */
4205DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubUImm12(uint32_t iRegResult, uint32_t iRegSrc, uint32_t uImm12Subtrahend,
4206 bool f64Bit = true, bool fSetFlags = false, bool fShift12 = false)
4207{
4208 return Armv8A64MkInstrAddSubUImm12(true /*fSub*/, iRegResult, iRegSrc, uImm12Subtrahend, f64Bit, fSetFlags, fShift12);
4209}
4210
4211
4212/**
4213 * A64: Encodes either add, adds, sub or subs with shifted register.
4214 *
4215 * @returns The encoded instruction.
4216 * @param fSub true for sub and subs, false for add and
4217 * adds.
4218 * @param iRegResult The register to store the result in.
4219 * SP is NOT valid, but ZR is.
4220 * @param iRegSrc1 The register containing the augend (@a fSub
4221 * = false) or minuend (@a fSub = true).
4222 * SP is NOT valid, but ZR is.
4223 * @param iRegSrc2 The register containing the addened (@a fSub
4224 * = false) or subtrahend (@a fSub = true).
4225 * SP is NOT valid, but ZR is.
4226 * @param f64Bit true for 64-bit GRPs (default), false for
4227 * 32-bit GPRs.
4228 * @param fSetFlags Whether to set flags (adds / subs) or not
4229 * (add / sub - default).
4230 * @param cShift The shift count to apply to @a iRegSrc2.
4231 * @param enmShift The shift type to apply to the @a iRegSrc2
4232 * register. kArmv8A64InstrShift_Ror is
4233 * reserved.
4234 */
4235DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubReg(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4236 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4237 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4238{
4239 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4240 Assert(cShift < (f64Bit ? 64U : 32U)); Assert(enmShift != kArmv8A64InstrShift_Ror);
4241
4242 return ((uint32_t)f64Bit << 31)
4243 | ((uint32_t)fSub << 30)
4244 | ((uint32_t)fSetFlags << 29)
4245 | UINT32_C(0x0b000000)
4246 | ((uint32_t)enmShift << 22)
4247 | (iRegSrc2 << 16)
4248 | (cShift << 10)
4249 | (iRegSrc1 << 5)
4250 | iRegResult;
4251}
4252
4253
4254/** Alias for sub zxr, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx]. */
4255DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true, uint32_t cShift = 0,
4256 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4257{
4258 return Armv8A64MkInstrAddSubReg(true /*fSub*/, ARMV8_A64_REG_XZR, iRegSrc1, iRegSrc2,
4259 f64Bit, true /*fSetFlags*/, cShift, enmShift);
4260}
4261
4262
4263/** ADD dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
4264DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4265 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4266 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4267{
4268 return Armv8A64MkInstrAddSubReg(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
4269}
4270
4271
4272/** SUB dst, reg1, reg2 [, LSL/LSR/ASR/ROR \#xx] */
4273DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSubReg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4274 bool f64Bit = true, bool fSetFlags = false, uint32_t cShift = 0,
4275 ARMV8A64INSTRSHIFT enmShift = kArmv8A64InstrShift_Lsl)
4276{
4277 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags, cShift, enmShift);
4278}
4279
4280
4281/** NEG dst */
4282DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrNeg(uint32_t iRegResult, bool f64Bit = true, bool fSetFlags = false)
4283{
4284 return Armv8A64MkInstrAddSubReg(true /*fSub*/, iRegResult, ARMV8_A64_REG_XZR, iRegResult, f64Bit, fSetFlags);
4285}
4286
4287
4288/** Extension option for 'extended register' instructions. */
4289typedef enum ARMV8A64INSTREXTEND
4290{
4291 kArmv8A64InstrExtend_UxtB = 0,
4292 kArmv8A64InstrExtend_UxtH,
4293 kArmv8A64InstrExtend_UxtW,
4294 kArmv8A64InstrExtend_UxtX,
4295 kArmv8A64InstrExtend_SxtB,
4296 kArmv8A64InstrExtend_SxtH,
4297 kArmv8A64InstrExtend_SxtW,
4298 kArmv8A64InstrExtend_SxtX,
4299 /** The default is either UXTW or UXTX depending on whether the instruction
4300 * is in 32-bit or 64-bit mode. Thus, this needs to be resolved according
4301 * to the f64Bit value. */
4302 kArmv8A64InstrExtend_Default
4303} ARMV8A64INSTREXTEND;
4304
4305
4306/**
4307 * A64: Encodes either add, adds, sub or subs with extended register encoding.
4308 *
4309 * @returns The encoded instruction.
4310 * @param fSub true for sub and subs, false for add and
4311 * adds.
4312 * @param iRegResult The register to store the result in.
4313 * SP is NOT valid, but ZR is.
4314 * @param iRegSrc1 The register containing the augend (@a fSub
4315 * = false) or minuend (@a fSub = true).
4316 * SP is valid, but ZR is NOT.
4317 * @param iRegSrc2 The register containing the addened (@a fSub
4318 * = false) or subtrahend (@a fSub = true).
4319 * SP is NOT valid, but ZR is.
4320 * @param f64Bit true for 64-bit GRPs (default), false for
4321 * 32-bit GPRs.
4322 * @param fSetFlags Whether to set flags (adds / subs) or not
4323 * (add / sub - default).
4324 * @param enmExtend The type of extension to apply to @a
4325 * iRegSrc2.
4326 * @param cShift The left shift count to apply to @a iRegSrc2
4327 * after enmExtend processing is done.
4328 * Max shift is 4 for some reason.
4329 */
4330DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAddSubRegExtend(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4331 bool f64Bit = true, bool fSetFlags = false,
4332 ARMV8A64INSTREXTEND enmExtend = kArmv8A64InstrExtend_Default,
4333 uint32_t cShift = 0)
4334{
4335 if (enmExtend == kArmv8A64InstrExtend_Default)
4336 enmExtend = f64Bit ? kArmv8A64InstrExtend_UxtW : kArmv8A64InstrExtend_UxtX;
4337 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(cShift <= 4);
4338
4339 return ((uint32_t)f64Bit << 31)
4340 | ((uint32_t)fSub << 30)
4341 | ((uint32_t)fSetFlags << 29)
4342 | UINT32_C(0x0b200000)
4343 | (iRegSrc2 << 16)
4344 | ((uint32_t)enmExtend << 13)
4345 | (cShift << 10)
4346 | (iRegSrc1 << 5)
4347 | iRegResult;
4348}
4349
4350
4351/**
4352 * A64: Encodes either adc, adcs, sbc or sbcs with two source registers.
4353 *
4354 * @returns The encoded instruction.
4355 * @param fSub true for sbc and sbcs, false for adc and
4356 * adcs.
4357 * @param iRegResult The register to store the result in. SP is
4358 * NOT valid, but ZR is.
4359 * @param iRegSrc1 The register containing the augend (@a fSub
4360 * = false) or minuend (@a fSub = true).
4361 * SP is NOT valid, but ZR is.
4362 * @param iRegSrc2 The register containing the addened (@a fSub
4363 * = false) or subtrahend (@a fSub = true).
4364 * SP is NOT valid, but ZR is.
4365 * @param f64Bit true for 64-bit GRPs (default), false for
4366 * 32-bit GPRs.
4367 * @param fSetFlags Whether to set flags (adds / subs) or not
4368 * (add / sub - default).
4369 */
4370DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcSbc(bool fSub, uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4371 bool f64Bit = true, bool fSetFlags = false)
4372{
4373 Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4374
4375 return ((uint32_t)f64Bit << 31)
4376 | ((uint32_t)fSub << 30)
4377 | ((uint32_t)fSetFlags << 29)
4378 | UINT32_C(0x1a000000)
4379 | (iRegSrc2 << 16)
4380 | (iRegSrc1 << 5)
4381 | iRegResult;
4382}
4383
4384
4385/** ADC dst, reg1, reg2 */
4386DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4387 bool f64Bit = true, bool fSetFlags = false)
4388{
4389 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
4390}
4391
4392
4393/** ADCS dst, reg1, reg2 */
4394DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrAdcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
4395{
4396 return Armv8A64MkInstrAdcSbc(false /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
4397}
4398
4399
4400/** SBC dst, reg1, reg2 */
4401DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4402 bool f64Bit = true, bool fSetFlags = false)
4403{
4404 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, fSetFlags);
4405}
4406
4407
4408/** SBCS dst, reg1, reg2 */
4409DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSbcs(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2, bool f64Bit = true)
4410{
4411 return Armv8A64MkInstrAdcSbc(true /*fSub*/, iRegResult, iRegSrc1, iRegSrc2, f64Bit, true /*fSetFlags*/);
4412}
4413
4414
4415/**
4416 * A64: Encodes a B (unconditional branch w/ imm) instruction.
4417 *
4418 * @returns The encoded instruction.
4419 * @param iImm26 Signed number of instruction to jump (i.e. *4).
4420 */
4421DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrB(int32_t iImm26)
4422{
4423 Assert(iImm26 >= -67108864 && iImm26 < 67108864);
4424 return UINT32_C(0x14000000) | ((uint32_t)iImm26 & UINT32_C(0x3ffffff));
4425}
4426
4427
4428/**
4429 * A64: Encodes a BL (unconditional call w/ imm) instruction.
4430 *
4431 * @returns The encoded instruction.
4432 * @param iImm26 Signed number of instruction to jump (i.e. *4).
4433 */
4434DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBl(int32_t iImm26)
4435{
4436 return Armv8A64MkInstrB(iImm26) | RT_BIT_32(31);
4437}
4438
4439
4440/**
4441 * A64: Encodes a BR (unconditional branch w/ register) instruction.
4442 *
4443 * @returns The encoded instruction.
4444 * @param iReg The register containing the target address.
4445 */
4446DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBr(uint32_t iReg)
4447{
4448 Assert(iReg < 32);
4449 return UINT32_C(0xd61f0000) | (iReg << 5);
4450}
4451
4452
4453/**
4454 * A64: Encodes a BLR instruction.
4455 *
4456 * @returns The encoded instruction.
4457 * @param iReg The register containing the target address.
4458 */
4459DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBlr(uint32_t iReg)
4460{
4461 return Armv8A64MkInstrBr(iReg) | RT_BIT_32(21);
4462}
4463
4464
4465/**
4466 * A64: Encodes CBZ and CBNZ (conditional branch w/ immediate) instructions.
4467 *
4468 * @returns The encoded instruction.
4469 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
4470 * its not zero.
4471 * @param iImm19 Signed number of instruction to jump (i.e. *4).
4472 * @param iReg The GPR to check for zero / non-zero value.
4473 * @param f64Bit true for 64-bit register, false for 32-bit.
4474 */
4475DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbzCbnz(bool fJmpIfNotZero, int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4476{
4477 Assert(iReg < 32); Assert(iImm19 >= -262144 && iImm19 < 262144);
4478 return ((uint32_t)f64Bit << 31)
4479 | UINT32_C(0x34000000)
4480 | ((uint32_t)fJmpIfNotZero << 24)
4481 | (((uint32_t)iImm19 & 0x7ffff) << 5)
4482 | iReg;
4483}
4484
4485
4486/** A64: Encodes the CBZ instructions. */
4487DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4488{
4489 return Armv8A64MkInstrCbzCbnz(false /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
4490}
4491
4492
4493/** A64: Encodes the CBNZ instructions. */
4494DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCbnz(int32_t iImm19, uint32_t iReg, bool f64Bit = true)
4495{
4496 return Armv8A64MkInstrCbzCbnz(true /*fJmpIfNotZero*/, iImm19, iReg, f64Bit);
4497}
4498
4499
4500/**
4501 * A64: Encodes TBZ and TBNZ (conditional branch w/ immediate) instructions.
4502 *
4503 * @returns The encoded instruction.
4504 * @param fJmpIfNotZero false to jump if register is zero, true to jump if
4505 * its not zero.
4506 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4507 * @param iReg The GPR to check for zero / non-zero value.
4508 * @param iBitNo The bit to test for.
4509 */
4510DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbzTbnz(bool fJmpIfNotZero, int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4511{
4512 Assert(iReg < 32); Assert(iImm14 >= -8192 && iImm14 < 8192); Assert(iBitNo < 64);
4513 return ((uint32_t)(iBitNo & 0x20) << (31-5))
4514 | UINT32_C(0x36000000)
4515 | ((uint32_t)fJmpIfNotZero << 24)
4516 | ((iBitNo & 0x1f) << 19)
4517 | (((uint32_t)iImm14 & 0x3fff) << 5)
4518 | iReg;
4519}
4520
4521
4522/**
4523 * A64: Encodes TBZ (conditional branch w/ immediate) instructions.
4524 *
4525 * @returns The encoded instruction.
4526 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4527 * @param iReg The GPR to check for zero / non-zero value.
4528 * @param iBitNo The bit to test for.
4529 */
4530DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbz(int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4531{
4532 return Armv8A64MkInstrTbzTbnz(false /*fJmpIfNotZero*/, iImm14, iReg, iBitNo);
4533}
4534
4535
4536/**
4537 * A64: Encodes TBNZ (conditional branch w/ immediate) instructions.
4538 *
4539 * @returns The encoded instruction.
4540 * @param iImm14 Signed number of instruction to jump (i.e. *4).
4541 * @param iReg The GPR to check for zero / non-zero value.
4542 * @param iBitNo The bit to test for.
4543 */
4544DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrTbnz(int32_t iImm14, uint32_t iReg, uint32_t iBitNo)
4545{
4546 return Armv8A64MkInstrTbzTbnz(true /*fJmpIfNotZero*/, iImm14, iReg, iBitNo);
4547}
4548
4549
4550
4551/** Armv8 Condition codes. */
4552typedef enum ARMV8INSTRCOND
4553{
4554 kArmv8InstrCond_Eq = 0, /**< 0 - Equal - Zero set. */
4555 kArmv8InstrCond_Ne, /**< 1 - Not equal - Zero clear. */
4556
4557 kArmv8InstrCond_Cs, /**< 2 - Carry set (also known as 'HS'). */
4558 kArmv8InstrCond_Hs = kArmv8InstrCond_Cs, /**< 2 - Unsigned higher or same. */
4559 kArmv8InstrCond_Cc, /**< 3 - Carry clear (also known as 'LO'). */
4560 kArmv8InstrCond_Lo = kArmv8InstrCond_Cc, /**< 3 - Unsigned lower. */
4561
4562 kArmv8InstrCond_Mi, /**< 4 - Negative result (minus). */
4563 kArmv8InstrCond_Pl, /**< 5 - Positive or zero result (plus). */
4564
4565 kArmv8InstrCond_Vs, /**< 6 - Overflow set. */
4566 kArmv8InstrCond_Vc, /**< 7 - Overflow clear. */
4567
4568 kArmv8InstrCond_Hi, /**< 8 - Unsigned higher. */
4569 kArmv8InstrCond_Ls, /**< 9 - Unsigned lower or same. */
4570
4571 kArmv8InstrCond_Ge, /**< a - Signed greater or equal. */
4572 kArmv8InstrCond_Lt, /**< b - Signed less than. */
4573
4574 kArmv8InstrCond_Gt, /**< c - Signed greater than. */
4575 kArmv8InstrCond_Le, /**< d - Signed less or equal. */
4576
4577 kArmv8InstrCond_Al, /**< e - Condition is always true. */
4578 kArmv8InstrCond_Al1 /**< f - Condition is always true. */
4579} ARMV8INSTRCOND;
4580
4581/**
4582 * A64: Encodes conditional branch instruction w/ immediate target.
4583 *
4584 * @returns The encoded instruction.
4585 * @param enmCond The branch condition.
4586 * @param iImm19 Signed number of instruction to jump (i.e. *4).
4587 */
4588DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBCond(ARMV8INSTRCOND enmCond, int32_t iImm19)
4589{
4590 Assert((unsigned)enmCond < 16);
4591 return UINT32_C(0x54000000)
4592 | (((uint32_t)iImm19 & 0x7ffff) << 5)
4593 | (uint32_t)enmCond;
4594}
4595
4596
4597/**
4598 * A64: Encodes the BRK instruction.
4599 *
4600 * @returns The encoded instruction.
4601 * @param uImm16 Unsigned immediate value.
4602 */
4603DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrBrk(uint32_t uImm16)
4604{
4605 Assert(uImm16 < _64K);
4606 return UINT32_C(0xd4200000)
4607 | (uImm16 << 5);
4608}
4609
4610/** @name RMA64_NZCV_F_XXX - readable NZCV mask for CCMP and friends.
4611 * @{ */
4612#define ARMA64_NZCV_F_N0_Z0_C0_V0 UINT32_C(0x0)
4613#define ARMA64_NZCV_F_N0_Z0_C0_V1 UINT32_C(0x1)
4614#define ARMA64_NZCV_F_N0_Z0_C1_V0 UINT32_C(0x2)
4615#define ARMA64_NZCV_F_N0_Z0_C1_V1 UINT32_C(0x3)
4616#define ARMA64_NZCV_F_N0_Z1_C0_V0 UINT32_C(0x4)
4617#define ARMA64_NZCV_F_N0_Z1_C0_V1 UINT32_C(0x5)
4618#define ARMA64_NZCV_F_N0_Z1_C1_V0 UINT32_C(0x6)
4619#define ARMA64_NZCV_F_N0_Z1_C1_V1 UINT32_C(0x7)
4620
4621#define ARMA64_NZCV_F_N1_Z0_C0_V0 UINT32_C(0x8)
4622#define ARMA64_NZCV_F_N1_Z0_C0_V1 UINT32_C(0x9)
4623#define ARMA64_NZCV_F_N1_Z0_C1_V0 UINT32_C(0xa)
4624#define ARMA64_NZCV_F_N1_Z0_C1_V1 UINT32_C(0xb)
4625#define ARMA64_NZCV_F_N1_Z1_C0_V0 UINT32_C(0xc)
4626#define ARMA64_NZCV_F_N1_Z1_C0_V1 UINT32_C(0xd)
4627#define ARMA64_NZCV_F_N1_Z1_C1_V0 UINT32_C(0xe)
4628#define ARMA64_NZCV_F_N1_Z1_C1_V1 UINT32_C(0xf)
4629/** @} */
4630
4631/**
4632 * A64: Encodes CCMP or CCMN with two register operands.
4633 *
4634 * @returns The encoded instruction.
4635 * @param iRegSrc1 The 1st register. SP is NOT valid, but ZR is.
4636 * @param iRegSrc2 The 2nd register. SP is NOT valid, but ZR is.
4637 * @param fNzcv The N, Z, C & V flags values to load if the condition
4638 * does not match. See RMA64_NZCV_F_XXX.
4639 * @param enmCond The condition guarding the compare.
4640 * @param fCCmp Set for CCMP (default), clear for CCMN.
4641 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4642 */
4643DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4644 ARMV8INSTRCOND enmCond, bool fCCmp = true, bool f64Bit = true)
4645{
4646 Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32); Assert(fNzcv < 16);
4647
4648 return ((uint32_t)f64Bit << 31)
4649 | ((uint32_t)fCCmp << 30)
4650 | UINT32_C(0x3a400000)
4651 | (iRegSrc2 << 16)
4652 | ((uint32_t)enmCond << 12)
4653 | (iRegSrc1 << 5)
4654 | fNzcv;
4655}
4656
4657/** CCMP w/ reg. */
4658DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4659 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4660{
4661 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4662}
4663
4664
4665/** CCMN w/ reg. */
4666DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnReg(uint32_t iRegSrc1, uint32_t iRegSrc2, uint32_t fNzcv,
4667 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4668{
4669 return Armv8A64MkInstrCCmpCmnReg(iRegSrc1, iRegSrc2, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4670}
4671
4672
4673/**
4674 * A64: Encodes CCMP or CCMN with register and 5-bit immediate.
4675 *
4676 * @returns The encoded instruction.
4677 * @param iRegSrc The register. SP is NOT valid, but ZR is.
4678 * @param uImm5 The immediate, to compare iRegSrc with.
4679 * @param fNzcv The N, Z, C & V flags values to load if the condition
4680 * does not match. See RMA64_NZCV_F_XXX.
4681 * @param enmCond The condition guarding the compare.
4682 * @param fCCmp Set for CCMP (default), clear for CCMN.
4683 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4684 */
4685DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv, ARMV8INSTRCOND enmCond,
4686 bool fCCmp = true, bool f64Bit = true)
4687{
4688 Assert(iRegSrc < 32); Assert(uImm5 < 32); Assert(fNzcv < 16);
4689
4690 return ((uint32_t)f64Bit << 31)
4691 | ((uint32_t)fCCmp << 30)
4692 | UINT32_C(0x3a400800)
4693 | (uImm5 << 16)
4694 | ((uint32_t)enmCond << 12)
4695 | (iRegSrc << 5)
4696 | fNzcv;
4697}
4698
4699/** CCMP w/ immediate. */
4700DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmpImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4701 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4702{
4703 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, true /*fCCmp*/, f64Bit);
4704}
4705
4706
4707/** CCMN w/ immediate. */
4708DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCCmnImm(uint32_t iRegSrc, uint32_t uImm5, uint32_t fNzcv,
4709 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4710{
4711 return Armv8A64MkInstrCCmpCmnImm(iRegSrc, uImm5, fNzcv, enmCond, false /*fCCmp*/, f64Bit);
4712}
4713
4714
4715/**
4716 * A64: Encodes CSEL, CSINC, CSINV and CSNEG (three registers)
4717 *
4718 * @returns The encoded instruction.
4719 * @param uOp Opcode bit 30.
4720 * @param uOp2 Opcode bits 11:10.
4721 * @param iRegResult The result register. SP is NOT valid, but ZR is.
4722 * @param iRegSrc1 The 1st source register. SP is NOT valid, but ZR is.
4723 * @param iRegSrc2 The 2nd source register. SP is NOT valid, but ZR is.
4724 * @param enmCond The condition guarding the compare.
4725 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4726 */
4727DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCondSelect(uint32_t uOp, uint32_t uOp2, uint32_t iRegResult, uint32_t iRegSrc1,
4728 uint32_t iRegSrc2, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4729{
4730 Assert(uOp <= 1); Assert(uOp2 <= 1); Assert(iRegResult < 32); Assert(iRegSrc1 < 32); Assert(iRegSrc2 < 32);
4731
4732 return ((uint32_t)f64Bit << 31)
4733 | (uOp << 30)
4734 | UINT32_C(0x1a800000)
4735 | (iRegSrc2 << 16)
4736 | ((uint32_t)enmCond << 12)
4737 | (uOp2 << 10)
4738 | (iRegSrc1 << 5)
4739 | iRegResult;
4740}
4741
4742
4743/** A64: Encodes CSEL.
4744 * @see Armv8A64MkInstrCondSelect for details. */
4745DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSel(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4746 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4747{
4748 return Armv8A64MkInstrCondSelect(0, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4749}
4750
4751
4752/** A64: Encodes CSINC.
4753 * @see Armv8A64MkInstrCondSelect for details. */
4754DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInc(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4755 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4756{
4757 return Armv8A64MkInstrCondSelect(0, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4758}
4759
4760
4761/** A64: Encodes CSET.
4762 * @see Armv8A64MkInstrCondSelect for details. */
4763DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSet(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4764{
4765 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4766 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4767 return Armv8A64MkInstrCSInc(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4768}
4769
4770
4771/** A64: Encodes CSINV.
4772 * @see Armv8A64MkInstrCondSelect for details. */
4773DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSInv(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4774 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4775{
4776 return Armv8A64MkInstrCondSelect(1, 0, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4777}
4778
4779/** A64: Encodes CSETM.
4780 * @see Armv8A64MkInstrCondSelect for details. */
4781DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSetM(uint32_t iRegResult, ARMV8INSTRCOND enmCond, bool f64Bit = true)
4782{
4783 Assert(enmCond != kArmv8InstrCond_Al && enmCond != kArmv8InstrCond_Al1);
4784 enmCond = (ARMV8INSTRCOND)((uint32_t)enmCond ^ 1);
4785 return Armv8A64MkInstrCSInv(iRegResult, ARMV8_A64_REG_XZR, ARMV8_A64_REG_XZR, enmCond, f64Bit);
4786}
4787
4788
4789/** A64: Encodes CSNEG.
4790 * @see Armv8A64MkInstrCondSelect for details. */
4791DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrCSNeg(uint32_t iRegResult, uint32_t iRegSrc1, uint32_t iRegSrc2,
4792 ARMV8INSTRCOND enmCond, bool f64Bit = true)
4793{
4794 return Armv8A64MkInstrCondSelect(1, 1, iRegResult, iRegSrc1, iRegSrc2, enmCond, f64Bit);
4795}
4796
4797
4798/**
4799 * A64: Encodes REV instruction.
4800 *
4801 * @returns The encoded instruction.
4802 * @param iRegDst The destination register. SP is NOT valid.
4803 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4804 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4805 */
4806DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4807{
4808 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4809
4810 return ((uint32_t)f64Bit << 31)
4811 | UINT32_C(0x5ac00800)
4812 | ((uint32_t)f64Bit << 10)
4813 | (iRegSrc << 5)
4814 | iRegDst;
4815}
4816
4817
4818/**
4819 * A64: Encodes REV16 instruction.
4820 *
4821 * @returns The encoded instruction.
4822 * @param iRegDst The destination register. SP is NOT valid.
4823 * @param iRegSrc The source register. SP is NOT valid, but ZR is
4824 * @param f64Bit true for 64-bit GRPs (default), false for 32-bit GPRs.
4825 */
4826DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRev16(uint32_t iRegDst, uint32_t iRegSrc, bool f64Bit = true)
4827{
4828 Assert(iRegDst < 32); Assert(iRegSrc < 32);
4829
4830 return ((uint32_t)f64Bit << 31)
4831 | UINT32_C(0x5ac00400)
4832 | (iRegSrc << 5)
4833 | iRegDst;
4834}
4835
4836
4837/**
4838 * A64: Encodes SETF8 & SETF16.
4839 *
4840 * @returns The encoded instruction.
4841 * @param iRegResult The register holding the result. SP is NOT valid.
4842 * @param f16Bit Set for SETF16, clear for SETF8.
4843 */
4844DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrSetF8SetF16(uint32_t iRegResult, bool f16Bit)
4845{
4846 Assert(iRegResult < 32);
4847
4848 return UINT32_C(0x3a00080d)
4849 | ((uint32_t)f16Bit << 14)
4850 | (iRegResult << 5);
4851}
4852
4853
4854/**
4855 * A64: Encodes RMIF.
4856 *
4857 * @returns The encoded instruction.
4858 * @param iRegSrc The source register to get flags from.
4859 * @param cRotateRight The right rotate count (LSB bit offset).
4860 * @param fMask Mask of which flag bits to set:
4861 * - bit 0: V
4862 * - bit 1: C
4863 * - bit 2: Z
4864 * - bit 3: N
4865 */
4866DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrRmif(uint32_t iRegSrc, uint32_t cRotateRight, uint32_t fMask)
4867{
4868 Assert(iRegSrc < 32); Assert(cRotateRight < 64); Assert(fMask <= 0xf);
4869
4870 return UINT32_C(0xba000400)
4871 | (cRotateRight << 15)
4872 | (iRegSrc << 5)
4873 | fMask;
4874}
4875
4876
4877/**
4878 * A64: Encodes MRS (for reading a system register into a GPR).
4879 *
4880 * @returns The encoded instruction.
4881 * @param iRegDst The register to put the result into. SP is NOT valid.
4882 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4883 * IPRT specific format, of the register to read.
4884 */
4885DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMrs(uint32_t iRegDst, uint32_t idSysReg)
4886{
4887 Assert(iRegDst < 32);
4888 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4889
4890 /* Note. The top bit of idSysReg must always be set and is also set in
4891 0xd5300000, otherwise we'll be encoding a different instruction. */
4892 return UINT32_C(0xd5300000)
4893 | (idSysReg << 5)
4894 | iRegDst;
4895}
4896
4897
4898/**
4899 * A64: Encodes MSR (for writing a GPR to a system register).
4900 *
4901 * @returns The encoded instruction.
4902 * @param iRegSrc The register which value to write. SP is NOT valid.
4903 * @param idSysReg The system register ID (ARMV8_AARCH64_SYSREG_XXX),
4904 * IPRT specific format, of the register to write.
4905 */
4906DECL_FORCE_INLINE(uint32_t) Armv8A64MkInstrMsr(uint32_t iRegSrc, uint32_t idSysReg)
4907{
4908 Assert(iRegSrc < 32);
4909 Assert(idSysReg < RT_BIT_32(16) && (idSysReg & RT_BIT_32(15)));
4910
4911 /* Note. The top bit of idSysReg must always be set and is also set in
4912 0xd5100000, otherwise we'll be encoding a different instruction. */
4913 return UINT32_C(0xd5100000)
4914 | (idSysReg << 5)
4915 | iRegSrc;
4916}
4917
4918
4919/** @} */
4920
4921
4922/** @defgroup grp_rt_armv8_mkinstr_vec Vector Instruction Encoding Helpers
4923 * @ingroup grp_rt_armv8_mkinstr
4924 *
4925 * A few inlined functions and macros for assisting in encoding common ARMv8
4926 * Neon/SIMD instructions.
4927 *
4928 * @{ */
4929
4930/** Armv8 vector logical operation. */
4931typedef enum
4932{
4933 kArmv8VecInstrLogicOp_And = 0, /**< AND */
4934 kArmv8VecInstrLogicOp_Bic = RT_BIT_32(22), /**< BIC */
4935 kArmv8VecInstrLogicOp_Orr = RT_BIT_32(23), /**< ORR */
4936 kArmv8VecInstrLogicOp_Orn = RT_BIT_32(23) | RT_BIT_32(22), /**< ORN */
4937 kArmv8VecInstrLogicOp_Eor = RT_BIT_32(29), /**< EOR */
4938 kArmv8VecInstrLogicOp_Bsl = RT_BIT_32(29) | RT_BIT_32(22), /**< BSL */
4939 kArmv8VecInstrLogicOp_Bit = RT_BIT_32(29) | RT_BIT_32(23), /**< BIT */
4940 kArmv8VecInstrLogicOp_Bif = RT_BIT_32(29) | RT_BIT_32(23) | RT_BIT_32(22) /**< BIF */
4941} ARMV8INSTRVECLOGICOP;
4942
4943
4944/**
4945 * A64: Encodes logical instruction (vector, register).
4946 *
4947 * @returns The encoded instruction.
4948 * @param enmOp The operation to encode.
4949 * @param iVecRegDst The vector register to put the result into.
4950 * @param iVecRegSrc1 The 1st source register.
4951 * @param iVecRegSrc2 The 2nd source register.
4952 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4953 * or just the low 64-bit (false).
4954 */
4955DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrLogical(ARMV8INSTRVECLOGICOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4956 bool f128Bit = true)
4957{
4958 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
4959
4960 return UINT32_C(0x0e201c00)
4961 | (uint32_t)enmOp
4962 | ((uint32_t)f128Bit << 30)
4963 | (iVecRegSrc2 << 16)
4964 | (iVecRegSrc1 << 5)
4965 | iVecRegDst;
4966}
4967
4968
4969/**
4970 * A64: Encodes ORR (vector, register).
4971 *
4972 * @returns The encoded instruction.
4973 * @param iVecRegDst The vector register to put the result into.
4974 * @param iVecRegSrc1 The 1st source register.
4975 * @param iVecRegSrc2 The 2nd source register.
4976 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4977 * or just the low 64-bit (false).
4978 */
4979DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrOrr(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4980 bool f128Bit = true)
4981{
4982 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_Orr, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
4983}
4984
4985
4986/**
4987 * A64: Encodes EOR (vector, register).
4988 *
4989 * @returns The encoded instruction.
4990 * @param iVecRegDst The vector register to put the result into.
4991 * @param iVecRegSrc1 The 1st source register.
4992 * @param iVecRegSrc2 The 2nd source register.
4993 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
4994 * or just the low 64-bit (false).
4995 */
4996DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrEor(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
4997 bool f128Bit = true)
4998{
4999 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_Eor, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
5000}
5001
5002
5003/**
5004 * A64: Encodes AND (vector, register).
5005 *
5006 * @returns The encoded instruction.
5007 * @param iVecRegDst The vector register to put the result into.
5008 * @param iVecRegSrc1 The 1st source register.
5009 * @param iVecRegSrc2 The 2nd source register.
5010 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5011 * or just the low 64-bit (false).
5012 */
5013DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrAnd(uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5014 bool f128Bit = true)
5015{
5016 return Armv8A64MkVecInstrLogical(kArmv8VecInstrLogicOp_And, iVecRegDst, iVecRegSrc1, iVecRegSrc2, f128Bit);
5017}
5018
5019
5020/** Armv8 UMOV/INS vector element size. */
5021typedef enum ARMV8INSTRUMOVINSSZ
5022{
5023 kArmv8InstrUmovInsSz_U8 = 0, /**< Byte. */
5024 kArmv8InstrUmovInsSz_U16 = 1, /**< Halfword. */
5025 kArmv8InstrUmovInsSz_U32 = 2, /**< 32-bit. */
5026 kArmv8InstrUmovInsSz_U64 = 3 /**< 64-bit (only valid when the destination is a 64-bit register. */
5027} ARMV8INSTRUMOVINSSZ;
5028
5029
5030/**
5031 * A64: Encodes UMOV (vector, register).
5032 *
5033 * @returns The encoded instruction.
5034 * @param iRegDst The register to put the result into.
5035 * @param iVecRegSrc The vector source register.
5036 * @param idxElem The element index.
5037 * @param enmSz Element size of the source vector register.
5038 * @param fDst64Bit Flag whether the destination register is 64-bit (true) or 32-bit (false).
5039 */
5040DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUmov(uint32_t iRegDst, uint32_t iVecRegSrc, uint8_t idxElem,
5041 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64, bool fDst64Bit = true)
5042{
5043 Assert(iRegDst < 32); Assert(iVecRegSrc < 32);
5044 Assert((fDst64Bit && enmSz == kArmv8InstrUmovInsSz_U64) || (!fDst64Bit && enmSz != kArmv8InstrUmovInsSz_U64));
5045 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
5046 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
5047 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
5048 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
5049
5050 return UINT32_C(0x0e003c00)
5051 | ((uint32_t)fDst64Bit << 30)
5052 | ((uint32_t)idxElem << (16 + enmSz + 1))
5053 | (RT_BIT_32(enmSz) << 16)
5054 | (iVecRegSrc << 5)
5055 | iRegDst;
5056}
5057
5058
5059/**
5060 * A64: Encodes INS (vector, register).
5061 *
5062 * @returns The encoded instruction.
5063 * @param iVecRegDst The vector register to put the result into.
5064 * @param iRegSrc The source register.
5065 * @param idxElem The element index for the destination.
5066 * @param enmSz Element size of the source vector register.
5067 *
5068 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
5069 */
5070DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrIns(uint32_t iVecRegDst, uint32_t iRegSrc, uint8_t idxElem,
5071 ARMV8INSTRUMOVINSSZ enmSz = kArmv8InstrUmovInsSz_U64)
5072{
5073 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
5074 Assert( (enmSz == kArmv8InstrUmovInsSz_U8 && idxElem < 16)
5075 || (enmSz == kArmv8InstrUmovInsSz_U16 && idxElem < 8)
5076 || (enmSz == kArmv8InstrUmovInsSz_U32 && idxElem < 4)
5077 || (enmSz == kArmv8InstrUmovInsSz_U64 && idxElem < 2));
5078
5079 return UINT32_C(0x4e001c00)
5080 | ((uint32_t)idxElem << (16 + enmSz + 1))
5081 | (RT_BIT_32(enmSz) << 16)
5082 | (iRegSrc << 5)
5083 | iVecRegDst;
5084}
5085
5086
5087/**
5088 * A64: Encodes DUP (vector, register).
5089 *
5090 * @returns The encoded instruction.
5091 * @param iVecRegDst The vector register to put the result into.
5092 * @param iRegSrc The source register (ZR is valid).
5093 * @param enmSz Element size of the source vector register.
5094 * @param f128Bit Flag whether the instruction operates on the whole 128-bit of the vector register (true) or
5095 * just the low 64-bit (false).
5096 *
5097 * @note This instruction assumes a 32-bit W<n> register for all non 64bit vector sizes.
5098 */
5099DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrDup(uint32_t iVecRegDst, uint32_t iRegSrc, ARMV8INSTRUMOVINSSZ enmSz,
5100 bool f128Bit = true)
5101{
5102 Assert(iRegSrc < 32); Assert(iVecRegDst < 32);
5103 Assert( (enmSz == kArmv8InstrUmovInsSz_U8)
5104 || (enmSz == kArmv8InstrUmovInsSz_U16)
5105 || (enmSz == kArmv8InstrUmovInsSz_U32)
5106 || (enmSz == kArmv8InstrUmovInsSz_U64));
5107
5108 return UINT32_C(0x0e000c00)
5109 | ((uint32_t)f128Bit << 30)
5110 | (RT_BIT_32(enmSz) << 16)
5111 | (iRegSrc << 5)
5112 | iVecRegDst;
5113}
5114
5115
5116/** Armv8 vector compare to zero vector element size. */
5117typedef enum ARMV8INSTRVECCMPZEROSZ
5118{
5119 kArmv8InstrCmpZeroSz_S8 = 0, /**< Byte. */
5120 kArmv8InstrCmpZeroSz_S16 = 1, /**< Halfword. */
5121 kArmv8InstrCmpZeroSz_S32 = 2, /**< 32-bit. */
5122 kArmv8InstrCmpZeroSz_S64 = 3 /**< 64-bit. */
5123} ARMV8INSTRVECCMPZEROSZ;
5124
5125
5126/** Armv8 vector compare to zero vector operation. */
5127typedef enum ARMV8INSTRVECCMPZEROOP
5128{
5129 kArmv8InstrCmpZeroOp_Gt = 0, /**< Greater than. */
5130 kArmv8InstrCmpZeroOp_Ge = RT_BIT_32(29), /**< Greater than or equal to. */
5131 kArmv8InstrCmpZeroOp_Eq = RT_BIT_32(12), /**< Equal to. */
5132 kArmv8InstrCmpZeroOp_Le = RT_BIT_32(29) | RT_BIT_32(12) /**< Lower than or equal to. */
5133} ARMV8INSTRVECCMPZEROOP;
5134
5135
5136/**
5137 * A64: Encodes CMGT, CMGE, CMEQ or CMLE against zero (vector, register).
5138 *
5139 * @returns The encoded instruction.
5140 * @param iVecRegDst The vector register to put the result into.
5141 * @param iVecRegSrc The vector source register.
5142 * @param enmSz Vector element size.
5143 * @param enmOp The compare operation against to encode.
5144 */
5145DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmpToZero(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECCMPZEROSZ enmSz,
5146 ARMV8INSTRVECCMPZEROOP enmOp)
5147{
5148 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5149
5150 return UINT32_C(0x5e208800)
5151 | ((uint32_t)enmSz << 22)
5152 | (RT_BIT_32(enmSz) << 16)
5153 | (iVecRegSrc << 5)
5154 | iVecRegDst
5155 | (uint32_t)enmOp;
5156}
5157
5158
5159/**
5160 * A64: Encodes CNT (vector, register).
5161 *
5162 * @returns The encoded instruction.
5163 * @param iVecRegDst The vector register to put the result into.
5164 * @param iVecRegSrc The vector source register.
5165 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5166 * or just the low 64-bit (false).
5167 */
5168DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCnt(uint32_t iVecRegDst, uint32_t iVecRegSrc, bool f128Bit = true)
5169{
5170 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5171
5172 return UINT32_C(0x0e205800)
5173 | ((uint32_t)f128Bit << 30)
5174 | (iVecRegSrc << 5)
5175 | iVecRegDst;
5176}
5177
5178
5179/** Armv8 vector unsigned sum long across vector element size. */
5180typedef enum ARMV8INSTRVECUADDLVSZ
5181{
5182 kArmv8InstrUAddLVSz_8B = 0, /**< 8 x 8-bit. */
5183 kArmv8InstrUAddLVSz_16B = RT_BIT_32(30), /**< 16 x 8-bit. */
5184 kArmv8InstrUAddLVSz_4H = 1, /**< 4 x 16-bit. */
5185 kArmv8InstrUAddLVSz_8H = RT_BIT_32(30) | 1, /**< 8 x 16-bit. */
5186 kArmv8InstrUAddLVSz_4S = RT_BIT_32(30) | 2 /**< 4 x 32-bit. */
5187} ARMV8INSTRVECUADDLVSZ;
5188
5189
5190/**
5191 * A64: Encodes UADDLV (vector, register).
5192 *
5193 * @returns The encoded instruction.
5194 * @param iVecRegDst The vector register to put the result into.
5195 * @param iVecRegSrc The vector source register.
5196 * @param enmSz Element size.
5197 */
5198DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUAddLV(uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECUADDLVSZ enmSz)
5199{
5200 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5201
5202 return UINT32_C(0x2e303800)
5203 | ((uint32_t)enmSz)
5204 | (iVecRegSrc << 5)
5205 | iVecRegDst;
5206}
5207
5208
5209/** Armv8 USHR/USRA/URSRA/SSHR/SRSA/SSHR vector element size. */
5210typedef enum ARMV8INSTRUSHIFTSZ
5211{
5212 kArmv8InstrShiftSz_U8 = 8, /**< Byte. */
5213 kArmv8InstrShiftSz_U16 = 16, /**< Halfword. */
5214 kArmv8InstrShiftSz_U32 = 32, /**< 32-bit. */
5215 kArmv8InstrShiftSz_U64 = 64 /**< 64-bit. */
5216} ARMV8INSTRUSHIFTSZ;
5217
5218/**
5219 * A64: Encodes USHR/USRA/URSRA/SSHR/SRSA/SSHR (vector, register).
5220 *
5221 * @returns The encoded instruction.
5222 * @param iVecRegDst The vector register to put the result into.
5223 * @param iVecRegSrc The vector source register.
5224 * @param cShift Number of bits to shift.
5225 * @param enmSz Element size.
5226 * @param fUnsigned Flag whether this a signed or unsigned shift,
5227 * @param fRound Flag whether this is the rounding shift variant.
5228 * @param fAccum Flag whether this is the accumulate shift variant.
5229 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5230 * or just the low 64-bit (false).
5231 */
5232DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShrImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5233 bool fUnsigned = true, bool fRound = false, bool fAccum = false, bool f128Bit = true)
5234{
5235 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5236 Assert( cShift >= 1
5237 && ( (enmSz == kArmv8InstrShiftSz_U8 && cShift <= 8)
5238 || (enmSz == kArmv8InstrShiftSz_U16 && cShift <= 16)
5239 || (enmSz == kArmv8InstrShiftSz_U32 && cShift <= 32)
5240 || (enmSz == kArmv8InstrShiftSz_U64 && cShift <= 64)));
5241
5242 return UINT32_C(0x0f000400)
5243 | ((uint32_t)f128Bit << 30)
5244 | ((uint32_t)fUnsigned << 29)
5245 | ((((uint32_t)enmSz << 1) - cShift) << 16)
5246 | ((uint32_t)fRound << 13)
5247 | ((uint32_t)fAccum << 12)
5248 | (iVecRegSrc << 5)
5249 | iVecRegDst;
5250}
5251
5252
5253/**
5254 * A64: Encodes SHL (vector, register).
5255 *
5256 * @returns The encoded instruction.
5257 * @param iVecRegDst The vector register to put the result into.
5258 * @param iVecRegSrc The vector source register.
5259 * @param cShift Number of bits to shift.
5260 * @param enmSz Element size.
5261 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5262 * or just the low 64-bit (false).
5263 */
5264DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrShlImm(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5265 bool f128Bit = true)
5266{
5267 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5268 Assert( (enmSz == kArmv8InstrShiftSz_U8 && cShift < 8)
5269 || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16)
5270 || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32)
5271 || (enmSz == kArmv8InstrShiftSz_U64 && cShift < 64));
5272
5273 return UINT32_C(0x0f005400)
5274 | ((uint32_t)f128Bit << 30)
5275 | (((uint32_t)enmSz | cShift) << 16)
5276 | (iVecRegSrc << 5)
5277 | iVecRegDst;
5278}
5279
5280
5281/**
5282 * A64: Encodes SHLL/SHLL2/USHLL/USHLL2 (vector, register).
5283 *
5284 * @returns The encoded instruction.
5285 * @param iVecRegDst The vector register to put the result into.
5286 * @param iVecRegSrc The vector source register.
5287 * @param cShift Number of bits to shift.
5288 * @param enmSz Element size of the source vector register, the destination vector register
5289 * element size is twice as large, kArmv8InstrShiftSz_U64 is invalid.
5290 * @param fUnsigned Flag whether this is an unsigned shift left (true, default) or signed (false).
5291 * @param fUpper Flag whether this operates on the lower half (false, default) of the source vector register
5292 * or the upper half (true).
5293 */
5294DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrUShll(uint32_t iVecRegDst, uint32_t iVecRegSrc, uint8_t cShift, ARMV8INSTRUSHIFTSZ enmSz,
5295 bool fUnsigned = true, bool fUpper = false)
5296{
5297 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5298 Assert( (enmSz == kArmv8InstrShiftSz_U8 && cShift < 8)
5299 || (enmSz == kArmv8InstrShiftSz_U16 && cShift < 16)
5300 || (enmSz == kArmv8InstrShiftSz_U32 && cShift < 32));
5301
5302 return UINT32_C(0x0f00a400)
5303 | ((uint32_t)fUpper << 30)
5304 | ((uint32_t)fUnsigned << 29)
5305 | (((uint32_t)enmSz | cShift) << 16)
5306 | (iVecRegSrc << 5)
5307 | iVecRegDst;
5308}
5309
5310
5311/** Armv8 vector arith ops element size. */
5312typedef enum ARMV8INSTRVECARITHSZ
5313{
5314 kArmv8VecInstrArithSz_8 = 0, /**< 8-bit. */
5315 kArmv8VecInstrArithSz_16 = 1, /**< 16-bit. */
5316 kArmv8VecInstrArithSz_32 = 2, /**< 32-bit. */
5317 kArmv8VecInstrArithSz_64 = 3 /**< 64-bit. */
5318} ARMV8INSTRVECARITHSZ;
5319
5320
5321/** Armv8 vector arithmetic operation. */
5322typedef enum
5323{
5324 kArmv8VecInstrArithOp_Add = RT_BIT_32(15), /**< ADD */
5325 kArmv8VecInstrArithOp_Sub = RT_BIT_32(29) | RT_BIT_32(15), /**< SUB */
5326 kArmv8VecInstrArithOp_UnsignSat_Add = RT_BIT_32(29) | RT_BIT_32(11), /**< UQADD */
5327 kArmv8VecInstrArithOp_UnsignSat_Sub = RT_BIT_32(29) | RT_BIT_32(13) | RT_BIT_32(11), /**< UQSUB */
5328 kArmv8VecInstrArithOp_SignSat_Add = RT_BIT_32(11), /**< SQADD */
5329 kArmv8VecInstrArithOp_SignSat_Sub = RT_BIT_32(13) | RT_BIT_32(11), /**< SQSUB */
5330 kArmv8VecInstrArithOp_Mul = RT_BIT_32(15) | RT_BIT_32(12) | RT_BIT_32(11) /**< MUL */
5331} ARMV8INSTRVECARITHOP;
5332
5333
5334/**
5335 * A64: Encodes an arithmetic operation (vector, register).
5336 *
5337 * @returns The encoded instruction.
5338 * @param enmOp The operation to encode.
5339 * @param iVecRegDst The vector register to put the result into.
5340 * @param iVecRegSrc1 The first vector source register.
5341 * @param iVecRegSrc2 The second vector source register.
5342 * @param enmSz Element size.
5343 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5344 * or just the low 64-bit (false).
5345 */
5346DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrArithOp(ARMV8INSTRVECARITHOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5347 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5348{
5349 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5350
5351 return UINT32_C(0x0e200400)
5352 | (uint32_t)enmOp
5353 | ((uint32_t)f128Bit << 30)
5354 | ((uint32_t)enmSz << 22)
5355 | (iVecRegSrc2 << 16)
5356 | (iVecRegSrc1 << 5)
5357 | iVecRegDst;
5358}
5359
5360
5361/** Armv8 vector compare operation. */
5362typedef enum ARMV8VECINSTRCMPOP
5363{
5364 /* U insn[15:10] */
5365 kArmv8VecInstrCmpOp_Gt = UINT32_C(0x3400), /**< Greater than (>) (signed) */
5366 kArmv8VecInstrCmpOp_Ge = UINT32_C(0x3c00), /**< Greater or equal (>=) (signed) */
5367 kArmv8VecInstrCmpOp_Hi = RT_BIT_32(29) | UINT32_C(0x3400), /**< Greater than (>) (unsigned) */
5368 kArmv8VecInstrCmpOp_Hs = RT_BIT_32(29) | UINT32_C(0x3c00), /**< Greater or equal (>=) (unsigned) */
5369 kArmv8VecInstrCmpOp_Eq = RT_BIT_32(29) | UINT32_C(0x8c00) /**< Equal (==) (unsigned) */
5370} ARMV8VECINSTRCMPOP;
5371
5372/**
5373 * A64: Encodes CMEQ/CMGE/CMGT/CMHI/CMHS (register variant) (vector, register).
5374 *
5375 * @returns The encoded instruction.
5376 * @param enmOp The operation to perform.
5377 * @param iVecRegDst The vector register to put the result into.
5378 * @param iVecRegSrc1 The first vector source register.
5379 * @param iVecRegSrc2 The second vector source register.
5380 * @param enmSz Element size.
5381 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5382 * or just the low 64-bit (false).
5383 */
5384DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmp(ARMV8VECINSTRCMPOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc1, uint32_t iVecRegSrc2,
5385 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5386{
5387 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5388
5389 return UINT32_C(0x0e200000)
5390 | ((uint32_t)f128Bit << 30)
5391 | ((uint32_t)enmSz << 22)
5392 | (iVecRegSrc2 << 16)
5393 | ((uint32_t)enmOp)
5394 | (iVecRegSrc1 << 5)
5395 | iVecRegDst;
5396}
5397
5398
5399/** Armv8 vector compare against zero operation. */
5400typedef enum ARMV8VECINSTRCMPZEROOP
5401{
5402 /* U insn[15:10] */
5403 kArmv8VecInstrCmpZeroOp_Gt = UINT32_C(0x8800), /**< Greater than zero (>) (signed) */
5404 kArmv8VecInstrCmpZeroOp_Eq = UINT32_C(0x9800), /**< Equal to zero (==) */
5405 kArmv8VecInstrCmpZeroOp_Lt = UINT32_C(0xa800), /**< Lower than zero (>=) (signed) */
5406 kArmv8VecInstrCmpZeroOp_Ge = RT_BIT_32(29) | UINT32_C(0x8800), /**< Greater or equal to zero (>=) (signed) */
5407 kArmv8VecInstrCmpZeroOp_Le = RT_BIT_32(29) | UINT32_C(0x9800) /**< Lower or equal to zero (<=) (signed) */
5408} ARMV8VECINSTRCMPZEROOP;
5409
5410/**
5411 * A64: Encodes CMEQ/CMGE/CMGT/CMLE/CMLT (zero variant) (vector, register).
5412 *
5413 * @returns The encoded instruction.
5414 * @param enmOp The operation to perform.
5415 * @param iVecRegDst The vector register to put the result into.
5416 * @param iVecRegSrc The first vector source register.
5417 * @param enmSz Element size.
5418 * @param f128Bit Flag whether this operates on the full 128-bit (true, default) of the vector register
5419 * or just the low 64-bit (false).
5420 */
5421DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrCmpAgainstZero(ARMV8VECINSTRCMPOP enmOp, uint32_t iVecRegDst, uint32_t iVecRegSrc,
5422 ARMV8INSTRVECARITHSZ enmSz, bool f128Bit = true)
5423{
5424 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5425
5426 return UINT32_C(0x0e200000)
5427 | ((uint32_t)f128Bit << 30)
5428 | ((uint32_t)enmSz << 22)
5429 | ((uint32_t)enmOp)
5430 | (iVecRegSrc << 5)
5431 | iVecRegDst;
5432}
5433
5434
5435/** Armv8 [Signed,Unsigned] Extract {Unsigned} operation. */
5436typedef enum
5437{
5438 kArmv8VecInstrQxtnOp_Sqxtn = RT_BIT_32(14), /**< SQXTN */
5439 kArmv8VecInstrQxtnOp_Sqxtun = RT_BIT_32(29) | RT_BIT_32(13), /**< SQXTUN */
5440 kArmv8VecInstrQxtnOp_Uqxtn = RT_BIT_32(29) | RT_BIT_32(14) /**< UQXTN */
5441} ARMV8INSTRVECQXTNOP;
5442
5443/**
5444 * A64: Encodes SQXTN/SQXTN2/UQXTN/UQXTN2/SQXTUN/SQXTUN2 (vector, register).
5445 *
5446 * @returns The encoded instruction.
5447 * @param enmOp The operation to perform.
5448 * @param fUpper Flag whether to write the result to the lower (false) or upper (true) half of the destinatiom register.
5449 * @param iVecRegDst The vector register to put the result into.
5450 * @param iVecRegSrc The first vector source register.
5451 * @param enmSz Element size.
5452 */
5453DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrQxtn(ARMV8INSTRVECQXTNOP enmOp, bool fUpper, uint32_t iVecRegDst, uint32_t iVecRegSrc, ARMV8INSTRVECARITHSZ enmSz)
5454{
5455 Assert(iVecRegDst < 32); Assert(iVecRegSrc < 32);
5456
5457 return UINT32_C(0x0e210800)
5458 | ((uint32_t)enmOp)
5459 | ((uint32_t)fUpper << 30)
5460 | ((uint32_t)enmSz << 22)
5461 | (iVecRegSrc << 5)
5462 | iVecRegDst;
5463}
5464
5465
5466/** Armv8 floating point size. */
5467typedef enum
5468{
5469 kArmv8VecInstrFpSz_2x_Single = 0, /**< 2x single precision values in the low 64-bit of the 128-bit register. */
5470 kArmv8VecInstrFpSz_4x_Single = RT_BIT_32(30), /**< 4x single precision values in the 128-bit register. */
5471 kArmv8VecInstrFpSz_2x_Double = RT_BIT_32(30) | RT_BIT_32(22) /**< 2x double precision values in the 128-bit register. */
5472} ARMV8INSTRVECFPSZ;
5473
5474
5475/** Armv8 3 operand floating point operation. */
5476typedef enum
5477{
5478 /* insn[29] insn[23] insn[15:11] */
5479 kArmv8VecInstrFpOp_Add = UINT32_C(0xd000), /**< FADD */
5480 kArmv8VecInstrFpOp_Sub = RT_BIT_32(23) | UINT32_C(0xd000), /**< FADD */
5481 kArmv8VecInstrFpOp_AddPairwise = RT_BIT_32(29) | UINT32_C(0xd000), /**< FADDP */
5482 kArmv8VecInstrFpOp_Mul = RT_BIT_32(29) | UINT32_C(0xd800), /**< FMUL */
5483 kArmv8VecInstrFpOp_Div = RT_BIT_32(29) | UINT32_C(0xf800), /**< FDIV */
5484
5485 kArmv8VecInstrFpOp_Max = UINT32_C(0xf000), /**< FMAX */
5486 kArmv8VecInstrFpOp_MaxNumber = UINT32_C(0xc000), /**< FMAXNM */
5487 kArmv8VecInstrFpOp_MaxNumberPairwise = RT_BIT_32(29) | UINT32_C(0xc000), /**< FMAXNMP */
5488 kArmv8VecInstrFpOp_MaxPairwise = RT_BIT_32(29) | UINT32_C(0xf000), /**< FMAXP */
5489
5490 kArmv8VecInstrFpOp_Min = RT_BIT_32(23) | UINT32_C(0xf000), /**< FMIN */
5491 kArmv8VecInstrFpOp_MinNumber = RT_BIT_32(23) | UINT32_C(0xc000), /**< FMINNM */
5492 kArmv8VecInstrFpOp_MinNumberPairwise = RT_BIT_32(29) | RT_BIT_32(23) | UINT32_C(0xc000), /**< FMINNMP */
5493 kArmv8VecInstrFpOp_MinPairwise = RT_BIT_32(29) | RT_BIT_32(23) | UINT32_C(0xf000), /**< FMINP */
5494
5495 kArmv8VecInstrFpOp_Fmla = UINT32_C(0xc800), /**< FMLA */
5496 kArmv8VecInstrFpOp_Fmls = RT_BIT_32(23) | UINT32_C(0xc800) /**< FMLS */
5497} ARMV8INSTRVECFPOP;
5498
5499/**
5500 * A64: Encodes a 3 operand floating point operation (vector, register).
5501 *
5502 * @returns The encoded instruction.
5503 * @param enmOp The operation to perform.
5504 * @param enmSz The size to operate on.
5505 * @param iVecRegDst The vector register to put the result into.
5506 * @param iVecRegSrc1 The first vector source register.
5507 * @param iVecRegSrc2 The second vector source register.
5508 */
5509DECL_FORCE_INLINE(uint32_t) Armv8A64MkVecInstrFp3Op(ARMV8INSTRVECFPOP enmOp, ARMV8INSTRVECFPSZ enmSz, uint32_t iVecRegDst,
5510 uint32_t iVecRegSrc1, uint32_t iVecRegSrc2)
5511{
5512 Assert(iVecRegDst < 32); Assert(iVecRegSrc1 < 32); Assert(iVecRegSrc2 < 32);
5513
5514 return UINT32_C(0x0e200400)
5515 | ((uint32_t)enmOp)
5516 | ((uint32_t)enmSz)
5517 | (iVecRegSrc2 << 16)
5518 | (iVecRegSrc1 << 5)
5519 | iVecRegDst;
5520}
5521
5522
5523/** @} */
5524
5525#endif /* !dtrace && __cplusplus */
5526
5527/** @} */
5528
5529#endif /* !IPRT_INCLUDED_armv8_h */
5530
注意: 瀏覽 TracBrowser 來幫助您使用儲存庫瀏覽器

© 2025 Oracle Support Privacy / Do Not Sell My Info Terms of Use Trademark Policy Automated Access Etiquette