Skip site navigation (1)Skip section navigation (2)
Date:      Mon, 17 Feb 2025 16:36:58 GMT
From:      Andrew Turner <andrew@FreeBSD.org>
To:        src-committers@FreeBSD.org, dev-commits-src-all@FreeBSD.org, dev-commits-src-main@FreeBSD.org
Subject:   git: 4daaee441311 - main - arm64: update and align armreg.h with ARMv8.9 features
Message-ID:  <202502171636.51HGawkx082816@gitrepo.freebsd.org>

next in thread | raw e-mail | index | archive | help
The branch main has been updated by andrew:

URL: https://cgit.FreeBSD.org/src/commit/?id=4daaee441311833ca2bef8ddae0089f66a900844

commit 4daaee441311833ca2bef8ddae0089f66a900844
Author:     Harry Moulton <harry.moulton@arm.com>
AuthorDate: 2025-02-17 16:00:19 +0000
Commit:     Andrew Turner <andrew@FreeBSD.org>
CommitDate: 2025-02-17 16:07:36 +0000

    arm64: update and align armreg.h with ARMv8.9 features
    
    Update armreg.h to contain feature bit definitions for all architecture
    versions up to and including ARMv8.9. This includes a number of
    corrections for PAC_frac, PAN and ETS definitions.
    
    Reviewed by:    andrew
    Sponsored by:   Arm Ltd
    Differential Revision:  https://reviews.freebsd.org/D48815
    Signed-off-by: Harry Moulton <harry.moulton@arm.com>
---
 sys/arm64/include/armreg.h | 215 +++++++++++++++++++++++++++++++++++++--------
 1 file changed, 180 insertions(+), 35 deletions(-)

diff --git a/sys/arm64/include/armreg.h b/sys/arm64/include/armreg.h
index 2a2c8b23e0a4..a72e1ea99793 100644
--- a/sys/arm64/include/armreg.h
+++ b/sys/arm64/include/armreg.h
@@ -795,6 +795,7 @@
 #define	 ID_AA64DFR0_DebugVer_8_2	(UL(0x8) << ID_AA64DFR0_DebugVer_SHIFT)
 #define	 ID_AA64DFR0_DebugVer_8_4	(UL(0x9) << ID_AA64DFR0_DebugVer_SHIFT)
 #define	 ID_AA64DFR0_DebugVer_8_8	(UL(0xa) << ID_AA64DFR0_DebugVer_SHIFT)
+#define	 ID_AA64DFR0_DebugVer_8_9	(UL(0xb) << ID_AA64DFR0_DebugVer_SHIFT)
 #define	ID_AA64DFR0_TraceVer_SHIFT	4
 #define	ID_AA64DFR0_TraceVer_WIDTH	4
 #define	ID_AA64DFR0_TraceVer_MASK	(UL(0xf) << ID_AA64DFR0_TraceVer_SHIFT)
@@ -812,6 +813,7 @@
 #define	 ID_AA64DFR0_PMUVer_3_5		(UL(0x6) << ID_AA64DFR0_PMUVer_SHIFT)
 #define	 ID_AA64DFR0_PMUVer_3_7		(UL(0x7) << ID_AA64DFR0_PMUVer_SHIFT)
 #define	 ID_AA64DFR0_PMUVer_3_8		(UL(0x8) << ID_AA64DFR0_PMUVer_SHIFT)
+#define	 ID_AA64DFR0_PMUVer_3_9		(UL(0x9) << ID_AA64DFR0_PMUVer_SHIFT)
 #define	 ID_AA64DFR0_PMUVer_IMPL	(UL(0xf) << ID_AA64DFR0_PMUVer_SHIFT)
 #define	ID_AA64DFR0_BRPs_SHIFT		12
 #define	ID_AA64DFR0_BRPs_WIDTH		4
@@ -843,6 +845,7 @@
 #define	 ID_AA64DFR0_PMSVer_SPE_1_1	(UL(0x2) << ID_AA64DFR0_PMSVer_SHIFT)
 #define	 ID_AA64DFR0_PMSVer_SPE_1_2	(UL(0x3) << ID_AA64DFR0_PMSVer_SHIFT)
 #define	 ID_AA64DFR0_PMSVer_SPE_1_3	(UL(0x4) << ID_AA64DFR0_PMSVer_SHIFT)
+#define	 ID_AA64DFR0_PMSVer_SPE_1_4	(UL(0x5) << ID_AA64DFR0_PMSVer_SHIFT)
 #define	ID_AA64DFR0_DoubleLock_SHIFT	36
 #define	ID_AA64DFR0_DoubleLock_WIDTH	4
 #define	ID_AA64DFR0_DoubleLock_MASK	(UL(0xf) << ID_AA64DFR0_DoubleLock_SHIFT)
@@ -890,6 +893,24 @@
 #define	ID_AA64DFR1_EL1_CRn		0
 #define	ID_AA64DFR1_EL1_CRm		5
 #define	ID_AA64DFR1_EL1_op2		1
+#define	ID_AA64DFR1_SPMU_SHIFT		32
+#define	ID_AA64DFR1_SPMU_WIDTH		4
+#define	ID_AA64DFR1_SPMU_MASK		(UL(0xf) << ID_AA64DFR1_SPMU_SHIFT)
+#define	ID_AA64DFR1_SPMU_VAL(x)		((x) & ID_AA64DFR1_SPMU_MASK)
+#define	 ID_AA64DFR1_SPMU_NONE		(UL(0x0) << ID_AA64DFR1_SPMU_SHIFT)
+#define	 ID_AA64DFR1_SPMU_IMPL		(UL(0x1) << ID_AA64DFR1_SPMU_SHIFT)
+#define	ID_AA64DFR1_PMICNTR_SHIFT	36
+#define	ID_AA64DFR1_PMICNTR_WIDTH	4
+#define	ID_AA64DFR1_PMICNTR_MASK	(UL(0xf) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define	ID_AA64DFR1_PMICNTR_VAL(x)	((x) & ID_AA64DFR1_PMICNTR_MASK)
+#define	 ID_AA64DFR1_PMICNTR_NONE	(UL(0x0) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define	 ID_AA64DFR1_PMICNTR_IMPL	(UL(0x1) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define	ID_AA64DFR1_DPFZS_SHIFT		52
+#define	ID_AA64DFR1_DPFZS_WIDTH		4
+#define	ID_AA64DFR1_DPFZS_MASK		(UL(0xf) << ID_AA64DFR1_DPFZS_SHIFT)
+#define	ID_AA64DFR1_DPFZS_VAL(x)	((x) & ID_AA64DFR1_DPFZS_MASK)
+#define	 ID_AA64DFR1_DPFZS_NONE		(UL(0x0) << ID_AA64DFR1_DPFZS_SHIFT)
+#define	 ID_AA64DFR1_DPFZS_IMPL		(UL(0x1) << ID_AA64DFR1_DPFZS_SHIFT)
 
 /* ID_AA64ISAR0_EL1 */
 #define	ID_AA64ISAR0_EL1		MRS_REG(ID_AA64ISAR0_EL1)
@@ -1076,7 +1097,8 @@
 #define	ID_AA64ISAR1_SPECRES_MASK	(UL(0xf) << ID_AA64ISAR1_SPECRES_SHIFT)
 #define	ID_AA64ISAR1_SPECRES_VAL(x)	((x) & ID_AA64ISAR1_SPECRES_MASK)
 #define	 ID_AA64ISAR1_SPECRES_NONE	(UL(0x0) << ID_AA64ISAR1_SPECRES_SHIFT)
-#define	 ID_AA64ISAR1_SPECRES_IMPL	(UL(0x1) << ID_AA64ISAR1_SPECRES_SHIFT)
+#define	 ID_AA64ISAR1_SPECRES_8_5	(UL(0x1) << ID_AA64ISAR1_SPECRES_SHIFT)
+#define	 ID_AA64ISAR1_SPECRES_8_9	(UL(0x2) << ID_AA64ISAR1_SPECRES_SHIFT)
 #define	ID_AA64ISAR1_BF16_SHIFT		44
 #define	ID_AA64ISAR1_BF16_WIDTH		4
 #define	ID_AA64ISAR1_BF16_MASK		(UL(0xf) << ID_AA64ISAR1_BF16_SHIFT)
@@ -1159,12 +1181,42 @@
 #define	ID_AA64ISAR2_BC_VAL(x)		((x) & ID_AA64ISAR2_BC_MASK)
 #define	 ID_AA64ISAR2_BC_NONE		(UL(0x0) << ID_AA64ISAR2_BC_SHIFT)
 #define	 ID_AA64ISAR2_BC_IMPL		(UL(0x1) << ID_AA64ISAR2_BC_SHIFT)
-#define	ID_AA64ISAR2_PAC_frac_SHIFT	28
+#define	ID_AA64ISAR2_PAC_frac_SHIFT	24
 #define	ID_AA64ISAR2_PAC_frac_WIDTH	4
 #define	ID_AA64ISAR2_PAC_frac_MASK	(UL(0xf) << ID_AA64ISAR2_PAC_frac_SHIFT)
 #define	ID_AA64ISAR2_PAC_frac_VAL(x)	((x) & ID_AA64ISAR2_PAC_frac_MASK)
 #define	 ID_AA64ISAR2_PAC_frac_NONE	(UL(0x0) << ID_AA64ISAR2_PAC_frac_SHIFT)
 #define	 ID_AA64ISAR2_PAC_frac_IMPL	(UL(0x1) << ID_AA64ISAR2_PAC_frac_SHIFT)
+#define	ID_AA64ISAR2_CLRBHB_SHIFT	28
+#define	ID_AA64ISAR2_CLRBHB_WIDTH	4
+#define	ID_AA64ISAR2_CLRBHB_MASK	(UL(0xf) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define	ID_AA64ISAR2_CLRBHB_VAL(x)	((x) & ID_AA64ISAR2_CLRBHB_MASK)
+#define	 ID_AA64ISAR2_CLRBHB_NONE	(UL(0x0) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define	 ID_AA64ISAR2_CLRBHB_IMPL	(UL(0x1) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define	ID_AA64ISAR2_PRFMSLC_SHIFT	40
+#define	ID_AA64ISAR2_PRFMSLC_WIDTH	4
+#define	ID_AA64ISAR2_PRFMSLC_MASK	(UL(0xf) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define	ID_AA64ISAR2_PRFMSLC_VAL(x)	((x) & ID_AA64ISAR2_PRFMSLC_MASK)
+#define	ID_AA64ISAR2_PRFMSLC_NONE	(UL(0x0) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define	ID_AA64ISAR2_PRFMSLC_IMPL	(UL(0x1) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define	ID_AA64ISAR2_RPRFM_SHIFT	48
+#define	ID_AA64ISAR2_RPRFM_WIDTH	4
+#define	ID_AA64ISAR2_RPRFM_MASK		(UL(0xf) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define	ID_AA64ISAR2_RPRFM_VAL(x)	((x) & ID_AA64ISAR2_RPRFM_MASK)
+#define	ID_AA64ISAR2_RPRFM_NONE		(UL(0x0) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define	ID_AA64ISAR2_RPRFM_IMPL		(UL(0x1) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define	ID_AA64ISAR2_CSSC_SHIFT		52
+#define	ID_AA64ISAR2_CSSC_WIDTH		4
+#define	ID_AA64ISAR2_CSSC_MASK		(UL(0xf) << ID_AA64ISAR2_CSSC_SHIFT)
+#define	ID_AA64ISAR2_CSSC_VAL(x)	((x) & ID_AA64ISAR2_CSSC_MASK)
+#define	 ID_AA64ISAR2_CSSC_NONE		(UL(0x0) << ID_AA64ISAR2_CSSC_SHIFT)
+#define	 ID_AA64ISAR2_CSSC_IMPL		(UL(0x1) << ID_AA64ISAR2_CSSC_SHIFT)
+#define	ID_AA64ISAR2_ATS1A_SHIFT	60
+#define	ID_AA64ISAR2_ATS1A_WIDTH	4
+#define	ID_AA64ISAR2_ATS1A_MASK	(UL(0xf) << ID_AA64ISAR2_ATS1A_SHIFT)
+#define	ID_AA64ISAR2_ATS1A_VAL(x)	((x) & ID_AA64ISAR2_ATS1A_MASK)
+#define	 ID_AA64ISAR2_ATS1A_NONE	(UL(0x0) << ID_AA64ISAR2_ATS1A_SHIFT)
+#define	 ID_AA64ISAR2_ATS1A_IMPL	(UL(0x1) << ID_AA64ISAR2_ATS1A_SHIFT)
 
 /* ID_AA64MMFR0_EL1 */
 #define	ID_AA64MMFR0_EL1		MRS_REG(ID_AA64MMFR0_EL1)
@@ -1263,7 +1315,8 @@
 #define	ID_AA64MMFR0_FGT_MASK		(UL(0xf) << ID_AA64MMFR0_FGT_SHIFT)
 #define	ID_AA64MMFR0_FGT_VAL(x)		((x) & ID_AA64MMFR0_FGT_MASK)
 #define	 ID_AA64MMFR0_FGT_NONE		(UL(0x0) << ID_AA64MMFR0_FGT_SHIFT)
-#define	 ID_AA64MMFR0_FGT_IMPL		(UL(0x1) << ID_AA64MMFR0_FGT_SHIFT)
+#define	 ID_AA64MMFR0_FGT_8_6		(UL(0x1) << ID_AA64MMFR0_FGT_SHIFT)
+#define	 ID_AA64MMFR0_FGT_8_9		(UL(0x2) << ID_AA64MMFR0_FGT_SHIFT)
 #define	ID_AA64MMFR0_ECV_SHIFT		60
 #define	ID_AA64MMFR0_ECV_WIDTH		4
 #define	ID_AA64MMFR0_ECV_MASK		(UL(0xf) << ID_AA64MMFR0_ECV_SHIFT)
@@ -1319,7 +1372,7 @@
 #define	 ID_AA64MMFR1_PAN_NONE		(UL(0x0) << ID_AA64MMFR1_PAN_SHIFT)
 #define	 ID_AA64MMFR1_PAN_IMPL		(UL(0x1) << ID_AA64MMFR1_PAN_SHIFT)
 #define	 ID_AA64MMFR1_PAN_ATS1E1	(UL(0x2) << ID_AA64MMFR1_PAN_SHIFT)
-#define	 ID_AA64MMFR1_PAN_EPAN		(UL(0x2) << ID_AA64MMFR1_PAN_SHIFT)
+#define	 ID_AA64MMFR1_PAN_EPAN		(UL(0x3) << ID_AA64MMFR1_PAN_SHIFT)
 #define	ID_AA64MMFR1_SpecSEI_SHIFT	24
 #define	ID_AA64MMFR1_SpecSEI_WIDTH	4
 #define	ID_AA64MMFR1_SpecSEI_MASK	(UL(0xf) << ID_AA64MMFR1_SpecSEI_SHIFT)
@@ -1343,7 +1396,8 @@
 #define	ID_AA64MMFR1_ETS_MASK		(UL(0xf) << ID_AA64MMFR1_ETS_SHIFT)
 #define	ID_AA64MMFR1_ETS_VAL(x)		((x) & ID_AA64MMFR1_ETS_MASK)
 #define	 ID_AA64MMFR1_ETS_NONE		(UL(0x0) << ID_AA64MMFR1_ETS_SHIFT)
-#define	 ID_AA64MMFR1_ETS_IMPL		(UL(0x1) << ID_AA64MMFR1_ETS_SHIFT)
+#define	 ID_AA64MMFR1_ETS_NONE2		(UL(0x1) << ID_AA64MMFR1_ETS_SHIFT)
+#define	 ID_AA64MMFR1_ETS_IMPL		(UL(0x2) << ID_AA64MMFR1_ETS_SHIFT)
 #define	ID_AA64MMFR1_HCX_SHIFT		40
 #define	ID_AA64MMFR1_HCX_WIDTH		4
 #define	ID_AA64MMFR1_HCX_MASK		(UL(0xf) << ID_AA64MMFR1_HCX_SHIFT)
@@ -1374,6 +1428,12 @@
 #define	ID_AA64MMFR1_CMOVW_VAL(x)	((x) & ID_AA64MMFR1_CMOVW_MASK)
 #define	 ID_AA64MMFR1_CMOVW_NONE	(UL(0x0) << ID_AA64MMFR1_CMOVW_SHIFT)
 #define	 ID_AA64MMFR1_CMOVW_IMPL	(UL(0x1) << ID_AA64MMFR1_CMOVW_SHIFT)
+#define	ID_AA64MMFR1_ECBHB_SHIFT	60
+#define	ID_AA64MMFR1_ECBHB_WIDTH	4
+#define	ID_AA64MMFR1_ECBHB_MASK		(UL(0xf) << ID_AA64MMFR1_ECBHB_SHIFT)
+#define	ID_AA64MMFR1_ECBHB_VAL(x)	((x) & ID_AA64MMFR1_ECBHB_MASK)
+#define	 ID_AA64MMFR1_ECBHB_NONE	(UL(0x0) << ID_AA64MMFR1_ECBHB_SHIFT)
+#define	 ID_AA64MMFR1_ECBHB_IMPL	(UL(0x1) << ID_AA64MMFR1_ECBHB_SHIFT)
 
 /* ID_AA64MMFR2_EL1 */
 #define	ID_AA64MMFR2_EL1		MRS_REG(ID_AA64MMFR2_EL1)
@@ -1497,12 +1557,66 @@
 #define	ID_AA64MMFR3_SCTLRX_VAL(x)	((x) & ID_AA64MMFR3_SCTLRX_MASK)
 #define	 ID_AA64MMFR3_SCTLRX_NONE	(UL(0x0) << ID_AA64MMFR3_SCTLRX_SHIFT)
 #define	 ID_AA64MMFR3_SCTLRX_IMPL	(UL(0x1) << ID_AA64MMFR3_SCTLRX_SHIFT)
+#define	ID_AA64MMFR3_S1PIE_SHIFT	8
+#define	ID_AA64MMFR3_S1PIE_WIDTH	4
+#define	ID_AA64MMFR3_S1PIE_MASK		(UL(0xf) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define	ID_AA64MMFR3_S1PIE_VAL(x)	((x) & ID_AA64MMFR3_S1PIE_MASK)
+#define	 ID_AA64MMFR3_S1PIE_NONE	(UL(0x0) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define	 ID_AA64MMFR3_S1PIE_IMPL	(UL(0x1) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define	ID_AA64MMFR3_S2PIE_SHIFT	12
+#define	ID_AA64MMFR3_S2PIE_WIDTH	4
+#define	ID_AA64MMFR3_S2PIE_MASK		(UL(0xf) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define	ID_AA64MMFR3_S2PIE_VAL(x)	((x) & ID_AA64MMFR3_S2PIE_MASK)
+#define	 ID_AA64MMFR3_S2PIE_NONE	(UL(0x0) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define	 ID_AA64MMFR3_S2PIE_IMPL	(UL(0x1) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define	ID_AA64MMFR3_S1POE_SHIFT	16
+#define	ID_AA64MMFR3_S1POE_WIDTH	4
+#define	ID_AA64MMFR3_S1POE_MASK		(UL(0xf) << ID_AA64MMFR3_S1POE_SHIFT)
+#define	ID_AA64MMFR3_S1POE_VAL(x)	((x) & ID_AA64MMFR3_S1POE_MASK)
+#define	 ID_AA64MMFR3_S1POE_NONE	(UL(0x0) << ID_AA64MMFR3_S1POE_SHIFT)
+#define	 ID_AA64MMFR3_S1POE_IMPL	(UL(0x1) << ID_AA64MMFR3_S1POE_SHIFT)
+#define	ID_AA64MMFR3_S2POE_SHIFT	20
+#define	ID_AA64MMFR3_S2POE_WIDTH	4
+#define	ID_AA64MMFR3_S2POE_MASK		(UL(0xf) << ID_AA64MMFR3_S2POE_SHIFT)
+#define	ID_AA64MMFR3_S2POE_VAL(x)	((x) & ID_AA64MMFR3_S2POE_MASK)
+#define	 ID_AA64MMFR3_S2POE_NONE	(UL(0x0) << ID_AA64MMFR3_S2POE_SHIFT)
+#define	 ID_AA64MMFR3_S2POE_IMPL	(UL(0x1) << ID_AA64MMFR3_S2POE_SHIFT)
+#define	ID_AA64MMFR3_AIE_SHIFT		24
+#define	ID_AA64MMFR3_AIE_WIDTH		4
+#define	ID_AA64MMFR3_AIE_MASK		(UL(0xf) << ID_AA64MMFR3_AIE_SHIFT)
+#define	ID_AA64MMFR3_AIE_VAL(x)		((x) & ID_AA64MMFR3_AIE_MASK)
+#define	 ID_AA64MMFR3_AIE_NONE		(UL(0x0) << ID_AA64MMFR3_AIE_SHIFT)
+#define	 ID_AA64MMFR3_AIE_IMPL		(UL(0x1) << ID_AA64MMFR3_AIE_SHIFT)
 #define	ID_AA64MMFR3_MEC_SHIFT		28
 #define	ID_AA64MMFR3_MEC_WIDTH		4
 #define	ID_AA64MMFR3_MEC_MASK		(UL(0xf) << ID_AA64MMFR3_MEC_SHIFT)
 #define	ID_AA64MMFR3_MEC_VAL(x)	((x) & ID_AA64MMFR3_MEC_MASK)
 #define	 ID_AA64MMFR3_MEC_NONE		(UL(0x0) << ID_AA64MMFR3_MEC_SHIFT)
 #define	 ID_AA64MMFR3_MEC_IMPL		(UL(0x1) << ID_AA64MMFR3_MEC_SHIFT)
+#define	ID_AA64MMFR3_SNERR_SHIFT	40
+#define	ID_AA64MMFR3_SNERR_WIDTH	4
+#define	ID_AA64MMFR3_SNERR_MASK		(UL(0xf) << ID_AA64MMFR3_SNERR_SHIFT)
+#define	ID_AA64MMFR3_SNERR_VAL(x)	((x) & ID_AA64MMFR3_SNERR_MASK)
+#define	 ID_AA64MMFR3_SNERR_NONE	(UL(0x0) << ID_AA64MMFR3_SNERR_SHIFT)
+#define	 ID_AA64MMFR3_SNERR_ALL		(UL(0x1) << ID_AA64MMFR3_SNERR_SHIFT)
+#define	ID_AA64MMFR3_ANERR_SHIFT	44
+#define	ID_AA64MMFR3_ANERR_WIDTH	4
+#define	ID_AA64MMFR3_ANERR_MASK		(UL(0xf) << ID_AA64MMFR3_ANERR_SHIFT)
+#define	ID_AA64MMFR3_ANERR_VAL(x)	((x) & ID_AA64MMFR3_ANERR_MASK)
+#define	 ID_AA64MMFR3_ANERR_NONE	(UL(0x0) << ID_AA64MMFR3_ANERR_SHIFT)
+#define	 ID_AA64MMFR3_ANERR_SOME	(UL(0x1) << ID_AA64MMFR3_ANERR_SHIFT)
+#define	ID_AA64MMFR3_SDERR_SHIFT	52
+#define	ID_AA64MMFR3_SDERR_WIDTH	4
+#define	ID_AA64MMFR3_SDERR_MASK		(UL(0xf) << ID_AA64MMFR3_SDERR_SHIFT)
+#define	ID_AA64MMFR3_SDERR_VAL(x)	((x) & ID_AA64MMFR3_SDERR_MASK)
+#define	 ID_AA64MMFR3_SDERR_NONE	(UL(0x0) << ID_AA64MMFR3_SDERR_SHIFT)
+#define	 ID_AA64MMFR3_SDERR_ALL		(UL(0x1) << ID_AA64MMFR3_SDERR_SHIFT)
+#define	ID_AA64MMFR3_ADERR_SHIFT	56
+#define	ID_AA64MMFR3_ADERR_WIDTH	4
+#define	ID_AA64MMFR3_ADERR_MASK		(UL(0xf) << ID_AA64MMFR3_ADERR_SHIFT)
+#define	ID_AA64MMFR3_ADERR_VAL(x)	((x) & ID_AA64MMFR3_ADERR_MASK)
+#define	 ID_AA64MMFR3_ADERR_NONE	(UL(0x0) << ID_AA64MMFR3_ADERR_SHIFT)
+#define	 ID_AA64MMFR3_ADERR_SOME	(UL(0x1) << ID_AA64MMFR3_ADERR_SHIFT)
 #define	ID_AA64MMFR3_Spec_FPACC_SHIFT	60
 #define	ID_AA64MMFR3_Spec_FPACC_WIDTH	4
 #define	ID_AA64MMFR3_Spec_FPACC_MASK	(UL(0xf) << ID_AA64MMFR3_Spec_FPACC_SHIFT)
@@ -1582,6 +1696,7 @@
 #define	 ID_AA64PFR0_RAS_NONE		(UL(0x0) << ID_AA64PFR0_RAS_SHIFT)
 #define	 ID_AA64PFR0_RAS_IMPL		(UL(0x1) << ID_AA64PFR0_RAS_SHIFT)
 #define	 ID_AA64PFR0_RAS_8_4		(UL(0x2) << ID_AA64PFR0_RAS_SHIFT)
+#define	 ID_AA64PFR0_RAS_8_9		(UL(0x3) << ID_AA64PFR0_RAS_SHIFT)
 #define	ID_AA64PFR0_SVE_SHIFT		32
 #define	ID_AA64PFR0_SVE_WIDTH		4
 #define	ID_AA64PFR0_SVE_MASK		(UL(0xf) << ID_AA64PFR0_SVE_SHIFT)
@@ -1701,6 +1816,36 @@
 #define	ID_AA64PFR1_NMI_VAL(x)		((x) & ID_AA64PFR1_NMI_MASK)
 #define	 ID_AA64PFR1_NMI_NONE		(UL(0x0) << ID_AA64PFR1_NMI_SHIFT)
 #define	 ID_AA64PFR1_NMI_IMPL		(UL(0x1) << ID_AA64PFR1_NMI_SHIFT)
+#define	ID_AA64PFR1_MTE_frac_SHIFT	40
+#define	ID_AA64PFR1_MTE_frac_WIDTH	4
+#define	ID_AA64PFR1_MTE_frac_MASK	(UL(0xf) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define	ID_AA64PFR1_MTE_frac_VAL(x)	((x) & ID_AA64PFR1_MTE_frac_MASK)
+#define	 ID_AA64PFR1_MTE_frac_IMPL	(UL(0x0) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define	 ID_AA64PFR1_MTE_frac_NONE	(UL(0xf) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define	ID_AA64PFR1_THE_SHIFT		48
+#define	ID_AA64PFR1_THE_WIDTH		4
+#define	ID_AA64PFR1_THE_MASK		(UL(0xf) << ID_AA64PFR1_THE_SHIFT)
+#define	ID_AA64PFR1_THE_VAL(x)		((x) & ID_AA64PFR1_THE_MASK)
+#define	 ID_AA64PFR1_THE_NONE		(UL(0x0) << ID_AA64PFR1_THE_SHIFT)
+#define	 ID_AA64PFR1_THE_IMPL		(UL(0x1) << ID_AA64PFR1_THE_SHIFT)
+#define	ID_AA64PFR1_MTEX_SHIFT		52
+#define	ID_AA64PFR1_MTEX_WIDTH		4
+#define	ID_AA64PFR1_MTEX_MASK		(UL(0xf) << ID_AA64PFR1_MTEX_SHIFT)
+#define	ID_AA64PFR1_MTEX_VAL(x)		((x) & ID_AA64PFR1_MTEX_MASK)
+#define	 ID_AA64PFR1_MTEX_NONE		(UL(0x0) << ID_AA64PFR1_MTEX_SHIFT)
+#define	 ID_AA64PFR1_MTEX_IMPL		(UL(0x1) << ID_AA64PFR1_MTEX_SHIFT)
+#define	ID_AA64PFR1_DF2_SHIFT		56
+#define	ID_AA64PFR1_DF2_WIDTH		4
+#define	ID_AA64PFR1_DF2_MASK		(UL(0xf) << ID_AA64PFR1_DF2_SHIFT)
+#define	ID_AA64PFR1_DF2_VAL(x)		((x) & ID_AA64PFR1_DF2_MASK)
+#define	 ID_AA64PFR1_DF2_NONE		(UL(0x0) << ID_AA64PFR1_DF2_SHIFT)
+#define	 ID_AA64PFR1_DF2_IMPL		(UL(0x1) << ID_AA64PFR1_DF2_SHIFT)
+#define	ID_AA64PFR1_PFAR_SHIFT		60
+#define	ID_AA64PFR1_PFAR_WIDTH		4
+#define	ID_AA64PFR1_PFAR_MASK		(UL(0xf) << ID_AA64PFR1_PFAR_SHIFT)
+#define	ID_AA64PFR1_PFAR_VAL(x)		((x) & ID_AA64PFR1_PFAR_MASK)
+#define	 ID_AA64PFR1_PFAR_NONE		(UL(0x0) << ID_AA64PFR1_PFAR_SHIFT)
+#define	 ID_AA64PFR1_PFAR_IMPL		(UL(0x1) << ID_AA64PFR1_PFAR_SHIFT)
 
 /* ID_AA64PFR2_EL1 */
 #define	ID_AA64PFR2_EL1			MRS_REG(ID_AA64PFR2_EL1)
@@ -1722,60 +1867,60 @@
 #define	ID_AA64ZFR0_SVEver_SHIFT	0
 #define	ID_AA64ZFR0_SVEver_WIDTH	4
 #define	ID_AA64ZFR0_SVEver_MASK		(UL(0xf) << ID_AA64ZFR0_SVEver_SHIFT)
-#define	ID_AA64ZFR0_SVEver_VAL(x)	((x) & ID_AA64ZFR0_SVEver_MASK
-#define	ID_AA64ZFR0_SVEver_SVE1		(UL(0x0) << ID_AA64ZFR0_SVEver_SHIFT)
-#define	ID_AA64ZFR0_SVEver_SVE2		(UL(0x1) << ID_AA64ZFR0_SVEver_SHIFT)
-#define	ID_AA64ZFR0_SVEver_SVE2P1	(UL(0x2) << ID_AA64ZFR0_SVEver_SHIFT)
+#define	ID_AA64ZFR0_SVEver_VAL(x)	((x) & ID_AA64ZFR0_SVEver_MASK)
+#define	 ID_AA64ZFR0_SVEver_SVE1	(UL(0x0) << ID_AA64ZFR0_SVEver_SHIFT)
+#define	 ID_AA64ZFR0_SVEver_SVE2	(UL(0x1) << ID_AA64ZFR0_SVEver_SHIFT)
+#define	 ID_AA64ZFR0_SVEver_SVE2P1	(UL(0x2) << ID_AA64ZFR0_SVEver_SHIFT)
 #define	ID_AA64ZFR0_AES_SHIFT		4
 #define	ID_AA64ZFR0_AES_WIDTH		4
 #define	ID_AA64ZFR0_AES_MASK		(UL(0xf) << ID_AA64ZFR0_AES_SHIFT)
-#define	ID_AA64ZFR0_AES_VAL(x)		((x) & ID_AA64ZFR0_AES_MASK
-#define	ID_AA64ZFR0_AES_NONE		(UL(0x0) << ID_AA64ZFR0_AES_SHIFT)
-#define	ID_AA64ZFR0_AES_BASE		(UL(0x1) << ID_AA64ZFR0_AES_SHIFT)
-#define	ID_AA64ZFR0_AES_PMULL		(UL(0x2) << ID_AA64ZFR0_AES_SHIFT)
+#define	ID_AA64ZFR0_AES_VAL(x)		((x) & ID_AA64ZFR0_AES_MASK)
+#define	 ID_AA64ZFR0_AES_NONE		(UL(0x0) << ID_AA64ZFR0_AES_SHIFT)
+#define	 ID_AA64ZFR0_AES_BASE		(UL(0x1) << ID_AA64ZFR0_AES_SHIFT)
+#define	 ID_AA64ZFR0_AES_PMULL		(UL(0x2) << ID_AA64ZFR0_AES_SHIFT)
 #define	ID_AA64ZFR0_BitPerm_SHIFT	16
 #define	ID_AA64ZFR0_BitPerm_WIDTH	4
 #define	ID_AA64ZFR0_BitPerm_MASK	(UL(0xf) << ID_AA64ZFR0_BitPerm_SHIFT)
-#define	ID_AA64ZFR0_BitPerm_VAL(x)	((x) & ID_AA64ZFR0_BitPerm_MASK
-#define	ID_AA64ZFR0_BitPerm_NONE	(UL(0x0) << ID_AA64ZFR0_BitPerm_SHIFT)
-#define	ID_AA64ZFR0_BitPerm_IMPL	(UL(0x1) << ID_AA64ZFR0_BitPerm_SHIFT)
+#define	ID_AA64ZFR0_BitPerm_VAL(x)	((x) & ID_AA64ZFR0_BitPerm_MASK)
+#define	 ID_AA64ZFR0_BitPerm_NONE	(UL(0x0) << ID_AA64ZFR0_BitPerm_SHIFT)
+#define	 ID_AA64ZFR0_BitPerm_IMPL	(UL(0x1) << ID_AA64ZFR0_BitPerm_SHIFT)
 #define	ID_AA64ZFR0_BF16_SHIFT		20
 #define	ID_AA64ZFR0_BF16_WIDTH		4
 #define	ID_AA64ZFR0_BF16_MASK		(UL(0xf) << ID_AA64ZFR0_BF16_SHIFT)
-#define	ID_AA64ZFR0_BF16_VAL(x)		((x) & ID_AA64ZFR0_BF16_MASK
-#define	ID_AA64ZFR0_BF16_NONE		(UL(0x0) << ID_AA64ZFR0_BF16_SHIFT)
-#define	ID_AA64ZFR0_BF16_BASE		(UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
-#define	ID_AA64ZFR0_BF16_EBF		(UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
+#define	ID_AA64ZFR0_BF16_VAL(x)		((x) & ID_AA64ZFR0_BF16_MASK)
+#define	 ID_AA64ZFR0_BF16_NONE		(UL(0x0) << ID_AA64ZFR0_BF16_SHIFT)
+#define	 ID_AA64ZFR0_BF16_BASE		(UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
+#define	 ID_AA64ZFR0_BF16_EBF		(UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
 #define	ID_AA64ZFR0_SHA3_SHIFT		32
 #define	ID_AA64ZFR0_SHA3_WIDTH		4
 #define	ID_AA64ZFR0_SHA3_MASK		(UL(0xf) << ID_AA64ZFR0_SHA3_SHIFT)
-#define	ID_AA64ZFR0_SHA3_VAL(x)		((x) & ID_AA64ZFR0_SHA3_MASK
-#define	ID_AA64ZFR0_SHA3_NONE		(UL(0x0) << ID_AA64ZFR0_SHA3_SHIFT)
-#define	ID_AA64ZFR0_SHA3_IMPL		(UL(0x1) << ID_AA64ZFR0_SHA3_SHIFT)
+#define	ID_AA64ZFR0_SHA3_VAL(x)		((x) & ID_AA64ZFR0_SHA3_MASK)
+#define	 ID_AA64ZFR0_SHA3_NONE		(UL(0x0) << ID_AA64ZFR0_SHA3_SHIFT)
+#define	 ID_AA64ZFR0_SHA3_IMPL		(UL(0x1) << ID_AA64ZFR0_SHA3_SHIFT)
 #define	ID_AA64ZFR0_SM4_SHIFT		40
 #define	ID_AA64ZFR0_SM4_WIDTH		4
 #define	ID_AA64ZFR0_SM4_MASK		(UL(0xf) << ID_AA64ZFR0_SM4_SHIFT)
-#define	ID_AA64ZFR0_SM4_VAL(x)		((x) & ID_AA64ZFR0_SM4_MASK
-#define	ID_AA64ZFR0_SM4_NONE		(UL(0x0) << ID_AA64ZFR0_SM4_SHIFT)
-#define	ID_AA64ZFR0_SM4_IMPL		(UL(0x1) << ID_AA64ZFR0_SM4_SHIFT)
+#define	ID_AA64ZFR0_SM4_VAL(x)		((x) & ID_AA64ZFR0_SM4_MASK)
+#define	 ID_AA64ZFR0_SM4_NONE		(UL(0x0) << ID_AA64ZFR0_SM4_SHIFT)
+#define	 ID_AA64ZFR0_SM4_IMPL		(UL(0x1) << ID_AA64ZFR0_SM4_SHIFT)
 #define	ID_AA64ZFR0_I8MM_SHIFT		44
 #define	ID_AA64ZFR0_I8MM_WIDTH		4
 #define	ID_AA64ZFR0_I8MM_MASK		(UL(0xf) << ID_AA64ZFR0_I8MM_SHIFT)
-#define	ID_AA64ZFR0_I8MM_VAL(x)		((x) & ID_AA64ZFR0_I8MM_MASK
-#define	ID_AA64ZFR0_I8MM_NONE		(UL(0x0) << ID_AA64ZFR0_I8MM_SHIFT)
-#define	ID_AA64ZFR0_I8MM_IMPL		(UL(0x1) << ID_AA64ZFR0_I8MM_SHIFT)
+#define	ID_AA64ZFR0_I8MM_VAL(x)		((x) & ID_AA64ZFR0_I8MM_MASK)
+#define	 ID_AA64ZFR0_I8MM_NONE		(UL(0x0) << ID_AA64ZFR0_I8MM_SHIFT)
+#define	 ID_AA64ZFR0_I8MM_IMPL		(UL(0x1) << ID_AA64ZFR0_I8MM_SHIFT)
 #define	ID_AA64ZFR0_F32MM_SHIFT		52
 #define	ID_AA64ZFR0_F32MM_WIDTH		4
 #define	ID_AA64ZFR0_F32MM_MASK		(UL(0xf) << ID_AA64ZFR0_F32MM_SHIFT)
-#define	ID_AA64ZFR0_F32MM_VAL(x)	((x) & ID_AA64ZFR0_F32MM_MASK
-#define	ID_AA64ZFR0_F32MM_NONE		(UL(0x0) << ID_AA64ZFR0_F32MM_SHIFT)
-#define	ID_AA64ZFR0_F32MM_IMPL		(UL(0x1) << ID_AA64ZFR0_F32MM_SHIFT)
+#define	ID_AA64ZFR0_F32MM_VAL(x)	((x) & ID_AA64ZFR0_F32MM_MASK)
+#define	 ID_AA64ZFR0_F32MM_NONE		(UL(0x0) << ID_AA64ZFR0_F32MM_SHIFT)
+#define	 ID_AA64ZFR0_F32MM_IMPL		(UL(0x1) << ID_AA64ZFR0_F32MM_SHIFT)
 #define	ID_AA64ZFR0_F64MM_SHIFT		56
 #define	ID_AA64ZFR0_F64MM_WIDTH		4
 #define	ID_AA64ZFR0_F64MM_MASK		(UL(0xf) << ID_AA64ZFR0_F64MM_SHIFT)
-#define	ID_AA64ZFR0_F64MM_VAL(x)	((x) & ID_AA64ZFR0_F64MM_MASK
-#define	ID_AA64ZFR0_F64MM_NONE		(UL(0x0) << ID_AA64ZFR0_F64MM_SHIFT)
-#define	ID_AA64ZFR0_F64MM_IMPL		(UL(0x1) << ID_AA64ZFR0_F64MM_SHIFT)
+#define	ID_AA64ZFR0_F64MM_VAL(x)	((x) & ID_AA64ZFR0_F64MM_MASK)
+#define	 ID_AA64ZFR0_F64MM_NONE		(UL(0x0) << ID_AA64ZFR0_F64MM_SHIFT)
+#define	 ID_AA64ZFR0_F64MM_IMPL		(UL(0x1) << ID_AA64ZFR0_F64MM_SHIFT)
 
 /* ID_ISAR5_EL1 */
 #define	ID_ISAR5_EL1			MRS_REG(ID_ISAR5_EL1)



Want to link to this message? Use this URL: <https://mail-archive.FreeBSD.org/cgi/mid.cgi?202502171636.51HGawkx082816>