aboutsummaryrefslogtreecommitdiff
path: root/sys/arm64/include
diff options
context:
space:
mode:
Diffstat (limited to 'sys/arm64/include')
-rw-r--r--sys/arm64/include/armreg.h1176
-rw-r--r--sys/arm64/include/asm.h22
-rw-r--r--sys/arm64/include/atomic.h9
-rw-r--r--sys/arm64/include/bus.h1
-rw-r--r--sys/arm64/include/bus_dma.h2
-rw-r--r--sys/arm64/include/cpu.h39
-rw-r--r--sys/arm64/include/cpu_feat.h88
-rw-r--r--sys/arm64/include/cpufunc.h22
-rw-r--r--sys/arm64/include/efi.h4
-rw-r--r--sys/arm64/include/elf.h174
-rw-r--r--sys/arm64/include/hypervisor.h102
-rw-r--r--sys/arm64/include/intr.h16
-rw-r--r--sys/arm64/include/machdep.h1
-rw-r--r--sys/arm64/include/md_var.h4
-rw-r--r--sys/arm64/include/metadata.h11
-rw-r--r--sys/arm64/include/param.h12
-rw-r--r--sys/arm64/include/pcb.h10
-rw-r--r--sys/arm64/include/pmap.h18
-rw-r--r--sys/arm64/include/proc.h6
-rw-r--r--sys/arm64/include/pte.h62
-rw-r--r--sys/arm64/include/reg.h13
-rw-r--r--sys/arm64/include/resource.h2
-rw-r--r--sys/arm64/include/runq.h50
-rw-r--r--sys/arm64/include/sdt_machdep.h12
-rw-r--r--sys/arm64/include/sysarch.h5
-rw-r--r--sys/arm64/include/ucontext.h8
-rw-r--r--sys/arm64/include/undefined.h30
-rw-r--r--sys/arm64/include/vfp.h7
-rw-r--r--sys/arm64/include/vmm.h74
-rw-r--r--sys/arm64/include/vmm_dev.h5
-rw-r--r--sys/arm64/include/vmparam.h45
31 files changed, 1473 insertions, 557 deletions
diff --git a/sys/arm64/include/armreg.h b/sys/arm64/include/armreg.h
index 9391b1e2c1b7..cd770386f852 100644
--- a/sys/arm64/include/armreg.h
+++ b/sys/arm64/include/armreg.h
@@ -36,29 +36,6 @@
#define INSN_SIZE 4
-#define MRS_MASK 0xfff00000
-#define MRS_VALUE 0xd5300000
-#define MRS_SPECIAL(insn) ((insn) & 0x000fffe0)
-#define MRS_REGISTER(insn) ((insn) & 0x0000001f)
-#define MRS_Op0_SHIFT 19
-#define MRS_Op0_MASK 0x00080000
-#define MRS_Op1_SHIFT 16
-#define MRS_Op1_MASK 0x00070000
-#define MRS_CRn_SHIFT 12
-#define MRS_CRn_MASK 0x0000f000
-#define MRS_CRm_SHIFT 8
-#define MRS_CRm_MASK 0x00000f00
-#define MRS_Op2_SHIFT 5
-#define MRS_Op2_MASK 0x000000e0
-#define MRS_Rt_SHIFT 0
-#define MRS_Rt_MASK 0x0000001f
-#define __MRS_REG(op0, op1, crn, crm, op2) \
- (((op0) << MRS_Op0_SHIFT) | ((op1) << MRS_Op1_SHIFT) | \
- ((crn) << MRS_CRn_SHIFT) | ((crm) << MRS_CRm_SHIFT) | \
- ((op2) << MRS_Op2_SHIFT))
-#define MRS_REG(reg) \
- __MRS_REG(reg##_op0, reg##_op1, reg##_CRn, reg##_CRm, reg##_op2)
-
#define __MRS_REG_ALT_NAME(op0, op1, crn, crm, op2) \
S##op0##_##op1##_C##crn##_C##crm##_##op2
#define _MRS_REG_ALT_NAME(op0, op1, crn, crm, op2) \
@@ -77,6 +54,134 @@
#define UL(x) UINT64_C(x)
+/* AFSR0_EL1 - Auxiliary Fault Status Register 0 */
+#define AFSR0_EL1_REG MRS_REG_ALT_NAME(AFSR0_EL1)
+#define AFSR0_EL1_op0 3
+#define AFSR0_EL1_op1 0
+#define AFSR0_EL1_CRn 5
+#define AFSR0_EL1_CRm 1
+#define AFSR0_EL1_op2 0
+
+/* AFSR0_EL12 */
+#define AFSR0_EL12_REG MRS_REG_ALT_NAME(AFSR0_EL12)
+#define AFSR0_EL12_op0 3
+#define AFSR0_EL12_op1 5
+#define AFSR0_EL12_CRn 5
+#define AFSR0_EL12_CRm 1
+#define AFSR0_EL12_op2 0
+
+/* AFSR1_EL1 - Auxiliary Fault Status Register 1 */
+#define AFSR1_EL1_REG MRS_REG_ALT_NAME(AFSR1_EL1)
+#define AFSR1_EL1_op0 3
+#define AFSR1_EL1_op1 0
+#define AFSR1_EL1_CRn 5
+#define AFSR1_EL1_CRm 1
+#define AFSR1_EL1_op2 1
+
+/* AFSR1_EL12 */
+#define AFSR1_EL12_REG MRS_REG_ALT_NAME(AFSR1_EL12)
+#define AFSR1_EL12_op0 3
+#define AFSR1_EL12_op1 5
+#define AFSR1_EL12_CRn 5
+#define AFSR1_EL12_CRm 1
+#define AFSR1_EL12_op2 1
+
+/* AMAIR_EL1 - Auxiliary Memory Attribute Indirection Register */
+#define AMAIR_EL1_REG MRS_REG_ALT_NAME(AMAIR_EL1)
+#define AMAIR_EL1_op0 3
+#define AMAIR_EL1_op1 0
+#define AMAIR_EL1_CRn 10
+#define AMAIR_EL1_CRm 3
+#define AMAIR_EL1_op2 0
+
+/* AMAIR_EL12 */
+#define AMAIR_EL12_REG MRS_REG_ALT_NAME(AMAIR_EL12)
+#define AMAIR_EL12_op0 3
+#define AMAIR_EL12_op1 5
+#define AMAIR_EL12_CRn 10
+#define AMAIR_EL12_CRm 3
+#define AMAIR_EL12_op2 0
+
+/* APDAKeyHi_EL1 */
+#define APDAKeyHi_EL1_REG MRS_REG_ALT_NAME(APDAKeyHi_EL1)
+#define APDAKeyHi_EL1_op0 3
+#define APDAKeyHi_EL1_op1 0
+#define APDAKeyHi_EL1_CRn 2
+#define APDAKeyHi_EL1_CRm 2
+#define APDAKeyHi_EL1_op2 1
+
+/* APDAKeyLo_EL1 */
+#define APDAKeyLo_EL1_REG MRS_REG_ALT_NAME(APDAKeyLo_EL1)
+#define APDAKeyLo_EL1_op0 3
+#define APDAKeyLo_EL1_op1 0
+#define APDAKeyLo_EL1_CRn 2
+#define APDAKeyLo_EL1_CRm 2
+#define APDAKeyLo_EL1_op2 0
+
+/* APDBKeyHi_EL1 */
+#define APDBKeyHi_EL1_REG MRS_REG_ALT_NAME(APDBKeyHi_EL1)
+#define APDBKeyHi_EL1_op0 3
+#define APDBKeyHi_EL1_op1 0
+#define APDBKeyHi_EL1_CRn 2
+#define APDBKeyHi_EL1_CRm 2
+#define APDBKeyHi_EL1_op2 3
+
+/* APDBKeyLo_EL1 */
+#define APDBKeyLo_EL1_REG MRS_REG_ALT_NAME(APDBKeyLo_EL1)
+#define APDBKeyLo_EL1_op0 3
+#define APDBKeyLo_EL1_op1 0
+#define APDBKeyLo_EL1_CRn 2
+#define APDBKeyLo_EL1_CRm 2
+#define APDBKeyLo_EL1_op2 2
+
+/* APGAKeyHi_EL1 */
+#define APGAKeyHi_EL1_REG MRS_REG_ALT_NAME(APGAKeyHi_EL1)
+#define APGAKeyHi_EL1_op0 3
+#define APGAKeyHi_EL1_op1 0
+#define APGAKeyHi_EL1_CRn 2
+#define APGAKeyHi_EL1_CRm 3
+#define APGAKeyHi_EL1_op2 1
+
+/* APGAKeyLo_EL1 */
+#define APGAKeyLo_EL1_REG MRS_REG_ALT_NAME(APGAKeyLo_EL1)
+#define APGAKeyLo_EL1_op0 3
+#define APGAKeyLo_EL1_op1 0
+#define APGAKeyLo_EL1_CRn 2
+#define APGAKeyLo_EL1_CRm 3
+#define APGAKeyLo_EL1_op2 0
+
+/* APIAKeyHi_EL1 */
+#define APIAKeyHi_EL1_REG MRS_REG_ALT_NAME(APIAKeyHi_EL1)
+#define APIAKeyHi_EL1_op0 3
+#define APIAKeyHi_EL1_op1 0
+#define APIAKeyHi_EL1_CRn 2
+#define APIAKeyHi_EL1_CRm 1
+#define APIAKeyHi_EL1_op2 1
+
+/* APIAKeyLo_EL1 */
+#define APIAKeyLo_EL1_REG MRS_REG_ALT_NAME(APIAKeyLo_EL1)
+#define APIAKeyLo_EL1_op0 3
+#define APIAKeyLo_EL1_op1 0
+#define APIAKeyLo_EL1_CRn 2
+#define APIAKeyLo_EL1_CRm 1
+#define APIAKeyLo_EL1_op2 0
+
+/* APIBKeyHi_EL1 */
+#define APIBKeyHi_EL1_REG MRS_REG_ALT_NAME(APIBKeyHi_EL1)
+#define APIBKeyHi_EL1_op0 3
+#define APIBKeyHi_EL1_op1 0
+#define APIBKeyHi_EL1_CRn 2
+#define APIBKeyHi_EL1_CRm 1
+#define APIBKeyHi_EL1_op2 3
+
+/* APIBKeyLo_EL1 */
+#define APIBKeyLo_EL1_REG MRS_REG_ALT_NAME(APIBKeyLo_EL1)
+#define APIBKeyLo_EL1_op0 3
+#define APIBKeyLo_EL1_op1 0
+#define APIBKeyLo_EL1_CRn 2
+#define APIBKeyLo_EL1_CRm 1
+#define APIBKeyLo_EL1_op2 2
+
/* CCSIDR_EL1 - Cache Size ID Register */
#define CCSIDR_NumSets_MASK 0x0FFFE000
#define CCSIDR_NumSets64_MASK 0x00FFFFFF00000000
@@ -103,8 +208,21 @@
#define CLIDR_CTYPE_ID 0x3 /* Split instruction and data */
#define CLIDR_CTYPE_UNIFIED 0x4 /* Unified */
+/* CNTKCTL_EL1 - Counter-timer Kernel Control Register */
+#define CNTKCTL_EL1_op0 3
+#define CNTKCTL_EL1_op1 0
+#define CNTKCTL_EL1_CRn 14
+#define CNTKCTL_EL1_CRm 1
+#define CNTKCTL_EL1_op2 0
+
+/* CNTKCTL_EL12 - Counter-timer Kernel Control Register */
+#define CNTKCTL_EL12_op0 3
+#define CNTKCTL_EL12_op1 5
+#define CNTKCTL_EL12_CRn 14
+#define CNTKCTL_EL12_CRm 1
+#define CNTKCTL_EL12_op2 0
+
/* CNTP_CTL_EL0 - Counter-timer Physical Timer Control register */
-#define CNTP_CTL_EL0 MRS_REG(CNTP_CTL_EL0)
#define CNTP_CTL_EL0_op0 3
#define CNTP_CTL_EL0_op1 3
#define CNTP_CTL_EL0_CRn 14
@@ -115,7 +233,6 @@
#define CNTP_CTL_ISTATUS (1 << 2)
/* CNTP_CVAL_EL0 - Counter-timer Physical Timer CompareValue register */
-#define CNTP_CVAL_EL0 MRS_REG(CNTP_CVAL_EL0)
#define CNTP_CVAL_EL0_op0 3
#define CNTP_CVAL_EL0_op1 3
#define CNTP_CVAL_EL0_CRn 14
@@ -123,7 +240,6 @@
#define CNTP_CVAL_EL0_op2 2
/* CNTP_TVAL_EL0 - Counter-timer Physical Timer TimerValue register */
-#define CNTP_TVAL_EL0 MRS_REG(CNTP_TVAL_EL0)
#define CNTP_TVAL_EL0_op0 3
#define CNTP_TVAL_EL0_op1 3
#define CNTP_TVAL_EL0_CRn 14
@@ -131,14 +247,64 @@
#define CNTP_TVAL_EL0_op2 0
/* CNTPCT_EL0 - Counter-timer Physical Count register */
-#define CNTPCT_EL0 MRS_REG(CNTPCT_EL0)
+#define CNTPCT_EL0_ISS ISS_MSR_REG(CNTPCT_EL0)
#define CNTPCT_EL0_op0 3
#define CNTPCT_EL0_op1 3
#define CNTPCT_EL0_CRn 14
#define CNTPCT_EL0_CRm 0
#define CNTPCT_EL0_op2 1
+/* CNTV_CTL_EL0 - Counter-timer Virtual Timer Control register */
+#define CNTV_CTL_EL0_op0 3
+#define CNTV_CTL_EL0_op1 3
+#define CNTV_CTL_EL0_CRn 14
+#define CNTV_CTL_EL0_CRm 3
+#define CNTV_CTL_EL0_op2 1
+
+/* CNTV_CTL_EL02 - Counter-timer Virtual Timer Control register */
+#define CNTV_CTL_EL02_op0 3
+#define CNTV_CTL_EL02_op1 5
+#define CNTV_CTL_EL02_CRn 14
+#define CNTV_CTL_EL02_CRm 3
+#define CNTV_CTL_EL02_op2 1
+
+/* CNTV_CVAL_EL0 - Counter-timer Virtual Timer CompareValue register */
+#define CNTV_CVAL_EL0_op0 3
+#define CNTV_CVAL_EL0_op1 3
+#define CNTV_CVAL_EL0_CRn 14
+#define CNTV_CVAL_EL0_CRm 3
+#define CNTV_CVAL_EL0_op2 2
+
+/* CNTV_CVAL_EL02 - Counter-timer Virtual Timer CompareValue register */
+#define CNTV_CVAL_EL02_op0 3
+#define CNTV_CVAL_EL02_op1 5
+#define CNTV_CVAL_EL02_CRn 14
+#define CNTV_CVAL_EL02_CRm 3
+#define CNTV_CVAL_EL02_op2 2
+
+/* CONTEXTIDR_EL1 - Context ID register */
+#define CONTEXTIDR_EL1_REG MRS_REG_ALT_NAME(CONTEXTIDR_EL1)
+#define CONTEXTIDR_EL1_op0 3
+#define CONTEXTIDR_EL1_op1 0
+#define CONTEXTIDR_EL1_CRn 13
+#define CONTEXTIDR_EL1_CRm 0
+#define CONTEXTIDR_EL1_op2 1
+
+/* CONTEXTIDR_EL12 */
+#define CONTEXTIDR_EL12_REG MRS_REG_ALT_NAME(CONTEXTIDR_EL12)
+#define CONTEXTIDR_EL12_op0 3
+#define CONTEXTIDR_EL12_op1 5
+#define CONTEXTIDR_EL12_CRn 13
+#define CONTEXTIDR_EL12_CRm 0
+#define CONTEXTIDR_EL12_op2 1
+
/* CPACR_EL1 */
+#define CPACR_EL1_REG MRS_REG_ALT_NAME(CPACR_EL1)
+#define CPACR_EL1_op0 3
+#define CPACR_EL1_op1 0
+#define CPACR_EL1_CRn 1
+#define CPACR_EL1_CRm 0
+#define CPACR_EL1_op2 2
#define CPACR_ZEN_MASK (0x3 << 16)
#define CPACR_ZEN_TRAP_ALL1 (0x0 << 16) /* Traps from EL0 and EL1 */
#define CPACR_ZEN_TRAP_EL0 (0x1 << 16) /* Traps from EL0 */
@@ -151,41 +317,65 @@
#define CPACR_FPEN_TRAP_NONE (0x3 << 20) /* No traps */
#define CPACR_TTA (0x1 << 28)
+/* CPACR_EL12 */
+#define CPACR_EL12_REG MRS_REG_ALT_NAME(CPACR_EL12)
+#define CPACR_EL12_op0 3
+#define CPACR_EL12_op1 5
+#define CPACR_EL12_CRn 1
+#define CPACR_EL12_CRm 0
+#define CPACR_EL12_op2 2
+
/* CSSELR_EL1 - Cache size selection register */
#define CSSELR_Level(i) (i << 1)
#define CSSELR_InD 0x00000001
/* CTR_EL0 - Cache Type Register */
+#define CTR_EL0_REG MRS_REG_ALT_NAME(CTR_EL0)
+#define CTR_EL0_ISS ISS_MSR_REG(CTR_EL0)
+#define CTR_EL0_op0 3
+#define CTR_EL0_op1 3
+#define CTR_EL0_CRn 0
+#define CTR_EL0_CRm 0
+#define CTR_EL0_op2 1
#define CTR_RES1 (1 << 31)
#define CTR_TminLine_SHIFT 32
#define CTR_TminLine_MASK (UL(0x3f) << CTR_TminLine_SHIFT)
#define CTR_TminLine_VAL(reg) ((reg) & CTR_TminLine_MASK)
#define CTR_DIC_SHIFT 29
+#define CTR_DIC_WIDTH 1
#define CTR_DIC_MASK (0x1 << CTR_DIC_SHIFT)
#define CTR_DIC_VAL(reg) ((reg) & CTR_DIC_MASK)
+#define CTR_DIC_NONE (0x0 << CTR_DIC_SHIFT)
+#define CTR_DIC_IMPL (0x1 << CTR_DIC_SHIFT)
#define CTR_IDC_SHIFT 28
+#define CTR_IDC_WIDTH 1
#define CTR_IDC_MASK (0x1 << CTR_IDC_SHIFT)
#define CTR_IDC_VAL(reg) ((reg) & CTR_IDC_MASK)
+#define CTR_IDC_NONE (0x0 << CTR_IDC_SHIFT)
+#define CTR_IDC_IMPL (0x1 << CTR_IDC_SHIFT)
#define CTR_CWG_SHIFT 24
+#define CTR_CWG_WIDTH 4
#define CTR_CWG_MASK (0xf << CTR_CWG_SHIFT)
#define CTR_CWG_VAL(reg) ((reg) & CTR_CWG_MASK)
#define CTR_CWG_SIZE(reg) (4 << (CTR_CWG_VAL(reg) >> CTR_CWG_SHIFT))
#define CTR_ERG_SHIFT 20
+#define CTR_ERG_WIDTH 4
#define CTR_ERG_MASK (0xf << CTR_ERG_SHIFT)
#define CTR_ERG_VAL(reg) ((reg) & CTR_ERG_MASK)
#define CTR_ERG_SIZE(reg) (4 << (CTR_ERG_VAL(reg) >> CTR_ERG_SHIFT))
#define CTR_DLINE_SHIFT 16
+#define CTR_DLINE_WIDTH 4
#define CTR_DLINE_MASK (0xf << CTR_DLINE_SHIFT)
#define CTR_DLINE_VAL(reg) ((reg) & CTR_DLINE_MASK)
#define CTR_DLINE_SIZE(reg) (4 << (CTR_DLINE_VAL(reg) >> CTR_DLINE_SHIFT))
#define CTR_L1IP_SHIFT 14
+#define CTR_L1IP_WIDTH 2
#define CTR_L1IP_MASK (0x3 << CTR_L1IP_SHIFT)
#define CTR_L1IP_VAL(reg) ((reg) & CTR_L1IP_MASK)
-#define CTR_L1IP_VPIPT (0 << CTR_L1IP_SHIFT)
-#define CTR_L1IP_AIVIVT (1 << CTR_L1IP_SHIFT)
#define CTR_L1IP_VIPT (2 << CTR_L1IP_SHIFT)
#define CTR_L1IP_PIPT (3 << CTR_L1IP_SHIFT)
#define CTR_ILINE_SHIFT 0
+#define CTR_ILINE_WIDTH 4
#define CTR_ILINE_MASK (0xf << CTR_ILINE_SHIFT)
#define CTR_ILINE_VAL(reg) ((reg) & CTR_ILINE_MASK)
#define CTR_ILINE_SIZE(reg) (4 << (CTR_ILINE_VAL(reg) >> CTR_ILINE_SHIFT))
@@ -204,7 +394,7 @@
#define DAIF_I (1 << 1)
#define DAIF_F (1 << 0)
#define DAIF_ALL (DAIF_D | DAIF_A | DAIF_I | DAIF_F)
-#define DAIF_INTR (DAIF_I) /* All exceptions that pass */
+#define DAIF_INTR (DAIF_I | DAIF_F) /* All exceptions that pass */
/* through the intr framework */
/* DBGBCR<n>_EL1 - Debug Breakpoint Control Registers */
@@ -276,7 +466,6 @@
#define DCZID_BS_SIZE(reg) (((reg) & DCZID_BS_MASK) >> DCZID_BS_SHIFT)
/* DBGAUTHSTATUS_EL1 */
-#define DBGAUTHSTATUS_EL1 MRS_REG(DBGAUTHSTATUS_EL1)
#define DBGAUTHSTATUS_EL1_op0 2
#define DBGAUTHSTATUS_EL1_op1 0
#define DBGAUTHSTATUS_EL1_CRn 7
@@ -284,7 +473,6 @@
#define DBGAUTHSTATUS_EL1_op2 6
/* DBGCLAIMCLR_EL1 */
-#define DBGCLAIMCLR_EL1 MRS_REG(DBGCLAIMCLR_EL1)
#define DBGCLAIMCLR_EL1_op0 2
#define DBGCLAIMCLR_EL1_op1 0
#define DBGCLAIMCLR_EL1_CRn 7
@@ -292,7 +480,6 @@
#define DBGCLAIMCLR_EL1_op2 6
/* DBGCLAIMSET_EL1 */
-#define DBGCLAIMSET_EL1 MRS_REG(DBGCLAIMSET_EL1)
#define DBGCLAIMSET_EL1_op0 2
#define DBGCLAIMSET_EL1_op1 0
#define DBGCLAIMSET_EL1_CRn 7
@@ -300,13 +487,28 @@
#define DBGCLAIMSET_EL1_op2 6
/* DBGPRCR_EL1 */
-#define DBGPRCR_EL1 MRS_REG(DBGPRCR_EL1)
#define DBGPRCR_EL1_op0 2
#define DBGPRCR_EL1_op1 0
#define DBGPRCR_EL1_CRn 1
#define DBGPRCR_EL1_CRm 4
#define DBGPRCR_EL1_op2 4
+/* ELR_EL1 */
+#define ELR_EL1_REG MRS_REG_ALT_NAME(ELR_EL1)
+#define ELR_EL1_op0 3
+#define ELR_EL1_op1 0
+#define ELR_EL1_CRn 4
+#define ELR_EL1_CRm 0
+#define ELR_EL1_op2 1
+
+/* ELR_EL12 */
+#define ELR_EL12_REG MRS_REG_ALT_NAME(ELR_EL12)
+#define ELR_EL12_op0 3
+#define ELR_EL12_op1 5
+#define ELR_EL12_CRn 4
+#define ELR_EL12_CRm 0
+#define ELR_EL12_op2 1
+
/* ESR_ELx */
#define ESR_ELx_ISS_MASK 0x01ffffff
#define ISS_FP_TFV_SHIFT 23
@@ -365,12 +567,14 @@
#define ISS_MSR_REG_MASK \
(ISS_MSR_OP0_MASK | ISS_MSR_OP2_MASK | ISS_MSR_OP1_MASK | \
ISS_MSR_CRn_MASK | ISS_MSR_CRm_MASK)
+#define __ISS_MSR_REG(op0, op1, crn, crm, op2) \
+ (((op0) << ISS_MSR_OP0_SHIFT) | \
+ ((op1) << ISS_MSR_OP1_SHIFT) | \
+ ((crn) << ISS_MSR_CRn_SHIFT) | \
+ ((crm) << ISS_MSR_CRm_SHIFT) | \
+ ((op2) << ISS_MSR_OP2_SHIFT))
#define ISS_MSR_REG(reg) \
- (((reg ## _op0) << ISS_MSR_OP0_SHIFT) | \
- ((reg ## _op1) << ISS_MSR_OP1_SHIFT) | \
- ((reg ## _CRn) << ISS_MSR_CRn_SHIFT) | \
- ((reg ## _CRm) << ISS_MSR_CRm_SHIFT) | \
- ((reg ## _op2) << ISS_MSR_OP2_SHIFT))
+ __ISS_MSR_REG(reg##_op0, reg##_op1, reg##_CRn, reg##_CRm, reg##_op2)
#define ISS_DATA_ISV_SHIFT 24
#define ISS_DATA_ISV (0x01 << ISS_DATA_ISV_SHIFT)
@@ -447,6 +651,38 @@
#define EXCP_BRKPT_32 0x38 /* 32bits breakpoint */
#define EXCP_BRK 0x3c /* Breakpoint */
+/* ESR_EL1 */
+#define ESR_EL1_REG MRS_REG_ALT_NAME(ESR_EL1)
+#define ESR_EL1_op0 3
+#define ESR_EL1_op1 0
+#define ESR_EL1_CRn 5
+#define ESR_EL1_CRm 2
+#define ESR_EL1_op2 0
+
+/* ESR_EL12 */
+#define ESR_EL12_REG MRS_REG_ALT_NAME(ESR_EL12)
+#define ESR_EL12_op0 3
+#define ESR_EL12_op1 5
+#define ESR_EL12_CRn 5
+#define ESR_EL12_CRm 2
+#define ESR_EL12_op2 0
+
+/* FAR_EL1 */
+#define FAR_EL1_REG MRS_REG_ALT_NAME(FAR_EL1)
+#define FAR_EL1_op0 3
+#define FAR_EL1_op1 0
+#define FAR_EL1_CRn 6
+#define FAR_EL1_CRm 0
+#define FAR_EL1_op2 0
+
+/* FAR_EL12 */
+#define FAR_EL12_REG MRS_REG_ALT_NAME(FAR_EL12)
+#define FAR_EL12_op0 3
+#define FAR_EL12_op1 5
+#define FAR_EL12_CRn 6
+#define FAR_EL12_CRm 0
+#define FAR_EL12_op2 0
+
/* ICC_CTLR_EL1 */
#define ICC_CTLR_EL1_EOIMODE (1U << 1)
@@ -460,7 +696,6 @@
#define ICC_PMR_EL1_PRIO_MASK (0xFFUL)
/* ICC_SGI1R_EL1 */
-#define ICC_SGI1R_EL1 MRS_REG(ICC_SGI1R_EL1)
#define ICC_SGI1R_EL1_op0 3
#define ICC_SGI1R_EL1_op1 0
#define ICC_SGI1R_EL1_CRn 12
@@ -490,7 +725,8 @@
#define ICC_SRE_EL1_SRE (1U << 0)
/* ID_AA64AFR0_EL1 */
-#define ID_AA64AFR0_EL1 MRS_REG(ID_AA64AFR0_EL1)
+#define ID_AA64AFR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64AFR0_EL1)
+#define ID_AA64AFR0_EL1_ISS ISS_MSR_REG(ID_AA64AFR0_EL1)
#define ID_AA64AFR0_EL1_op0 3
#define ID_AA64AFR0_EL1_op1 0
#define ID_AA64AFR0_EL1_CRn 0
@@ -498,7 +734,8 @@
#define ID_AA64AFR0_EL1_op2 4
/* ID_AA64AFR1_EL1 */
-#define ID_AA64AFR1_EL1 MRS_REG(ID_AA64AFR1_EL1)
+#define ID_AA64AFR1_EL1_REG MRS_REG_ALT_NAME(ID_AA64AFR1_EL1)
+#define ID_AA64AFR1_EL1_ISS ISS_MSR_REG(ID_AA64AFR1_EL1)
#define ID_AA64AFR1_EL1_op0 3
#define ID_AA64AFR1_EL1_op1 0
#define ID_AA64AFR1_EL1_CRn 0
@@ -506,13 +743,15 @@
#define ID_AA64AFR1_EL1_op2 5
/* ID_AA64DFR0_EL1 */
-#define ID_AA64DFR0_EL1 MRS_REG(ID_AA64DFR0_EL1)
+#define ID_AA64DFR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64DFR0_EL1)
+#define ID_AA64DFR0_EL1_ISS ISS_MSR_REG(ID_AA64DFR0_EL1)
#define ID_AA64DFR0_EL1_op0 3
#define ID_AA64DFR0_EL1_op1 0
#define ID_AA64DFR0_EL1_CRn 0
#define ID_AA64DFR0_EL1_CRm 5
#define ID_AA64DFR0_EL1_op2 0
#define ID_AA64DFR0_DebugVer_SHIFT 0
+#define ID_AA64DFR0_DebugVer_WIDTH 4
#define ID_AA64DFR0_DebugVer_MASK (UL(0xf) << ID_AA64DFR0_DebugVer_SHIFT)
#define ID_AA64DFR0_DebugVer_VAL(x) ((x) & ID_AA64DFR0_DebugVer_MASK)
#define ID_AA64DFR0_DebugVer_8 (UL(0x6) << ID_AA64DFR0_DebugVer_SHIFT)
@@ -520,12 +759,15 @@
#define ID_AA64DFR0_DebugVer_8_2 (UL(0x8) << ID_AA64DFR0_DebugVer_SHIFT)
#define ID_AA64DFR0_DebugVer_8_4 (UL(0x9) << ID_AA64DFR0_DebugVer_SHIFT)
#define ID_AA64DFR0_DebugVer_8_8 (UL(0xa) << ID_AA64DFR0_DebugVer_SHIFT)
+#define ID_AA64DFR0_DebugVer_8_9 (UL(0xb) << ID_AA64DFR0_DebugVer_SHIFT)
#define ID_AA64DFR0_TraceVer_SHIFT 4
+#define ID_AA64DFR0_TraceVer_WIDTH 4
#define ID_AA64DFR0_TraceVer_MASK (UL(0xf) << ID_AA64DFR0_TraceVer_SHIFT)
#define ID_AA64DFR0_TraceVer_VAL(x) ((x) & ID_AA64DFR0_TraceVer_MASK)
#define ID_AA64DFR0_TraceVer_NONE (UL(0x0) << ID_AA64DFR0_TraceVer_SHIFT)
#define ID_AA64DFR0_TraceVer_IMPL (UL(0x1) << ID_AA64DFR0_TraceVer_SHIFT)
#define ID_AA64DFR0_PMUVer_SHIFT 8
+#define ID_AA64DFR0_PMUVer_WIDTH 4
#define ID_AA64DFR0_PMUVer_MASK (UL(0xf) << ID_AA64DFR0_PMUVer_SHIFT)
#define ID_AA64DFR0_PMUVer_VAL(x) ((x) & ID_AA64DFR0_PMUVer_MASK)
#define ID_AA64DFR0_PMUVer_NONE (UL(0x0) << ID_AA64DFR0_PMUVer_SHIFT)
@@ -535,25 +777,31 @@
#define ID_AA64DFR0_PMUVer_3_5 (UL(0x6) << ID_AA64DFR0_PMUVer_SHIFT)
#define ID_AA64DFR0_PMUVer_3_7 (UL(0x7) << ID_AA64DFR0_PMUVer_SHIFT)
#define ID_AA64DFR0_PMUVer_3_8 (UL(0x8) << ID_AA64DFR0_PMUVer_SHIFT)
+#define ID_AA64DFR0_PMUVer_3_9 (UL(0x9) << ID_AA64DFR0_PMUVer_SHIFT)
#define ID_AA64DFR0_PMUVer_IMPL (UL(0xf) << ID_AA64DFR0_PMUVer_SHIFT)
#define ID_AA64DFR0_BRPs_SHIFT 12
+#define ID_AA64DFR0_BRPs_WIDTH 4
#define ID_AA64DFR0_BRPs_MASK (UL(0xf) << ID_AA64DFR0_BRPs_SHIFT)
#define ID_AA64DFR0_BRPs_VAL(x) \
((((x) >> ID_AA64DFR0_BRPs_SHIFT) & 0xf) + 1)
#define ID_AA64DFR0_PMSS_SHIFT 16
+#define ID_AA64DFR0_PMSS_WIDTH 4
#define ID_AA64DFR0_PMSS_MASK (UL(0xf) << ID_AA64DFR0_PMSS_SHIFT)
#define ID_AA64DFR0_PMSS_VAL(x) ((x) & ID_AA64DFR0_PMSS_MASK)
#define ID_AA64DFR0_PMSS_NONE (UL(0x0) << ID_AA64DFR0_PMSS_SHIFT)
#define ID_AA64DFR0_PMSS_IMPL (UL(0x1) << ID_AA64DFR0_PMSS_SHIFT)
#define ID_AA64DFR0_WRPs_SHIFT 20
+#define ID_AA64DFR0_WRPs_WIDTH 4
#define ID_AA64DFR0_WRPs_MASK (UL(0xf) << ID_AA64DFR0_WRPs_SHIFT)
#define ID_AA64DFR0_WRPs_VAL(x) \
((((x) >> ID_AA64DFR0_WRPs_SHIFT) & 0xf) + 1)
#define ID_AA64DFR0_CTX_CMPs_SHIFT 28
+#define ID_AA64DFR0_CTX_CMPs_WIDTH 4
#define ID_AA64DFR0_CTX_CMPs_MASK (UL(0xf) << ID_AA64DFR0_CTX_CMPs_SHIFT)
#define ID_AA64DFR0_CTX_CMPs_VAL(x) \
((((x) >> ID_AA64DFR0_CTX_CMPs_SHIFT) & 0xf) + 1)
#define ID_AA64DFR0_PMSVer_SHIFT 32
+#define ID_AA64DFR0_PMSVer_WIDTH 4
#define ID_AA64DFR0_PMSVer_MASK (UL(0xf) << ID_AA64DFR0_PMSVer_SHIFT)
#define ID_AA64DFR0_PMSVer_VAL(x) ((x) & ID_AA64DFR0_PMSVer_MASK)
#define ID_AA64DFR0_PMSVer_NONE (UL(0x0) << ID_AA64DFR0_PMSVer_SHIFT)
@@ -561,147 +809,192 @@
#define ID_AA64DFR0_PMSVer_SPE_1_1 (UL(0x2) << ID_AA64DFR0_PMSVer_SHIFT)
#define ID_AA64DFR0_PMSVer_SPE_1_2 (UL(0x3) << ID_AA64DFR0_PMSVer_SHIFT)
#define ID_AA64DFR0_PMSVer_SPE_1_3 (UL(0x4) << ID_AA64DFR0_PMSVer_SHIFT)
+#define ID_AA64DFR0_PMSVer_SPE_1_4 (UL(0x5) << ID_AA64DFR0_PMSVer_SHIFT)
#define ID_AA64DFR0_DoubleLock_SHIFT 36
+#define ID_AA64DFR0_DoubleLock_WIDTH 4
#define ID_AA64DFR0_DoubleLock_MASK (UL(0xf) << ID_AA64DFR0_DoubleLock_SHIFT)
#define ID_AA64DFR0_DoubleLock_VAL(x) ((x) & ID_AA64DFR0_DoubleLock_MASK)
#define ID_AA64DFR0_DoubleLock_IMPL (UL(0x0) << ID_AA64DFR0_DoubleLock_SHIFT)
#define ID_AA64DFR0_DoubleLock_NONE (UL(0xf) << ID_AA64DFR0_DoubleLock_SHIFT)
#define ID_AA64DFR0_TraceFilt_SHIFT 40
+#define ID_AA64DFR0_TraceFilt_WIDTH 4
#define ID_AA64DFR0_TraceFilt_MASK (UL(0xf) << ID_AA64DFR0_TraceFilt_SHIFT)
#define ID_AA64DFR0_TraceFilt_VAL(x) ((x) & ID_AA64DFR0_TraceFilt_MASK)
#define ID_AA64DFR0_TraceFilt_NONE (UL(0x0) << ID_AA64DFR0_TraceFilt_SHIFT)
#define ID_AA64DFR0_TraceFilt_8_4 (UL(0x1) << ID_AA64DFR0_TraceFilt_SHIFT)
#define ID_AA64DFR0_TraceBuffer_SHIFT 44
+#define ID_AA64DFR0_TraceBuffer_WIDTH 4
#define ID_AA64DFR0_TraceBuffer_MASK (UL(0xf) << ID_AA64DFR0_TraceBuffer_SHIFT)
#define ID_AA64DFR0_TraceBuffer_VAL(x) ((x) & ID_AA64DFR0_TraceBuffer_MASK)
#define ID_AA64DFR0_TraceBuffer_NONE (UL(0x0) << ID_AA64DFR0_TraceBuffer_SHIFT)
#define ID_AA64DFR0_TraceBuffer_IMPL (UL(0x1) << ID_AA64DFR0_TraceBuffer_SHIFT)
#define ID_AA64DFR0_MTPMU_SHIFT 48
+#define ID_AA64DFR0_MTPMU_WIDTH 4
#define ID_AA64DFR0_MTPMU_MASK (UL(0xf) << ID_AA64DFR0_MTPMU_SHIFT)
#define ID_AA64DFR0_MTPMU_VAL(x) ((x) & ID_AA64DFR0_MTPMU_MASK)
#define ID_AA64DFR0_MTPMU_NONE (UL(0x0) << ID_AA64DFR0_MTPMU_SHIFT)
#define ID_AA64DFR0_MTPMU_IMPL (UL(0x1) << ID_AA64DFR0_MTPMU_SHIFT)
#define ID_AA64DFR0_MTPMU_NONE_MT_RES0 (UL(0xf) << ID_AA64DFR0_MTPMU_SHIFT)
#define ID_AA64DFR0_BRBE_SHIFT 52
+#define ID_AA64DFR0_BRBE_WIDTH 4
#define ID_AA64DFR0_BRBE_MASK (UL(0xf) << ID_AA64DFR0_BRBE_SHIFT)
#define ID_AA64DFR0_BRBE_VAL(x) ((x) & ID_AA64DFR0_BRBE_MASK)
#define ID_AA64DFR0_BRBE_NONE (UL(0x0) << ID_AA64DFR0_BRBE_SHIFT)
#define ID_AA64DFR0_BRBE_IMPL (UL(0x1) << ID_AA64DFR0_BRBE_SHIFT)
#define ID_AA64DFR0_BRBE_EL3 (UL(0x2) << ID_AA64DFR0_BRBE_SHIFT)
#define ID_AA64DFR0_HPMN0_SHIFT 60
+#define ID_AA64DFR0_HPMN0_WIDTH 4
#define ID_AA64DFR0_HPMN0_MASK (UL(0xf) << ID_AA64DFR0_HPMN0_SHIFT)
#define ID_AA64DFR0_HPMN0_VAL(x) ((x) & ID_AA64DFR0_HPMN0_MASK)
#define ID_AA64DFR0_HPMN0_CONSTR (UL(0x0) << ID_AA64DFR0_HPMN0_SHIFT)
#define ID_AA64DFR0_HPMN0_DEFINED (UL(0x1) << ID_AA64DFR0_HPMN0_SHIFT)
/* ID_AA64DFR1_EL1 */
-#define ID_AA64DFR1_EL1 MRS_REG(ID_AA64DFR1_EL1)
+#define ID_AA64DFR1_EL1_REG MRS_REG_ALT_NAME(ID_AA64DFR1_EL1)
+#define ID_AA64DFR1_EL1_ISS ISS_MSR_REG(ID_AA64DFR1_EL1)
#define ID_AA64DFR1_EL1_op0 3
#define ID_AA64DFR1_EL1_op1 0
#define ID_AA64DFR1_EL1_CRn 0
#define ID_AA64DFR1_EL1_CRm 5
#define ID_AA64DFR1_EL1_op2 1
+#define ID_AA64DFR1_SPMU_SHIFT 32
+#define ID_AA64DFR1_SPMU_WIDTH 4
+#define ID_AA64DFR1_SPMU_MASK (UL(0xf) << ID_AA64DFR1_SPMU_SHIFT)
+#define ID_AA64DFR1_SPMU_VAL(x) ((x) & ID_AA64DFR1_SPMU_MASK)
+#define ID_AA64DFR1_SPMU_NONE (UL(0x0) << ID_AA64DFR1_SPMU_SHIFT)
+#define ID_AA64DFR1_SPMU_IMPL (UL(0x1) << ID_AA64DFR1_SPMU_SHIFT)
+#define ID_AA64DFR1_PMICNTR_SHIFT 36
+#define ID_AA64DFR1_PMICNTR_WIDTH 4
+#define ID_AA64DFR1_PMICNTR_MASK (UL(0xf) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define ID_AA64DFR1_PMICNTR_VAL(x) ((x) & ID_AA64DFR1_PMICNTR_MASK)
+#define ID_AA64DFR1_PMICNTR_NONE (UL(0x0) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define ID_AA64DFR1_PMICNTR_IMPL (UL(0x1) << ID_AA64DFR1_PMICNTR_SHIFT)
+#define ID_AA64DFR1_DPFZS_SHIFT 52
+#define ID_AA64DFR1_DPFZS_WIDTH 4
+#define ID_AA64DFR1_DPFZS_MASK (UL(0xf) << ID_AA64DFR1_DPFZS_SHIFT)
+#define ID_AA64DFR1_DPFZS_VAL(x) ((x) & ID_AA64DFR1_DPFZS_MASK)
+#define ID_AA64DFR1_DPFZS_NONE (UL(0x0) << ID_AA64DFR1_DPFZS_SHIFT)
+#define ID_AA64DFR1_DPFZS_IMPL (UL(0x1) << ID_AA64DFR1_DPFZS_SHIFT)
/* ID_AA64ISAR0_EL1 */
-#define ID_AA64ISAR0_EL1 MRS_REG(ID_AA64ISAR0_EL1)
+#define ID_AA64ISAR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64ISAR0_EL1)
+#define ID_AA64ISAR0_EL1_ISS ISS_MSR_REG(ID_AA64ISAR0_EL1)
#define ID_AA64ISAR0_EL1_op0 3
#define ID_AA64ISAR0_EL1_op1 0
#define ID_AA64ISAR0_EL1_CRn 0
#define ID_AA64ISAR0_EL1_CRm 6
#define ID_AA64ISAR0_EL1_op2 0
#define ID_AA64ISAR0_AES_SHIFT 4
+#define ID_AA64ISAR0_AES_WIDTH 4
#define ID_AA64ISAR0_AES_MASK (UL(0xf) << ID_AA64ISAR0_AES_SHIFT)
#define ID_AA64ISAR0_AES_VAL(x) ((x) & ID_AA64ISAR0_AES_MASK)
#define ID_AA64ISAR0_AES_NONE (UL(0x0) << ID_AA64ISAR0_AES_SHIFT)
#define ID_AA64ISAR0_AES_BASE (UL(0x1) << ID_AA64ISAR0_AES_SHIFT)
#define ID_AA64ISAR0_AES_PMULL (UL(0x2) << ID_AA64ISAR0_AES_SHIFT)
#define ID_AA64ISAR0_SHA1_SHIFT 8
+#define ID_AA64ISAR0_SHA1_WIDTH 4
#define ID_AA64ISAR0_SHA1_MASK (UL(0xf) << ID_AA64ISAR0_SHA1_SHIFT)
#define ID_AA64ISAR0_SHA1_VAL(x) ((x) & ID_AA64ISAR0_SHA1_MASK)
#define ID_AA64ISAR0_SHA1_NONE (UL(0x0) << ID_AA64ISAR0_SHA1_SHIFT)
#define ID_AA64ISAR0_SHA1_BASE (UL(0x1) << ID_AA64ISAR0_SHA1_SHIFT)
#define ID_AA64ISAR0_SHA2_SHIFT 12
+#define ID_AA64ISAR0_SHA2_WIDTH 4
#define ID_AA64ISAR0_SHA2_MASK (UL(0xf) << ID_AA64ISAR0_SHA2_SHIFT)
#define ID_AA64ISAR0_SHA2_VAL(x) ((x) & ID_AA64ISAR0_SHA2_MASK)
#define ID_AA64ISAR0_SHA2_NONE (UL(0x0) << ID_AA64ISAR0_SHA2_SHIFT)
#define ID_AA64ISAR0_SHA2_BASE (UL(0x1) << ID_AA64ISAR0_SHA2_SHIFT)
#define ID_AA64ISAR0_SHA2_512 (UL(0x2) << ID_AA64ISAR0_SHA2_SHIFT)
#define ID_AA64ISAR0_CRC32_SHIFT 16
+#define ID_AA64ISAR0_CRC32_WIDTH 4
#define ID_AA64ISAR0_CRC32_MASK (UL(0xf) << ID_AA64ISAR0_CRC32_SHIFT)
#define ID_AA64ISAR0_CRC32_VAL(x) ((x) & ID_AA64ISAR0_CRC32_MASK)
#define ID_AA64ISAR0_CRC32_NONE (UL(0x0) << ID_AA64ISAR0_CRC32_SHIFT)
#define ID_AA64ISAR0_CRC32_BASE (UL(0x1) << ID_AA64ISAR0_CRC32_SHIFT)
#define ID_AA64ISAR0_Atomic_SHIFT 20
+#define ID_AA64ISAR0_Atomic_WIDTH 4
#define ID_AA64ISAR0_Atomic_MASK (UL(0xf) << ID_AA64ISAR0_Atomic_SHIFT)
#define ID_AA64ISAR0_Atomic_VAL(x) ((x) & ID_AA64ISAR0_Atomic_MASK)
#define ID_AA64ISAR0_Atomic_NONE (UL(0x0) << ID_AA64ISAR0_Atomic_SHIFT)
#define ID_AA64ISAR0_Atomic_IMPL (UL(0x2) << ID_AA64ISAR0_Atomic_SHIFT)
#define ID_AA64ISAR0_TME_SHIFT 24
+#define ID_AA64ISAR0_TME_WIDTH 4
#define ID_AA64ISAR0_TME_MASK (UL(0xf) << ID_AA64ISAR0_TME_SHIFT)
#define ID_AA64ISAR0_TME_NONE (UL(0x0) << ID_AA64ISAR0_TME_SHIFT)
#define ID_AA64ISAR0_TME_IMPL (UL(0x1) << ID_AA64ISAR0_TME_SHIFT)
#define ID_AA64ISAR0_RDM_SHIFT 28
+#define ID_AA64ISAR0_RDM_WIDTH 4
#define ID_AA64ISAR0_RDM_MASK (UL(0xf) << ID_AA64ISAR0_RDM_SHIFT)
#define ID_AA64ISAR0_RDM_VAL(x) ((x) & ID_AA64ISAR0_RDM_MASK)
#define ID_AA64ISAR0_RDM_NONE (UL(0x0) << ID_AA64ISAR0_RDM_SHIFT)
#define ID_AA64ISAR0_RDM_IMPL (UL(0x1) << ID_AA64ISAR0_RDM_SHIFT)
#define ID_AA64ISAR0_SHA3_SHIFT 32
+#define ID_AA64ISAR0_SHA3_WIDTH 4
#define ID_AA64ISAR0_SHA3_MASK (UL(0xf) << ID_AA64ISAR0_SHA3_SHIFT)
#define ID_AA64ISAR0_SHA3_VAL(x) ((x) & ID_AA64ISAR0_SHA3_MASK)
#define ID_AA64ISAR0_SHA3_NONE (UL(0x0) << ID_AA64ISAR0_SHA3_SHIFT)
#define ID_AA64ISAR0_SHA3_IMPL (UL(0x1) << ID_AA64ISAR0_SHA3_SHIFT)
#define ID_AA64ISAR0_SM3_SHIFT 36
+#define ID_AA64ISAR0_SM3_WIDTH 4
#define ID_AA64ISAR0_SM3_MASK (UL(0xf) << ID_AA64ISAR0_SM3_SHIFT)
#define ID_AA64ISAR0_SM3_VAL(x) ((x) & ID_AA64ISAR0_SM3_MASK)
#define ID_AA64ISAR0_SM3_NONE (UL(0x0) << ID_AA64ISAR0_SM3_SHIFT)
#define ID_AA64ISAR0_SM3_IMPL (UL(0x1) << ID_AA64ISAR0_SM3_SHIFT)
#define ID_AA64ISAR0_SM4_SHIFT 40
+#define ID_AA64ISAR0_SM4_WIDTH 4
#define ID_AA64ISAR0_SM4_MASK (UL(0xf) << ID_AA64ISAR0_SM4_SHIFT)
#define ID_AA64ISAR0_SM4_VAL(x) ((x) & ID_AA64ISAR0_SM4_MASK)
#define ID_AA64ISAR0_SM4_NONE (UL(0x0) << ID_AA64ISAR0_SM4_SHIFT)
#define ID_AA64ISAR0_SM4_IMPL (UL(0x1) << ID_AA64ISAR0_SM4_SHIFT)
#define ID_AA64ISAR0_DP_SHIFT 44
+#define ID_AA64ISAR0_DP_WIDTH 4
#define ID_AA64ISAR0_DP_MASK (UL(0xf) << ID_AA64ISAR0_DP_SHIFT)
#define ID_AA64ISAR0_DP_VAL(x) ((x) & ID_AA64ISAR0_DP_MASK)
#define ID_AA64ISAR0_DP_NONE (UL(0x0) << ID_AA64ISAR0_DP_SHIFT)
#define ID_AA64ISAR0_DP_IMPL (UL(0x1) << ID_AA64ISAR0_DP_SHIFT)
#define ID_AA64ISAR0_FHM_SHIFT 48
+#define ID_AA64ISAR0_FHM_WIDTH 4
#define ID_AA64ISAR0_FHM_MASK (UL(0xf) << ID_AA64ISAR0_FHM_SHIFT)
#define ID_AA64ISAR0_FHM_VAL(x) ((x) & ID_AA64ISAR0_FHM_MASK)
#define ID_AA64ISAR0_FHM_NONE (UL(0x0) << ID_AA64ISAR0_FHM_SHIFT)
#define ID_AA64ISAR0_FHM_IMPL (UL(0x1) << ID_AA64ISAR0_FHM_SHIFT)
#define ID_AA64ISAR0_TS_SHIFT 52
+#define ID_AA64ISAR0_TS_WIDTH 4
#define ID_AA64ISAR0_TS_MASK (UL(0xf) << ID_AA64ISAR0_TS_SHIFT)
#define ID_AA64ISAR0_TS_VAL(x) ((x) & ID_AA64ISAR0_TS_MASK)
#define ID_AA64ISAR0_TS_NONE (UL(0x0) << ID_AA64ISAR0_TS_SHIFT)
#define ID_AA64ISAR0_TS_CondM_8_4 (UL(0x1) << ID_AA64ISAR0_TS_SHIFT)
#define ID_AA64ISAR0_TS_CondM_8_5 (UL(0x2) << ID_AA64ISAR0_TS_SHIFT)
#define ID_AA64ISAR0_TLB_SHIFT 56
+#define ID_AA64ISAR0_TLB_WIDTH 4
#define ID_AA64ISAR0_TLB_MASK (UL(0xf) << ID_AA64ISAR0_TLB_SHIFT)
#define ID_AA64ISAR0_TLB_VAL(x) ((x) & ID_AA64ISAR0_TLB_MASK)
#define ID_AA64ISAR0_TLB_NONE (UL(0x0) << ID_AA64ISAR0_TLB_SHIFT)
#define ID_AA64ISAR0_TLB_TLBIOS (UL(0x1) << ID_AA64ISAR0_TLB_SHIFT)
#define ID_AA64ISAR0_TLB_TLBIOSR (UL(0x2) << ID_AA64ISAR0_TLB_SHIFT)
#define ID_AA64ISAR0_RNDR_SHIFT 60
+#define ID_AA64ISAR0_RNDR_WIDTH 4
#define ID_AA64ISAR0_RNDR_MASK (UL(0xf) << ID_AA64ISAR0_RNDR_SHIFT)
#define ID_AA64ISAR0_RNDR_VAL(x) ((x) & ID_AA64ISAR0_RNDR_MASK)
#define ID_AA64ISAR0_RNDR_NONE (UL(0x0) << ID_AA64ISAR0_RNDR_SHIFT)
#define ID_AA64ISAR0_RNDR_IMPL (UL(0x1) << ID_AA64ISAR0_RNDR_SHIFT)
/* ID_AA64ISAR1_EL1 */
-#define ID_AA64ISAR1_EL1 MRS_REG(ID_AA64ISAR1_EL1)
+#define ID_AA64ISAR1_EL1_REG MRS_REG_ALT_NAME(ID_AA64ISAR1_EL1)
+#define ID_AA64ISAR1_EL1_ISS ISS_MSR_REG(ID_AA64ISAR1_EL1)
#define ID_AA64ISAR1_EL1_op0 3
#define ID_AA64ISAR1_EL1_op1 0
#define ID_AA64ISAR1_EL1_CRn 0
#define ID_AA64ISAR1_EL1_CRm 6
#define ID_AA64ISAR1_EL1_op2 1
#define ID_AA64ISAR1_DPB_SHIFT 0
+#define ID_AA64ISAR1_DPB_WIDTH 4
#define ID_AA64ISAR1_DPB_MASK (UL(0xf) << ID_AA64ISAR1_DPB_SHIFT)
#define ID_AA64ISAR1_DPB_VAL(x) ((x) & ID_AA64ISAR1_DPB_MASK)
#define ID_AA64ISAR1_DPB_NONE (UL(0x0) << ID_AA64ISAR1_DPB_SHIFT)
#define ID_AA64ISAR1_DPB_DCCVAP (UL(0x1) << ID_AA64ISAR1_DPB_SHIFT)
#define ID_AA64ISAR1_DPB_DCCVADP (UL(0x2) << ID_AA64ISAR1_DPB_SHIFT)
#define ID_AA64ISAR1_APA_SHIFT 4
+#define ID_AA64ISAR1_APA_WIDTH 4
#define ID_AA64ISAR1_APA_MASK (UL(0xf) << ID_AA64ISAR1_APA_SHIFT)
#define ID_AA64ISAR1_APA_VAL(x) ((x) & ID_AA64ISAR1_APA_MASK)
#define ID_AA64ISAR1_APA_NONE (UL(0x0) << ID_AA64ISAR1_APA_SHIFT)
@@ -711,6 +1004,7 @@
#define ID_AA64ISAR1_APA_FPAC (UL(0x4) << ID_AA64ISAR1_APA_SHIFT)
#define ID_AA64ISAR1_APA_FPAC_COMBINED (UL(0x5) << ID_AA64ISAR1_APA_SHIFT)
#define ID_AA64ISAR1_API_SHIFT 8
+#define ID_AA64ISAR1_API_WIDTH 4
#define ID_AA64ISAR1_API_MASK (UL(0xf) << ID_AA64ISAR1_API_SHIFT)
#define ID_AA64ISAR1_API_VAL(x) ((x) & ID_AA64ISAR1_API_MASK)
#define ID_AA64ISAR1_API_NONE (UL(0x0) << ID_AA64ISAR1_API_SHIFT)
@@ -720,68 +1014,82 @@
#define ID_AA64ISAR1_API_FPAC (UL(0x4) << ID_AA64ISAR1_API_SHIFT)
#define ID_AA64ISAR1_API_FPAC_COMBINED (UL(0x5) << ID_AA64ISAR1_API_SHIFT)
#define ID_AA64ISAR1_JSCVT_SHIFT 12
+#define ID_AA64ISAR1_JSCVT_WIDTH 4
#define ID_AA64ISAR1_JSCVT_MASK (UL(0xf) << ID_AA64ISAR1_JSCVT_SHIFT)
#define ID_AA64ISAR1_JSCVT_VAL(x) ((x) & ID_AA64ISAR1_JSCVT_MASK)
#define ID_AA64ISAR1_JSCVT_NONE (UL(0x0) << ID_AA64ISAR1_JSCVT_SHIFT)
#define ID_AA64ISAR1_JSCVT_IMPL (UL(0x1) << ID_AA64ISAR1_JSCVT_SHIFT)
#define ID_AA64ISAR1_FCMA_SHIFT 16
+#define ID_AA64ISAR1_FCMA_WIDTH 4
#define ID_AA64ISAR1_FCMA_MASK (UL(0xf) << ID_AA64ISAR1_FCMA_SHIFT)
#define ID_AA64ISAR1_FCMA_VAL(x) ((x) & ID_AA64ISAR1_FCMA_MASK)
#define ID_AA64ISAR1_FCMA_NONE (UL(0x0) << ID_AA64ISAR1_FCMA_SHIFT)
#define ID_AA64ISAR1_FCMA_IMPL (UL(0x1) << ID_AA64ISAR1_FCMA_SHIFT)
#define ID_AA64ISAR1_LRCPC_SHIFT 20
+#define ID_AA64ISAR1_LRCPC_WIDTH 4
#define ID_AA64ISAR1_LRCPC_MASK (UL(0xf) << ID_AA64ISAR1_LRCPC_SHIFT)
#define ID_AA64ISAR1_LRCPC_VAL(x) ((x) & ID_AA64ISAR1_LRCPC_MASK)
#define ID_AA64ISAR1_LRCPC_NONE (UL(0x0) << ID_AA64ISAR1_LRCPC_SHIFT)
#define ID_AA64ISAR1_LRCPC_RCPC_8_3 (UL(0x1) << ID_AA64ISAR1_LRCPC_SHIFT)
#define ID_AA64ISAR1_LRCPC_RCPC_8_4 (UL(0x2) << ID_AA64ISAR1_LRCPC_SHIFT)
#define ID_AA64ISAR1_GPA_SHIFT 24
+#define ID_AA64ISAR1_GPA_WIDTH 4
#define ID_AA64ISAR1_GPA_MASK (UL(0xf) << ID_AA64ISAR1_GPA_SHIFT)
#define ID_AA64ISAR1_GPA_VAL(x) ((x) & ID_AA64ISAR1_GPA_MASK)
#define ID_AA64ISAR1_GPA_NONE (UL(0x0) << ID_AA64ISAR1_GPA_SHIFT)
#define ID_AA64ISAR1_GPA_IMPL (UL(0x1) << ID_AA64ISAR1_GPA_SHIFT)
#define ID_AA64ISAR1_GPI_SHIFT 28
+#define ID_AA64ISAR1_GPI_WIDTH 4
#define ID_AA64ISAR1_GPI_MASK (UL(0xf) << ID_AA64ISAR1_GPI_SHIFT)
#define ID_AA64ISAR1_GPI_VAL(x) ((x) & ID_AA64ISAR1_GPI_MASK)
#define ID_AA64ISAR1_GPI_NONE (UL(0x0) << ID_AA64ISAR1_GPI_SHIFT)
#define ID_AA64ISAR1_GPI_IMPL (UL(0x1) << ID_AA64ISAR1_GPI_SHIFT)
#define ID_AA64ISAR1_FRINTTS_SHIFT 32
+#define ID_AA64ISAR1_FRINTTS_WIDTH 4
#define ID_AA64ISAR1_FRINTTS_MASK (UL(0xf) << ID_AA64ISAR1_FRINTTS_SHIFT)
#define ID_AA64ISAR1_FRINTTS_VAL(x) ((x) & ID_AA64ISAR1_FRINTTS_MASK)
#define ID_AA64ISAR1_FRINTTS_NONE (UL(0x0) << ID_AA64ISAR1_FRINTTS_SHIFT)
#define ID_AA64ISAR1_FRINTTS_IMPL (UL(0x1) << ID_AA64ISAR1_FRINTTS_SHIFT)
#define ID_AA64ISAR1_SB_SHIFT 36
+#define ID_AA64ISAR1_SB_WIDTH 4
#define ID_AA64ISAR1_SB_MASK (UL(0xf) << ID_AA64ISAR1_SB_SHIFT)
#define ID_AA64ISAR1_SB_VAL(x) ((x) & ID_AA64ISAR1_SB_MASK)
#define ID_AA64ISAR1_SB_NONE (UL(0x0) << ID_AA64ISAR1_SB_SHIFT)
#define ID_AA64ISAR1_SB_IMPL (UL(0x1) << ID_AA64ISAR1_SB_SHIFT)
#define ID_AA64ISAR1_SPECRES_SHIFT 40
+#define ID_AA64ISAR1_SPECRES_WIDTH 4
#define ID_AA64ISAR1_SPECRES_MASK (UL(0xf) << ID_AA64ISAR1_SPECRES_SHIFT)
#define ID_AA64ISAR1_SPECRES_VAL(x) ((x) & ID_AA64ISAR1_SPECRES_MASK)
#define ID_AA64ISAR1_SPECRES_NONE (UL(0x0) << ID_AA64ISAR1_SPECRES_SHIFT)
-#define ID_AA64ISAR1_SPECRES_IMPL (UL(0x1) << ID_AA64ISAR1_SPECRES_SHIFT)
+#define ID_AA64ISAR1_SPECRES_8_5 (UL(0x1) << ID_AA64ISAR1_SPECRES_SHIFT)
+#define ID_AA64ISAR1_SPECRES_8_9 (UL(0x2) << ID_AA64ISAR1_SPECRES_SHIFT)
#define ID_AA64ISAR1_BF16_SHIFT 44
+#define ID_AA64ISAR1_BF16_WIDTH 4
#define ID_AA64ISAR1_BF16_MASK (UL(0xf) << ID_AA64ISAR1_BF16_SHIFT)
#define ID_AA64ISAR1_BF16_VAL(x) ((x) & ID_AA64ISAR1_BF16_MASK)
#define ID_AA64ISAR1_BF16_NONE (UL(0x0) << ID_AA64ISAR1_BF16_SHIFT)
#define ID_AA64ISAR1_BF16_IMPL (UL(0x1) << ID_AA64ISAR1_BF16_SHIFT)
#define ID_AA64ISAR1_BF16_EBF (UL(0x2) << ID_AA64ISAR1_BF16_SHIFT)
#define ID_AA64ISAR1_DGH_SHIFT 48
+#define ID_AA64ISAR1_DGH_WIDTH 4
#define ID_AA64ISAR1_DGH_MASK (UL(0xf) << ID_AA64ISAR1_DGH_SHIFT)
#define ID_AA64ISAR1_DGH_VAL(x) ((x) & ID_AA64ISAR1_DGH_MASK)
#define ID_AA64ISAR1_DGH_NONE (UL(0x0) << ID_AA64ISAR1_DGH_SHIFT)
#define ID_AA64ISAR1_DGH_IMPL (UL(0x1) << ID_AA64ISAR1_DGH_SHIFT)
#define ID_AA64ISAR1_I8MM_SHIFT 52
+#define ID_AA64ISAR1_I8MM_WIDTH 4
#define ID_AA64ISAR1_I8MM_MASK (UL(0xf) << ID_AA64ISAR1_I8MM_SHIFT)
#define ID_AA64ISAR1_I8MM_VAL(x) ((x) & ID_AA64ISAR1_I8MM_MASK)
#define ID_AA64ISAR1_I8MM_NONE (UL(0x0) << ID_AA64ISAR1_I8MM_SHIFT)
#define ID_AA64ISAR1_I8MM_IMPL (UL(0x1) << ID_AA64ISAR1_I8MM_SHIFT)
#define ID_AA64ISAR1_XS_SHIFT 56
+#define ID_AA64ISAR1_XS_WIDTH 4
#define ID_AA64ISAR1_XS_MASK (UL(0xf) << ID_AA64ISAR1_XS_SHIFT)
#define ID_AA64ISAR1_XS_VAL(x) ((x) & ID_AA64ISAR1_XS_MASK)
#define ID_AA64ISAR1_XS_NONE (UL(0x0) << ID_AA64ISAR1_XS_SHIFT)
#define ID_AA64ISAR1_XS_IMPL (UL(0x1) << ID_AA64ISAR1_XS_SHIFT)
#define ID_AA64ISAR1_LS64_SHIFT 60
+#define ID_AA64ISAR1_LS64_WIDTH 4
#define ID_AA64ISAR1_LS64_MASK (UL(0xf) << ID_AA64ISAR1_LS64_SHIFT)
#define ID_AA64ISAR1_LS64_VAL(x) ((x) & ID_AA64ISAR1_LS64_MASK)
#define ID_AA64ISAR1_LS64_NONE (UL(0x0) << ID_AA64ISAR1_LS64_SHIFT)
@@ -790,28 +1098,33 @@
#define ID_AA64ISAR1_LS64_ACCDATA (UL(0x3) << ID_AA64ISAR1_LS64_SHIFT)
/* ID_AA64ISAR2_EL1 */
-#define ID_AA64ISAR2_EL1 MRS_REG(ID_AA64ISAR2_EL1)
+#define ID_AA64ISAR2_EL1_REG MRS_REG_ALT_NAME(ID_AA64ISAR2_EL1)
+#define ID_AA64ISAR2_EL1_ISS ISS_MSR_REG(ID_AA64ISAR2_EL1)
#define ID_AA64ISAR2_EL1_op0 3
#define ID_AA64ISAR2_EL1_op1 0
#define ID_AA64ISAR2_EL1_CRn 0
#define ID_AA64ISAR2_EL1_CRm 6
#define ID_AA64ISAR2_EL1_op2 2
#define ID_AA64ISAR2_WFxT_SHIFT 0
+#define ID_AA64ISAR2_WFxT_WIDTH 4
#define ID_AA64ISAR2_WFxT_MASK (UL(0xf) << ID_AA64ISAR2_WFxT_SHIFT)
#define ID_AA64ISAR2_WFxT_VAL(x) ((x) & ID_AA64ISAR2_WFxT_MASK)
#define ID_AA64ISAR2_WFxT_NONE (UL(0x0) << ID_AA64ISAR2_WFxT_SHIFT)
-#define ID_AA64ISAR2_WFxT_IMPL (UL(0x1) << ID_AA64ISAR2_WFxT_SHIFT)
+#define ID_AA64ISAR2_WFxT_IMPL (UL(0x2) << ID_AA64ISAR2_WFxT_SHIFT)
#define ID_AA64ISAR2_RPRES_SHIFT 4
+#define ID_AA64ISAR2_RPRES_WIDTH 4
#define ID_AA64ISAR2_RPRES_MASK (UL(0xf) << ID_AA64ISAR2_RPRES_SHIFT)
#define ID_AA64ISAR2_RPRES_VAL(x) ((x) & ID_AA64ISAR2_RPRES_MASK)
#define ID_AA64ISAR2_RPRES_NONE (UL(0x0) << ID_AA64ISAR2_RPRES_SHIFT)
#define ID_AA64ISAR2_RPRES_IMPL (UL(0x1) << ID_AA64ISAR2_RPRES_SHIFT)
#define ID_AA64ISAR2_GPA3_SHIFT 8
+#define ID_AA64ISAR2_GPA3_WIDTH 4
#define ID_AA64ISAR2_GPA3_MASK (UL(0xf) << ID_AA64ISAR2_GPA3_SHIFT)
#define ID_AA64ISAR2_GPA3_VAL(x) ((x) & ID_AA64ISAR2_GPA3_MASK)
#define ID_AA64ISAR2_GPA3_NONE (UL(0x0) << ID_AA64ISAR2_GPA3_SHIFT)
#define ID_AA64ISAR2_GPA3_IMPL (UL(0x1) << ID_AA64ISAR2_GPA3_SHIFT)
#define ID_AA64ISAR2_APA3_SHIFT 12
+#define ID_AA64ISAR2_APA3_WIDTH 4
#define ID_AA64ISAR2_APA3_MASK (UL(0xf) << ID_AA64ISAR2_APA3_SHIFT)
#define ID_AA64ISAR2_APA3_VAL(x) ((x) & ID_AA64ISAR2_APA3_MASK)
#define ID_AA64ISAR2_APA3_NONE (UL(0x0) << ID_AA64ISAR2_APA3_SHIFT)
@@ -821,29 +1134,64 @@
#define ID_AA64ISAR2_APA3_FPAC (UL(0x4) << ID_AA64ISAR2_APA3_SHIFT)
#define ID_AA64ISAR2_APA3_FPAC_COMBINED (UL(0x5) << ID_AA64ISAR2_APA3_SHIFT)
#define ID_AA64ISAR2_MOPS_SHIFT 16
+#define ID_AA64ISAR2_MOPS_WIDTH 4
#define ID_AA64ISAR2_MOPS_MASK (UL(0xf) << ID_AA64ISAR2_MOPS_SHIFT)
#define ID_AA64ISAR2_MOPS_VAL(x) ((x) & ID_AA64ISAR2_MOPS_MASK)
#define ID_AA64ISAR2_MOPS_NONE (UL(0x0) << ID_AA64ISAR2_MOPS_SHIFT)
#define ID_AA64ISAR2_MOPS_IMPL (UL(0x1) << ID_AA64ISAR2_MOPS_SHIFT)
#define ID_AA64ISAR2_BC_SHIFT 20
+#define ID_AA64ISAR2_BC_WIDTH 4
#define ID_AA64ISAR2_BC_MASK (UL(0xf) << ID_AA64ISAR2_BC_SHIFT)
#define ID_AA64ISAR2_BC_VAL(x) ((x) & ID_AA64ISAR2_BC_MASK)
#define ID_AA64ISAR2_BC_NONE (UL(0x0) << ID_AA64ISAR2_BC_SHIFT)
#define ID_AA64ISAR2_BC_IMPL (UL(0x1) << ID_AA64ISAR2_BC_SHIFT)
-#define ID_AA64ISAR2_PAC_frac_SHIFT 28
+#define ID_AA64ISAR2_PAC_frac_SHIFT 24
+#define ID_AA64ISAR2_PAC_frac_WIDTH 4
#define ID_AA64ISAR2_PAC_frac_MASK (UL(0xf) << ID_AA64ISAR2_PAC_frac_SHIFT)
#define ID_AA64ISAR2_PAC_frac_VAL(x) ((x) & ID_AA64ISAR2_PAC_frac_MASK)
#define ID_AA64ISAR2_PAC_frac_NONE (UL(0x0) << ID_AA64ISAR2_PAC_frac_SHIFT)
#define ID_AA64ISAR2_PAC_frac_IMPL (UL(0x1) << ID_AA64ISAR2_PAC_frac_SHIFT)
+#define ID_AA64ISAR2_CLRBHB_SHIFT 28
+#define ID_AA64ISAR2_CLRBHB_WIDTH 4
+#define ID_AA64ISAR2_CLRBHB_MASK (UL(0xf) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define ID_AA64ISAR2_CLRBHB_VAL(x) ((x) & ID_AA64ISAR2_CLRBHB_MASK)
+#define ID_AA64ISAR2_CLRBHB_NONE (UL(0x0) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define ID_AA64ISAR2_CLRBHB_IMPL (UL(0x1) << ID_AA64ISAR2_CLRBHB_SHIFT)
+#define ID_AA64ISAR2_PRFMSLC_SHIFT 40
+#define ID_AA64ISAR2_PRFMSLC_WIDTH 4
+#define ID_AA64ISAR2_PRFMSLC_MASK (UL(0xf) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define ID_AA64ISAR2_PRFMSLC_VAL(x) ((x) & ID_AA64ISAR2_PRFMSLC_MASK)
+#define ID_AA64ISAR2_PRFMSLC_NONE (UL(0x0) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define ID_AA64ISAR2_PRFMSLC_IMPL (UL(0x1) << ID_AA64ISAR2_PRFMSLC_SHIFT)
+#define ID_AA64ISAR2_RPRFM_SHIFT 48
+#define ID_AA64ISAR2_RPRFM_WIDTH 4
+#define ID_AA64ISAR2_RPRFM_MASK (UL(0xf) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define ID_AA64ISAR2_RPRFM_VAL(x) ((x) & ID_AA64ISAR2_RPRFM_MASK)
+#define ID_AA64ISAR2_RPRFM_NONE (UL(0x0) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define ID_AA64ISAR2_RPRFM_IMPL (UL(0x1) << ID_AA64ISAR2_RPRFM_SHIFT)
+#define ID_AA64ISAR2_CSSC_SHIFT 52
+#define ID_AA64ISAR2_CSSC_WIDTH 4
+#define ID_AA64ISAR2_CSSC_MASK (UL(0xf) << ID_AA64ISAR2_CSSC_SHIFT)
+#define ID_AA64ISAR2_CSSC_VAL(x) ((x) & ID_AA64ISAR2_CSSC_MASK)
+#define ID_AA64ISAR2_CSSC_NONE (UL(0x0) << ID_AA64ISAR2_CSSC_SHIFT)
+#define ID_AA64ISAR2_CSSC_IMPL (UL(0x1) << ID_AA64ISAR2_CSSC_SHIFT)
+#define ID_AA64ISAR2_ATS1A_SHIFT 60
+#define ID_AA64ISAR2_ATS1A_WIDTH 4
+#define ID_AA64ISAR2_ATS1A_MASK (UL(0xf) << ID_AA64ISAR2_ATS1A_SHIFT)
+#define ID_AA64ISAR2_ATS1A_VAL(x) ((x) & ID_AA64ISAR2_ATS1A_MASK)
+#define ID_AA64ISAR2_ATS1A_NONE (UL(0x0) << ID_AA64ISAR2_ATS1A_SHIFT)
+#define ID_AA64ISAR2_ATS1A_IMPL (UL(0x1) << ID_AA64ISAR2_ATS1A_SHIFT)
/* ID_AA64MMFR0_EL1 */
-#define ID_AA64MMFR0_EL1 MRS_REG(ID_AA64MMFR0_EL1)
+#define ID_AA64MMFR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64MMFR0_EL1)
+#define ID_AA64MMFR0_EL1_ISS ISS_MSR_REG(ID_AA64MMFR0_EL1)
#define ID_AA64MMFR0_EL1_op0 3
#define ID_AA64MMFR0_EL1_op1 0
#define ID_AA64MMFR0_EL1_CRn 0
#define ID_AA64MMFR0_EL1_CRm 7
#define ID_AA64MMFR0_EL1_op2 0
#define ID_AA64MMFR0_PARange_SHIFT 0
+#define ID_AA64MMFR0_PARange_WIDTH 4
#define ID_AA64MMFR0_PARange_MASK (UL(0xf) << ID_AA64MMFR0_PARange_SHIFT)
#define ID_AA64MMFR0_PARange_VAL(x) ((x) & ID_AA64MMFR0_PARange_MASK)
#define ID_AA64MMFR0_PARange_4G (UL(0x0) << ID_AA64MMFR0_PARange_SHIFT)
@@ -854,43 +1202,51 @@
#define ID_AA64MMFR0_PARange_256T (UL(0x5) << ID_AA64MMFR0_PARange_SHIFT)
#define ID_AA64MMFR0_PARange_4P (UL(0x6) << ID_AA64MMFR0_PARange_SHIFT)
#define ID_AA64MMFR0_ASIDBits_SHIFT 4
+#define ID_AA64MMFR0_ASIDBits_WIDTH 4
#define ID_AA64MMFR0_ASIDBits_MASK (UL(0xf) << ID_AA64MMFR0_ASIDBits_SHIFT)
#define ID_AA64MMFR0_ASIDBits_VAL(x) ((x) & ID_AA64MMFR0_ASIDBits_MASK)
#define ID_AA64MMFR0_ASIDBits_8 (UL(0x0) << ID_AA64MMFR0_ASIDBits_SHIFT)
#define ID_AA64MMFR0_ASIDBits_16 (UL(0x2) << ID_AA64MMFR0_ASIDBits_SHIFT)
#define ID_AA64MMFR0_BigEnd_SHIFT 8
+#define ID_AA64MMFR0_BigEnd_WIDTH 4
#define ID_AA64MMFR0_BigEnd_MASK (UL(0xf) << ID_AA64MMFR0_BigEnd_SHIFT)
#define ID_AA64MMFR0_BigEnd_VAL(x) ((x) & ID_AA64MMFR0_BigEnd_MASK)
#define ID_AA64MMFR0_BigEnd_FIXED (UL(0x0) << ID_AA64MMFR0_BigEnd_SHIFT)
#define ID_AA64MMFR0_BigEnd_MIXED (UL(0x1) << ID_AA64MMFR0_BigEnd_SHIFT)
#define ID_AA64MMFR0_SNSMem_SHIFT 12
+#define ID_AA64MMFR0_SNSMem_WIDTH 4
#define ID_AA64MMFR0_SNSMem_MASK (UL(0xf) << ID_AA64MMFR0_SNSMem_SHIFT)
#define ID_AA64MMFR0_SNSMem_VAL(x) ((x) & ID_AA64MMFR0_SNSMem_MASK)
#define ID_AA64MMFR0_SNSMem_NONE (UL(0x0) << ID_AA64MMFR0_SNSMem_SHIFT)
#define ID_AA64MMFR0_SNSMem_DISTINCT (UL(0x1) << ID_AA64MMFR0_SNSMem_SHIFT)
#define ID_AA64MMFR0_BigEndEL0_SHIFT 16
+#define ID_AA64MMFR0_BigEndEL0_WIDTH 4
#define ID_AA64MMFR0_BigEndEL0_MASK (UL(0xf) << ID_AA64MMFR0_BigEndEL0_SHIFT)
#define ID_AA64MMFR0_BigEndEL0_VAL(x) ((x) & ID_AA64MMFR0_BigEndEL0_MASK)
#define ID_AA64MMFR0_BigEndEL0_FIXED (UL(0x0) << ID_AA64MMFR0_BigEndEL0_SHIFT)
#define ID_AA64MMFR0_BigEndEL0_MIXED (UL(0x1) << ID_AA64MMFR0_BigEndEL0_SHIFT)
#define ID_AA64MMFR0_TGran16_SHIFT 20
+#define ID_AA64MMFR0_TGran16_WIDTH 4
#define ID_AA64MMFR0_TGran16_MASK (UL(0xf) << ID_AA64MMFR0_TGran16_SHIFT)
#define ID_AA64MMFR0_TGran16_VAL(x) ((x) & ID_AA64MMFR0_TGran16_MASK)
#define ID_AA64MMFR0_TGran16_NONE (UL(0x0) << ID_AA64MMFR0_TGran16_SHIFT)
#define ID_AA64MMFR0_TGran16_IMPL (UL(0x1) << ID_AA64MMFR0_TGran16_SHIFT)
#define ID_AA64MMFR0_TGran16_LPA2 (UL(0x2) << ID_AA64MMFR0_TGran16_SHIFT)
#define ID_AA64MMFR0_TGran64_SHIFT 24
+#define ID_AA64MMFR0_TGran64_WIDTH 4
#define ID_AA64MMFR0_TGran64_MASK (UL(0xf) << ID_AA64MMFR0_TGran64_SHIFT)
#define ID_AA64MMFR0_TGran64_VAL(x) ((x) & ID_AA64MMFR0_TGran64_MASK)
#define ID_AA64MMFR0_TGran64_IMPL (UL(0x0) << ID_AA64MMFR0_TGran64_SHIFT)
#define ID_AA64MMFR0_TGran64_NONE (UL(0xf) << ID_AA64MMFR0_TGran64_SHIFT)
#define ID_AA64MMFR0_TGran4_SHIFT 28
+#define ID_AA64MMFR0_TGran4_WIDTH 4
#define ID_AA64MMFR0_TGran4_MASK (UL(0xf) << ID_AA64MMFR0_TGran4_SHIFT)
#define ID_AA64MMFR0_TGran4_VAL(x) ((x) & ID_AA64MMFR0_TGran4_MASK)
#define ID_AA64MMFR0_TGran4_IMPL (UL(0x0) << ID_AA64MMFR0_TGran4_SHIFT)
#define ID_AA64MMFR0_TGran4_LPA2 (UL(0x1) << ID_AA64MMFR0_TGran4_SHIFT)
#define ID_AA64MMFR0_TGran4_NONE (UL(0xf) << ID_AA64MMFR0_TGran4_SHIFT)
#define ID_AA64MMFR0_TGran16_2_SHIFT 32
+#define ID_AA64MMFR0_TGran16_2_WIDTH 4
#define ID_AA64MMFR0_TGran16_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran16_2_SHIFT)
#define ID_AA64MMFR0_TGran16_2_VAL(x) ((x) & ID_AA64MMFR0_TGran16_2_MASK)
#define ID_AA64MMFR0_TGran16_2_TGran16 (UL(0x0) << ID_AA64MMFR0_TGran16_2_SHIFT)
@@ -898,12 +1254,14 @@
#define ID_AA64MMFR0_TGran16_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran16_2_SHIFT)
#define ID_AA64MMFR0_TGran16_2_LPA2 (UL(0x3) << ID_AA64MMFR0_TGran16_2_SHIFT)
#define ID_AA64MMFR0_TGran64_2_SHIFT 36
+#define ID_AA64MMFR0_TGran64_2_WIDTH 4
#define ID_AA64MMFR0_TGran64_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran64_2_SHIFT)
#define ID_AA64MMFR0_TGran64_2_VAL(x) ((x) & ID_AA64MMFR0_TGran64_2_MASK)
#define ID_AA64MMFR0_TGran64_2_TGran64 (UL(0x0) << ID_AA64MMFR0_TGran64_2_SHIFT)
#define ID_AA64MMFR0_TGran64_2_NONE (UL(0x1) << ID_AA64MMFR0_TGran64_2_SHIFT)
#define ID_AA64MMFR0_TGran64_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran64_2_SHIFT)
#define ID_AA64MMFR0_TGran4_2_SHIFT 40
+#define ID_AA64MMFR0_TGran4_2_WIDTH 4
#define ID_AA64MMFR0_TGran4_2_MASK (UL(0xf) << ID_AA64MMFR0_TGran4_2_SHIFT)
#define ID_AA64MMFR0_TGran4_2_VAL(x) ((x) & ID_AA64MMFR0_TGran4_2_MASK)
#define ID_AA64MMFR0_TGran4_2_TGran4 (UL(0x0) << ID_AA64MMFR0_TGran4_2_SHIFT)
@@ -911,16 +1269,20 @@
#define ID_AA64MMFR0_TGran4_2_IMPL (UL(0x2) << ID_AA64MMFR0_TGran4_2_SHIFT)
#define ID_AA64MMFR0_TGran4_2_LPA2 (UL(0x3) << ID_AA64MMFR0_TGran4_2_SHIFT)
#define ID_AA64MMFR0_ExS_SHIFT 44
+#define ID_AA64MMFR0_ExS_WIDTH 4
#define ID_AA64MMFR0_ExS_MASK (UL(0xf) << ID_AA64MMFR0_ExS_SHIFT)
#define ID_AA64MMFR0_ExS_VAL(x) ((x) & ID_AA64MMFR0_ExS_MASK)
#define ID_AA64MMFR0_ExS_ALL (UL(0x0) << ID_AA64MMFR0_ExS_SHIFT)
#define ID_AA64MMFR0_ExS_IMPL (UL(0x1) << ID_AA64MMFR0_ExS_SHIFT)
#define ID_AA64MMFR0_FGT_SHIFT 56
+#define ID_AA64MMFR0_FGT_WIDTH 4
#define ID_AA64MMFR0_FGT_MASK (UL(0xf) << ID_AA64MMFR0_FGT_SHIFT)
#define ID_AA64MMFR0_FGT_VAL(x) ((x) & ID_AA64MMFR0_FGT_MASK)
#define ID_AA64MMFR0_FGT_NONE (UL(0x0) << ID_AA64MMFR0_FGT_SHIFT)
-#define ID_AA64MMFR0_FGT_IMPL (UL(0x1) << ID_AA64MMFR0_FGT_SHIFT)
+#define ID_AA64MMFR0_FGT_8_6 (UL(0x1) << ID_AA64MMFR0_FGT_SHIFT)
+#define ID_AA64MMFR0_FGT_8_9 (UL(0x2) << ID_AA64MMFR0_FGT_SHIFT)
#define ID_AA64MMFR0_ECV_SHIFT 60
+#define ID_AA64MMFR0_ECV_WIDTH 4
#define ID_AA64MMFR0_ECV_MASK (UL(0xf) << ID_AA64MMFR0_ECV_SHIFT)
#define ID_AA64MMFR0_ECV_VAL(x) ((x) & ID_AA64MMFR0_ECV_MASK)
#define ID_AA64MMFR0_ECV_NONE (UL(0x0) << ID_AA64MMFR0_ECV_SHIFT)
@@ -928,208 +1290,307 @@
#define ID_AA64MMFR0_ECV_CNTHCTL (UL(0x2) << ID_AA64MMFR0_ECV_SHIFT)
/* ID_AA64MMFR1_EL1 */
-#define ID_AA64MMFR1_EL1 MRS_REG(ID_AA64MMFR1_EL1)
+#define ID_AA64MMFR1_EL1_REG MRS_REG_ALT_NAME(ID_AA64MMFR1_EL1)
+#define ID_AA64MMFR1_EL1_ISS ISS_MSR_REG(ID_AA64MMFR1_EL1)
#define ID_AA64MMFR1_EL1_op0 3
#define ID_AA64MMFR1_EL1_op1 0
#define ID_AA64MMFR1_EL1_CRn 0
#define ID_AA64MMFR1_EL1_CRm 7
#define ID_AA64MMFR1_EL1_op2 1
#define ID_AA64MMFR1_HAFDBS_SHIFT 0
+#define ID_AA64MMFR1_HAFDBS_WIDTH 4
#define ID_AA64MMFR1_HAFDBS_MASK (UL(0xf) << ID_AA64MMFR1_HAFDBS_SHIFT)
#define ID_AA64MMFR1_HAFDBS_VAL(x) ((x) & ID_AA64MMFR1_HAFDBS_MASK)
#define ID_AA64MMFR1_HAFDBS_NONE (UL(0x0) << ID_AA64MMFR1_HAFDBS_SHIFT)
#define ID_AA64MMFR1_HAFDBS_AF (UL(0x1) << ID_AA64MMFR1_HAFDBS_SHIFT)
#define ID_AA64MMFR1_HAFDBS_AF_DBS (UL(0x2) << ID_AA64MMFR1_HAFDBS_SHIFT)
#define ID_AA64MMFR1_VMIDBits_SHIFT 4
+#define ID_AA64MMFR1_VMIDBits_WIDTH 4
#define ID_AA64MMFR1_VMIDBits_MASK (UL(0xf) << ID_AA64MMFR1_VMIDBits_SHIFT)
#define ID_AA64MMFR1_VMIDBits_VAL(x) ((x) & ID_AA64MMFR1_VMIDBits_MASK)
#define ID_AA64MMFR1_VMIDBits_8 (UL(0x0) << ID_AA64MMFR1_VMIDBits_SHIFT)
#define ID_AA64MMFR1_VMIDBits_16 (UL(0x2) << ID_AA64MMFR1_VMIDBits_SHIFT)
#define ID_AA64MMFR1_VH_SHIFT 8
+#define ID_AA64MMFR1_VH_WIDTH 4
#define ID_AA64MMFR1_VH_MASK (UL(0xf) << ID_AA64MMFR1_VH_SHIFT)
#define ID_AA64MMFR1_VH_VAL(x) ((x) & ID_AA64MMFR1_VH_MASK)
#define ID_AA64MMFR1_VH_NONE (UL(0x0) << ID_AA64MMFR1_VH_SHIFT)
#define ID_AA64MMFR1_VH_IMPL (UL(0x1) << ID_AA64MMFR1_VH_SHIFT)
#define ID_AA64MMFR1_HPDS_SHIFT 12
+#define ID_AA64MMFR1_HPDS_WIDTH 4
#define ID_AA64MMFR1_HPDS_MASK (UL(0xf) << ID_AA64MMFR1_HPDS_SHIFT)
#define ID_AA64MMFR1_HPDS_VAL(x) ((x) & ID_AA64MMFR1_HPDS_MASK)
#define ID_AA64MMFR1_HPDS_NONE (UL(0x0) << ID_AA64MMFR1_HPDS_SHIFT)
#define ID_AA64MMFR1_HPDS_HPD (UL(0x1) << ID_AA64MMFR1_HPDS_SHIFT)
#define ID_AA64MMFR1_HPDS_TTPBHA (UL(0x2) << ID_AA64MMFR1_HPDS_SHIFT)
#define ID_AA64MMFR1_LO_SHIFT 16
+#define ID_AA64MMFR1_LO_WIDTH 4
#define ID_AA64MMFR1_LO_MASK (UL(0xf) << ID_AA64MMFR1_LO_SHIFT)
#define ID_AA64MMFR1_LO_VAL(x) ((x) & ID_AA64MMFR1_LO_MASK)
#define ID_AA64MMFR1_LO_NONE (UL(0x0) << ID_AA64MMFR1_LO_SHIFT)
#define ID_AA64MMFR1_LO_IMPL (UL(0x1) << ID_AA64MMFR1_LO_SHIFT)
#define ID_AA64MMFR1_PAN_SHIFT 20
+#define ID_AA64MMFR1_PAN_WIDTH 4
#define ID_AA64MMFR1_PAN_MASK (UL(0xf) << ID_AA64MMFR1_PAN_SHIFT)
#define ID_AA64MMFR1_PAN_VAL(x) ((x) & ID_AA64MMFR1_PAN_MASK)
#define ID_AA64MMFR1_PAN_NONE (UL(0x0) << ID_AA64MMFR1_PAN_SHIFT)
#define ID_AA64MMFR1_PAN_IMPL (UL(0x1) << ID_AA64MMFR1_PAN_SHIFT)
#define ID_AA64MMFR1_PAN_ATS1E1 (UL(0x2) << ID_AA64MMFR1_PAN_SHIFT)
-#define ID_AA64MMFR1_PAN_EPAN (UL(0x2) << ID_AA64MMFR1_PAN_SHIFT)
+#define ID_AA64MMFR1_PAN_EPAN (UL(0x3) << ID_AA64MMFR1_PAN_SHIFT)
#define ID_AA64MMFR1_SpecSEI_SHIFT 24
+#define ID_AA64MMFR1_SpecSEI_WIDTH 4
#define ID_AA64MMFR1_SpecSEI_MASK (UL(0xf) << ID_AA64MMFR1_SpecSEI_SHIFT)
#define ID_AA64MMFR1_SpecSEI_VAL(x) ((x) & ID_AA64MMFR1_SpecSEI_MASK)
#define ID_AA64MMFR1_SpecSEI_NONE (UL(0x0) << ID_AA64MMFR1_SpecSEI_SHIFT)
#define ID_AA64MMFR1_SpecSEI_IMPL (UL(0x1) << ID_AA64MMFR1_SpecSEI_SHIFT)
#define ID_AA64MMFR1_XNX_SHIFT 28
+#define ID_AA64MMFR1_XNX_WIDTH 4
#define ID_AA64MMFR1_XNX_MASK (UL(0xf) << ID_AA64MMFR1_XNX_SHIFT)
#define ID_AA64MMFR1_XNX_VAL(x) ((x) & ID_AA64MMFR1_XNX_MASK)
#define ID_AA64MMFR1_XNX_NONE (UL(0x0) << ID_AA64MMFR1_XNX_SHIFT)
#define ID_AA64MMFR1_XNX_IMPL (UL(0x1) << ID_AA64MMFR1_XNX_SHIFT)
#define ID_AA64MMFR1_TWED_SHIFT 32
+#define ID_AA64MMFR1_TWED_WIDTH 4
#define ID_AA64MMFR1_TWED_MASK (UL(0xf) << ID_AA64MMFR1_TWED_SHIFT)
#define ID_AA64MMFR1_TWED_VAL(x) ((x) & ID_AA64MMFR1_TWED_MASK)
#define ID_AA64MMFR1_TWED_NONE (UL(0x0) << ID_AA64MMFR1_TWED_SHIFT)
#define ID_AA64MMFR1_TWED_IMPL (UL(0x1) << ID_AA64MMFR1_TWED_SHIFT)
#define ID_AA64MMFR1_ETS_SHIFT 36
+#define ID_AA64MMFR1_ETS_WIDTH 4
#define ID_AA64MMFR1_ETS_MASK (UL(0xf) << ID_AA64MMFR1_ETS_SHIFT)
#define ID_AA64MMFR1_ETS_VAL(x) ((x) & ID_AA64MMFR1_ETS_MASK)
#define ID_AA64MMFR1_ETS_NONE (UL(0x0) << ID_AA64MMFR1_ETS_SHIFT)
-#define ID_AA64MMFR1_ETS_IMPL (UL(0x1) << ID_AA64MMFR1_ETS_SHIFT)
+#define ID_AA64MMFR1_ETS_NONE2 (UL(0x1) << ID_AA64MMFR1_ETS_SHIFT)
+#define ID_AA64MMFR1_ETS_IMPL (UL(0x2) << ID_AA64MMFR1_ETS_SHIFT)
#define ID_AA64MMFR1_HCX_SHIFT 40
+#define ID_AA64MMFR1_HCX_WIDTH 4
#define ID_AA64MMFR1_HCX_MASK (UL(0xf) << ID_AA64MMFR1_HCX_SHIFT)
#define ID_AA64MMFR1_HCX_VAL(x) ((x) & ID_AA64MMFR1_HCX_MASK)
#define ID_AA64MMFR1_HCX_NONE (UL(0x0) << ID_AA64MMFR1_HCX_SHIFT)
#define ID_AA64MMFR1_HCX_IMPL (UL(0x1) << ID_AA64MMFR1_HCX_SHIFT)
#define ID_AA64MMFR1_AFP_SHIFT 44
+#define ID_AA64MMFR1_AFP_WIDTH 4
#define ID_AA64MMFR1_AFP_MASK (UL(0xf) << ID_AA64MMFR1_AFP_SHIFT)
#define ID_AA64MMFR1_AFP_VAL(x) ((x) & ID_AA64MMFR1_AFP_MASK)
#define ID_AA64MMFR1_AFP_NONE (UL(0x0) << ID_AA64MMFR1_AFP_SHIFT)
#define ID_AA64MMFR1_AFP_IMPL (UL(0x1) << ID_AA64MMFR1_AFP_SHIFT)
#define ID_AA64MMFR1_nTLBPA_SHIFT 48
+#define ID_AA64MMFR1_nTLBPA_WIDTH 4
#define ID_AA64MMFR1_nTLBPA_MASK (UL(0xf) << ID_AA64MMFR1_nTLBPA_SHIFT)
#define ID_AA64MMFR1_nTLBPA_VAL(x) ((x) & ID_AA64MMFR1_nTLBPA_MASK)
#define ID_AA64MMFR1_nTLBPA_NONE (UL(0x0) << ID_AA64MMFR1_nTLBPA_SHIFT)
#define ID_AA64MMFR1_nTLBPA_IMPL (UL(0x1) << ID_AA64MMFR1_nTLBPA_SHIFT)
#define ID_AA64MMFR1_TIDCP1_SHIFT 52
+#define ID_AA64MMFR1_TIDCP1_WIDTH 4
#define ID_AA64MMFR1_TIDCP1_MASK (UL(0xf) << ID_AA64MMFR1_TIDCP1_SHIFT)
#define ID_AA64MMFR1_TIDCP1_VAL(x) ((x) & ID_AA64MMFR1_TIDCP1_MASK)
#define ID_AA64MMFR1_TIDCP1_NONE (UL(0x0) << ID_AA64MMFR1_TIDCP1_SHIFT)
#define ID_AA64MMFR1_TIDCP1_IMPL (UL(0x1) << ID_AA64MMFR1_TIDCP1_SHIFT)
#define ID_AA64MMFR1_CMOVW_SHIFT 56
+#define ID_AA64MMFR1_CMOVW_WIDTH 4
#define ID_AA64MMFR1_CMOVW_MASK (UL(0xf) << ID_AA64MMFR1_CMOVW_SHIFT)
#define ID_AA64MMFR1_CMOVW_VAL(x) ((x) & ID_AA64MMFR1_CMOVW_MASK)
#define ID_AA64MMFR1_CMOVW_NONE (UL(0x0) << ID_AA64MMFR1_CMOVW_SHIFT)
#define ID_AA64MMFR1_CMOVW_IMPL (UL(0x1) << ID_AA64MMFR1_CMOVW_SHIFT)
+#define ID_AA64MMFR1_ECBHB_SHIFT 60
+#define ID_AA64MMFR1_ECBHB_WIDTH 4
+#define ID_AA64MMFR1_ECBHB_MASK (UL(0xf) << ID_AA64MMFR1_ECBHB_SHIFT)
+#define ID_AA64MMFR1_ECBHB_VAL(x) ((x) & ID_AA64MMFR1_ECBHB_MASK)
+#define ID_AA64MMFR1_ECBHB_NONE (UL(0x0) << ID_AA64MMFR1_ECBHB_SHIFT)
+#define ID_AA64MMFR1_ECBHB_IMPL (UL(0x1) << ID_AA64MMFR1_ECBHB_SHIFT)
/* ID_AA64MMFR2_EL1 */
-#define ID_AA64MMFR2_EL1 MRS_REG(ID_AA64MMFR2_EL1)
+#define ID_AA64MMFR2_EL1_REG MRS_REG_ALT_NAME(ID_AA64MMFR2_EL1)
+#define ID_AA64MMFR2_EL1_ISS ISS_MSR_REG(ID_AA64MMFR2_EL1)
#define ID_AA64MMFR2_EL1_op0 3
#define ID_AA64MMFR2_EL1_op1 0
#define ID_AA64MMFR2_EL1_CRn 0
#define ID_AA64MMFR2_EL1_CRm 7
#define ID_AA64MMFR2_EL1_op2 2
#define ID_AA64MMFR2_CnP_SHIFT 0
+#define ID_AA64MMFR2_CnP_WIDTH 4
#define ID_AA64MMFR2_CnP_MASK (UL(0xf) << ID_AA64MMFR2_CnP_SHIFT)
#define ID_AA64MMFR2_CnP_VAL(x) ((x) & ID_AA64MMFR2_CnP_MASK)
#define ID_AA64MMFR2_CnP_NONE (UL(0x0) << ID_AA64MMFR2_CnP_SHIFT)
#define ID_AA64MMFR2_CnP_IMPL (UL(0x1) << ID_AA64MMFR2_CnP_SHIFT)
#define ID_AA64MMFR2_UAO_SHIFT 4
+#define ID_AA64MMFR2_UAO_WIDTH 4
#define ID_AA64MMFR2_UAO_MASK (UL(0xf) << ID_AA64MMFR2_UAO_SHIFT)
#define ID_AA64MMFR2_UAO_VAL(x) ((x) & ID_AA64MMFR2_UAO_MASK)
#define ID_AA64MMFR2_UAO_NONE (UL(0x0) << ID_AA64MMFR2_UAO_SHIFT)
#define ID_AA64MMFR2_UAO_IMPL (UL(0x1) << ID_AA64MMFR2_UAO_SHIFT)
#define ID_AA64MMFR2_LSM_SHIFT 8
+#define ID_AA64MMFR2_LSM_WIDTH 4
#define ID_AA64MMFR2_LSM_MASK (UL(0xf) << ID_AA64MMFR2_LSM_SHIFT)
#define ID_AA64MMFR2_LSM_VAL(x) ((x) & ID_AA64MMFR2_LSM_MASK)
#define ID_AA64MMFR2_LSM_NONE (UL(0x0) << ID_AA64MMFR2_LSM_SHIFT)
#define ID_AA64MMFR2_LSM_IMPL (UL(0x1) << ID_AA64MMFR2_LSM_SHIFT)
#define ID_AA64MMFR2_IESB_SHIFT 12
+#define ID_AA64MMFR2_IESB_WIDTH 4
#define ID_AA64MMFR2_IESB_MASK (UL(0xf) << ID_AA64MMFR2_IESB_SHIFT)
#define ID_AA64MMFR2_IESB_VAL(x) ((x) & ID_AA64MMFR2_IESB_MASK)
#define ID_AA64MMFR2_IESB_NONE (UL(0x0) << ID_AA64MMFR2_IESB_SHIFT)
#define ID_AA64MMFR2_IESB_IMPL (UL(0x1) << ID_AA64MMFR2_IESB_SHIFT)
#define ID_AA64MMFR2_VARange_SHIFT 16
+#define ID_AA64MMFR2_VARange_WIDTH 4
#define ID_AA64MMFR2_VARange_MASK (UL(0xf) << ID_AA64MMFR2_VARange_SHIFT)
#define ID_AA64MMFR2_VARange_VAL(x) ((x) & ID_AA64MMFR2_VARange_MASK)
#define ID_AA64MMFR2_VARange_48 (UL(0x0) << ID_AA64MMFR2_VARange_SHIFT)
#define ID_AA64MMFR2_VARange_52 (UL(0x1) << ID_AA64MMFR2_VARange_SHIFT)
#define ID_AA64MMFR2_CCIDX_SHIFT 20
+#define ID_AA64MMFR2_CCIDX_WIDTH 4
#define ID_AA64MMFR2_CCIDX_MASK (UL(0xf) << ID_AA64MMFR2_CCIDX_SHIFT)
#define ID_AA64MMFR2_CCIDX_VAL(x) ((x) & ID_AA64MMFR2_CCIDX_MASK)
#define ID_AA64MMFR2_CCIDX_32 (UL(0x0) << ID_AA64MMFR2_CCIDX_SHIFT)
#define ID_AA64MMFR2_CCIDX_64 (UL(0x1) << ID_AA64MMFR2_CCIDX_SHIFT)
#define ID_AA64MMFR2_NV_SHIFT 24
+#define ID_AA64MMFR2_NV_WIDTH 4
#define ID_AA64MMFR2_NV_MASK (UL(0xf) << ID_AA64MMFR2_NV_SHIFT)
#define ID_AA64MMFR2_NV_VAL(x) ((x) & ID_AA64MMFR2_NV_MASK)
#define ID_AA64MMFR2_NV_NONE (UL(0x0) << ID_AA64MMFR2_NV_SHIFT)
#define ID_AA64MMFR2_NV_8_3 (UL(0x1) << ID_AA64MMFR2_NV_SHIFT)
#define ID_AA64MMFR2_NV_8_4 (UL(0x2) << ID_AA64MMFR2_NV_SHIFT)
#define ID_AA64MMFR2_ST_SHIFT 28
+#define ID_AA64MMFR2_ST_WIDTH 4
#define ID_AA64MMFR2_ST_MASK (UL(0xf) << ID_AA64MMFR2_ST_SHIFT)
#define ID_AA64MMFR2_ST_VAL(x) ((x) & ID_AA64MMFR2_ST_MASK)
#define ID_AA64MMFR2_ST_NONE (UL(0x0) << ID_AA64MMFR2_ST_SHIFT)
#define ID_AA64MMFR2_ST_IMPL (UL(0x1) << ID_AA64MMFR2_ST_SHIFT)
#define ID_AA64MMFR2_AT_SHIFT 32
+#define ID_AA64MMFR2_AT_WIDTH 4
#define ID_AA64MMFR2_AT_MASK (UL(0xf) << ID_AA64MMFR2_AT_SHIFT)
#define ID_AA64MMFR2_AT_VAL(x) ((x) & ID_AA64MMFR2_AT_MASK)
#define ID_AA64MMFR2_AT_NONE (UL(0x0) << ID_AA64MMFR2_AT_SHIFT)
#define ID_AA64MMFR2_AT_IMPL (UL(0x1) << ID_AA64MMFR2_AT_SHIFT)
#define ID_AA64MMFR2_IDS_SHIFT 36
+#define ID_AA64MMFR2_IDS_WIDTH 4
#define ID_AA64MMFR2_IDS_MASK (UL(0xf) << ID_AA64MMFR2_IDS_SHIFT)
#define ID_AA64MMFR2_IDS_VAL(x) ((x) & ID_AA64MMFR2_IDS_MASK)
#define ID_AA64MMFR2_IDS_NONE (UL(0x0) << ID_AA64MMFR2_IDS_SHIFT)
#define ID_AA64MMFR2_IDS_IMPL (UL(0x1) << ID_AA64MMFR2_IDS_SHIFT)
#define ID_AA64MMFR2_FWB_SHIFT 40
+#define ID_AA64MMFR2_FWB_WIDTH 4
#define ID_AA64MMFR2_FWB_MASK (UL(0xf) << ID_AA64MMFR2_FWB_SHIFT)
#define ID_AA64MMFR2_FWB_VAL(x) ((x) & ID_AA64MMFR2_FWB_MASK)
#define ID_AA64MMFR2_FWB_NONE (UL(0x0) << ID_AA64MMFR2_FWB_SHIFT)
#define ID_AA64MMFR2_FWB_IMPL (UL(0x1) << ID_AA64MMFR2_FWB_SHIFT)
#define ID_AA64MMFR2_TTL_SHIFT 48
+#define ID_AA64MMFR2_TTL_WIDTH 4
#define ID_AA64MMFR2_TTL_MASK (UL(0xf) << ID_AA64MMFR2_TTL_SHIFT)
#define ID_AA64MMFR2_TTL_VAL(x) ((x) & ID_AA64MMFR2_TTL_MASK)
#define ID_AA64MMFR2_TTL_NONE (UL(0x0) << ID_AA64MMFR2_TTL_SHIFT)
#define ID_AA64MMFR2_TTL_IMPL (UL(0x1) << ID_AA64MMFR2_TTL_SHIFT)
#define ID_AA64MMFR2_BBM_SHIFT 52
+#define ID_AA64MMFR2_BBM_WIDTH 4
#define ID_AA64MMFR2_BBM_MASK (UL(0xf) << ID_AA64MMFR2_BBM_SHIFT)
#define ID_AA64MMFR2_BBM_VAL(x) ((x) & ID_AA64MMFR2_BBM_MASK)
#define ID_AA64MMFR2_BBM_LEVEL0 (UL(0x0) << ID_AA64MMFR2_BBM_SHIFT)
#define ID_AA64MMFR2_BBM_LEVEL1 (UL(0x1) << ID_AA64MMFR2_BBM_SHIFT)
#define ID_AA64MMFR2_BBM_LEVEL2 (UL(0x2) << ID_AA64MMFR2_BBM_SHIFT)
#define ID_AA64MMFR2_EVT_SHIFT 56
+#define ID_AA64MMFR2_EVT_WIDTH 4
#define ID_AA64MMFR2_EVT_MASK (UL(0xf) << ID_AA64MMFR2_EVT_SHIFT)
#define ID_AA64MMFR2_EVT_VAL(x) ((x) & ID_AA64MMFR2_EVT_MASK)
#define ID_AA64MMFR2_EVT_NONE (UL(0x0) << ID_AA64MMFR2_EVT_SHIFT)
#define ID_AA64MMFR2_EVT_8_2 (UL(0x1) << ID_AA64MMFR2_EVT_SHIFT)
#define ID_AA64MMFR2_EVT_8_5 (UL(0x2) << ID_AA64MMFR2_EVT_SHIFT)
#define ID_AA64MMFR2_E0PD_SHIFT 60
+#define ID_AA64MMFR2_E0PD_WIDTH 4
#define ID_AA64MMFR2_E0PD_MASK (UL(0xf) << ID_AA64MMFR2_E0PD_SHIFT)
#define ID_AA64MMFR2_E0PD_VAL(x) ((x) & ID_AA64MMFR2_E0PD_MASK)
#define ID_AA64MMFR2_E0PD_NONE (UL(0x0) << ID_AA64MMFR2_E0PD_SHIFT)
#define ID_AA64MMFR2_E0PD_IMPL (UL(0x1) << ID_AA64MMFR2_E0PD_SHIFT)
/* ID_AA64MMFR3_EL1 */
-#define ID_AA64MMFR3_EL1 MRS_REG(ID_AA64MMFR3_EL1)
+#define ID_AA64MMFR3_EL1_REG MRS_REG_ALT_NAME(ID_AA64MMFR3_EL1)
+#define ID_AA64MMFR3_EL1_ISS ISS_MSR_REG(ID_AA64MMFR3_EL1)
#define ID_AA64MMFR3_EL1_op0 3
#define ID_AA64MMFR3_EL1_op1 0
#define ID_AA64MMFR3_EL1_CRn 0
#define ID_AA64MMFR3_EL1_CRm 7
#define ID_AA64MMFR3_EL1_op2 3
#define ID_AA64MMFR3_TCRX_SHIFT 0
+#define ID_AA64MMFR3_TCRX_WIDTH 4
#define ID_AA64MMFR3_TCRX_MASK (UL(0xf) << ID_AA64MMFR3_TCRX_SHIFT)
#define ID_AA64MMFR3_TCRX_VAL(x) ((x) & ID_AA64MMFR3_TCRX_MASK)
#define ID_AA64MMFR3_TCRX_NONE (UL(0x0) << ID_AA64MMFR3_TCRX_SHIFT)
#define ID_AA64MMFR3_TCRX_IMPL (UL(0x1) << ID_AA64MMFR3_TCRX_SHIFT)
#define ID_AA64MMFR3_SCTLRX_SHIFT 4
+#define ID_AA64MMFR3_SCTLRX_WIDTH 4
#define ID_AA64MMFR3_SCTLRX_MASK (UL(0xf) << ID_AA64MMFR3_SCTLRX_SHIFT)
#define ID_AA64MMFR3_SCTLRX_VAL(x) ((x) & ID_AA64MMFR3_SCTLRX_MASK)
#define ID_AA64MMFR3_SCTLRX_NONE (UL(0x0) << ID_AA64MMFR3_SCTLRX_SHIFT)
#define ID_AA64MMFR3_SCTLRX_IMPL (UL(0x1) << ID_AA64MMFR3_SCTLRX_SHIFT)
+#define ID_AA64MMFR3_S1PIE_SHIFT 8
+#define ID_AA64MMFR3_S1PIE_WIDTH 4
+#define ID_AA64MMFR3_S1PIE_MASK (UL(0xf) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define ID_AA64MMFR3_S1PIE_VAL(x) ((x) & ID_AA64MMFR3_S1PIE_MASK)
+#define ID_AA64MMFR3_S1PIE_NONE (UL(0x0) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define ID_AA64MMFR3_S1PIE_IMPL (UL(0x1) << ID_AA64MMFR3_S1PIE_SHIFT)
+#define ID_AA64MMFR3_S2PIE_SHIFT 12
+#define ID_AA64MMFR3_S2PIE_WIDTH 4
+#define ID_AA64MMFR3_S2PIE_MASK (UL(0xf) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define ID_AA64MMFR3_S2PIE_VAL(x) ((x) & ID_AA64MMFR3_S2PIE_MASK)
+#define ID_AA64MMFR3_S2PIE_NONE (UL(0x0) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define ID_AA64MMFR3_S2PIE_IMPL (UL(0x1) << ID_AA64MMFR3_S2PIE_SHIFT)
+#define ID_AA64MMFR3_S1POE_SHIFT 16
+#define ID_AA64MMFR3_S1POE_WIDTH 4
+#define ID_AA64MMFR3_S1POE_MASK (UL(0xf) << ID_AA64MMFR3_S1POE_SHIFT)
+#define ID_AA64MMFR3_S1POE_VAL(x) ((x) & ID_AA64MMFR3_S1POE_MASK)
+#define ID_AA64MMFR3_S1POE_NONE (UL(0x0) << ID_AA64MMFR3_S1POE_SHIFT)
+#define ID_AA64MMFR3_S1POE_IMPL (UL(0x1) << ID_AA64MMFR3_S1POE_SHIFT)
+#define ID_AA64MMFR3_S2POE_SHIFT 20
+#define ID_AA64MMFR3_S2POE_WIDTH 4
+#define ID_AA64MMFR3_S2POE_MASK (UL(0xf) << ID_AA64MMFR3_S2POE_SHIFT)
+#define ID_AA64MMFR3_S2POE_VAL(x) ((x) & ID_AA64MMFR3_S2POE_MASK)
+#define ID_AA64MMFR3_S2POE_NONE (UL(0x0) << ID_AA64MMFR3_S2POE_SHIFT)
+#define ID_AA64MMFR3_S2POE_IMPL (UL(0x1) << ID_AA64MMFR3_S2POE_SHIFT)
+#define ID_AA64MMFR3_AIE_SHIFT 24
+#define ID_AA64MMFR3_AIE_WIDTH 4
+#define ID_AA64MMFR3_AIE_MASK (UL(0xf) << ID_AA64MMFR3_AIE_SHIFT)
+#define ID_AA64MMFR3_AIE_VAL(x) ((x) & ID_AA64MMFR3_AIE_MASK)
+#define ID_AA64MMFR3_AIE_NONE (UL(0x0) << ID_AA64MMFR3_AIE_SHIFT)
+#define ID_AA64MMFR3_AIE_IMPL (UL(0x1) << ID_AA64MMFR3_AIE_SHIFT)
#define ID_AA64MMFR3_MEC_SHIFT 28
+#define ID_AA64MMFR3_MEC_WIDTH 4
#define ID_AA64MMFR3_MEC_MASK (UL(0xf) << ID_AA64MMFR3_MEC_SHIFT)
#define ID_AA64MMFR3_MEC_VAL(x) ((x) & ID_AA64MMFR3_MEC_MASK)
#define ID_AA64MMFR3_MEC_NONE (UL(0x0) << ID_AA64MMFR3_MEC_SHIFT)
#define ID_AA64MMFR3_MEC_IMPL (UL(0x1) << ID_AA64MMFR3_MEC_SHIFT)
+#define ID_AA64MMFR3_SNERR_SHIFT 40
+#define ID_AA64MMFR3_SNERR_WIDTH 4
+#define ID_AA64MMFR3_SNERR_MASK (UL(0xf) << ID_AA64MMFR3_SNERR_SHIFT)
+#define ID_AA64MMFR3_SNERR_VAL(x) ((x) & ID_AA64MMFR3_SNERR_MASK)
+#define ID_AA64MMFR3_SNERR_NONE (UL(0x0) << ID_AA64MMFR3_SNERR_SHIFT)
+#define ID_AA64MMFR3_SNERR_ALL (UL(0x1) << ID_AA64MMFR3_SNERR_SHIFT)
+#define ID_AA64MMFR3_ANERR_SHIFT 44
+#define ID_AA64MMFR3_ANERR_WIDTH 4
+#define ID_AA64MMFR3_ANERR_MASK (UL(0xf) << ID_AA64MMFR3_ANERR_SHIFT)
+#define ID_AA64MMFR3_ANERR_VAL(x) ((x) & ID_AA64MMFR3_ANERR_MASK)
+#define ID_AA64MMFR3_ANERR_NONE (UL(0x0) << ID_AA64MMFR3_ANERR_SHIFT)
+#define ID_AA64MMFR3_ANERR_SOME (UL(0x1) << ID_AA64MMFR3_ANERR_SHIFT)
+#define ID_AA64MMFR3_SDERR_SHIFT 52
+#define ID_AA64MMFR3_SDERR_WIDTH 4
+#define ID_AA64MMFR3_SDERR_MASK (UL(0xf) << ID_AA64MMFR3_SDERR_SHIFT)
+#define ID_AA64MMFR3_SDERR_VAL(x) ((x) & ID_AA64MMFR3_SDERR_MASK)
+#define ID_AA64MMFR3_SDERR_NONE (UL(0x0) << ID_AA64MMFR3_SDERR_SHIFT)
+#define ID_AA64MMFR3_SDERR_ALL (UL(0x1) << ID_AA64MMFR3_SDERR_SHIFT)
+#define ID_AA64MMFR3_ADERR_SHIFT 56
+#define ID_AA64MMFR3_ADERR_WIDTH 4
+#define ID_AA64MMFR3_ADERR_MASK (UL(0xf) << ID_AA64MMFR3_ADERR_SHIFT)
+#define ID_AA64MMFR3_ADERR_VAL(x) ((x) & ID_AA64MMFR3_ADERR_MASK)
+#define ID_AA64MMFR3_ADERR_NONE (UL(0x0) << ID_AA64MMFR3_ADERR_SHIFT)
+#define ID_AA64MMFR3_ADERR_SOME (UL(0x1) << ID_AA64MMFR3_ADERR_SHIFT)
#define ID_AA64MMFR3_Spec_FPACC_SHIFT 60
+#define ID_AA64MMFR3_Spec_FPACC_WIDTH 4
#define ID_AA64MMFR3_Spec_FPACC_MASK (UL(0xf) << ID_AA64MMFR3_Spec_FPACC_SHIFT)
#define ID_AA64MMFR3_Spec_FPACC_VAL(x) ((x) & ID_AA64MMFR3_Spec_FPACC_MASK)
#define ID_AA64MMFR3_Spec_FPACC_NONE (UL(0x0) << ID_AA64MMFR3_Spec_FPACC_SHIFT)
#define ID_AA64MMFR3_Spec_FPACC_IMPL (UL(0x1) << ID_AA64MMFR3_Spec_FPACC_SHIFT)
/* ID_AA64MMFR4_EL1 */
-#define ID_AA64MMFR4_EL1 MRS_REG(ID_AA64MMFR4_EL1)
+#define ID_AA64MMFR4_EL1_REG MRS_REG_ALT_NAME(ID_AA64MMFR4_EL1)
+#define ID_AA64MMFR4_EL1_ISS ISS_MSR_REG(ID_AA64MMFR4_EL1)
#define ID_AA64MMFR4_EL1_op0 3
#define ID_AA64MMFR4_EL1_op1 0
#define ID_AA64MMFR4_EL1_CRn 0
@@ -1137,41 +1598,48 @@
#define ID_AA64MMFR4_EL1_op2 4
/* ID_AA64PFR0_EL1 */
-#define ID_AA64PFR0_EL1 MRS_REG(ID_AA64PFR0_EL1)
+#define ID_AA64PFR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64PFR0_EL1)
+#define ID_AA64PFR0_EL1_ISS ISS_MSR_REG(ID_AA64PFR0_EL1)
#define ID_AA64PFR0_EL1_op0 3
#define ID_AA64PFR0_EL1_op1 0
#define ID_AA64PFR0_EL1_CRn 0
#define ID_AA64PFR0_EL1_CRm 4
#define ID_AA64PFR0_EL1_op2 0
#define ID_AA64PFR0_EL0_SHIFT 0
+#define ID_AA64PFR0_EL0_WIDTH 4
#define ID_AA64PFR0_EL0_MASK (UL(0xf) << ID_AA64PFR0_EL0_SHIFT)
#define ID_AA64PFR0_EL0_VAL(x) ((x) & ID_AA64PFR0_EL0_MASK)
#define ID_AA64PFR0_EL0_64 (UL(0x1) << ID_AA64PFR0_EL0_SHIFT)
#define ID_AA64PFR0_EL0_64_32 (UL(0x2) << ID_AA64PFR0_EL0_SHIFT)
#define ID_AA64PFR0_EL1_SHIFT 4
+#define ID_AA64PFR0_EL1_WIDTH 4
#define ID_AA64PFR0_EL1_MASK (UL(0xf) << ID_AA64PFR0_EL1_SHIFT)
#define ID_AA64PFR0_EL1_VAL(x) ((x) & ID_AA64PFR0_EL1_MASK)
#define ID_AA64PFR0_EL1_64 (UL(0x1) << ID_AA64PFR0_EL1_SHIFT)
#define ID_AA64PFR0_EL1_64_32 (UL(0x2) << ID_AA64PFR0_EL1_SHIFT)
#define ID_AA64PFR0_EL2_SHIFT 8
+#define ID_AA64PFR0_EL2_WIDTH 4
#define ID_AA64PFR0_EL2_MASK (UL(0xf) << ID_AA64PFR0_EL2_SHIFT)
#define ID_AA64PFR0_EL2_VAL(x) ((x) & ID_AA64PFR0_EL2_MASK)
#define ID_AA64PFR0_EL2_NONE (UL(0x0) << ID_AA64PFR0_EL2_SHIFT)
#define ID_AA64PFR0_EL2_64 (UL(0x1) << ID_AA64PFR0_EL2_SHIFT)
#define ID_AA64PFR0_EL2_64_32 (UL(0x2) << ID_AA64PFR0_EL2_SHIFT)
#define ID_AA64PFR0_EL3_SHIFT 12
+#define ID_AA64PFR0_EL3_WIDTH 4
#define ID_AA64PFR0_EL3_MASK (UL(0xf) << ID_AA64PFR0_EL3_SHIFT)
#define ID_AA64PFR0_EL3_VAL(x) ((x) & ID_AA64PFR0_EL3_MASK)
#define ID_AA64PFR0_EL3_NONE (UL(0x0) << ID_AA64PFR0_EL3_SHIFT)
#define ID_AA64PFR0_EL3_64 (UL(0x1) << ID_AA64PFR0_EL3_SHIFT)
#define ID_AA64PFR0_EL3_64_32 (UL(0x2) << ID_AA64PFR0_EL3_SHIFT)
#define ID_AA64PFR0_FP_SHIFT 16
+#define ID_AA64PFR0_FP_WIDTH 4
#define ID_AA64PFR0_FP_MASK (UL(0xf) << ID_AA64PFR0_FP_SHIFT)
#define ID_AA64PFR0_FP_VAL(x) ((x) & ID_AA64PFR0_FP_MASK)
#define ID_AA64PFR0_FP_IMPL (UL(0x0) << ID_AA64PFR0_FP_SHIFT)
#define ID_AA64PFR0_FP_HP (UL(0x1) << ID_AA64PFR0_FP_SHIFT)
#define ID_AA64PFR0_FP_NONE (UL(0xf) << ID_AA64PFR0_FP_SHIFT)
#define ID_AA64PFR0_AdvSIMD_SHIFT 20
+#define ID_AA64PFR0_AdvSIMD_WIDTH 4
#define ID_AA64PFR0_AdvSIMD_MASK (UL(0xf) << ID_AA64PFR0_AdvSIMD_SHIFT)
#define ID_AA64PFR0_AdvSIMD_VAL(x) ((x) & ID_AA64PFR0_AdvSIMD_MASK)
#define ID_AA64PFR0_AdvSIMD_IMPL (UL(0x0) << ID_AA64PFR0_AdvSIMD_SHIFT)
@@ -1179,49 +1647,59 @@
#define ID_AA64PFR0_AdvSIMD_NONE (UL(0xf) << ID_AA64PFR0_AdvSIMD_SHIFT)
#define ID_AA64PFR0_GIC_BITS 0x4 /* Number of bits in GIC field */
#define ID_AA64PFR0_GIC_SHIFT 24
+#define ID_AA64PFR0_GIC_WIDTH 4
#define ID_AA64PFR0_GIC_MASK (UL(0xf) << ID_AA64PFR0_GIC_SHIFT)
#define ID_AA64PFR0_GIC_VAL(x) ((x) & ID_AA64PFR0_GIC_MASK)
#define ID_AA64PFR0_GIC_CPUIF_NONE (UL(0x0) << ID_AA64PFR0_GIC_SHIFT)
#define ID_AA64PFR0_GIC_CPUIF_EN (UL(0x1) << ID_AA64PFR0_GIC_SHIFT)
#define ID_AA64PFR0_GIC_CPUIF_4_1 (UL(0x3) << ID_AA64PFR0_GIC_SHIFT)
#define ID_AA64PFR0_RAS_SHIFT 28
+#define ID_AA64PFR0_RAS_WIDTH 4
#define ID_AA64PFR0_RAS_MASK (UL(0xf) << ID_AA64PFR0_RAS_SHIFT)
#define ID_AA64PFR0_RAS_VAL(x) ((x) & ID_AA64PFR0_RAS_MASK)
#define ID_AA64PFR0_RAS_NONE (UL(0x0) << ID_AA64PFR0_RAS_SHIFT)
#define ID_AA64PFR0_RAS_IMPL (UL(0x1) << ID_AA64PFR0_RAS_SHIFT)
#define ID_AA64PFR0_RAS_8_4 (UL(0x2) << ID_AA64PFR0_RAS_SHIFT)
+#define ID_AA64PFR0_RAS_8_9 (UL(0x3) << ID_AA64PFR0_RAS_SHIFT)
#define ID_AA64PFR0_SVE_SHIFT 32
+#define ID_AA64PFR0_SVE_WIDTH 4
#define ID_AA64PFR0_SVE_MASK (UL(0xf) << ID_AA64PFR0_SVE_SHIFT)
#define ID_AA64PFR0_SVE_VAL(x) ((x) & ID_AA64PFR0_SVE_MASK)
#define ID_AA64PFR0_SVE_NONE (UL(0x0) << ID_AA64PFR0_SVE_SHIFT)
#define ID_AA64PFR0_SVE_IMPL (UL(0x1) << ID_AA64PFR0_SVE_SHIFT)
#define ID_AA64PFR0_SEL2_SHIFT 36
+#define ID_AA64PFR0_SEL2_WIDTH 4
#define ID_AA64PFR0_SEL2_MASK (UL(0xf) << ID_AA64PFR0_SEL2_SHIFT)
#define ID_AA64PFR0_SEL2_VAL(x) ((x) & ID_AA64PFR0_SEL2_MASK)
#define ID_AA64PFR0_SEL2_NONE (UL(0x0) << ID_AA64PFR0_SEL2_SHIFT)
#define ID_AA64PFR0_SEL2_IMPL (UL(0x1) << ID_AA64PFR0_SEL2_SHIFT)
#define ID_AA64PFR0_MPAM_SHIFT 40
+#define ID_AA64PFR0_MPAM_WIDTH 4
#define ID_AA64PFR0_MPAM_MASK (UL(0xf) << ID_AA64PFR0_MPAM_SHIFT)
#define ID_AA64PFR0_MPAM_VAL(x) ((x) & ID_AA64PFR0_MPAM_MASK)
#define ID_AA64PFR0_MPAM_NONE (UL(0x0) << ID_AA64PFR0_MPAM_SHIFT)
#define ID_AA64PFR0_MPAM_IMPL (UL(0x1) << ID_AA64PFR0_MPAM_SHIFT)
#define ID_AA64PFR0_AMU_SHIFT 44
+#define ID_AA64PFR0_AMU_WIDTH 4
#define ID_AA64PFR0_AMU_MASK (UL(0xf) << ID_AA64PFR0_AMU_SHIFT)
#define ID_AA64PFR0_AMU_VAL(x) ((x) & ID_AA64PFR0_AMU_MASK)
#define ID_AA64PFR0_AMU_NONE (UL(0x0) << ID_AA64PFR0_AMU_SHIFT)
#define ID_AA64PFR0_AMU_V1 (UL(0x1) << ID_AA64PFR0_AMU_SHIFT)
#define ID_AA64PFR0_AMU_V1_1 (UL(0x2) << ID_AA64PFR0_AMU_SHIFT)
#define ID_AA64PFR0_DIT_SHIFT 48
+#define ID_AA64PFR0_DIT_WIDTH 4
#define ID_AA64PFR0_DIT_MASK (UL(0xf) << ID_AA64PFR0_DIT_SHIFT)
#define ID_AA64PFR0_DIT_VAL(x) ((x) & ID_AA64PFR0_DIT_MASK)
#define ID_AA64PFR0_DIT_NONE (UL(0x0) << ID_AA64PFR0_DIT_SHIFT)
#define ID_AA64PFR0_DIT_PSTATE (UL(0x1) << ID_AA64PFR0_DIT_SHIFT)
#define ID_AA64PFR0_RME_SHIFT 52
+#define ID_AA64PFR0_RME_WIDTH 4
#define ID_AA64PFR0_RME_MASK (UL(0xf) << ID_AA64PFR0_RME_SHIFT)
#define ID_AA64PFR0_RME_VAL(x) ((x) & ID_AA64PFR0_RME_MASK)
#define ID_AA64PFR0_RME_NONE (UL(0x0) << ID_AA64PFR0_RME_SHIFT)
#define ID_AA64PFR0_RME_IMPL (UL(0x1) << ID_AA64PFR0_RME_SHIFT)
#define ID_AA64PFR0_CSV2_SHIFT 56
+#define ID_AA64PFR0_CSV2_WIDTH 4
#define ID_AA64PFR0_CSV2_MASK (UL(0xf) << ID_AA64PFR0_CSV2_SHIFT)
#define ID_AA64PFR0_CSV2_VAL(x) ((x) & ID_AA64PFR0_CSV2_MASK)
#define ID_AA64PFR0_CSV2_NONE (UL(0x0) << ID_AA64PFR0_CSV2_SHIFT)
@@ -1229,30 +1707,35 @@
#define ID_AA64PFR0_CSV2_SCXTNUM (UL(0x2) << ID_AA64PFR0_CSV2_SHIFT)
#define ID_AA64PFR0_CSV2_3 (UL(0x3) << ID_AA64PFR0_CSV2_SHIFT)
#define ID_AA64PFR0_CSV3_SHIFT 60
+#define ID_AA64PFR0_CSV3_WIDTH 4
#define ID_AA64PFR0_CSV3_MASK (UL(0xf) << ID_AA64PFR0_CSV3_SHIFT)
#define ID_AA64PFR0_CSV3_VAL(x) ((x) & ID_AA64PFR0_CSV3_MASK)
#define ID_AA64PFR0_CSV3_NONE (UL(0x0) << ID_AA64PFR0_CSV3_SHIFT)
#define ID_AA64PFR0_CSV3_ISOLATED (UL(0x1) << ID_AA64PFR0_CSV3_SHIFT)
/* ID_AA64PFR1_EL1 */
-#define ID_AA64PFR1_EL1 MRS_REG(ID_AA64PFR1_EL1)
+#define ID_AA64PFR1_EL1_REG MRS_REG_ALT_NAME(ID_AA64PFR1_EL1)
+#define ID_AA64PFR1_EL1_ISS ISS_MSR_REG(ID_AA64PFR1_EL1)
#define ID_AA64PFR1_EL1_op0 3
#define ID_AA64PFR1_EL1_op1 0
#define ID_AA64PFR1_EL1_CRn 0
#define ID_AA64PFR1_EL1_CRm 4
#define ID_AA64PFR1_EL1_op2 1
#define ID_AA64PFR1_BT_SHIFT 0
+#define ID_AA64PFR1_BT_WIDTH 4
#define ID_AA64PFR1_BT_MASK (UL(0xf) << ID_AA64PFR1_BT_SHIFT)
#define ID_AA64PFR1_BT_VAL(x) ((x) & ID_AA64PFR1_BT_MASK)
#define ID_AA64PFR1_BT_NONE (UL(0x0) << ID_AA64PFR1_BT_SHIFT)
#define ID_AA64PFR1_BT_IMPL (UL(0x1) << ID_AA64PFR1_BT_SHIFT)
#define ID_AA64PFR1_SSBS_SHIFT 4
+#define ID_AA64PFR1_SSBS_WIDTH 4
#define ID_AA64PFR1_SSBS_MASK (UL(0xf) << ID_AA64PFR1_SSBS_SHIFT)
#define ID_AA64PFR1_SSBS_VAL(x) ((x) & ID_AA64PFR1_SSBS_MASK)
#define ID_AA64PFR1_SSBS_NONE (UL(0x0) << ID_AA64PFR1_SSBS_SHIFT)
#define ID_AA64PFR1_SSBS_PSTATE (UL(0x1) << ID_AA64PFR1_SSBS_SHIFT)
#define ID_AA64PFR1_SSBS_PSTATE_MSR (UL(0x2) << ID_AA64PFR1_SSBS_SHIFT)
#define ID_AA64PFR1_MTE_SHIFT 8
+#define ID_AA64PFR1_MTE_WIDTH 4
#define ID_AA64PFR1_MTE_MASK (UL(0xf) << ID_AA64PFR1_MTE_SHIFT)
#define ID_AA64PFR1_MTE_VAL(x) ((x) & ID_AA64PFR1_MTE_MASK)
#define ID_AA64PFR1_MTE_NONE (UL(0x0) << ID_AA64PFR1_MTE_SHIFT)
@@ -1260,40 +1743,77 @@
#define ID_AA64PFR1_MTE_MTE2 (UL(0x2) << ID_AA64PFR1_MTE_SHIFT)
#define ID_AA64PFR1_MTE_MTE3 (UL(0x3) << ID_AA64PFR1_MTE_SHIFT)
#define ID_AA64PFR1_RAS_frac_SHIFT 12
+#define ID_AA64PFR1_RAS_frac_WIDTH 4
#define ID_AA64PFR1_RAS_frac_MASK (UL(0xf) << ID_AA64PFR1_RAS_frac_SHIFT)
#define ID_AA64PFR1_RAS_frac_VAL(x) ((x) & ID_AA64PFR1_RAS_frac_MASK)
#define ID_AA64PFR1_RAS_frac_p0 (UL(0x0) << ID_AA64PFR1_RAS_frac_SHIFT)
#define ID_AA64PFR1_RAS_frac_p1 (UL(0x1) << ID_AA64PFR1_RAS_frac_SHIFT)
#define ID_AA64PFR1_MPAM_frac_SHIFT 16
+#define ID_AA64PFR1_MPAM_frac_WIDTH 4
#define ID_AA64PFR1_MPAM_frac_MASK (UL(0xf) << ID_AA64PFR1_MPAM_frac_SHIFT)
#define ID_AA64PFR1_MPAM_frac_VAL(x) ((x) & ID_AA64PFR1_MPAM_frac_MASK)
#define ID_AA64PFR1_MPAM_frac_p0 (UL(0x0) << ID_AA64PFR1_MPAM_frac_SHIFT)
#define ID_AA64PFR1_MPAM_frac_p1 (UL(0x1) << ID_AA64PFR1_MPAM_frac_SHIFT)
#define ID_AA64PFR1_SME_SHIFT 24
+#define ID_AA64PFR1_SME_WIDTH 4
#define ID_AA64PFR1_SME_MASK (UL(0xf) << ID_AA64PFR1_SME_SHIFT)
#define ID_AA64PFR1_SME_VAL(x) ((x) & ID_AA64PFR1_SME_MASK)
#define ID_AA64PFR1_SME_NONE (UL(0x0) << ID_AA64PFR1_SME_SHIFT)
#define ID_AA64PFR1_SME_SME (UL(0x1) << ID_AA64PFR1_SME_SHIFT)
#define ID_AA64PFR1_SME_SME2 (UL(0x2) << ID_AA64PFR1_SME_SHIFT)
#define ID_AA64PFR1_RNDR_trap_SHIFT 28
+#define ID_AA64PFR1_RNDR_trap_WIDTH 4
#define ID_AA64PFR1_RNDR_trap_MASK (UL(0xf) << ID_AA64PFR1_RNDR_trap_SHIFT)
#define ID_AA64PFR1_RNDR_trap_VAL(x) ((x) & ID_AA64PFR1_RNDR_trap_MASK)
#define ID_AA64PFR1_RNDR_trap_NONE (UL(0x0) << ID_AA64PFR1_RNDR_trap_SHIFT)
#define ID_AA64PFR1_RNDR_trap_IMPL (UL(0x1) << ID_AA64PFR1_RNDR_trap_SHIFT)
#define ID_AA64PFR1_CSV2_frac_SHIFT 32
+#define ID_AA64PFR1_CSV2_frac_WIDTH 4
#define ID_AA64PFR1_CSV2_frac_MASK (UL(0xf) << ID_AA64PFR1_CSV2_frac_SHIFT)
#define ID_AA64PFR1_CSV2_frac_VAL(x) ((x) & ID_AA64PFR1_CSV2_frac_MASK)
#define ID_AA64PFR1_CSV2_frac_p0 (UL(0x0) << ID_AA64PFR1_CSV2_frac_SHIFT)
#define ID_AA64PFR1_CSV2_frac_p1 (UL(0x1) << ID_AA64PFR1_CSV2_frac_SHIFT)
#define ID_AA64PFR1_CSV2_frac_p2 (UL(0x2) << ID_AA64PFR1_CSV2_frac_SHIFT)
#define ID_AA64PFR1_NMI_SHIFT 36
+#define ID_AA64PFR1_NMI_WIDTH 4
#define ID_AA64PFR1_NMI_MASK (UL(0xf) << ID_AA64PFR1_NMI_SHIFT)
#define ID_AA64PFR1_NMI_VAL(x) ((x) & ID_AA64PFR1_NMI_MASK)
#define ID_AA64PFR1_NMI_NONE (UL(0x0) << ID_AA64PFR1_NMI_SHIFT)
#define ID_AA64PFR1_NMI_IMPL (UL(0x1) << ID_AA64PFR1_NMI_SHIFT)
+#define ID_AA64PFR1_MTE_frac_SHIFT 40
+#define ID_AA64PFR1_MTE_frac_WIDTH 4
+#define ID_AA64PFR1_MTE_frac_MASK (UL(0xf) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define ID_AA64PFR1_MTE_frac_VAL(x) ((x) & ID_AA64PFR1_MTE_frac_MASK)
+#define ID_AA64PFR1_MTE_frac_IMPL (UL(0x0) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define ID_AA64PFR1_MTE_frac_NONE (UL(0xf) << ID_AA64PFR1_MTE_frac_SHIFT)
+#define ID_AA64PFR1_THE_SHIFT 48
+#define ID_AA64PFR1_THE_WIDTH 4
+#define ID_AA64PFR1_THE_MASK (UL(0xf) << ID_AA64PFR1_THE_SHIFT)
+#define ID_AA64PFR1_THE_VAL(x) ((x) & ID_AA64PFR1_THE_MASK)
+#define ID_AA64PFR1_THE_NONE (UL(0x0) << ID_AA64PFR1_THE_SHIFT)
+#define ID_AA64PFR1_THE_IMPL (UL(0x1) << ID_AA64PFR1_THE_SHIFT)
+#define ID_AA64PFR1_MTEX_SHIFT 52
+#define ID_AA64PFR1_MTEX_WIDTH 4
+#define ID_AA64PFR1_MTEX_MASK (UL(0xf) << ID_AA64PFR1_MTEX_SHIFT)
+#define ID_AA64PFR1_MTEX_VAL(x) ((x) & ID_AA64PFR1_MTEX_MASK)
+#define ID_AA64PFR1_MTEX_NONE (UL(0x0) << ID_AA64PFR1_MTEX_SHIFT)
+#define ID_AA64PFR1_MTEX_IMPL (UL(0x1) << ID_AA64PFR1_MTEX_SHIFT)
+#define ID_AA64PFR1_DF2_SHIFT 56
+#define ID_AA64PFR1_DF2_WIDTH 4
+#define ID_AA64PFR1_DF2_MASK (UL(0xf) << ID_AA64PFR1_DF2_SHIFT)
+#define ID_AA64PFR1_DF2_VAL(x) ((x) & ID_AA64PFR1_DF2_MASK)
+#define ID_AA64PFR1_DF2_NONE (UL(0x0) << ID_AA64PFR1_DF2_SHIFT)
+#define ID_AA64PFR1_DF2_IMPL (UL(0x1) << ID_AA64PFR1_DF2_SHIFT)
+#define ID_AA64PFR1_PFAR_SHIFT 60
+#define ID_AA64PFR1_PFAR_WIDTH 4
+#define ID_AA64PFR1_PFAR_MASK (UL(0xf) << ID_AA64PFR1_PFAR_SHIFT)
+#define ID_AA64PFR1_PFAR_VAL(x) ((x) & ID_AA64PFR1_PFAR_MASK)
+#define ID_AA64PFR1_PFAR_NONE (UL(0x0) << ID_AA64PFR1_PFAR_SHIFT)
+#define ID_AA64PFR1_PFAR_IMPL (UL(0x1) << ID_AA64PFR1_PFAR_SHIFT)
/* ID_AA64PFR2_EL1 */
-#define ID_AA64PFR2_EL1 MRS_REG(ID_AA64PFR2_EL1)
+#define ID_AA64PFR2_EL1_REG MRS_REG_ALT_NAME(ID_AA64PFR2_EL1)
+#define ID_AA64PFR2_EL1_ISS ISS_MSR_REG(ID_AA64PFR2_EL1)
#define ID_AA64PFR2_EL1_op0 3
#define ID_AA64PFR2_EL1_op1 0
#define ID_AA64PFR2_EL1_CRn 0
@@ -1301,116 +1821,146 @@
#define ID_AA64PFR2_EL1_op2 2
/* ID_AA64ZFR0_EL1 */
-#define ID_AA64ZFR0_EL1 MRS_REG(ID_AA64ZFR0_EL1)
#define ID_AA64ZFR0_EL1_REG MRS_REG_ALT_NAME(ID_AA64ZFR0_EL1)
+#define ID_AA64ZFR0_EL1_ISS ISS_MSR_REG(ID_AA64ZFR0_EL1)
#define ID_AA64ZFR0_EL1_op0 3
#define ID_AA64ZFR0_EL1_op1 0
#define ID_AA64ZFR0_EL1_CRn 0
#define ID_AA64ZFR0_EL1_CRm 4
#define ID_AA64ZFR0_EL1_op2 4
#define ID_AA64ZFR0_SVEver_SHIFT 0
+#define ID_AA64ZFR0_SVEver_WIDTH 4
#define ID_AA64ZFR0_SVEver_MASK (UL(0xf) << ID_AA64ZFR0_SVEver_SHIFT)
-#define ID_AA64ZFR0_SVEver_VAL(x) ((x) & ID_AA64ZFR0_SVEver_MASK
-#define ID_AA64ZFR0_SVEver_SVE1 (UL(0x0) << ID_AA64ZFR0_SVEver_SHIFT)
-#define ID_AA64ZFR0_SVEver_SVE2 (UL(0x1) << ID_AA64ZFR0_SVEver_SHIFT)
+#define ID_AA64ZFR0_SVEver_VAL(x) ((x) & ID_AA64ZFR0_SVEver_MASK)
+#define ID_AA64ZFR0_SVEver_SVE1 (UL(0x0) << ID_AA64ZFR0_SVEver_SHIFT)
+#define ID_AA64ZFR0_SVEver_SVE2 (UL(0x1) << ID_AA64ZFR0_SVEver_SHIFT)
+#define ID_AA64ZFR0_SVEver_SVE2P1 (UL(0x2) << ID_AA64ZFR0_SVEver_SHIFT)
#define ID_AA64ZFR0_AES_SHIFT 4
+#define ID_AA64ZFR0_AES_WIDTH 4
#define ID_AA64ZFR0_AES_MASK (UL(0xf) << ID_AA64ZFR0_AES_SHIFT)
-#define ID_AA64ZFR0_AES_VAL(x) ((x) & ID_AA64ZFR0_AES_MASK
-#define ID_AA64ZFR0_AES_NONE (UL(0x0) << ID_AA64ZFR0_AES_SHIFT)
-#define ID_AA64ZFR0_AES_BASE (UL(0x1) << ID_AA64ZFR0_AES_SHIFT)
-#define ID_AA64ZFR0_AES_PMULL (UL(0x2) << ID_AA64ZFR0_AES_SHIFT)
+#define ID_AA64ZFR0_AES_VAL(x) ((x) & ID_AA64ZFR0_AES_MASK)
+#define ID_AA64ZFR0_AES_NONE (UL(0x0) << ID_AA64ZFR0_AES_SHIFT)
+#define ID_AA64ZFR0_AES_BASE (UL(0x1) << ID_AA64ZFR0_AES_SHIFT)
+#define ID_AA64ZFR0_AES_PMULL (UL(0x2) << ID_AA64ZFR0_AES_SHIFT)
#define ID_AA64ZFR0_BitPerm_SHIFT 16
+#define ID_AA64ZFR0_BitPerm_WIDTH 4
#define ID_AA64ZFR0_BitPerm_MASK (UL(0xf) << ID_AA64ZFR0_BitPerm_SHIFT)
-#define ID_AA64ZFR0_BitPerm_VAL(x) ((x) & ID_AA64ZFR0_BitPerm_MASK
-#define ID_AA64ZFR0_BitPerm_NONE (UL(0x0) << ID_AA64ZFR0_BitPerm_SHIFT)
-#define ID_AA64ZFR0_BitPerm_IMPL (UL(0x1) << ID_AA64ZFR0_BitPerm_SHIFT)
+#define ID_AA64ZFR0_BitPerm_VAL(x) ((x) & ID_AA64ZFR0_BitPerm_MASK)
+#define ID_AA64ZFR0_BitPerm_NONE (UL(0x0) << ID_AA64ZFR0_BitPerm_SHIFT)
+#define ID_AA64ZFR0_BitPerm_IMPL (UL(0x1) << ID_AA64ZFR0_BitPerm_SHIFT)
#define ID_AA64ZFR0_BF16_SHIFT 20
+#define ID_AA64ZFR0_BF16_WIDTH 4
#define ID_AA64ZFR0_BF16_MASK (UL(0xf) << ID_AA64ZFR0_BF16_SHIFT)
-#define ID_AA64ZFR0_BF16_VAL(x) ((x) & ID_AA64ZFR0_BF16_MASK
-#define ID_AA64ZFR0_BF16_NONE (UL(0x0) << ID_AA64ZFR0_BF16_SHIFT)
-#define ID_AA64ZFR0_BF16_BASE (UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
-#define ID_AA64ZFR0_BF16_EBF (UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
+#define ID_AA64ZFR0_BF16_VAL(x) ((x) & ID_AA64ZFR0_BF16_MASK)
+#define ID_AA64ZFR0_BF16_NONE (UL(0x0) << ID_AA64ZFR0_BF16_SHIFT)
+#define ID_AA64ZFR0_BF16_BASE (UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
+#define ID_AA64ZFR0_BF16_EBF (UL(0x1) << ID_AA64ZFR0_BF16_SHIFT)
#define ID_AA64ZFR0_SHA3_SHIFT 32
+#define ID_AA64ZFR0_SHA3_WIDTH 4
#define ID_AA64ZFR0_SHA3_MASK (UL(0xf) << ID_AA64ZFR0_SHA3_SHIFT)
-#define ID_AA64ZFR0_SHA3_VAL(x) ((x) & ID_AA64ZFR0_SHA3_MASK
-#define ID_AA64ZFR0_SHA3_NONE (UL(0x0) << ID_AA64ZFR0_SHA3_SHIFT)
-#define ID_AA64ZFR0_SHA3_IMPL (UL(0x1) << ID_AA64ZFR0_SHA3_SHIFT)
+#define ID_AA64ZFR0_SHA3_VAL(x) ((x) & ID_AA64ZFR0_SHA3_MASK)
+#define ID_AA64ZFR0_SHA3_NONE (UL(0x0) << ID_AA64ZFR0_SHA3_SHIFT)
+#define ID_AA64ZFR0_SHA3_IMPL (UL(0x1) << ID_AA64ZFR0_SHA3_SHIFT)
#define ID_AA64ZFR0_SM4_SHIFT 40
+#define ID_AA64ZFR0_SM4_WIDTH 4
#define ID_AA64ZFR0_SM4_MASK (UL(0xf) << ID_AA64ZFR0_SM4_SHIFT)
-#define ID_AA64ZFR0_SM4_VAL(x) ((x) & ID_AA64ZFR0_SM4_MASK
-#define ID_AA64ZFR0_SM4_NONE (UL(0x0) << ID_AA64ZFR0_SM4_SHIFT)
-#define ID_AA64ZFR0_SM4_IMPL (UL(0x1) << ID_AA64ZFR0_SM4_SHIFT)
+#define ID_AA64ZFR0_SM4_VAL(x) ((x) & ID_AA64ZFR0_SM4_MASK)
+#define ID_AA64ZFR0_SM4_NONE (UL(0x0) << ID_AA64ZFR0_SM4_SHIFT)
+#define ID_AA64ZFR0_SM4_IMPL (UL(0x1) << ID_AA64ZFR0_SM4_SHIFT)
#define ID_AA64ZFR0_I8MM_SHIFT 44
+#define ID_AA64ZFR0_I8MM_WIDTH 4
#define ID_AA64ZFR0_I8MM_MASK (UL(0xf) << ID_AA64ZFR0_I8MM_SHIFT)
-#define ID_AA64ZFR0_I8MM_VAL(x) ((x) & ID_AA64ZFR0_I8MM_MASK
-#define ID_AA64ZFR0_I8MM_NONE (UL(0x0) << ID_AA64ZFR0_I8MM_SHIFT)
-#define ID_AA64ZFR0_I8MM_IMPL (UL(0x1) << ID_AA64ZFR0_I8MM_SHIFT)
+#define ID_AA64ZFR0_I8MM_VAL(x) ((x) & ID_AA64ZFR0_I8MM_MASK)
+#define ID_AA64ZFR0_I8MM_NONE (UL(0x0) << ID_AA64ZFR0_I8MM_SHIFT)
+#define ID_AA64ZFR0_I8MM_IMPL (UL(0x1) << ID_AA64ZFR0_I8MM_SHIFT)
#define ID_AA64ZFR0_F32MM_SHIFT 52
+#define ID_AA64ZFR0_F32MM_WIDTH 4
#define ID_AA64ZFR0_F32MM_MASK (UL(0xf) << ID_AA64ZFR0_F32MM_SHIFT)
-#define ID_AA64ZFR0_F32MM_VAL(x) ((x) & ID_AA64ZFR0_F32MM_MASK
-#define ID_AA64ZFR0_F32MM_NONE (UL(0x0) << ID_AA64ZFR0_F32MM_SHIFT)
-#define ID_AA64ZFR0_F32MM_IMPL (UL(0x1) << ID_AA64ZFR0_F32MM_SHIFT)
+#define ID_AA64ZFR0_F32MM_VAL(x) ((x) & ID_AA64ZFR0_F32MM_MASK)
+#define ID_AA64ZFR0_F32MM_NONE (UL(0x0) << ID_AA64ZFR0_F32MM_SHIFT)
+#define ID_AA64ZFR0_F32MM_IMPL (UL(0x1) << ID_AA64ZFR0_F32MM_SHIFT)
#define ID_AA64ZFR0_F64MM_SHIFT 56
+#define ID_AA64ZFR0_F64MM_WIDTH 4
#define ID_AA64ZFR0_F64MM_MASK (UL(0xf) << ID_AA64ZFR0_F64MM_SHIFT)
-#define ID_AA64ZFR0_F64MM_VAL(x) ((x) & ID_AA64ZFR0_F64MM_MASK
-#define ID_AA64ZFR0_F64MM_NONE (UL(0x0) << ID_AA64ZFR0_F64MM_SHIFT)
-#define ID_AA64ZFR0_F64MM_IMPL (UL(0x1) << ID_AA64ZFR0_F64MM_SHIFT)
+#define ID_AA64ZFR0_F64MM_VAL(x) ((x) & ID_AA64ZFR0_F64MM_MASK)
+#define ID_AA64ZFR0_F64MM_NONE (UL(0x0) << ID_AA64ZFR0_F64MM_SHIFT)
+#define ID_AA64ZFR0_F64MM_IMPL (UL(0x1) << ID_AA64ZFR0_F64MM_SHIFT)
/* ID_ISAR5_EL1 */
-#define ID_ISAR5_EL1 MRS_REG(ID_ISAR5_EL1)
+#define ID_ISAR5_EL1_ISS ISS_MSR_REG(ID_ISAR5_EL1)
#define ID_ISAR5_EL1_op0 0x3
#define ID_ISAR5_EL1_op1 0x0
#define ID_ISAR5_EL1_CRn 0x0
#define ID_ISAR5_EL1_CRm 0x2
#define ID_ISAR5_EL1_op2 0x5
#define ID_ISAR5_SEVL_SHIFT 0
+#define ID_ISAR5_SEVL_WIDTH 4
#define ID_ISAR5_SEVL_MASK (UL(0xf) << ID_ISAR5_SEVL_SHIFT)
#define ID_ISAR5_SEVL_VAL(x) ((x) & ID_ISAR5_SEVL_MASK)
#define ID_ISAR5_SEVL_NOP (UL(0x0) << ID_ISAR5_SEVL_SHIFT)
#define ID_ISAR5_SEVL_IMPL (UL(0x1) << ID_ISAR5_SEVL_SHIFT)
#define ID_ISAR5_AES_SHIFT 4
+#define ID_ISAR5_AES_WIDTH 4
#define ID_ISAR5_AES_MASK (UL(0xf) << ID_ISAR5_AES_SHIFT)
#define ID_ISAR5_AES_VAL(x) ((x) & ID_ISAR5_AES_MASK)
#define ID_ISAR5_AES_NONE (UL(0x0) << ID_ISAR5_AES_SHIFT)
#define ID_ISAR5_AES_BASE (UL(0x1) << ID_ISAR5_AES_SHIFT)
#define ID_ISAR5_AES_VMULL (UL(0x2) << ID_ISAR5_AES_SHIFT)
#define ID_ISAR5_SHA1_SHIFT 8
+#define ID_ISAR5_SHA1_WIDTH 4
#define ID_ISAR5_SHA1_MASK (UL(0xf) << ID_ISAR5_SHA1_SHIFT)
#define ID_ISAR5_SHA1_VAL(x) ((x) & ID_ISAR5_SHA1_MASK)
#define ID_ISAR5_SHA1_NONE (UL(0x0) << ID_ISAR5_SHA1_SHIFT)
#define ID_ISAR5_SHA1_IMPL (UL(0x1) << ID_ISAR5_SHA1_SHIFT)
#define ID_ISAR5_SHA2_SHIFT 12
+#define ID_ISAR5_SHA2_WIDTH 4
#define ID_ISAR5_SHA2_MASK (UL(0xf) << ID_ISAR5_SHA2_SHIFT)
#define ID_ISAR5_SHA2_VAL(x) ((x) & ID_ISAR5_SHA2_MASK)
#define ID_ISAR5_SHA2_NONE (UL(0x0) << ID_ISAR5_SHA2_SHIFT)
#define ID_ISAR5_SHA2_IMPL (UL(0x1) << ID_ISAR5_SHA2_SHIFT)
#define ID_ISAR5_CRC32_SHIFT 16
+#define ID_ISAR5_CRC32_WIDTH 4
#define ID_ISAR5_CRC32_MASK (UL(0xf) << ID_ISAR5_CRC32_SHIFT)
#define ID_ISAR5_CRC32_VAL(x) ((x) & ID_ISAR5_CRC32_MASK)
#define ID_ISAR5_CRC32_NONE (UL(0x0) << ID_ISAR5_CRC32_SHIFT)
#define ID_ISAR5_CRC32_IMPL (UL(0x1) << ID_ISAR5_CRC32_SHIFT)
#define ID_ISAR5_RDM_SHIFT 24
+#define ID_ISAR5_RDM_WIDTH 4
#define ID_ISAR5_RDM_MASK (UL(0xf) << ID_ISAR5_RDM_SHIFT)
#define ID_ISAR5_RDM_VAL(x) ((x) & ID_ISAR5_RDM_MASK)
#define ID_ISAR5_RDM_NONE (UL(0x0) << ID_ISAR5_RDM_SHIFT)
#define ID_ISAR5_RDM_IMPL (UL(0x1) << ID_ISAR5_RDM_SHIFT)
#define ID_ISAR5_VCMA_SHIFT 28
+#define ID_ISAR5_VCMA_WIDTH 4
#define ID_ISAR5_VCMA_MASK (UL(0xf) << ID_ISAR5_VCMA_SHIFT)
#define ID_ISAR5_VCMA_VAL(x) ((x) & ID_ISAR5_VCMA_MASK)
#define ID_ISAR5_VCMA_NONE (UL(0x0) << ID_ISAR5_VCMA_SHIFT)
#define ID_ISAR5_VCMA_IMPL (UL(0x1) << ID_ISAR5_VCMA_SHIFT)
/* MAIR_EL1 - Memory Attribute Indirection Register */
-#define MAIR_ATTR_MASK(idx) (UL(0xff) << ((n)* 8))
-#define MAIR_ATTR(attr, idx) ((attr) << ((idx) * 8))
-#define MAIR_DEVICE_nGnRnE UL(0x00)
-#define MAIR_DEVICE_nGnRE UL(0x04)
-#define MAIR_NORMAL_NC UL(0x44)
-#define MAIR_NORMAL_WT UL(0xbb)
-#define MAIR_NORMAL_WB UL(0xff)
+#define MAIR_EL1_REG MRS_REG_ALT_NAME(MAIR_EL1)
+#define MAIR_EL1_op0 3
+#define MAIR_EL1_op1 0
+#define MAIR_EL1_CRn 10
+#define MAIR_EL1_CRm 2
+#define MAIR_EL1_op2 0
+#define MAIR_ATTR_MASK(idx) (UL(0xff) << ((n)* 8))
+#define MAIR_ATTR(attr, idx) ((attr) << ((idx) * 8))
+#define MAIR_DEVICE_nGnRnE UL(0x00)
+#define MAIR_DEVICE_nGnRE UL(0x04)
+#define MAIR_NORMAL_NC UL(0x44)
+#define MAIR_NORMAL_WT UL(0xbb)
+#define MAIR_NORMAL_WB UL(0xff)
+
+/* MAIR_EL12 */
+#define MAIR_EL12_REG MRS_REG_ALT_NAME(MAIR_EL12)
+#define MAIR_EL12_op0 3
+#define MAIR_EL12_op1 5
+#define MAIR_EL12_CRn 10
+#define MAIR_EL12_CRm 2
+#define MAIR_EL12_op2 0
/* MDCCINT_EL1 */
-#define MDCCINT_EL1 MRS_REG(MDCCINT_EL1)
#define MDCCINT_EL1_op0 2
#define MDCCINT_EL1_op1 0
#define MDCCINT_EL1_CRn 0
@@ -1418,7 +1968,6 @@
#define MDCCINT_EL1_op2 0
/* MDCCSR_EL0 */
-#define MDCCSR_EL0 MRS_REG(MDCCSR_EL0)
#define MDCCSR_EL0_op0 2
#define MDCCSR_EL0_op1 3
#define MDCCSR_EL0_CRn 0
@@ -1426,7 +1975,6 @@
#define MDCCSR_EL0_op2 0
/* MDSCR_EL1 - Monitor Debug System Control Register */
-#define MDSCR_EL1 MRS_REG(MDSCR_EL1)
#define MDSCR_EL1_op0 2
#define MDSCR_EL1_op1 0
#define MDSCR_EL1_CRn 0
@@ -1440,7 +1988,6 @@
#define MDSCR_MDE (UL(0x1) << MDSCR_MDE_SHIFT)
/* MIDR_EL1 - Main ID Register */
-#define MIDR_EL1 MRS_REG(MIDR_EL1)
#define MIDR_EL1_op0 3
#define MIDR_EL1_op1 0
#define MIDR_EL1_CRn 0
@@ -1448,7 +1995,6 @@
#define MIDR_EL1_op2 0
/* MPIDR_EL1 - Multiprocessor Affinity Register */
-#define MPIDR_EL1 MRS_REG(MPIDR_EL1)
#define MPIDR_EL1_op0 3
#define MPIDR_EL1_op1 0
#define MPIDR_EL1_CRn 0
@@ -1472,95 +2018,110 @@
#define MPIDR_AFF3_VAL(x) ((x) & MPIDR_AFF3_MASK)
/* MVFR0_EL1 */
-#define MVFR0_EL1 MRS_REG(MVFR0_EL1)
+#define MVFR0_EL1_ISS ISS_MSR_REG(MVFR0_EL1)
#define MVFR0_EL1_op0 0x3
#define MVFR0_EL1_op1 0x0
#define MVFR0_EL1_CRn 0x0
#define MVFR0_EL1_CRm 0x3
#define MVFR0_EL1_op2 0x0
#define MVFR0_SIMDReg_SHIFT 0
+#define MVFR0_SIMDReg_WIDTH 4
#define MVFR0_SIMDReg_MASK (UL(0xf) << MVFR0_SIMDReg_SHIFT)
#define MVFR0_SIMDReg_VAL(x) ((x) & MVFR0_SIMDReg_MASK)
#define MVFR0_SIMDReg_NONE (UL(0x0) << MVFR0_SIMDReg_SHIFT)
#define MVFR0_SIMDReg_FP (UL(0x1) << MVFR0_SIMDReg_SHIFT)
#define MVFR0_SIMDReg_AdvSIMD (UL(0x2) << MVFR0_SIMDReg_SHIFT)
#define MVFR0_FPSP_SHIFT 4
+#define MVFR0_FPSP_WIDTH 4
#define MVFR0_FPSP_MASK (UL(0xf) << MVFR0_FPSP_SHIFT)
#define MVFR0_FPSP_VAL(x) ((x) & MVFR0_FPSP_MASK)
#define MVFR0_FPSP_NONE (UL(0x0) << MVFR0_FPSP_SHIFT)
#define MVFR0_FPSP_VFP_v2 (UL(0x1) << MVFR0_FPSP_SHIFT)
#define MVFR0_FPSP_VFP_v3_v4 (UL(0x2) << MVFR0_FPSP_SHIFT)
#define MVFR0_FPDP_SHIFT 8
+#define MVFR0_FPDP_WIDTH 4
#define MVFR0_FPDP_MASK (UL(0xf) << MVFR0_FPDP_SHIFT)
#define MVFR0_FPDP_VAL(x) ((x) & MVFR0_FPDP_MASK)
#define MVFR0_FPDP_NONE (UL(0x0) << MVFR0_FPDP_SHIFT)
#define MVFR0_FPDP_VFP_v2 (UL(0x1) << MVFR0_FPDP_SHIFT)
#define MVFR0_FPDP_VFP_v3_v4 (UL(0x2) << MVFR0_FPDP_SHIFT)
#define MVFR0_FPTrap_SHIFT 12
+#define MVFR0_FPTrap_WIDTH 4
#define MVFR0_FPTrap_MASK (UL(0xf) << MVFR0_FPTrap_SHIFT)
#define MVFR0_FPTrap_VAL(x) ((x) & MVFR0_FPTrap_MASK)
#define MVFR0_FPTrap_NONE (UL(0x0) << MVFR0_FPTrap_SHIFT)
#define MVFR0_FPTrap_IMPL (UL(0x1) << MVFR0_FPTrap_SHIFT)
#define MVFR0_FPDivide_SHIFT 16
+#define MVFR0_FPDivide_WIDTH 4
#define MVFR0_FPDivide_MASK (UL(0xf) << MVFR0_FPDivide_SHIFT)
#define MVFR0_FPDivide_VAL(x) ((x) & MVFR0_FPDivide_MASK)
#define MVFR0_FPDivide_NONE (UL(0x0) << MVFR0_FPDivide_SHIFT)
#define MVFR0_FPDivide_IMPL (UL(0x1) << MVFR0_FPDivide_SHIFT)
#define MVFR0_FPSqrt_SHIFT 20
+#define MVFR0_FPSqrt_WIDTH 4
#define MVFR0_FPSqrt_MASK (UL(0xf) << MVFR0_FPSqrt_SHIFT)
#define MVFR0_FPSqrt_VAL(x) ((x) & MVFR0_FPSqrt_MASK)
#define MVFR0_FPSqrt_NONE (UL(0x0) << MVFR0_FPSqrt_SHIFT)
#define MVFR0_FPSqrt_IMPL (UL(0x1) << MVFR0_FPSqrt_SHIFT)
#define MVFR0_FPShVec_SHIFT 24
+#define MVFR0_FPShVec_WIDTH 4
#define MVFR0_FPShVec_MASK (UL(0xf) << MVFR0_FPShVec_SHIFT)
#define MVFR0_FPShVec_VAL(x) ((x) & MVFR0_FPShVec_MASK)
#define MVFR0_FPShVec_NONE (UL(0x0) << MVFR0_FPShVec_SHIFT)
#define MVFR0_FPShVec_IMPL (UL(0x1) << MVFR0_FPShVec_SHIFT)
#define MVFR0_FPRound_SHIFT 28
+#define MVFR0_FPRound_WIDTH 4
#define MVFR0_FPRound_MASK (UL(0xf) << MVFR0_FPRound_SHIFT)
#define MVFR0_FPRound_VAL(x) ((x) & MVFR0_FPRound_MASK)
#define MVFR0_FPRound_NONE (UL(0x0) << MVFR0_FPRound_SHIFT)
#define MVFR0_FPRound_IMPL (UL(0x1) << MVFR0_FPRound_SHIFT)
/* MVFR1_EL1 */
-#define MVFR1_EL1 MRS_REG(MVFR1_EL1)
+#define MVFR1_EL1_ISS ISS_MSR_REG(MVFR1_EL1)
#define MVFR1_EL1_op0 0x3
#define MVFR1_EL1_op1 0x0
#define MVFR1_EL1_CRn 0x0
#define MVFR1_EL1_CRm 0x3
#define MVFR1_EL1_op2 0x1
#define MVFR1_FPFtZ_SHIFT 0
+#define MVFR1_FPFtZ_WIDTH 4
#define MVFR1_FPFtZ_MASK (UL(0xf) << MVFR1_FPFtZ_SHIFT)
#define MVFR1_FPFtZ_VAL(x) ((x) & MVFR1_FPFtZ_MASK)
#define MVFR1_FPFtZ_NONE (UL(0x0) << MVFR1_FPFtZ_SHIFT)
#define MVFR1_FPFtZ_IMPL (UL(0x1) << MVFR1_FPFtZ_SHIFT)
#define MVFR1_FPDNaN_SHIFT 4
+#define MVFR1_FPDNaN_WIDTH 4
#define MVFR1_FPDNaN_MASK (UL(0xf) << MVFR1_FPDNaN_SHIFT)
#define MVFR1_FPDNaN_VAL(x) ((x) & MVFR1_FPDNaN_MASK)
#define MVFR1_FPDNaN_NONE (UL(0x0) << MVFR1_FPDNaN_SHIFT)
#define MVFR1_FPDNaN_IMPL (UL(0x1) << MVFR1_FPDNaN_SHIFT)
#define MVFR1_SIMDLS_SHIFT 8
+#define MVFR1_SIMDLS_WIDTH 4
#define MVFR1_SIMDLS_MASK (UL(0xf) << MVFR1_SIMDLS_SHIFT)
#define MVFR1_SIMDLS_VAL(x) ((x) & MVFR1_SIMDLS_MASK)
#define MVFR1_SIMDLS_NONE (UL(0x0) << MVFR1_SIMDLS_SHIFT)
#define MVFR1_SIMDLS_IMPL (UL(0x1) << MVFR1_SIMDLS_SHIFT)
#define MVFR1_SIMDInt_SHIFT 12
+#define MVFR1_SIMDInt_WIDTH 4
#define MVFR1_SIMDInt_MASK (UL(0xf) << MVFR1_SIMDInt_SHIFT)
#define MVFR1_SIMDInt_VAL(x) ((x) & MVFR1_SIMDInt_MASK)
#define MVFR1_SIMDInt_NONE (UL(0x0) << MVFR1_SIMDInt_SHIFT)
#define MVFR1_SIMDInt_IMPL (UL(0x1) << MVFR1_SIMDInt_SHIFT)
#define MVFR1_SIMDSP_SHIFT 16
+#define MVFR1_SIMDSP_WIDTH 4
#define MVFR1_SIMDSP_MASK (UL(0xf) << MVFR1_SIMDSP_SHIFT)
#define MVFR1_SIMDSP_VAL(x) ((x) & MVFR1_SIMDSP_MASK)
#define MVFR1_SIMDSP_NONE (UL(0x0) << MVFR1_SIMDSP_SHIFT)
#define MVFR1_SIMDSP_IMPL (UL(0x1) << MVFR1_SIMDSP_SHIFT)
#define MVFR1_SIMDHP_SHIFT 20
+#define MVFR1_SIMDHP_WIDTH 4
#define MVFR1_SIMDHP_MASK (UL(0xf) << MVFR1_SIMDHP_SHIFT)
#define MVFR1_SIMDHP_VAL(x) ((x) & MVFR1_SIMDHP_MASK)
#define MVFR1_SIMDHP_NONE (UL(0x0) << MVFR1_SIMDHP_SHIFT)
#define MVFR1_SIMDHP_CONV_SP (UL(0x1) << MVFR1_SIMDHP_SHIFT)
#define MVFR1_SIMDHP_ARITH (UL(0x2) << MVFR1_SIMDHP_SHIFT)
#define MVFR1_FPHP_SHIFT 24
+#define MVFR1_FPHP_WIDTH 4
#define MVFR1_FPHP_MASK (UL(0xf) << MVFR1_FPHP_SHIFT)
#define MVFR1_FPHP_VAL(x) ((x) & MVFR1_FPHP_MASK)
#define MVFR1_FPHP_NONE (UL(0x0) << MVFR1_FPHP_SHIFT)
@@ -1568,13 +2129,13 @@
#define MVFR1_FPHP_CONV_DP (UL(0x2) << MVFR1_FPHP_SHIFT)
#define MVFR1_FPHP_ARITH (UL(0x3) << MVFR1_FPHP_SHIFT)
#define MVFR1_SIMDFMAC_SHIFT 28
+#define MVFR1_SIMDFMAC_WIDTH 4
#define MVFR1_SIMDFMAC_MASK (UL(0xf) << MVFR1_SIMDFMAC_SHIFT)
#define MVFR1_SIMDFMAC_VAL(x) ((x) & MVFR1_SIMDFMAC_MASK)
#define MVFR1_SIMDFMAC_NONE (UL(0x0) << MVFR1_SIMDFMAC_SHIFT)
#define MVFR1_SIMDFMAC_IMPL (UL(0x1) << MVFR1_SIMDFMAC_SHIFT)
/* OSDLR_EL1 */
-#define OSDLR_EL1 MRS_REG(OSDLR_EL1)
#define OSDLR_EL1_op0 2
#define OSDLR_EL1_op1 0
#define OSDLR_EL1_CRn 1
@@ -1582,7 +2143,6 @@
#define OSDLR_EL1_op2 4
/* OSLAR_EL1 */
-#define OSLAR_EL1 MRS_REG(OSLAR_EL1)
#define OSLAR_EL1_op0 2
#define OSLAR_EL1_op1 0
#define OSLAR_EL1_CRn 1
@@ -1590,7 +2150,6 @@
#define OSLAR_EL1_op2 4
/* OSLSR_EL1 */
-#define OSLSR_EL1 MRS_REG(OSLSR_EL1)
#define OSLSR_EL1_op0 2
#define OSLSR_EL1_op1 0
#define OSLSR_EL1_CRn 1
@@ -1608,7 +2167,7 @@
#define PAR_NS_SHIFT 9
#define PAR_NS_MASK (0x3 << PAR_NS_SHIFT)
#define PAR_PA_SHIFT 12
-#define PAR_PA_MASK 0x0000fffffffff000
+#define PAR_PA_MASK 0x000ffffffffff000
#define PAR_ATTR_SHIFT 56
#define PAR_ATTR_MASK (0xff << PAR_ATTR_SHIFT)
/* When PAR_F == 1 (aborted) */
@@ -1620,12 +2179,12 @@
#define PAR_S_MASK (0x1 << PAR_S_SHIFT)
/* PMBIDR_EL1 */
-#define PMBIDR_EL1 MRS_REG(PMBIDR_EL1)
-#define PMBIDR_EL1_op0 0x3
-#define PMBIDR_EL1_op1 0x0
-#define PMBIDR_EL1_CRn 0x9
-#define PMBIDR_EL1_CRm 0xa
-#define PMBIDR_EL1_op2 0x7
+#define PMBIDR_EL1_REG MRS_REG_ALT_NAME(PMBIDR_EL1)
+#define PMBIDR_EL1_op0 3
+#define PMBIDR_EL1_op1 0
+#define PMBIDR_EL1_CRn 9
+#define PMBIDR_EL1_CRm 10
+#define PMBIDR_EL1_op2 7
#define PMBIDR_Align_SHIFT 0
#define PMBIDR_Align_MASK (UL(0xf) << PMBIDR_Align_SHIFT)
#define PMBIDR_P_SHIFT 4
@@ -1634,12 +2193,12 @@
#define PMBIDR_F (UL(0x1) << PMBIDR_F_SHIFT)
/* PMBLIMITR_EL1 */
-#define PMBLIMITR_EL1 MRS_REG(PMBLIMITR_EL1)
-#define PMBLIMITR_EL1_op0 0x3
-#define PMBLIMITR_EL1_op1 0x0
-#define PMBLIMITR_EL1_CRn 0x9
-#define PMBLIMITR_EL1_CRm 0xa
-#define PMBLIMITR_EL1_op2 0x0
+#define PMBLIMITR_EL1_REG MRS_REG_ALT_NAME(PMBLIMITR_EL1)
+#define PMBLIMITR_EL1_op0 3
+#define PMBLIMITR_EL1_op1 0
+#define PMBLIMITR_EL1_CRn 9
+#define PMBLIMITR_EL1_CRm 10
+#define PMBLIMITR_EL1_op2 0
#define PMBLIMITR_E_SHIFT 0
#define PMBLIMITR_E (UL(0x1) << PMBLIMITR_E_SHIFT)
#define PMBLIMITR_FM_SHIFT 1
@@ -1651,25 +2210,27 @@
(UL(0xfffffffffffff) << PMBLIMITR_LIMIT_SHIFT)
/* PMBPTR_EL1 */
-#define PMBPTR_EL1 MRS_REG(PMBPTR_EL1)
-#define PMBPTR_EL1_op0 0x3
-#define PMBPTR_EL1_op1 0x0
-#define PMBPTR_EL1_CRn 0x9
-#define PMBPTR_EL1_CRm 0xa
-#define PMBPTR_EL1_op2 0x1
+#define PMBPTR_EL1_REG MRS_REG_ALT_NAME(PMBPTR_EL1)
+#define PMBPTR_EL1_op0 3
+#define PMBPTR_EL1_op1 0
+#define PMBPTR_EL1_CRn 9
+#define PMBPTR_EL1_CRm 10
+#define PMBPTR_EL1_op2 1
#define PMBPTR_PTR_SHIFT 0
#define PMBPTR_PTR_MASK \
(UL(0xffffffffffffffff) << PMBPTR_PTR_SHIFT)
/* PMBSR_EL1 */
-#define PMBSR_EL1 MRS_REG(PMBSR_EL1)
-#define PMBSR_EL1_op0 0x3
-#define PMBSR_EL1_op1 0x0
-#define PMBSR_EL1_CRn 0x9
-#define PMBSR_EL1_CRm 0xa
-#define PMBSR_EL1_op2 0x3
+#define PMBSR_EL1_REG MRS_REG_ALT_NAME(PMBSR_EL1)
+#define PMBSR_EL1_op0 3
+#define PMBSR_EL1_op1 0
+#define PMBSR_EL1_CRn 9
+#define PMBSR_EL1_CRm 10
+#define PMBSR_EL1_op2 3
#define PMBSR_MSS_SHIFT 0
#define PMBSR_MSS_MASK (UL(0xffff) << PMBSR_MSS_SHIFT)
+#define PMBSR_MSS_BSC_MASK (UL(0x3f) << PMBSR_MSS_SHIFT)
+#define PMBSR_MSS_FSC_MASK (UL(0x3f) << PMBSR_MSS_SHIFT)
#define PMBSR_COLL_SHIFT 16
#define PMBSR_COLL (UL(0x1) << PMBSR_COLL_SHIFT)
#define PMBSR_S_SHIFT 17
@@ -1682,7 +2243,6 @@
#define PMBSR_EC_MASK (UL(0x3f) << PMBSR_EC_SHIFT)
/* PMCCFILTR_EL0 */
-#define PMCCFILTR_EL0 MRS_REG(PMCCFILTR_EL0)
#define PMCCFILTR_EL0_op0 3
#define PMCCFILTR_EL0_op1 3
#define PMCCFILTR_EL0_CRn 14
@@ -1690,7 +2250,6 @@
#define PMCCFILTR_EL0_op2 7
/* PMCCNTR_EL0 */
-#define PMCCNTR_EL0 MRS_REG(PMCCNTR_EL0)
#define PMCCNTR_EL0_op0 3
#define PMCCNTR_EL0_op1 3
#define PMCCNTR_EL0_CRn 9
@@ -1698,7 +2257,6 @@
#define PMCCNTR_EL0_op2 0
/* PMCEID0_EL0 */
-#define PMCEID0_EL0 MRS_REG(PMCEID0_EL0)
#define PMCEID0_EL0_op0 3
#define PMCEID0_EL0_op1 3
#define PMCEID0_EL0_CRn 9
@@ -1706,7 +2264,6 @@
#define PMCEID0_EL0_op2 6
/* PMCEID1_EL0 */
-#define PMCEID1_EL0 MRS_REG(PMCEID1_EL0)
#define PMCEID1_EL0_op0 3
#define PMCEID1_EL0_op1 3
#define PMCEID1_EL0_CRn 9
@@ -1714,7 +2271,6 @@
#define PMCEID1_EL0_op2 7
/* PMCNTENCLR_EL0 */
-#define PMCNTENCLR_EL0 MRS_REG(PMCNTENCLR_EL0)
#define PMCNTENCLR_EL0_op0 3
#define PMCNTENCLR_EL0_op1 3
#define PMCNTENCLR_EL0_CRn 9
@@ -1722,7 +2278,6 @@
#define PMCNTENCLR_EL0_op2 2
/* PMCNTENSET_EL0 */
-#define PMCNTENSET_EL0 MRS_REG(PMCNTENSET_EL0)
#define PMCNTENSET_EL0_op0 3
#define PMCNTENSET_EL0_op1 3
#define PMCNTENSET_EL0_CRn 9
@@ -1730,24 +2285,24 @@
#define PMCNTENSET_EL0_op2 1
/* PMCR_EL0 - Perfomance Monitoring Counters */
-#define PMCR_EL0 MRS_REG(PMCR_EL0)
#define PMCR_EL0_op0 3
#define PMCR_EL0_op1 3
#define PMCR_EL0_CRn 9
#define PMCR_EL0_CRm 12
#define PMCR_EL0_op2 0
-#define PMCR_E (1 << 0) /* Enable all counters */
-#define PMCR_P (1 << 1) /* Reset all counters */
-#define PMCR_C (1 << 2) /* Clock counter reset */
-#define PMCR_D (1 << 3) /* CNTR counts every 64 clk cycles */
-#define PMCR_X (1 << 4) /* Export to ext. monitoring (ETM) */
-#define PMCR_DP (1 << 5) /* Disable CCNT if non-invasive debug*/
-#define PMCR_LC (1 << 6) /* Long cycle count enable */
-#define PMCR_IMP_SHIFT 24 /* Implementer code */
-#define PMCR_IMP_MASK (0xff << PMCR_IMP_SHIFT)
-#define PMCR_IMP_ARM 0x41
+#define PMCR_E (1ul << 0) /* Enable all counters */
+#define PMCR_P (1ul << 1) /* Reset all counters */
+#define PMCR_C (1ul << 2) /* Clock counter reset */
+#define PMCR_D (1ul << 3) /* CNTR counts every 64 clk cycles */
+#define PMCR_X (1ul << 4) /* Export to ext. monitoring (ETM) */
+#define PMCR_DP (1ul << 5) /* Disable CCNT if non-invasive debug*/
+#define PMCR_LC (1ul << 6) /* Long cycle count enable */
+#define PMCR_LP (1ul << 7) /* Long event count enable */
+#define PMCR_FZO (1ul << 9) /* Freeze-on-overflow */
+#define PMCR_N_SHIFT 11 /* Number of counters implemented */
+#define PMCR_N_MASK (0x1ful << PMCR_N_SHIFT)
#define PMCR_IDCODE_SHIFT 16 /* Identification code */
-#define PMCR_IDCODE_MASK (0xff << PMCR_IDCODE_SHIFT)
+#define PMCR_IDCODE_MASK (0xfful << PMCR_IDCODE_SHIFT)
#define PMCR_IDCODE_CORTEX_A57 0x01
#define PMCR_IDCODE_CORTEX_A72 0x02
#define PMCR_IDCODE_CORTEX_A53 0x03
@@ -1759,8 +2314,10 @@
#define PMCR_IDCODE_CORTEX_A55 0x45
#define PMCR_IDCODE_NEOVERSE_E1 0x46
#define PMCR_IDCODE_CORTEX_A75 0x4a
-#define PMCR_N_SHIFT 11 /* Number of counters implemented */
-#define PMCR_N_MASK (0x1f << PMCR_N_SHIFT)
+#define PMCR_IMP_SHIFT 24 /* Implementer code */
+#define PMCR_IMP_MASK (0xfful << PMCR_IMP_SHIFT)
+#define PMCR_IMP_ARM 0x41
+#define PMCR_FZS (1ul << 32) /* Freeze-on-SPE event */
/* PMEVCNTR<n>_EL0 */
#define PMEVCNTR_EL0_op0 3
@@ -1792,7 +2349,6 @@
#define PMEVTYPER_P (1 << 31) /* Privileged filtering */
/* PMINTENCLR_EL1 */
-#define PMINTENCLR_EL1 MRS_REG(PMINTENCLR_EL1)
#define PMINTENCLR_EL1_op0 3
#define PMINTENCLR_EL1_op1 0
#define PMINTENCLR_EL1_CRn 9
@@ -1800,7 +2356,6 @@
#define PMINTENCLR_EL1_op2 2
/* PMINTENSET_EL1 */
-#define PMINTENSET_EL1 MRS_REG(PMINTENSET_EL1)
#define PMINTENSET_EL1_op0 3
#define PMINTENSET_EL1_op1 0
#define PMINTENSET_EL1_CRn 9
@@ -1808,7 +2363,6 @@
#define PMINTENSET_EL1_op2 1
/* PMMIR_EL1 */
-#define PMMIR_EL1 MRS_REG(PMMIR_EL1)
#define PMMIR_EL1_op0 3
#define PMMIR_EL1_op1 0
#define PMMIR_EL1_CRn 9
@@ -1816,7 +2370,6 @@
#define PMMIR_EL1_op2 6
/* PMOVSCLR_EL0 */
-#define PMOVSCLR_EL0 MRS_REG(PMOVSCLR_EL0)
#define PMOVSCLR_EL0_op0 3
#define PMOVSCLR_EL0_op1 3
#define PMOVSCLR_EL0_CRn 9
@@ -1824,7 +2377,6 @@
#define PMOVSCLR_EL0_op2 3
/* PMOVSSET_EL0 */
-#define PMOVSSET_EL0 MRS_REG(PMOVSSET_EL0)
#define PMOVSSET_EL0_op0 3
#define PMOVSSET_EL0_op1 3
#define PMOVSSET_EL0_CRn 9
@@ -1832,12 +2384,12 @@
#define PMOVSSET_EL0_op2 3
/* PMSCR_EL1 */
-#define PMSCR_EL1 MRS_REG(PMSCR_EL1)
-#define PMSCR_EL1_op0 0x3
-#define PMSCR_EL1_op1 0x0
-#define PMSCR_EL1_CRn 0x9
-#define PMSCR_EL1_CRm 0x9
-#define PMSCR_EL1_op2 0x0
+#define PMSCR_EL1_REG MRS_REG_ALT_NAME(PMSCR_EL1)
+#define PMSCR_EL1_op0 3
+#define PMSCR_EL1_op1 0
+#define PMSCR_EL1_CRn 9
+#define PMSCR_EL1_CRm 9
+#define PMSCR_EL1_op2 0
#define PMSCR_E0SPE_SHIFT 0
#define PMSCR_E0SPE (UL(0x1) << PMSCR_E0SPE_SHIFT)
#define PMSCR_E1SPE_SHIFT 1
@@ -1852,7 +2404,6 @@
#define PMSCR_PCT_MASK (UL(0x3) << PMSCR_PCT_SHIFT)
/* PMSELR_EL0 */
-#define PMSELR_EL0 MRS_REG(PMSELR_EL0)
#define PMSELR_EL0_op0 3
#define PMSELR_EL0_op1 3
#define PMSELR_EL0_CRn 9
@@ -1861,20 +2412,20 @@
#define PMSELR_SEL_MASK 0x1f
/* PMSEVFR_EL1 */
-#define PMSEVFR_EL1 MRS_REG(PMSEVFR_EL1)
-#define PMSEVFR_EL1_op0 0x3
-#define PMSEVFR_EL1_op1 0x0
-#define PMSEVFR_EL1_CRn 0x9
-#define PMSEVFR_EL1_CRm 0x9
-#define PMSEVFR_EL1_op2 0x5
+#define PMSEVFR_EL1_REG MRS_REG_ALT_NAME(PMSEVFR_EL1)
+#define PMSEVFR_EL1_op0 3
+#define PMSEVFR_EL1_op1 0
+#define PMSEVFR_EL1_CRn 9
+#define PMSEVFR_EL1_CRm 9
+#define PMSEVFR_EL1_op2 5
/* PMSFCR_EL1 */
-#define PMSFCR_EL1 MRS_REG(PMSFCR_EL1)
-#define PMSFCR_EL1_op0 0x3
-#define PMSFCR_EL1_op1 0x0
-#define PMSFCR_EL1_CRn 0x9
-#define PMSFCR_EL1_CRm 0x9
-#define PMSFCR_EL1_op2 0x4
+#define PMSFCR_EL1_REG MRS_REG_ALT_NAME(PMSFCR_EL1)
+#define PMSFCR_EL1_op0 3
+#define PMSFCR_EL1_op1 0
+#define PMSFCR_EL1_CRn 9
+#define PMSFCR_EL1_CRm 9
+#define PMSFCR_EL1_op2 4
#define PMSFCR_FE_SHIFT 0
#define PMSFCR_FE (UL(0x1) << PMSFCR_FE_SHIFT)
#define PMSFCR_FT_SHIFT 1
@@ -1891,24 +2442,24 @@
#define PMSFCR_ST (UL(0x1) << PMSFCR_ST_SHIFT)
/* PMSICR_EL1 */
-#define PMSICR_EL1 MRS_REG(PMSICR_EL1)
-#define PMSICR_EL1_op0 0x3
-#define PMSICR_EL1_op1 0x0
-#define PMSICR_EL1_CRn 0x9
-#define PMSICR_EL1_CRm 0x9
-#define PMSICR_EL1_op2 0x2
+#define PMSICR_EL1_REG MRS_REG_ALT_NAME(PMSICR_EL1)
+#define PMSICR_EL1_op0 3
+#define PMSICR_EL1_op1 0
+#define PMSICR_EL1_CRn 9
+#define PMSICR_EL1_CRm 9
+#define PMSICR_EL1_op2 2
#define PMSICR_COUNT_SHIFT 0
#define PMSICR_COUNT_MASK (UL(0xffffffff) << PMSICR_COUNT_SHIFT)
#define PMSICR_ECOUNT_SHIFT 56
#define PMSICR_ECOUNT_MASK (UL(0xff) << PMSICR_ECOUNT_SHIFT)
/* PMSIDR_EL1 */
-#define PMSIDR_EL1 MRS_REG(PMSIDR_EL1)
-#define PMSIDR_EL1_op0 0x3
-#define PMSIDR_EL1_op1 0x0
-#define PMSIDR_EL1_CRn 0x9
-#define PMSIDR_EL1_CRm 0x9
-#define PMSIDR_EL1_op2 0x7
+#define PMSIDR_EL1_REG MRS_REG_ALT_NAME(PMSIDR_EL1)
+#define PMSIDR_EL1_op0 3
+#define PMSIDR_EL1_op1 0
+#define PMSIDR_EL1_CRn 9
+#define PMSIDR_EL1_CRm 9
+#define PMSIDR_EL1_op2 7
#define PMSIDR_FE_SHIFT 0
#define PMSIDR_FE (UL(0x1) << PMSIDR_FE_SHIFT)
#define PMSIDR_FT_SHIFT 1
@@ -1935,37 +2486,36 @@
#define PMSIDR_PBT (UL(0x1) << PMSIDR_PBT_SHIFT)
/* PMSIRR_EL1 */
-#define PMSIRR_EL1 MRS_REG(PMSIRR_EL1)
-#define PMSIRR_EL1_op0 0x3
-#define PMSIRR_EL1_op1 0x0
-#define PMSIRR_EL1_CRn 0x9
-#define PMSIRR_EL1_CRm 0x9
-#define PMSIRR_EL1_op2 0x3
+#define PMSIRR_EL1_REG MRS_REG_ALT_NAME(PMSIRR_EL1)
+#define PMSIRR_EL1_op0 3
+#define PMSIRR_EL1_op1 0
+#define PMSIRR_EL1_CRn 9
+#define PMSIRR_EL1_CRm 9
+#define PMSIRR_EL1_op2 3
#define PMSIRR_RND_SHIFT 0
#define PMSIRR_RND (UL(0x1) << PMSIRR_RND_SHIFT)
#define PMSIRR_INTERVAL_SHIFT 8
#define PMSIRR_INTERVAL_MASK (UL(0xffffff) << PMSIRR_INTERVAL_SHIFT)
/* PMSLATFR_EL1 */
-#define PMSLATFR_EL1 MRS_REG(PMSLATFR_EL1)
-#define PMSLATFR_EL1_op0 0x3
-#define PMSLATFR_EL1_op1 0x0
-#define PMSLATFR_EL1_CRn 0x9
-#define PMSLATFR_EL1_CRm 0x9
-#define PMSLATFR_EL1_op2 0x6
+#define PMSLATFR_EL1_REG MRS_REG_ALT_NAME(PMSLATFR_EL1)
+#define PMSLATFR_EL1_op0 3
+#define PMSLATFR_EL1_op1 0
+#define PMSLATFR_EL1_CRn 9
+#define PMSLATFR_EL1_CRm 9
+#define PMSLATFR_EL1_op2 6
#define PMSLATFR_MINLAT_SHIFT 0
#define PMSLATFR_MINLAT_MASK (UL(0xfff) << PMSLATFR_MINLAT_SHIFT)
/* PMSNEVFR_EL1 */
-#define PMSNEVFR_EL1 MRS_REG(PMSNEVFR_EL1)
-#define PMSNEVFR_EL1_op0 0x3
-#define PMSNEVFR_EL1_op1 0x0
-#define PMSNEVFR_EL1_CRn 0x9
-#define PMSNEVFR_EL1_CRm 0x9
-#define PMSNEVFR_EL1_op2 0x1
+#define PMSNEVFR_EL1_REG MRS_REG_ALT_NAME(PMSNEVFR_EL1)
+#define PMSNEVFR_EL1_op0 3
+#define PMSNEVFR_EL1_op1 0
+#define PMSNEVFR_EL1_CRn 9
+#define PMSNEVFR_EL1_CRm 9
+#define PMSNEVFR_EL1_op2 1
/* PMSWINC_EL0 */
-#define PMSWINC_EL0 MRS_REG(PMSWINC_EL0)
#define PMSWINC_EL0_op0 3
#define PMSWINC_EL0_op1 3
#define PMSWINC_EL0_CRn 9
@@ -1973,7 +2523,6 @@
#define PMSWINC_EL0_op2 4
/* PMUSERENR_EL0 */
-#define PMUSERENR_EL0 MRS_REG(PMUSERENR_EL0)
#define PMUSERENR_EL0_op0 3
#define PMUSERENR_EL0_op1 3
#define PMUSERENR_EL0_CRn 9
@@ -1981,7 +2530,6 @@
#define PMUSERENR_EL0_op2 0
/* PMXEVCNTR_EL0 */
-#define PMXEVCNTR_EL0 MRS_REG(PMXEVCNTR_EL0)
#define PMXEVCNTR_EL0_op0 3
#define PMXEVCNTR_EL0_op1 3
#define PMXEVCNTR_EL0_CRn 9
@@ -1989,7 +2537,6 @@
#define PMXEVCNTR_EL0_op2 2
/* PMXEVTYPER_EL0 */
-#define PMXEVTYPER_EL0 MRS_REG(PMXEVTYPER_EL0)
#define PMXEVTYPER_EL0_op0 3
#define PMXEVTYPER_EL0_op1 3
#define PMXEVTYPER_EL0_CRn 9
@@ -1997,7 +2544,6 @@
#define PMXEVTYPER_EL0_op2 1
/* RNDRRS */
-#define RNDRRS MRS_REG(RNDRRS)
#define RNDRRS_REG MRS_REG_ALT_NAME(RNDRRS)
#define RNDRRS_op0 3
#define RNDRRS_op1 3
@@ -2006,6 +2552,12 @@
#define RNDRRS_op2 1
/* SCTLR_EL1 - System Control Register */
+#define SCTLR_EL1_REG MRS_REG_ALT_NAME(SCTLR_EL1)
+#define SCTLR_EL1_op0 3
+#define SCTLR_EL1_op1 0
+#define SCTLR_EL1_CRn 1
+#define SCTLR_EL1_CRm 0
+#define SCTLR_EL1_op2 0
#define SCTLR_RES1 0x30d00800 /* Reserved ARMv8.0, write 1 */
#define SCTLR_M (UL(0x1) << 0)
#define SCTLR_A (UL(0x1) << 1)
@@ -2056,7 +2608,21 @@
#define SCTLR_EnALS (UL(0x1) << 56)
#define SCTLR_EPAN (UL(0x1) << 57)
+/* SCTLR_EL12 */
+#define SCTLR_EL12_REG MRS_REG_ALT_NAME(SCTLR_EL12)
+#define SCTLR_EL12_op0 3
+#define SCTLR_EL12_op1 5
+#define SCTLR_EL12_CRn 1
+#define SCTLR_EL12_CRm 0
+#define SCTLR_EL12_op2 0
+
/* SPSR_EL1 */
+#define SPSR_EL1_REG MRS_REG_ALT_NAME(SPSR_EL1)
+#define SPSR_EL1_op0 3
+#define SPSR_EL1_op1 0
+#define SPSR_EL1_CRn 4
+#define SPSR_EL1_CRm 0
+#define SPSR_EL1_op2 0
/*
* When the exception is taken in AArch64:
* M[3:2] is the exception level
@@ -2083,7 +2649,8 @@
#define PSR_D 0x00000200UL
#define PSR_DAIF (PSR_D | PSR_A | PSR_I | PSR_F)
/* The default DAIF mask. These bits are valid in spsr_el1 and daif */
-#define PSR_DAIF_DEFAULT (PSR_F)
+#define PSR_DAIF_DEFAULT (0)
+#define PSR_DAIF_INTR (PSR_I | PSR_F)
#define PSR_BTYPE 0x00000c00UL
#define PSR_SSBS 0x00001000UL
#define PSR_ALLINT 0x00002000UL
@@ -2102,8 +2669,15 @@
#define PSR_SETTABLE_32 PSR_FLAGS
#define PSR_SETTABLE_64 (PSR_FLAGS | PSR_SS)
+/* SPSR_EL12 */
+#define SPSR_EL12_REG MRS_REG_ALT_NAME(SPSR_EL12)
+#define SPSR_EL12_op0 3
+#define SPSR_EL12_op1 5
+#define SPSR_EL12_CRn 4
+#define SPSR_EL12_CRm 0
+#define SPSR_EL12_op2 0
+
/* REVIDR_EL1 - Revision ID Register */
-#define REVIDR_EL1 MRS_REG(REVIDR_EL1)
#define REVIDR_EL1_op0 3
#define REVIDR_EL1_op1 0
#define REVIDR_EL1_CRn 0
@@ -2111,111 +2685,119 @@
#define REVIDR_EL1_op2 6
/* TCR_EL1 - Translation Control Register */
+#define TCR_EL1_REG MRS_REG_ALT_NAME(TCR_EL1)
+#define TCR_EL1_op0 3
+#define TCR_EL1_op1 0
+#define TCR_EL1_CRn 2
+#define TCR_EL1_CRm 0
+#define TCR_EL1_op2 2
/* Bits 63:59 are reserved */
+#define TCR_DS_SHIFT 59
+#define TCR_DS (UL(1) << TCR_DS_SHIFT)
#define TCR_TCMA1_SHIFT 58
-#define TCR_TCMA1 (1UL << TCR_TCMA1_SHIFT)
+#define TCR_TCMA1 (UL(1) << TCR_TCMA1_SHIFT)
#define TCR_TCMA0_SHIFT 57
-#define TCR_TCMA0 (1UL << TCR_TCMA0_SHIFT)
+#define TCR_TCMA0 (UL(1) << TCR_TCMA0_SHIFT)
#define TCR_E0PD1_SHIFT 56
-#define TCR_E0PD1 (1UL << TCR_E0PD1_SHIFT)
+#define TCR_E0PD1 (UL(1) << TCR_E0PD1_SHIFT)
#define TCR_E0PD0_SHIFT 55
-#define TCR_E0PD0 (1UL << TCR_E0PD0_SHIFT)
+#define TCR_E0PD0 (UL(1) << TCR_E0PD0_SHIFT)
#define TCR_NFD1_SHIFT 54
-#define TCR_NFD1 (1UL << TCR_NFD1_SHIFT)
+#define TCR_NFD1 (UL(1) << TCR_NFD1_SHIFT)
#define TCR_NFD0_SHIFT 53
-#define TCR_NFD0 (1UL << TCR_NFD0_SHIFT)
+#define TCR_NFD0 (UL(1) << TCR_NFD0_SHIFT)
#define TCR_TBID1_SHIFT 52
-#define TCR_TBID1 (1UL << TCR_TBID1_SHIFT)
+#define TCR_TBID1 (UL(1) << TCR_TBID1_SHIFT)
#define TCR_TBID0_SHIFT 51
-#define TCR_TBID0 (1UL << TCR_TBID0_SHIFT)
+#define TCR_TBID0 (UL(1) << TCR_TBID0_SHIFT)
#define TCR_HWU162_SHIFT 50
-#define TCR_HWU162 (1UL << TCR_HWU162_SHIFT)
+#define TCR_HWU162 (UL(1) << TCR_HWU162_SHIFT)
#define TCR_HWU161_SHIFT 49
-#define TCR_HWU161 (1UL << TCR_HWU161_SHIFT)
+#define TCR_HWU161 (UL(1) << TCR_HWU161_SHIFT)
#define TCR_HWU160_SHIFT 48
-#define TCR_HWU160 (1UL << TCR_HWU160_SHIFT)
+#define TCR_HWU160 (UL(1) << TCR_HWU160_SHIFT)
#define TCR_HWU159_SHIFT 47
-#define TCR_HWU159 (1UL << TCR_HWU159_SHIFT)
+#define TCR_HWU159 (UL(1) << TCR_HWU159_SHIFT)
#define TCR_HWU1 \
(TCR_HWU159 | TCR_HWU160 | TCR_HWU161 | TCR_HWU162)
#define TCR_HWU062_SHIFT 46
-#define TCR_HWU062 (1UL << TCR_HWU062_SHIFT)
+#define TCR_HWU062 (UL(1) << TCR_HWU062_SHIFT)
#define TCR_HWU061_SHIFT 45
-#define TCR_HWU061 (1UL << TCR_HWU061_SHIFT)
+#define TCR_HWU061 (UL(1) << TCR_HWU061_SHIFT)
#define TCR_HWU060_SHIFT 44
-#define TCR_HWU060 (1UL << TCR_HWU060_SHIFT)
+#define TCR_HWU060 (UL(1) << TCR_HWU060_SHIFT)
#define TCR_HWU059_SHIFT 43
-#define TCR_HWU059 (1UL << TCR_HWU059_SHIFT)
+#define TCR_HWU059 (UL(1) << TCR_HWU059_SHIFT)
#define TCR_HWU0 \
(TCR_HWU059 | TCR_HWU060 | TCR_HWU061 | TCR_HWU062)
#define TCR_HPD1_SHIFT 42
-#define TCR_HPD1 (1UL << TCR_HPD1_SHIFT)
+#define TCR_HPD1 (UL(1) << TCR_HPD1_SHIFT)
#define TCR_HPD0_SHIFT 41
-#define TCR_HPD0 (1UL << TCR_HPD0_SHIFT)
+#define TCR_HPD0 (UL(1) << TCR_HPD0_SHIFT)
#define TCR_HD_SHIFT 40
-#define TCR_HD (1UL << TCR_HD_SHIFT)
+#define TCR_HD (UL(1) << TCR_HD_SHIFT)
#define TCR_HA_SHIFT 39
-#define TCR_HA (1UL << TCR_HA_SHIFT)
+#define TCR_HA (UL(1) << TCR_HA_SHIFT)
#define TCR_TBI1_SHIFT 38
-#define TCR_TBI1 (1UL << TCR_TBI1_SHIFT)
+#define TCR_TBI1 (UL(1) << TCR_TBI1_SHIFT)
#define TCR_TBI0_SHIFT 37
-#define TCR_TBI0 (1UL << TCR_TBI0_SHIFT)
+#define TCR_TBI0 (UL(1) << TCR_TBI0_SHIFT)
#define TCR_ASID_SHIFT 36
#define TCR_ASID_WIDTH 1
-#define TCR_ASID_16 (1UL << TCR_ASID_SHIFT)
+#define TCR_ASID_16 (UL(1) << TCR_ASID_SHIFT)
/* Bit 35 is reserved */
#define TCR_IPS_SHIFT 32
#define TCR_IPS_WIDTH 3
-#define TCR_IPS_32BIT (0UL << TCR_IPS_SHIFT)
-#define TCR_IPS_36BIT (1UL << TCR_IPS_SHIFT)
-#define TCR_IPS_40BIT (2UL << TCR_IPS_SHIFT)
-#define TCR_IPS_42BIT (3UL << TCR_IPS_SHIFT)
-#define TCR_IPS_44BIT (4UL << TCR_IPS_SHIFT)
-#define TCR_IPS_48BIT (5UL << TCR_IPS_SHIFT)
+#define TCR_IPS_32BIT (UL(0) << TCR_IPS_SHIFT)
+#define TCR_IPS_36BIT (UL(1) << TCR_IPS_SHIFT)
+#define TCR_IPS_40BIT (UL(2) << TCR_IPS_SHIFT)
+#define TCR_IPS_42BIT (UL(3) << TCR_IPS_SHIFT)
+#define TCR_IPS_44BIT (UL(4) << TCR_IPS_SHIFT)
+#define TCR_IPS_48BIT (UL(5) << TCR_IPS_SHIFT)
#define TCR_TG1_SHIFT 30
-#define TCR_TG1_MASK (3UL << TCR_TG1_SHIFT)
-#define TCR_TG1_16K (1UL << TCR_TG1_SHIFT)
-#define TCR_TG1_4K (2UL << TCR_TG1_SHIFT)
-#define TCR_TG1_64K (3UL << TCR_TG1_SHIFT)
+#define TCR_TG1_MASK (UL(3) << TCR_TG1_SHIFT)
+#define TCR_TG1_16K (UL(1) << TCR_TG1_SHIFT)
+#define TCR_TG1_4K (UL(2) << TCR_TG1_SHIFT)
+#define TCR_TG1_64K (UL(3) << TCR_TG1_SHIFT)
#define TCR_SH1_SHIFT 28
-#define TCR_SH1_IS (3UL << TCR_SH1_SHIFT)
+#define TCR_SH1_IS (UL(3) << TCR_SH1_SHIFT)
#define TCR_ORGN1_SHIFT 26
-#define TCR_ORGN1_WBWA (1UL << TCR_ORGN1_SHIFT)
+#define TCR_ORGN1_WBWA (UL(1) << TCR_ORGN1_SHIFT)
#define TCR_IRGN1_SHIFT 24
-#define TCR_IRGN1_WBWA (1UL << TCR_IRGN1_SHIFT)
+#define TCR_IRGN1_WBWA (UL(1) << TCR_IRGN1_SHIFT)
#define TCR_EPD1_SHIFT 23
-#define TCR_EPD1 (1UL << TCR_EPD1_SHIFT)
+#define TCR_EPD1 (UL(1) << TCR_EPD1_SHIFT)
#define TCR_A1_SHIFT 22
-#define TCR_A1 (0x1UL << TCR_A1_SHIFT)
+#define TCR_A1 (UL(1) << TCR_A1_SHIFT)
#define TCR_T1SZ_SHIFT 16
-#define TCR_T1SZ_MASK (0x3fUL << TCR_T1SZ_SHIFT)
+#define TCR_T1SZ_MASK (UL(0x3f) << TCR_T1SZ_SHIFT)
#define TCR_T1SZ(x) ((x) << TCR_T1SZ_SHIFT)
#define TCR_TG0_SHIFT 14
-#define TCR_TG0_MASK (3UL << TCR_TG0_SHIFT)
-#define TCR_TG0_4K (0UL << TCR_TG0_SHIFT)
-#define TCR_TG0_64K (1UL << TCR_TG0_SHIFT)
-#define TCR_TG0_16K (2UL << TCR_TG0_SHIFT)
+#define TCR_TG0_MASK (UL(3) << TCR_TG0_SHIFT)
+#define TCR_TG0_4K (UL(0) << TCR_TG0_SHIFT)
+#define TCR_TG0_64K (UL(1) << TCR_TG0_SHIFT)
+#define TCR_TG0_16K (UL(2) << TCR_TG0_SHIFT)
#define TCR_SH0_SHIFT 12
-#define TCR_SH0_IS (3UL << TCR_SH0_SHIFT)
+#define TCR_SH0_IS (UL(3) << TCR_SH0_SHIFT)
#define TCR_ORGN0_SHIFT 10
-#define TCR_ORGN0_WBWA (1UL << TCR_ORGN0_SHIFT)
+#define TCR_ORGN0_WBWA (UL(1) << TCR_ORGN0_SHIFT)
#define TCR_IRGN0_SHIFT 8
-#define TCR_IRGN0_WBWA (1UL << TCR_IRGN0_SHIFT)
+#define TCR_IRGN0_WBWA (UL(1) << TCR_IRGN0_SHIFT)
#define TCR_EPD0_SHIFT 7
-#define TCR_EPD0 (1UL << TCR_EPD0_SHIFT)
+#define TCR_EPD0 (UL(1) << TCR_EPD0_SHIFT)
/* Bit 6 is reserved */
#define TCR_T0SZ_SHIFT 0
-#define TCR_T0SZ_MASK (0x3fUL << TCR_T0SZ_SHIFT)
+#define TCR_T0SZ_MASK (UL(0x3f) << TCR_T0SZ_SHIFT)
#define TCR_T0SZ(x) ((x) << TCR_T0SZ_SHIFT)
#define TCR_TxSZ(x) (TCR_T1SZ(x) | TCR_T0SZ(x))
-#define TCR_CACHE_ATTRS ((TCR_IRGN0_WBWA | TCR_IRGN1_WBWA) |\
- (TCR_ORGN0_WBWA | TCR_ORGN1_WBWA))
-#ifdef SMP
-#define TCR_SMP_ATTRS (TCR_SH0_IS | TCR_SH1_IS)
-#else
-#define TCR_SMP_ATTRS 0
-#endif
+/* TCR_EL12 */
+#define TCR_EL12_REG MRS_REG_ALT_NAME(TCR_EL12)
+#define TCR_EL12_op0 3
+#define TCR_EL12_op1 5
+#define TCR_EL12_CRn 2
+#define TCR_EL12_CRm 0
+#define TCR_EL12_op2 2
/* TTBR0_EL1 & TTBR1_EL1 - Translation Table Base Register 0 & 1 */
#define TTBR_ASID_SHIFT 48
@@ -2224,7 +2806,61 @@
#define TTBR_CnP_SHIFT 0
#define TTBR_CnP (1ul << TTBR_CnP_SHIFT)
+/* TTBR0_EL1 */
+#define TTBR0_EL1_REG MRS_REG_ALT_NAME(TTBR0_EL1)
+#define TTBR0_EL1_op0 3
+#define TTBR0_EL1_op1 0
+#define TTBR0_EL1_CRn 2
+#define TTBR0_EL1_CRm 0
+#define TTBR0_EL1_op2 0
+
+/* TTBR0_EL12 */
+#define TTBR0_EL12_REG MRS_REG_ALT_NAME(TTBR0_EL12)
+#define TTBR0_EL12_op0 3
+#define TTBR0_EL12_op1 5
+#define TTBR0_EL12_CRn 2
+#define TTBR0_EL12_CRm 0
+#define TTBR0_EL12_op2 0
+
+/* TTBR1_EL1 */
+#define TTBR1_EL1_REG MRS_REG_ALT_NAME(TTBR1_EL1)
+#define TTBR1_EL1_op0 3
+#define TTBR1_EL1_op1 0
+#define TTBR1_EL1_CRn 2
+#define TTBR1_EL1_CRm 0
+#define TTBR1_EL1_op2 1
+
+/* TTBR1_EL12 */
+#define TTBR1_EL12_REG MRS_REG_ALT_NAME(TTBR1_EL12)
+#define TTBR1_EL12_op0 3
+#define TTBR1_EL12_op1 5
+#define TTBR1_EL12_CRn 2
+#define TTBR1_EL12_CRm 0
+#define TTBR1_EL12_op2 1
+
+/* VBAR_EL1 */
+#define VBAR_EL1_REG MRS_REG_ALT_NAME(VBAR_EL1)
+#define VBAR_EL1_op0 3
+#define VBAR_EL1_op1 0
+#define VBAR_EL1_CRn 12
+#define VBAR_EL1_CRm 0
+#define VBAR_EL1_op2 0
+
+/* VBAR_EL12 */
+#define VBAR_EL12_REG MRS_REG_ALT_NAME(VBAR_EL12)
+#define VBAR_EL12_op0 3
+#define VBAR_EL12_op1 5
+#define VBAR_EL12_CRn 12
+#define VBAR_EL12_CRm 0
+#define VBAR_EL12_op2 0
+
/* ZCR_EL1 - SVE Control Register */
+#define ZCR_EL1_REG MRS_REG_ALT_NAME(ZCR_EL1)
+#define ZCR_EL1_op0 3
+#define ZCR_EL1_op1 0
+#define ZCR_EL1_CRn 1
+#define ZCR_EL1_CRm 2
+#define ZCR_EL1_op2 0
#define ZCR_LEN_SHIFT 0
#define ZCR_LEN_MASK (0xf << ZCR_LEN_SHIFT)
#define ZCR_LEN_BYTES(x) ((((x) & ZCR_LEN_MASK) + 1) * 16)
diff --git a/sys/arm64/include/asm.h b/sys/arm64/include/asm.h
index 16be39b3eae4..4f373dc4b7e1 100644
--- a/sys/arm64/include/asm.h
+++ b/sys/arm64/include/asm.h
@@ -73,6 +73,16 @@
#define lr x30
/*
+ * Check whether a given cpu feature is present, in the case it is not we jump
+ * to the given label. The tmp register should be a register able to hold the
+ * temporary data.
+ */
+#define CHECK_CPU_FEAT(tmp, feat_reg, feat, label) \
+ mrs tmp, ##feat_reg##_el1; \
+ ubfx tmp, tmp, ##feat_reg##_##feat##_SHIFT, ##feat_reg##_##feat##_WIDTH; \
+ cbz tmp, label
+
+/*
* Sets the trap fault handler. The exception handler will return to the
* address in the handler register on a data abort or the xzr register to
* clear the handler. The tmp parameter should be a register able to hold
@@ -87,19 +97,25 @@
ldr tmp, =has_pan; /* Get the addr of has_pan */ \
ldr reg, [tmp]; /* Read it */ \
cbz reg, 997f; /* If no PAN skip */ \
- .inst 0xd500409f | (0 << 8); /* Clear PAN */ \
+ .arch_extension pan; \
+ msr pan, #0; /* Disable PAN checks */ \
+ .arch_extension nopan; \
997:
#define EXIT_USER_ACCESS(reg) \
cbz reg, 998f; /* If no PAN skip */ \
- .inst 0xd500409f | (1 << 8); /* Set PAN */ \
+ .arch_extension pan; \
+ msr pan, #1; /* Enable PAN checks */ \
+ .arch_extension nopan; \
998:
#define EXIT_USER_ACCESS_CHECK(reg, tmp) \
ldr tmp, =has_pan; /* Get the addr of has_pan */ \
ldr reg, [tmp]; /* Read it */ \
cbz reg, 999f; /* If no PAN skip */ \
- .inst 0xd500409f | (1 << 8); /* Set PAN */ \
+ .arch_extension pan; \
+ msr pan, #1; /* Enable PAN checks */ \
+ .arch_extension nopan; \
999:
/*
diff --git a/sys/arm64/include/atomic.h b/sys/arm64/include/atomic.h
index 76ca951678d4..998a49c02e60 100644
--- a/sys/arm64/include/atomic.h
+++ b/sys/arm64/include/atomic.h
@@ -65,8 +65,9 @@ extern _Bool lse_supported;
#include <sys/atomic_common.h>
-#ifdef _KERNEL
-
+#if defined(__ARM_FEATURE_ATOMICS)
+#define _ATOMIC_LSE_SUPPORTED 1
+#elif defined(_KERNEL)
#ifdef LSE_ATOMICS
#define _ATOMIC_LSE_SUPPORTED 1
#else
@@ -464,7 +465,7 @@ _ATOMIC_TEST_OP(set, orr, set)
#define _ATOMIC_LOAD_ACQ_IMPL(t, w, s) \
static __inline uint##t##_t \
-atomic_load_acq_##t(volatile uint##t##_t *p) \
+atomic_load_acq_##t(const volatile uint##t##_t *p) \
{ \
uint##t##_t ret; \
\
@@ -608,6 +609,8 @@ _ATOMIC_STORE_REL_IMPL(64, , )
#define atomic_set_ptr atomic_set_64
#define atomic_swap_ptr atomic_swap_64
#define atomic_subtract_ptr atomic_subtract_64
+#define atomic_testandclear_ptr atomic_testandclear_64
+#define atomic_testandset_ptr atomic_testandset_64
#define atomic_add_acq_long atomic_add_acq_64
#define atomic_fcmpset_acq_long atomic_fcmpset_acq_64
diff --git a/sys/arm64/include/bus.h b/sys/arm64/include/bus.h
index 196916de92e7..2e2ef2f6d008 100644
--- a/sys/arm64/include/bus.h
+++ b/sys/arm64/include/bus.h
@@ -76,6 +76,7 @@
#define BUS_SPACE_MAXADDR_24BIT 0xFFFFFFUL
#define BUS_SPACE_MAXADDR_32BIT 0xFFFFFFFFUL
+#define BUS_SPACE_MAXADDR_36BIT 0xFFFFFFFFFUL
#define BUS_SPACE_MAXADDR_40BIT 0xFFFFFFFFFFUL
#define BUS_SPACE_MAXSIZE_24BIT 0xFFFFFFUL
#define BUS_SPACE_MAXSIZE_32BIT 0xFFFFFFFFUL
diff --git a/sys/arm64/include/bus_dma.h b/sys/arm64/include/bus_dma.h
index fb494be0bdc4..d9c37eb2641b 100644
--- a/sys/arm64/include/bus_dma.h
+++ b/sys/arm64/include/bus_dma.h
@@ -62,7 +62,7 @@ bus_dmamem_alloc(bus_dma_tag_t dmat, void** vaddr, int flags,
/*
* Free a piece of memory and it's allociated dmamap, that was allocated
- * via bus_dmamem_alloc. Make the same choice for free/contigfree.
+ * via bus_dmamem_alloc.
*/
static inline void
bus_dmamem_free(bus_dma_tag_t dmat, void *vaddr, bus_dmamap_t map)
diff --git a/sys/arm64/include/cpu.h b/sys/arm64/include/cpu.h
index 8f5a9e3dbd3c..935e3754bf25 100644
--- a/sys/arm64/include/cpu.h
+++ b/sys/arm64/include/cpu.h
@@ -42,8 +42,10 @@
#ifndef _MACHINE_CPU_H_
#define _MACHINE_CPU_H_
+#if !defined(__ASSEMBLER__)
#include <machine/atomic.h>
#include <machine/frame.h>
+#endif
#include <machine/armreg.h>
#define TRAPF_PC(tfp) ((tfp)->tf_elr)
@@ -73,6 +75,7 @@
#define CPU_IMPL_CAVIUM 0x43
#define CPU_IMPL_DEC 0x44
#define CPU_IMPL_FUJITSU 0x46
+#define CPU_IMPL_HISILICON 0x48
#define CPU_IMPL_INFINEON 0x49
#define CPU_IMPL_FREESCALE 0x4D
#define CPU_IMPL_NVIDIA 0x4E
@@ -82,6 +85,7 @@
#define CPU_IMPL_APPLE 0x61
#define CPU_IMPL_INTEL 0x69
#define CPU_IMPL_AMPERE 0xC0
+#define CPU_IMPL_MICROSOFT 0x6D
/* ARM Part numbers */
#define CPU_PART_FOUNDATION 0xD00
@@ -101,6 +105,7 @@
#define CPU_PART_AEM_V8 0xD0F
#define CPU_PART_NEOVERSE_V1 0xD40
#define CPU_PART_CORTEX_A78 0xD41
+#define CPU_PART_CORTEX_A78AE 0xD42
#define CPU_PART_CORTEX_A65AE 0xD43
#define CPU_PART_CORTEX_X1 0xD44
#define CPU_PART_CORTEX_A510 0xD46
@@ -113,6 +118,14 @@
#define CPU_PART_CORTEX_A715 0xD4D
#define CPU_PART_CORTEX_X3 0xD4E
#define CPU_PART_NEOVERSE_V2 0xD4F
+#define CPU_PART_CORTEX_A520 0xD80
+#define CPU_PART_CORTEX_A720 0xD81
+#define CPU_PART_CORTEX_X4 0xD82
+#define CPU_PART_NEOVERSE_V3AE 0xD83
+#define CPU_PART_NEOVERSE_V3 0xD84
+#define CPU_PART_CORTEX_X925 0xD85
+#define CPU_PART_CORTEX_A725 0xD87
+#define CPU_PART_NEOVERSE_N3 0xD8E
/* Cavium Part numbers */
#define CPU_PART_THUNDERX 0x0A1
@@ -125,9 +138,16 @@
#define CPU_REV_THUNDERX2_0 0x00
-/* APM / Ampere Part Number */
+/* APM (now Ampere) Part number */
#define CPU_PART_EMAG8180 0x000
+/* Ampere Part numbers */
+#define CPU_PART_AMPERE1 0xAC3
+#define CPU_PART_AMPERE1A 0xAC4
+
+/* Microsoft Part numbers */
+#define CPU_PART_AZURE_COBALT_100 0xD49
+
/* Qualcomm */
#define CPU_PART_KRYO400_GOLD 0x804
#define CPU_PART_KRYO400_SILVER 0x805
@@ -198,6 +218,7 @@
#define CPU_MATCH_ERRATA_CAVIUM_THUNDERX_1_1 0
#endif
+#if !defined(__ASSEMBLER__)
extern char btext[];
extern char etext[];
@@ -229,9 +250,18 @@ void ptrauth_mp_start(uint64_t);
/* Functions to read the sanitised view of the special registers */
void update_special_regs(u_int);
-bool extract_user_id_field(u_int, u_int, uint8_t *);
-bool get_kernel_reg(u_int, uint64_t *);
-bool get_kernel_reg_masked(u_int, uint64_t *, uint64_t);
+void update_special_reg_iss(u_int, uint64_t, uint64_t);
+#define update_special_reg(reg, clear, set) \
+ update_special_reg_iss(reg ## _ISS, clear, set)
+bool get_kernel_reg_iss(u_int, uint64_t *);
+#define get_kernel_reg(reg, valp) \
+ get_kernel_reg_iss(reg ## _ISS, valp)
+bool get_kernel_reg_iss_masked(u_int, uint64_t *, uint64_t);
+#define get_kernel_reg_masked(reg, valp, mask) \
+ get_kernel_reg_iss_masked(reg ## _ISS, valp, mask)
+bool get_user_reg_iss(u_int, uint64_t *, bool);
+#define get_user_reg(reg, valp, fbsd) \
+ get_user_reg_iss(reg ## _ISS, valp, fbsd)
void cpu_desc_init(void);
@@ -268,6 +298,7 @@ ADDRESS_TRANSLATE_FUNC(s1e0w)
ADDRESS_TRANSLATE_FUNC(s1e1r)
ADDRESS_TRANSLATE_FUNC(s1e1w)
+#endif /* !__ASSEMBLER__ */
#endif
#endif /* !_MACHINE_CPU_H_ */
diff --git a/sys/arm64/include/cpu_feat.h b/sys/arm64/include/cpu_feat.h
new file mode 100644
index 000000000000..9fe6a9dd95d9
--- /dev/null
+++ b/sys/arm64/include/cpu_feat.h
@@ -0,0 +1,88 @@
+/*-
+ * SPDX-License-Identifier: BSD-2-Clause
+ *
+ * Copyright (c) 2024 Arm Ltd
+ *
+ * Redistribution and use in source and binary forms, with or without
+ * modification, are permitted provided that the following conditions
+ * are met:
+ * 1. Redistributions of source code must retain the above copyright
+ * notice, this list of conditions and the following disclaimer.
+ * 2. Redistributions in binary form must reproduce the above copyright
+ * notice, this list of conditions and the following disclaimer in the
+ * documentation and/or other materials provided with the distribution.
+ *
+ * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
+ * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
+ * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
+ * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
+ * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
+ * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
+ * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
+ * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
+ * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
+ * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
+ * SUCH DAMAGE.
+ */
+
+#ifndef _MACHINE_CPU_FEAT_H_
+#define _MACHINE_CPU_FEAT_H_
+
+#include <sys/linker_set.h>
+
+typedef enum {
+ ERRATA_UNKNOWN, /* Unknown erratum */
+ ERRATA_NONE, /* No errata for this feature on this system. */
+ ERRATA_AFFECTED, /* There is errata on this system. */
+ ERRATA_FW_MITIGAION, /* There is errata, and a firmware */
+ /* mitigation. The mitigation may need a */
+ /* kernel component. */
+} cpu_feat_errata;
+
+#define CPU_FEAT_STAGE_MASK 0x00000001
+#define CPU_FEAT_EARLY_BOOT 0x00000000
+#define CPU_FEAT_AFTER_DEV 0x00000001
+
+#define CPU_FEAT_SCOPE_MASK 0x00000010
+#define CPU_FEAT_PER_CPU 0x00000000
+#define CPU_FEAT_SYSTEM 0x00000010
+
+struct cpu_feat;
+
+typedef bool (cpu_feat_check)(const struct cpu_feat *, u_int);
+typedef bool (cpu_feat_has_errata)(const struct cpu_feat *, u_int,
+ u_int **, u_int *);
+typedef void (cpu_feat_enable)(const struct cpu_feat *, cpu_feat_errata,
+ u_int *, u_int);
+
+struct cpu_feat {
+ const char *feat_name;
+ cpu_feat_check *feat_check;
+ cpu_feat_has_errata *feat_has_errata;
+ cpu_feat_enable *feat_enable;
+ uint32_t feat_flags;
+};
+SET_DECLARE(cpu_feat_set, struct cpu_feat);
+
+/*
+ * Allow drivers to mark an erratum as worked around, e.g. the Errata
+ * Management ABI may know the workaround isn't needed on a given system.
+ */
+typedef cpu_feat_errata (*cpu_feat_errata_check_fn)(const struct cpu_feat *,
+ u_int);
+void cpu_feat_register_errata_check(cpu_feat_errata_check_fn);
+
+void enable_cpu_feat(uint32_t);
+
+/* Check if an erratum is in the list of errata */
+static inline bool
+cpu_feat_has_erratum(u_int *errata_list, u_int errata_count, u_int erratum)
+{
+ for (u_int i = 0; i < errata_count; i++)
+ if (errata_list[0] == erratum)
+ return (true);
+
+ return (false);
+}
+
+#endif /* _MACHINE_CPU_FEAT_H_ */
diff --git a/sys/arm64/include/cpufunc.h b/sys/arm64/include/cpufunc.h
index ba712f48b262..e6e1f682794e 100644
--- a/sys/arm64/include/cpufunc.h
+++ b/sys/arm64/include/cpufunc.h
@@ -41,8 +41,6 @@ breakpoint(void)
#ifdef _KERNEL
#include <machine/armreg.h>
-void pan_enable(void);
-
static __inline register_t
dbg_disable(void)
{
@@ -160,6 +158,26 @@ invalidate_local_icache(void)
"isb \n");
}
+static __inline void
+wfet(uint64_t val)
+{
+ __asm __volatile(
+ "msr s0_3_c1_c0_0, %0\n"
+ :
+ : "r" ((val))
+ : "memory");
+}
+
+static __inline void
+wfit(uint64_t val)
+{
+ __asm __volatile(
+ "msr s0_3_c1_c0_1, %0\n"
+ :
+ : "r" ((val))
+ : "memory");
+}
+
extern bool icache_aliasing;
extern bool icache_vmid;
diff --git a/sys/arm64/include/efi.h b/sys/arm64/include/efi.h
index ed9e8d86b7bc..bfce872296a2 100644
--- a/sys/arm64/include/efi.h
+++ b/sys/arm64/include/efi.h
@@ -35,6 +35,8 @@
#ifndef __ARM64_INCLUDE_EFI_H_
#define __ARM64_INCLUDE_EFI_H_
+#include <sys/types.h>
+
#define EFIABI_ATTR
#ifdef _KERNEL
@@ -44,7 +46,7 @@
#define EFI_TIME_UNLOCK()
#define EFI_TIME_OWNED()
-#define EFI_RT_HANDLE_FAULTS_DEFAULT 0
+#define EFI_RT_HANDLE_FAULTS_DEFAULT 1
#endif
struct efirt_callinfo {
diff --git a/sys/arm64/include/elf.h b/sys/arm64/include/elf.h
index 9f9cd44ac9f7..d6328c143585 100644
--- a/sys/arm64/include/elf.h
+++ b/sys/arm64/include/elf.h
@@ -94,90 +94,110 @@ __ElfType(Auxinfo);
#endif
/* HWCAP */
-#define HWCAP_FP 0x00000001
-#define HWCAP_ASIMD 0x00000002
-#define HWCAP_EVTSTRM 0x00000004
-#define HWCAP_AES 0x00000008
-#define HWCAP_PMULL 0x00000010
-#define HWCAP_SHA1 0x00000020
-#define HWCAP_SHA2 0x00000040
-#define HWCAP_CRC32 0x00000080
-#define HWCAP_ATOMICS 0x00000100
-#define HWCAP_FPHP 0x00000200
-#define HWCAP_ASIMDHP 0x00000400
+#define HWCAP_FP (1 << 0)
+#define HWCAP_ASIMD (1 << 1)
+#define HWCAP_EVTSTRM (1 << 2)
+#define HWCAP_AES (1 << 3)
+#define HWCAP_PMULL (1 << 4)
+#define HWCAP_SHA1 (1 << 5)
+#define HWCAP_SHA2 (1 << 6)
+#define HWCAP_CRC32 (1 << 7)
+#define HWCAP_ATOMICS (1 << 8)
+#define HWCAP_FPHP (1 << 9)
+#define HWCAP_ASIMDHP (1 << 10)
/*
* XXX: The following bits (from CPUID to FLAGM) were originally incorrect,
* but later changed to match the Linux definitions. No compatibility code is
* provided, as the fix was expected to result in near-zero fallout.
*/
-#define HWCAP_CPUID 0x00000800
-#define HWCAP_ASIMDRDM 0x00001000
-#define HWCAP_JSCVT 0x00002000
-#define HWCAP_FCMA 0x00004000
-#define HWCAP_LRCPC 0x00008000
-#define HWCAP_DCPOP 0x00010000
-#define HWCAP_SHA3 0x00020000
-#define HWCAP_SM3 0x00040000
-#define HWCAP_SM4 0x00080000
-#define HWCAP_ASIMDDP 0x00100000
-#define HWCAP_SHA512 0x00200000
-#define HWCAP_SVE 0x00400000
-#define HWCAP_ASIMDFHM 0x00800000
-#define HWCAP_DIT 0x01000000
-#define HWCAP_USCAT 0x02000000
-#define HWCAP_ILRCPC 0x04000000
-#define HWCAP_FLAGM 0x08000000
-#define HWCAP_SSBS 0x10000000
-#define HWCAP_SB 0x20000000
-#define HWCAP_PACA 0x40000000
-#define HWCAP_PACG 0x80000000
+#define HWCAP_CPUID (1 << 11)
+#define HWCAP_ASIMDRDM (1 << 12)
+#define HWCAP_JSCVT (1 << 13)
+#define HWCAP_FCMA (1 << 14)
+#define HWCAP_LRCPC (1 << 15)
+#define HWCAP_DCPOP (1 << 16)
+#define HWCAP_SHA3 (1 << 17)
+#define HWCAP_SM3 (1 << 18)
+#define HWCAP_SM4 (1 << 19)
+#define HWCAP_ASIMDDP (1 << 20)
+#define HWCAP_SHA512 (1 << 21)
+#define HWCAP_SVE (1 << 22)
+#define HWCAP_ASIMDFHM (1 << 23)
+#define HWCAP_DIT (1 << 24)
+#define HWCAP_USCAT (1 << 25)
+#define HWCAP_ILRCPC (1 << 26)
+#define HWCAP_FLAGM (1 << 27)
+#define HWCAP_SSBS (1 << 28)
+#define HWCAP_SB (1 << 29)
+#define HWCAP_PACA (1 << 30)
+#define HWCAP_PACG (1UL << 31)
+#define HWCAP_GCS (1UL << 32)
/* HWCAP2 */
-#define HWCAP2_DCPODP 0x0000000000000001ul
-#define HWCAP2_SVE2 0x0000000000000002ul
-#define HWCAP2_SVEAES 0x0000000000000004ul
-#define HWCAP2_SVEPMULL 0x0000000000000008ul
-#define HWCAP2_SVEBITPERM 0x0000000000000010ul
-#define HWCAP2_SVESHA3 0x0000000000000020ul
-#define HWCAP2_SVESM4 0x0000000000000040ul
-#define HWCAP2_FLAGM2 0x0000000000000080ul
-#define HWCAP2_FRINT 0x0000000000000100ul
-#define HWCAP2_SVEI8MM 0x0000000000000200ul
-#define HWCAP2_SVEF32MM 0x0000000000000400ul
-#define HWCAP2_SVEF64MM 0x0000000000000800ul
-#define HWCAP2_SVEBF16 0x0000000000001000ul
-#define HWCAP2_I8MM 0x0000000000002000ul
-#define HWCAP2_BF16 0x0000000000004000ul
-#define HWCAP2_DGH 0x0000000000008000ul
-#define HWCAP2_RNG 0x0000000000010000ul
-#define HWCAP2_BTI 0x0000000000020000ul
-#define HWCAP2_MTE 0x0000000000040000ul
-#define HWCAP2_ECV 0x0000000000080000ul
-#define HWCAP2_AFP 0x0000000000100000ul
-#define HWCAP2_RPRES 0x0000000000200000ul
-#define HWCAP2_MTE3 0x0000000000400000ul
-#define HWCAP2_SME 0x0000000000800000ul
-#define HWCAP2_SME_I16I64 0x0000000001000000ul
-#define HWCAP2_SME_F64F64 0x0000000002000000ul
-#define HWCAP2_SME_I8I32 0x0000000004000000ul
-#define HWCAP2_SME_F16F32 0x0000000008000000ul
-#define HWCAP2_SME_B16F32 0x0000000010000000ul
-#define HWCAP2_SME_F32F32 0x0000000020000000ul
-#define HWCAP2_SME_FA64 0x0000000040000000ul
-#define HWCAP2_WFXT 0x0000000080000000ul
-#define HWCAP2_EBF16 0x0000000100000000ul
-#define HWCAP2_SVE_EBF16 0x0000000200000000ul
-#define HWCAP2_CSSC 0x0000000400000000ul
-#define HWCAP2_RPRFM 0x0000000800000000ul
-#define HWCAP2_SVE2P1 0x0000001000000000ul
-#define HWCAP2_SME2 0x0000002000000000ul
-#define HWCAP2_SME2P1 0x0000004000000000ul
-#define HWCAP2_SME_I16I32 0x0000008000000000ul
-#define HWCAP2_SME_BI32I32 0x0000010000000000ul
-#define HWCAP2_SME_B16B16 0x0000020000000000ul
-#define HWCAP2_SME_F16F16 0x0000040000000000ul
-#define HWCAP2_MOPS 0x0000080000000000ul
-#define HWCAP2_HBC 0x0000100000000000ul
+#define HWCAP2_DCPODP (1 << 0)
+#define HWCAP2_SVE2 (1 << 1)
+#define HWCAP2_SVEAES (1 << 2)
+#define HWCAP2_SVEPMULL (1 << 3)
+#define HWCAP2_SVEBITPERM (1 << 4)
+#define HWCAP2_SVESHA3 (1 << 5)
+#define HWCAP2_SVESM4 (1 << 6)
+#define HWCAP2_FLAGM2 (1 << 7)
+#define HWCAP2_FRINT (1 << 8)
+#define HWCAP2_SVEI8MM (1 << 9)
+#define HWCAP2_SVEF32MM (1 << 10)
+#define HWCAP2_SVEF64MM (1 << 11)
+#define HWCAP2_SVEBF16 (1 << 12)
+#define HWCAP2_I8MM (1 << 13)
+#define HWCAP2_BF16 (1 << 14)
+#define HWCAP2_DGH (1 << 15)
+#define HWCAP2_RNG (1 << 16)
+#define HWCAP2_BTI (1 << 17)
+#define HWCAP2_MTE (1 << 18)
+#define HWCAP2_ECV (1 << 19)
+#define HWCAP2_AFP (1 << 20)
+#define HWCAP2_RPRES (1 << 21)
+#define HWCAP2_MTE3 (1 << 22)
+#define HWCAP2_SME (1 << 23)
+#define HWCAP2_SME_I16I64 (1 << 24)
+#define HWCAP2_SME_F64F64 (1 << 25)
+#define HWCAP2_SME_I8I32 (1 << 26)
+#define HWCAP2_SME_F16F32 (1 << 27)
+#define HWCAP2_SME_B16F32 (1 << 28)
+#define HWCAP2_SME_F32F32 (1 << 29)
+#define HWCAP2_SME_FA64 (1 << 30)
+#define HWCAP2_WFXT (1UL << 31)
+#define HWCAP2_EBF16 (1UL << 32)
+#define HWCAP2_SVE_EBF16 (1UL << 33)
+#define HWCAP2_CSSC (1UL << 34)
+#define HWCAP2_RPRFM (1UL << 35)
+#define HWCAP2_SVE2P1 (1UL << 36)
+#define HWCAP2_SME2 (1UL << 37)
+#define HWCAP2_SME2P1 (1UL << 38)
+#define HWCAP2_SME_I16I32 (1UL << 39)
+#define HWCAP2_SME_BI32I32 (1UL << 40)
+#define HWCAP2_SME_B16B16 (1UL << 41)
+#define HWCAP2_SME_F16F16 (1UL << 42)
+#define HWCAP2_MOPS (1UL << 43)
+#define HWCAP2_HBC (1UL << 44)
+#define HWCAP2_SVE_B16B16 (1UL << 45)
+#define HWCAP2_LRCPC3 (1UL << 46)
+#define HWCAP2_LSE128 (1UL << 47)
+#define HWCAP2_FPMR (1UL << 48)
+#define HWCAP2_LUT (1UL << 49)
+#define HWCAP2_FAMINMAX (1UL << 50)
+#define HWCAP2_F8CVT (1UL << 51)
+#define HWCAP2_F8FMA (1UL << 52)
+#define HWCAP2_F8DP4 (1UL << 53)
+#define HWCAP2_F8DP2 (1UL << 54)
+#define HWCAP2_F8E4M3 (1UL << 55)
+#define HWCAP2_F8E5M2 (1UL << 56)
+#define HWCAP2_SME_LUTV2 (1UL << 57)
+#define HWCAP2_SME_F8F16 (1UL << 58)
+#define HWCAP2_SME_F8F32 (1UL << 59)
+#define HWCAP2_SME_SF8FMA (1UL << 60)
+#define HWCAP2_SME_SF8DP4 (1UL << 61)
+#define HWCAP2_SME_SF8DP2 (1UL << 62)
+#define HWCAP2_POE (1UL << 63)
#ifdef COMPAT_FREEBSD32
/* ARM HWCAP */
diff --git a/sys/arm64/include/hypervisor.h b/sys/arm64/include/hypervisor.h
index 85ac5cda7037..a32e1000d911 100644
--- a/sys/arm64/include/hypervisor.h
+++ b/sys/arm64/include/hypervisor.h
@@ -37,21 +37,35 @@
/* CNTHCTL_EL2 - Counter-timer Hypervisor Control register */
#define CNTHCTL_EVNTI_MASK (0xf << 4) /* Bit to trigger event stream */
+/* Valid if HCR_EL2.E2H == 0 */
+#define CNTHCTL_EL1PCTEN (1 << 0) /* Allow physical counter access */
+#define CNTHCTL_EL1PCEN (1 << 1) /* Allow physical timer access */
+/* Valid if HCR_EL2.E2H == 1 */
+#define CNTHCTL_E2H_EL0PCTEN (1 << 0) /* Allow EL0 physical counter access */
+#define CNTHCTL_E2H_EL0VCTEN (1 << 1) /* Allow EL0 virtual counter access */
+#define CNTHCTL_E2H_EL0VTEN (1 << 8)
+#define CNTHCTL_E2H_EL0PTEN (1 << 9)
+#define CNTHCTL_E2H_EL1PCTEN (1 << 10) /* Allow physical counter access */
+#define CNTHCTL_E2H_EL1PTEN (1 << 11) /* Allow physical timer access */
+/* Unconditionally valid */
#define CNTHCTL_EVNTDIR (1 << 3) /* Control transition trigger bit */
#define CNTHCTL_EVNTEN (1 << 2) /* Enable event stream */
-#define CNTHCTL_EL1PCEN (1 << 1) /* Allow EL0/1 physical timer access */
-#define CNTHCTL_EL1PCTEN (1 << 0) /*Allow EL0/1 physical counter access*/
/* CPTR_EL2 - Architecture feature trap register */
/* Valid if HCR_EL2.E2H == 0 */
-#define CPTR_RES0 0x7fefc800
-#define CPTR_RES1 0x000033ff
-#define CPTR_TFP 0x00000400
+#define CPTR_TRAP_ALL 0xc01037ff /* Enable all traps */
+#define CPTR_RES0 0x7fefc800
+#define CPTR_RES1 0x000032ff
+#define CPTR_TZ 0x00000100
+#define CPTR_TFP 0x00000400
+#define CPTR_TTA 0x00100000
/* Valid if HCR_EL2.E2H == 1 */
-#define CPTR_FPEN 0x00300000
+#define CPTR_E2H_TRAP_ALL 0xd0000000
+#define CPTR_E2H_ZPEN 0x00030000
+#define CPTR_E2H_FPEN 0x00300000
+#define CPTR_E2H_TTA 0x10000000
/* Unconditionally valid */
-#define CPTR_TTA 0x00100000
-#define CPTR_TCPAC 0x80000000
+#define CPTR_TCPAC 0x80000000
/* HCR_EL2 - Hypervisor Config Register */
#define HCR_VM (UL(0x1) << 0)
@@ -118,6 +132,41 @@
#define HCR_TWEDEn (UL(0x1) << 59)
#define HCR_TWEDEL_MASK (UL(0xf) << 60)
+/* HCRX_EL2 - Extended Hypervisor Configuration Register */
+#define HCRX_EL2_REG MRS_REG_ALT_NAME(HCRX_EL2)
+#define HCRX_EL2_op0 3
+#define HCRX_EL2_op1 4
+#define HCRX_EL2_CRn 1
+#define HCRX_EL2_CRm 2
+#define HCRX_EL2_op2 2
+
+#define HCRX_EnAS0 (UL(0x1) << 0)
+#define HCRX_EnALS (UL(0x1) << 1)
+#define HCRX_EnASR (UL(0x1) << 2)
+#define HCRX_FnXS (UL(0x1) << 3)
+#define HCRX_FGTnXS (UL(0x1) << 4)
+#define HCRX_SMPME (UL(0x1) << 5)
+#define HCRX_TALLINT (UL(0x1) << 6)
+#define HCRX_VINMI (UL(0x1) << 7)
+#define HCRX_VFNMI (UL(0x1) << 8)
+#define HCRX_CMOW (UL(0x1) << 9)
+#define HCRX_MCE2 (UL(0x1) << 10)
+#define HCRX_MSCEn (UL(0x1) << 11)
+/* Bits 12 & 13 are reserved */
+#define HCRX_TCR2En (UL(0x1) << 14)
+#define HCRX_SCTLR2En (UL(0x1) << 15)
+#define HCRX_PTTWI (UL(0x1) << 16)
+#define HCRX_D128En (UL(0x1) << 17)
+#define HCRX_EnSNERR (UL(0x1) << 18)
+#define HCRX_TMEA (UL(0x1) << 19)
+#define HCRX_EnSDERR (UL(0x1) << 20)
+#define HCRX_EnIDCP128 (UL(0x1) << 21)
+#define HCRX_GCSEn (UL(0x1) << 22)
+#define HCRX_EnFPM (UL(0x1) << 23)
+#define HCRX_PACMEn (UL(0x1) << 24)
+/* Bit 25 is reserved */
+#define HCRX_SRMASKEn (UL(0x1) << 26)
+
/* HPFAR_EL2 - Hypervisor IPA Fault Address Register */
#define HPFAR_EL2_FIPA_SHIFT 4
#define HPFAR_EL2_FIPA_MASK 0xfffffffff0
@@ -143,10 +192,14 @@
#define SCTLR_EL2_C (0x1UL << SCTLR_EL2_C_SHIFT)
#define SCTLR_EL2_SA_SHIFT 3
#define SCTLR_EL2_SA (0x1UL << SCTLR_EL2_SA_SHIFT)
+#define SCTLR_EL2_EOS_SHIFT 11
+#define SCTLR_EL2_EOS (0x1UL << SCTLR_EL2_EOS_SHIFT)
#define SCTLR_EL2_I_SHIFT 12
#define SCTLR_EL2_I (0x1UL << SCTLR_EL2_I_SHIFT)
#define SCTLR_EL2_WXN_SHIFT 19
#define SCTLR_EL2_WXN (0x1UL << SCTLR_EL2_WXN_SHIFT)
+#define SCTLR_EL2_EIS_SHIFT 22
+#define SCTLR_EL2_EIS (0x1UL << SCTLR_EL2_EIS_SHIFT)
#define SCTLR_EL2_EE_SHIFT 25
#define SCTLR_EL2_EE (0x1UL << SCTLR_EL2_EE_SHIFT)
@@ -228,6 +281,9 @@
#define VTCR_EL2_PS_42BIT (0x3UL << VTCR_EL2_PS_SHIFT)
#define VTCR_EL2_PS_44BIT (0x4UL << VTCR_EL2_PS_SHIFT)
#define VTCR_EL2_PS_48BIT (0x5UL << VTCR_EL2_PS_SHIFT)
+#define VTCR_EL2_PS_52BIT (0x6UL << VTCR_EL2_PS_SHIFT)
+#define VTCR_EL2_DS_SHIFT 32
+#define VTCR_EL2_DS (0x1UL << VTCR_EL2_DS_SHIFT)
/* VTTBR_EL2 - Virtualization Translation Table Base Register */
#define VTTBR_VMID_MASK 0xffff000000000000
@@ -252,5 +308,35 @@
#define MDCR_EL2_TDOSA (0x1UL << MDCR_EL2_TDOSA_SHIFT)
#define MDCR_EL2_TDRA_SHIFT 11
#define MDCR_EL2_TDRA (0x1UL << MDCR_EL2_TDRA_SHIFT)
+#define MDCR_E2PB_SHIFT 12
+#define MDCR_E2PB_MASK (0x3UL << MDCR_E2PB_SHIFT)
+#define MDCR_TPMS_SHIFT 14
+#define MDCR_TPMS (0x1UL << MDCR_TPMS_SHIFT)
+#define MDCR_EnSPM_SHIFT 15
+#define MDCR_EnSPM (0x1UL << MDCR_EnSPM_SHIFT)
+#define MDCR_HPMD_SHIFT 17
+#define MDCR_HPMD (0x1UL << MDCR_HPMD_SHIFT)
+#define MDCR_TTRF_SHIFT 19
+#define MDCR_TTRF (0x1UL << MDCR_TTRF_SHIFT)
+#define MDCR_HCCD_SHIFT 23
+#define MDCR_HCCD (0x1UL << MDCR_HCCD_SHIFT)
+#define MDCR_E2TB_SHIFT 24
+#define MDCR_E2TB_MASK (0x3UL << MDCR_E2TB_SHIFT)
+#define MDCR_HLP_SHIFT 26
+#define MDCR_HLP (0x1UL << MDCR_HLP_SHIFT)
+#define MDCR_TDCC_SHIFT 27
+#define MDCR_TDCC (0x1UL << MDCR_TDCC_SHIFT)
+#define MDCR_MTPME_SHIFT 28
+#define MDCR_MTPME (0x1UL << MDCR_MTPME_SHIFT)
+#define MDCR_HPMFZO_SHIFT 29
+#define MDCR_HPMFZO (0x1UL << MDCR_HPMFZO_SHIFT)
+#define MDCR_PMSSE_SHIFT 30
+#define MDCR_PMSSE_MASK (0x3UL << MDCR_PMSSE_SHIFT)
+#define MDCR_HPMFZS_SHIFT 36
+#define MDCR_HPMFZS (0x1UL << MDCR_HPMFZS_SHIFT)
+#define MDCR_PMEE_SHIFT 40
+#define MDCR_PMEE_MASK (0x3UL << MDCR_PMEE_SHIFT)
+#define MDCR_EBWE_SHIFT 43
+#define MDCR_EBWE (0x1UL << MDCR_EBWE_SHIFT)
#endif /* !_MACHINE_HYPERVISOR_H_ */
diff --git a/sys/arm64/include/intr.h b/sys/arm64/include/intr.h
index 3cdbc83ff109..ef7fe56e3a13 100644
--- a/sys/arm64/include/intr.h
+++ b/sys/arm64/include/intr.h
@@ -27,20 +27,20 @@
#ifndef _MACHINE_INTR_H_
#define _MACHINE_INTR_H_
+#ifndef LOCORE
#ifdef FDT
#include <dev/ofw/openfirm.h>
#endif
-#include <sys/intr.h>
-
-#ifndef NIRQ
-#define NIRQ 16384 /* XXX - It should be an option. */
-#endif
-
static inline void
arm_irq_memory_barrier(uintptr_t irq)
{
}
+#endif /* !LOCORE */
+
+#ifndef NIRQ
+#define NIRQ 16384 /* XXX - It should be an option. */
+#endif
#ifdef DEV_ACPI
#define ACPI_INTR_XREF 1
@@ -48,4 +48,8 @@ arm_irq_memory_barrier(uintptr_t irq)
#define ACPI_GPIO_XREF 3
#endif
+#define INTR_ROOT_IRQ 0
+#define INTR_ROOT_FIQ 1
+#define INTR_ROOT_COUNT 2
+
#endif /* _MACHINE_INTR_H */
diff --git a/sys/arm64/include/machdep.h b/sys/arm64/include/machdep.h
index 2f2960ae39f2..4fa80219da42 100644
--- a/sys/arm64/include/machdep.h
+++ b/sys/arm64/include/machdep.h
@@ -33,7 +33,6 @@ struct arm64_bootparams {
vm_offset_t modulep;
vm_offset_t kern_stack;
vm_paddr_t kern_ttbr0;
- uint64_t hcr_el2;
int boot_el; /* EL the kernel booted from */
int pad;
};
diff --git a/sys/arm64/include/md_var.h b/sys/arm64/include/md_var.h
index f9aaaeba7306..da136ff091db 100644
--- a/sys/arm64/include/md_var.h
+++ b/sys/arm64/include/md_var.h
@@ -37,8 +37,12 @@ extern char sigcode[];
extern int szsigcode;
extern u_long elf_hwcap;
extern u_long elf_hwcap2;
+extern u_long elf_hwcap3;
+extern u_long elf_hwcap4;
extern u_long linux_elf_hwcap;
extern u_long linux_elf_hwcap2;
+extern u_long linux_elf_hwcap3;
+extern u_long linux_elf_hwcap4;
#ifdef COMPAT_FREEBSD32
extern u_long elf32_hwcap;
extern u_long elf32_hwcap2;
diff --git a/sys/arm64/include/metadata.h b/sys/arm64/include/metadata.h
index 7459aa90a6e2..30ec5115e670 100644
--- a/sys/arm64/include/metadata.h
+++ b/sys/arm64/include/metadata.h
@@ -31,10 +31,15 @@
#define MODINFOMD_DTBP 0x1002
#define MODINFOMD_EFI_FB 0x1003
+/*
+ * This is not the same as the UEFI standard EFI_MEMORY_ATTRIBUTES_TABLE, though
+ * memory_size / descritpr_size entries of EFI_MEMORY_DESCRIPTORS follow this table
+ * starting at a 16-byte alignment.
+ */
struct efi_map_header {
- size_t memory_size;
- size_t descriptor_size;
- uint32_t descriptor_version;
+ size_t memory_size; /* Numnber of bytes that follow */
+ size_t descriptor_size; /* Size of each EFI_MEMORY_DESCRIPTOR */
+ uint32_t descriptor_version; /* Currently '1' */
};
struct efi_fb {
diff --git a/sys/arm64/include/param.h b/sys/arm64/include/param.h
index ca3fae11c515..753035b7775e 100644
--- a/sys/arm64/include/param.h
+++ b/sys/arm64/include/param.h
@@ -43,8 +43,6 @@
#define STACKALIGNBYTES (16 - 1)
#define STACKALIGN(p) ((uint64_t)(p) & ~STACKALIGNBYTES)
-#define __PCI_REROUTE_INTERRUPT
-
#ifndef MACHINE
#define MACHINE "arm64"
#endif
@@ -97,7 +95,7 @@
#define PAGE_SIZE (1 << PAGE_SHIFT)
#define PAGE_MASK (PAGE_SIZE - 1)
-#define MAXPAGESIZES 3 /* maximum number of supported page sizes */
+#define MAXPAGESIZES 4 /* maximum number of supported page sizes */
#ifndef KSTACK_PAGES
#if defined(KASAN) || defined(KMSAN)
@@ -119,17 +117,9 @@
/*
* Mach derived conversion macros
*/
-#define round_page(x) (((unsigned long)(x) + PAGE_MASK) & ~PAGE_MASK)
-#define trunc_page(x) ((unsigned long)(x) & ~PAGE_MASK)
-
-#define atop(x) ((unsigned long)(x) >> PAGE_SHIFT)
-#define ptoa(x) ((unsigned long)(x) << PAGE_SHIFT)
-
#define arm64_btop(x) ((unsigned long)(x) >> PAGE_SHIFT)
#define arm64_ptob(x) ((unsigned long)(x) << PAGE_SHIFT)
-#define pgtok(x) ((unsigned long)(x) * (PAGE_SIZE / 1024))
-
#endif /* !_MACHINE_PARAM_H_ */
#endif /* !__arm__ */
diff --git a/sys/arm64/include/pcb.h b/sys/arm64/include/pcb.h
index d7392d5f2032..c0feb1149cf5 100644
--- a/sys/arm64/include/pcb.h
+++ b/sys/arm64/include/pcb.h
@@ -59,17 +59,19 @@ struct pcb {
u_int pcb_flags;
#define PCB_SINGLE_STEP_SHIFT 0
#define PCB_SINGLE_STEP (1 << PCB_SINGLE_STEP_SHIFT)
- uint32_t pcb_pad1;
+ u_int pcb_sve_len; /* The SVE vector length */
struct vfpstate *pcb_fpusaved;
int pcb_fpflags;
#define PCB_FP_STARTED 0x00000001
+#define PCB_FP_SVEVALID 0x00000002
#define PCB_FP_KERN 0x40000000
#define PCB_FP_NOSAVE 0x80000000
/* The bits passed to userspace in get_fpcontext */
-#define PCB_FP_USERMASK (PCB_FP_STARTED)
+#define PCB_FP_USERMASK (PCB_FP_STARTED | PCB_FP_SVEVALID)
u_int pcb_vfpcpu; /* Last cpu this thread ran VFP code */
- uint64_t pcb_reserved[5];
+ void *pcb_svesaved;
+ uint64_t pcb_reserved[4];
/*
* The userspace VFP state. The pcb_fpusaved pointer will point to
@@ -83,7 +85,7 @@ struct pcb {
#ifdef _KERNEL
void makectx(struct trapframe *tf, struct pcb *pcb);
-int savectx(struct pcb *pcb) __returns_twice;
+void savectx(struct pcb *pcb) __returns_twice;
#endif
#endif /* !LOCORE */
diff --git a/sys/arm64/include/pmap.h b/sys/arm64/include/pmap.h
index d69924080610..0f23f200f0f6 100644
--- a/sys/arm64/include/pmap.h
+++ b/sys/arm64/include/pmap.h
@@ -71,16 +71,6 @@ struct md_page {
vm_memattr_t pv_memattr;
};
-/*
- * This structure is used to hold a virtual<->physical address
- * association and is used mostly by bootstrap code
- */
-struct pv_addr {
- SLIST_ENTRY(pv_addr) pv_list;
- vm_offset_t pv_va;
- vm_paddr_t pv_pa;
-};
-
enum pmap_stage {
PM_INVALID,
PM_STAGE1,
@@ -111,6 +101,8 @@ extern struct pmap kernel_pmap_store;
#define kernel_pmap (&kernel_pmap_store)
#define pmap_kernel() kernel_pmap
+extern bool pmap_lpa_enabled;
+
#define PMAP_ASSERT_LOCKED(pmap) \
mtx_assert(&(pmap)->pm_mtx, MA_OWNED)
#define PMAP_LOCK(pmap) mtx_lock(&(pmap)->pm_mtx)
@@ -137,6 +129,8 @@ extern struct pmap kernel_pmap_store;
extern vm_offset_t virtual_avail;
extern vm_offset_t virtual_end;
+extern pt_entry_t pmap_sh_attr;
+
/*
* Macros to test if a mapping is mappable with an L1 Section mapping
* or an L2 Large Page mapping.
@@ -147,7 +141,8 @@ extern vm_offset_t virtual_end;
#define pmap_vm_page_alloc_check(m)
void pmap_activate_vm(pmap_t);
-void pmap_bootstrap(vm_size_t);
+void pmap_bootstrap_dmap(vm_size_t);
+void pmap_bootstrap(void);
int pmap_change_attr(vm_offset_t va, vm_size_t size, int mode);
int pmap_change_prot(vm_offset_t va, vm_size_t size, vm_prot_t prot);
void pmap_kenter(vm_offset_t sva, vm_size_t size, vm_paddr_t pa, int mode);
@@ -180,7 +175,6 @@ int pmap_fault(pmap_t, uint64_t, uint64_t);
struct pcb *pmap_switch(struct thread *);
extern void (*pmap_clean_stage2_tlbi)(void);
-extern void (*pmap_invalidate_vpipt_icache)(void);
extern void (*pmap_stage2_invalidate_range)(uint64_t, vm_offset_t, vm_offset_t,
bool);
extern void (*pmap_stage2_invalidate_all)(uint64_t);
diff --git a/sys/arm64/include/proc.h b/sys/arm64/include/proc.h
index dfd39faadd46..dc2fa2df654d 100644
--- a/sys/arm64/include/proc.h
+++ b/sys/arm64/include/proc.h
@@ -65,7 +65,11 @@ struct mdthread {
struct ptrauth_key apia;
} md_ptrauth_kern;
- uint64_t md_reserved[4];
+ uint64_t md_efirt_tmp;
+ int md_efirt_dis_pf;
+
+ int md_reserved0;
+ uint64_t md_reserved[2];
};
struct mdproc {
diff --git a/sys/arm64/include/pte.h b/sys/arm64/include/pte.h
index 56eede01d776..464d8c941c56 100644
--- a/sys/arm64/include/pte.h
+++ b/sys/arm64/include/pte.h
@@ -54,13 +54,6 @@ typedef uint64_t pt_entry_t; /* page table entry */
#define ATTR_MASK_L UINT64_C(0x0000000000000fff)
#define ATTR_MASK (ATTR_MASK_H | ATTR_MASK_L)
-#define BASE_MASK ~ATTR_MASK
-#define BASE_ADDR(x) ((x) & BASE_MASK)
-
-#define PTE_TO_PHYS(pte) BASE_ADDR(pte)
-/* Convert a phys addr to the output address field of a PTE */
-#define PHYS_TO_PTE(pa) (pa)
-
/* Bits 58:55 are reserved for software */
#define ATTR_SW_UNUSED1 (1UL << 58)
#define ATTR_SW_NO_PROMOTE (1UL << 57)
@@ -80,14 +73,37 @@ typedef uint64_t pt_entry_t; /* page table entry */
#define ATTR_CONTIGUOUS (1UL << 52)
#define ATTR_DBM (1UL << 51)
-#define ATTR_S1_GP (1UL << 50)
+#define ATTR_S1_GP_SHIFT 50
+#define ATTR_S1_GP (1UL << ATTR_S1_GP_SHIFT)
+
+/*
+ * Largest possible output address field for a level 3 page. Block
+ * entries will use fewer low address bits, but these are res0 so
+ * should be safe to include.
+ *
+ * This is also safe to use for the next-level table address for
+ * table entries as they encode a physical address in the same way.
+ */
+#if PAGE_SIZE == PAGE_SIZE_4K
+#define ATTR_ADDR UINT64_C(0x0003fffffffff000)
+#elif PAGE_SIZE == PAGE_SIZE_16K
+#define ATTR_ADDR UINT64_C(0x0003ffffffffc000)
+#else
+#error Unsupported page size
+#endif
+
#define ATTR_S1_nG (1 << 11)
#define ATTR_AF (1 << 10)
+/* When TCR_EL1.DS == 0 */
#define ATTR_SH(x) ((x) << 8)
#define ATTR_SH_MASK ATTR_SH(3)
#define ATTR_SH_NS 0 /* Non-shareable */
#define ATTR_SH_OS 2 /* Outer-shareable */
#define ATTR_SH_IS 3 /* Inner-shareable */
+/* When TCR_EL1.DS == 1 */
+#define ATTR_OA_51_50_SHIFT 8
+#define ATTR_OA_51_50_MASK (3 << ATTR_OA_51_50_SHIFT)
+#define ATTR_OA_51_50_DELTA (50 - 8) /* Delta from address to pte */
#define ATTR_S1_AP_RW_BIT (1 << 7)
#define ATTR_S1_AP(x) ((x) << 6)
@@ -111,8 +127,6 @@ typedef uint64_t pt_entry_t; /* page table entry */
#define ATTR_S2_MEMATTR_WT 0xa
#define ATTR_S2_MEMATTR_WB 0xf
-#define ATTR_DEFAULT (ATTR_AF | ATTR_SH(ATTR_SH_IS))
-
#define ATTR_DESCR_MASK 3
#define ATTR_DESCR_VALID 1
#define ATTR_DESCR_TYPE_MASK 2
@@ -126,6 +140,29 @@ typedef uint64_t pt_entry_t; /* page table entry */
*/
#define ATTR_PROMOTE (ATTR_MASK & ~(ATTR_CONTIGUOUS | ATTR_AF))
+/* Read the output address or next-level table address from a PTE */
+#define PTE_TO_PHYS(x) ({ \
+ pt_entry_t _pte = (x); \
+ vm_paddr_t _pa; \
+ _pa = _pte & ATTR_ADDR; \
+ if (pmap_lpa_enabled) \
+ _pa |= (_pte & ATTR_OA_51_50_MASK) << ATTR_OA_51_50_DELTA; \
+ _pa; \
+})
+
+/*
+ * Convert a physical address to an output address or next-level
+ * table address in a PTE
+ */
+#define PHYS_TO_PTE(x) ({ \
+ vm_paddr_t _pa = (x); \
+ pt_entry_t _pte; \
+ _pte = _pa & ATTR_ADDR; \
+ if (pmap_lpa_enabled) \
+ _pte |= (_pa >> ATTR_OA_51_50_DELTA) & ATTR_OA_51_50_MASK; \
+ _pte; \
+})
+
#if PAGE_SIZE == PAGE_SIZE_4K
#define L0_SHIFT 39
#define L1_SHIFT 30
@@ -198,13 +235,18 @@ typedef uint64_t pt_entry_t; /* page table entry */
* can be coalesced into a single TLB entry
*/
#if PAGE_SIZE == PAGE_SIZE_4K
+#define L2C_ENTRIES 16
#define L3C_ENTRIES 16
#elif PAGE_SIZE == PAGE_SIZE_16K
+#define L2C_ENTRIES 32
#define L3C_ENTRIES 128
#else
#error Unsupported page size
#endif
+#define L2C_SIZE (L2C_ENTRIES * L2_SIZE)
+#define L2C_OFFSET (L2C_SIZE - 1)
+
#define L3C_SIZE (L3C_ENTRIES * L3_SIZE)
#define L3C_OFFSET (L3C_SIZE - 1)
diff --git a/sys/arm64/include/reg.h b/sys/arm64/include/reg.h
index c699752197a8..4226385480e8 100644
--- a/sys/arm64/include/reg.h
+++ b/sys/arm64/include/reg.h
@@ -63,6 +63,19 @@ struct fpreg32 {
int dummy;
};
+#define SVEREG_FLAG_REGS_MASK 0x0001
+#define SVEREG_FLAG_FP 0x0000
+#define SVEREG_FLAG_SVE 0x0001
+
+struct svereg_header {
+ __uint32_t sve_size;
+ __uint32_t sve_maxsize;
+ __uint16_t sve_vec_len;
+ __uint16_t sve_max_vec_len;
+ __uint16_t sve_flags;
+ __uint16_t sve_reserved;
+};
+
struct dbreg {
__uint8_t db_debug_ver;
__uint8_t db_nbkpts;
diff --git a/sys/arm64/include/resource.h b/sys/arm64/include/resource.h
index d4cffb1ae854..336fc11a435a 100644
--- a/sys/arm64/include/resource.h
+++ b/sys/arm64/include/resource.h
@@ -44,9 +44,7 @@
#define SYS_RES_MEMORY 3 /* i/o memory */
#define SYS_RES_IOPORT 4 /* i/o ports */
#define SYS_RES_GPIO 5 /* general purpose i/o */
-#ifdef NEW_PCIB
#define PCI_RES_BUS 6 /* PCI bus numbers */
-#endif
#endif /* !_MACHINE_RESOURCE_H_ */
diff --git a/sys/arm64/include/runq.h b/sys/arm64/include/runq.h
deleted file mode 100644
index 5076bd9169df..000000000000
--- a/sys/arm64/include/runq.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*-
- * Copyright (c) 2001 Jake Burkholder <jake@FreeBSD.org>
- * All rights reserved.
- *
- * Redistribution and use in source and binary forms, with or without
- * modification, are permitted provided that the following conditions
- * are met:
- * 1. Redistributions of source code must retain the above copyright
- * notice, this list of conditions and the following disclaimer.
- * 2. Redistributions in binary form must reproduce the above copyright
- * notice, this list of conditions and the following disclaimer in the
- * documentation and/or other materials provided with the distribution.
- *
- * THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
- * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
- * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
- * ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
- * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
- * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
- * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
- * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
- * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
- * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
- * SUCH DAMAGE.
- */
-
-#ifdef __arm__
-#include <arm/runq.h>
-#else /* !__arm__ */
-
-#ifndef _MACHINE_RUNQ_H_
-#define _MACHINE_RUNQ_H_
-
-#define RQB_LEN (1) /* Number of priority status words. */
-#define RQB_L2BPW (6) /* Log2(sizeof(rqb_word_t) * NBBY)). */
-#define RQB_BPW (1<<RQB_L2BPW) /* Bits in an rqb_word_t. */
-
-#define RQB_BIT(pri) (1ul << ((pri) & (RQB_BPW - 1)))
-#define RQB_WORD(pri) ((pri) >> RQB_L2BPW)
-
-#define RQB_FFS(word) (ffsl(word) - 1)
-
-/*
- * Type of run queue status word.
- */
-typedef unsigned long rqb_word_t;
-
-#endif
-
-#endif /* !__arm__ */
diff --git a/sys/arm64/include/sdt_machdep.h b/sys/arm64/include/sdt_machdep.h
new file mode 100644
index 000000000000..738d246832a2
--- /dev/null
+++ b/sys/arm64/include/sdt_machdep.h
@@ -0,0 +1,12 @@
+/*-
+ * SPDX-License-Identifier: BSD-2-Clause
+ *
+ * Copyright (c) 2024 Mark Johnston <markj@FreeBSD.org>
+ */
+
+#ifndef _SYS_SDT_MACHDEP_H_
+#define _SYS_SDT_MACHDEP_H_
+
+#define _SDT_ASM_PATCH_INSTR "nop"
+
+#endif /* _SYS_SDT_MACHDEP_H_ */
diff --git a/sys/arm64/include/sysarch.h b/sys/arm64/include/sysarch.h
index 83094943423a..498e26f6d47e 100644
--- a/sys/arm64/include/sysarch.h
+++ b/sys/arm64/include/sysarch.h
@@ -39,6 +39,8 @@
#ifndef _MACHINE_SYSARCH_H_
#define _MACHINE_SYSARCH_H_
+#include <sys/cdefs.h>
+
#define ARM64_GUARD_PAGE 0x100
struct arm64_guard_page_args {
@@ -46,6 +48,9 @@ struct arm64_guard_page_args {
__size_t len;
};
+#define ARM64_GET_SVE_VL 0x200
+/* Reserved ARM64_SET_SVE_VL 0x201 */
+
#ifndef _KERNEL
__BEGIN_DECLS
diff --git a/sys/arm64/include/ucontext.h b/sys/arm64/include/ucontext.h
index dedbd061ec6b..a4f0ee243b3a 100644
--- a/sys/arm64/include/ucontext.h
+++ b/sys/arm64/include/ucontext.h
@@ -62,6 +62,14 @@ struct arm64_reg_context {
};
#define ARM64_CTX_END 0xa5a5a5a5
+#define ARM64_CTX_SVE 0x00657673
+
+struct sve_context {
+ struct arm64_reg_context sve_ctx;
+ __uint16_t sve_vector_len;
+ __uint16_t sve_flags;
+ __uint16_t sve_reserved[2];
+};
struct __mcontext {
struct gpregs mc_gpregs;
diff --git a/sys/arm64/include/undefined.h b/sys/arm64/include/undefined.h
index db5d0523e711..71b2eed22a84 100644
--- a/sys/arm64/include/undefined.h
+++ b/sys/arm64/include/undefined.h
@@ -35,31 +35,17 @@
typedef int (*undef_handler_t)(vm_offset_t, uint32_t, struct trapframe *,
uint32_t);
-
-static inline int
-mrs_Op0(uint32_t insn)
-{
-
- /* op0 is encoded without the top bit in a mrs instruction */
- return (2 | ((insn & MRS_Op0_MASK) >> MRS_Op0_SHIFT));
-}
-
-#define MRS_GET(op) \
-static inline int \
-mrs_##op(uint32_t insn) \
-{ \
- \
- return ((insn & MRS_##op##_MASK) >> MRS_##op##_SHIFT); \
-}
-MRS_GET(Op1)
-MRS_GET(CRn)
-MRS_GET(CRm)
-MRS_GET(Op2)
+typedef bool (*undef_sys_handler_t)(uint64_t, struct trapframe *);
void undef_init(void);
-void *install_undef_handler(bool, undef_handler_t);
+void install_sys_handler(undef_sys_handler_t);
+void *install_undef_handler(undef_handler_t);
+#ifdef COMPAT_FREEBSD32
+void *install_undef32_handler(undef_handler_t);
+#endif
void remove_undef_handler(void *);
-int undef_insn(u_int, struct trapframe *);
+bool undef_sys(uint64_t, struct trapframe *);
+int undef_insn(struct trapframe *);
#endif /* _KERNEL */
diff --git a/sys/arm64/include/vfp.h b/sys/arm64/include/vfp.h
index 7f4c86e7737d..fc93908add0b 100644
--- a/sys/arm64/include/vfp.h
+++ b/sys/arm64/include/vfp.h
@@ -79,6 +79,13 @@ void vfp_reset_state(struct thread *, struct pcb *);
void vfp_restore_state(void);
void vfp_save_state(struct thread *, struct pcb *);
void vfp_save_state_savectx(struct pcb *);
+void vfp_save_state_switch(struct thread *);
+void vfp_to_sve_sync(struct thread *);
+void sve_to_vfp_sync(struct thread *);
+
+size_t sve_max_buf_size(void);
+size_t sve_buf_size(struct thread *);
+bool sve_restore_state(struct thread *);
struct fpu_kern_ctx;
diff --git a/sys/arm64/include/vmm.h b/sys/arm64/include/vmm.h
index 8e2c9c868635..1d783cdacb0d 100644
--- a/sys/arm64/include/vmm.h
+++ b/sys/arm64/include/vmm.h
@@ -102,14 +102,30 @@ enum vm_reg_name {
#define VM_INTINFO_HWEXCEPTION (3 << 8)
#define VM_INTINFO_SWINTR (4 << 8)
-#define VM_MAX_SUFFIXLEN 15
-
#define VM_GUEST_BASE_IPA 0x80000000UL /* Guest kernel start ipa */
-#ifdef _KERNEL
-
-#define VM_MAX_NAMELEN 32
+/*
+ * The VM name has to fit into the pathname length constraints of devfs,
+ * governed primarily by SPECNAMELEN. The length is the total number of
+ * characters in the full path, relative to the mount point and not
+ * including any leading '/' characters.
+ * A prefix and a suffix are added to the name specified by the user.
+ * The prefix is usually "vmm/" or "vmm.io/", but can be a few characters
+ * longer for future use.
+ * The suffix is a string that identifies a bootrom image or some similar
+ * image that is attached to the VM. A separator character gets added to
+ * the suffix automatically when generating the full path, so it must be
+ * accounted for, reducing the effective length by 1.
+ * The effective length of a VM name is 229 bytes for FreeBSD 13 and 37
+ * bytes for FreeBSD 12. A minimum length is set for safety and supports
+ * a SPECNAMELEN as small as 32 on old systems.
+ */
+#define VM_MAX_PREFIXLEN 10
+#define VM_MAX_SUFFIXLEN 15
+#define VM_MAX_NAMELEN \
+ (SPECNAMELEN - VM_MAX_PREFIXLEN - VM_MAX_SUFFIXLEN - 1)
+#ifdef _KERNEL
struct vm;
struct vm_exception;
struct vm_exit;
@@ -127,44 +143,13 @@ struct vm_eventinfo {
int vm_create(const char *name, struct vm **retvm);
struct vcpu *vm_alloc_vcpu(struct vm *vm, int vcpuid);
+void vm_disable_vcpu_creation(struct vm *vm);
void vm_slock_vcpus(struct vm *vm);
void vm_unlock_vcpus(struct vm *vm);
void vm_destroy(struct vm *vm);
int vm_reinit(struct vm *vm);
const char *vm_name(struct vm *vm);
-/*
- * APIs that modify the guest memory map require all vcpus to be frozen.
- */
-void vm_slock_memsegs(struct vm *vm);
-void vm_xlock_memsegs(struct vm *vm);
-void vm_unlock_memsegs(struct vm *vm);
-int vm_mmap_memseg(struct vm *vm, vm_paddr_t gpa, int segid, vm_ooffset_t off,
- size_t len, int prot, int flags);
-int vm_munmap_memseg(struct vm *vm, vm_paddr_t gpa, size_t len);
-int vm_alloc_memseg(struct vm *vm, int ident, size_t len, bool sysmem);
-void vm_free_memseg(struct vm *vm, int ident);
-
-/*
- * APIs that inspect the guest memory map require only a *single* vcpu to
- * be frozen. This acts like a read lock on the guest memory map since any
- * modification requires *all* vcpus to be frozen.
- */
-int vm_mmap_getnext(struct vm *vm, vm_paddr_t *gpa, int *segid,
- vm_ooffset_t *segoff, size_t *len, int *prot, int *flags);
-int vm_get_memseg(struct vm *vm, int ident, size_t *len, bool *sysmem,
- struct vm_object **objptr);
-vm_paddr_t vmm_sysmem_maxaddr(struct vm *vm);
-void *vm_gpa_hold(struct vcpu *vcpu, vm_paddr_t gpa, size_t len,
- int prot, void **cookie);
-void *vm_gpa_hold_global(struct vm *vm, vm_paddr_t gpa, size_t len,
- int prot, void **cookie);
-void vm_gpa_release(void *cookie);
-bool vm_mem_allocated(struct vcpu *vcpu, vm_paddr_t gpa);
-
-int vm_gla2gpa_nofault(struct vcpu *vcpu, struct vm_guest_paging *paging,
- uint64_t gla, int prot, uint64_t *gpa, int *is_fault);
-
uint16_t vm_get_maxcpus(struct vm *vm);
void vm_get_topology(struct vm *vm, uint16_t *sockets, uint16_t *cores,
uint16_t *threads, uint16_t *maxcpus);
@@ -200,13 +185,6 @@ cpuset_t vm_active_cpus(struct vm *vm);
cpuset_t vm_debug_cpus(struct vm *vm);
cpuset_t vm_suspended_cpus(struct vm *vm);
-static __inline bool
-virt_enabled(void)
-{
-
- return (has_hyp());
-}
-
static __inline int
vcpu_rendezvous_pending(struct vm_eventinfo *info)
{
@@ -252,6 +230,8 @@ vcpu_should_yield(struct vcpu *vcpu)
void *vcpu_stats(struct vcpu *vcpu);
void vcpu_notify_event(struct vcpu *vcpu);
+struct vmspace *vm_vmspace(struct vm *vm);
+struct vm_mem *vm_mem(struct vm *vm);
enum vm_reg_name vm_segment_name(int seg_encoding);
@@ -295,9 +275,11 @@ struct vre {
*/
enum vm_cap_type {
VM_CAP_HALT_EXIT,
- VM_CAP_MTRAP_EXIT,
VM_CAP_PAUSE_EXIT,
VM_CAP_UNRESTRICTED_GUEST,
+ VM_CAP_BRK_EXIT,
+ VM_CAP_SS_EXIT,
+ VM_CAP_MASK_HWINTR,
VM_CAP_MAX
};
@@ -312,6 +294,8 @@ enum vm_exitcode {
VM_EXITCODE_PAGING,
VM_EXITCODE_SMCCC,
VM_EXITCODE_DEBUG,
+ VM_EXITCODE_BRK,
+ VM_EXITCODE_SS,
VM_EXITCODE_MAX
};
diff --git a/sys/arm64/include/vmm_dev.h b/sys/arm64/include/vmm_dev.h
index 9e229665a71e..938bea47c7f8 100644
--- a/sys/arm64/include/vmm_dev.h
+++ b/sys/arm64/include/vmm_dev.h
@@ -27,10 +27,7 @@
#ifndef _VMM_DEV_H_
#define _VMM_DEV_H_
-#ifdef _KERNEL
-void vmmdev_init(void);
-int vmmdev_cleanup(void);
-#endif
+#include <machine/vmm.h>
struct vm_memmap {
vm_paddr_t gpa;
diff --git a/sys/arm64/include/vmparam.h b/sys/arm64/include/vmparam.h
index 83c55913f56e..349849845e73 100644
--- a/sys/arm64/include/vmparam.h
+++ b/sys/arm64/include/vmparam.h
@@ -73,14 +73,16 @@
#define VM_PHYSSEG_MAX 64
/*
- * Create two free page pools: VM_FREEPOOL_DEFAULT is the default pool
- * from which physical pages are allocated and VM_FREEPOOL_DIRECT is
- * the pool from which physical pages for small UMA objects are
- * allocated.
+ * Create three free page pools: VM_FREEPOOL_DEFAULT is the default pool from
+ * which physical pages are allocated and VM_FREEPOOL_DIRECT is the pool from
+ * which physical pages for page tables and small UMA objects are allocated.
+ * VM_FREEPOOL_LAZYINIT is a special-purpose pool that is populated only during
+ * boot and is used to implement deferred initialization of page structures.
*/
-#define VM_NFREEPOOL 2
-#define VM_FREEPOOL_DEFAULT 0
-#define VM_FREEPOOL_DIRECT 1
+#define VM_NFREEPOOL 3
+#define VM_FREEPOOL_LAZYINIT 0
+#define VM_FREEPOOL_DEFAULT 1
+#define VM_FREEPOOL_DIRECT 2
/*
* Create two free page lists: VM_FREELIST_DMA32 is for physical pages that have
@@ -112,25 +114,34 @@
#endif
/*
- * Enable superpage reservations: 1 level.
+ * Enable superpage reservations: 2 levels.
*/
#ifndef VM_NRESERVLEVEL
-#define VM_NRESERVLEVEL 1
+#define VM_NRESERVLEVEL 2
#endif
/*
- * Level 0 reservations consist of 512 pages when PAGE_SIZE is 4KB, and
- * 2048 pages when PAGE_SIZE is 16KB.
+ * Level 0 reservations consist of 16 pages when PAGE_SIZE is 4KB, and 128
+ * pages when PAGE_SIZE is 16KB. Level 1 reservations consist of 32 64KB
+ * pages when PAGE_SIZE is 4KB, and 16 2M pages when PAGE_SIZE is 16KB.
*/
-#ifndef VM_LEVEL_0_ORDER
#if PAGE_SIZE == PAGE_SIZE_4K
-#define VM_LEVEL_0_ORDER 9
+#ifndef VM_LEVEL_0_ORDER
+#define VM_LEVEL_0_ORDER 4
+#endif
+#ifndef VM_LEVEL_1_ORDER
+#define VM_LEVEL_1_ORDER 5
+#endif
#elif PAGE_SIZE == PAGE_SIZE_16K
-#define VM_LEVEL_0_ORDER 11
+#ifndef VM_LEVEL_0_ORDER
+#define VM_LEVEL_0_ORDER 7
+#endif
+#ifndef VM_LEVEL_1_ORDER
+#define VM_LEVEL_1_ORDER 4
+#endif
#else
#error Unsupported page size
#endif
-#endif
/**
* Address space layout.
@@ -293,7 +304,7 @@
#endif
#if !defined(KASAN) && !defined(KMSAN)
-#define UMA_MD_SMALL_ALLOC
+#define UMA_USE_DMAP
#endif
#ifndef LOCORE
@@ -301,7 +312,6 @@
extern vm_paddr_t dmap_phys_base;
extern vm_paddr_t dmap_phys_max;
extern vm_offset_t dmap_max_addr;
-extern vm_offset_t vm_max_kernel_address;
#endif
@@ -318,6 +328,7 @@ extern vm_offset_t vm_max_kernel_address;
* Need a page dump array for minidump.
*/
#define MINIDUMP_PAGE_TRACKING 1
+#define MINIDUMP_STARTUP_PAGE_TRACKING 1
#endif /* !_MACHINE_VMPARAM_H_ */