Merge branch kvm-arm64/tlbi-range into kvmarm-master/next
authorMarc Zyngier <maz@kernel.org>
Mon, 28 Aug 2023 08:29:02 +0000 (09:29 +0100)
committerMarc Zyngier <maz@kernel.org>
Mon, 28 Aug 2023 08:29:02 +0000 (09:29 +0100)
* kvm-arm64/tlbi-range:
  : .
  : FEAT_TLBIRANGE support, courtesy of Raghavendra Rao Ananta.
  : From the cover letter:
  :
  : "In certain code paths, KVM/ARM currently invalidates the entire VM's
  : page-tables instead of just invalidating a necessary range. For example,
  : when collapsing a table PTE to a block PTE, instead of iterating over
  : each PTE and flushing them, KVM uses 'vmalls12e1is' TLBI operation to
  : flush all the entries. This is inefficient since the guest would have
  : to refill the TLBs again, even for the addresses that aren't covered
  : by the table entry. The performance impact would scale poorly if many
  : addresses in the VM is going through this remapping.
  :
  : For architectures that implement FEAT_TLBIRANGE, KVM can replace such
  : inefficient paths by performing the invalidations only on the range of
  : addresses that are in scope. This series tries to achieve the same in
  : the areas of stage-2 map, unmap and write-protecting the pages."
  : .
  KVM: arm64: Use TLBI range-based instructions for unmap
  KVM: arm64: Invalidate the table entries upon a range
  KVM: arm64: Flush only the memslot after write-protect
  KVM: arm64: Implement kvm_arch_flush_remote_tlbs_range()
  KVM: arm64: Define kvm_tlb_flush_vmid_range()
  KVM: arm64: Implement __kvm_tlb_flush_vmid_range()
  arm64: tlb: Implement __flush_s2_tlb_range_op()
  arm64: tlb: Refactor the core flush algorithm of __flush_tlb_range
  KVM: Move kvm_arch_flush_remote_tlbs_memslot() to common code
  KVM: Allow range-based TLB invalidation from common code
  KVM: Remove CONFIG_HAVE_KVM_ARCH_TLB_FLUSH_ALL
  KVM: arm64: Use kvm_arch_flush_remote_tlbs()
  KVM: Declare kvm_arch_flush_remote_tlbs() globally
  KVM: Rename kvm_arch_flush_remote_tlb() to kvm_arch_flush_remote_tlbs()

Signed-off-by: Marc Zyngier <maz@kernel.org>
18 files changed:
arch/arm64/include/asm/kvm_arm.h
arch/arm64/include/asm/kvm_host.h
arch/arm64/include/asm/kvm_nested.h
arch/arm64/include/asm/sysreg.h
arch/arm64/kernel/cpufeature.c
arch/arm64/kvm/Kconfig
arch/arm64/kvm/arm.c
arch/arm64/kvm/emulate-nested.c
arch/arm64/kvm/guest.c
arch/arm64/kvm/handle_exit.c
arch/arm64/kvm/hyp/include/hyp/switch.h
arch/arm64/kvm/hyp/nvhe/switch.c
arch/arm64/kvm/nested.c
arch/arm64/kvm/reset.c
arch/arm64/kvm/sys_regs.c
arch/arm64/kvm/trace_arm.h
arch/arm64/tools/cpucaps
arch/arm64/tools/sysreg

index 58e5eb2..137f732 100644 (file)
 #define HCR_DCT                (UL(1) << 57)
 #define HCR_ATA_SHIFT  56
 #define HCR_ATA                (UL(1) << HCR_ATA_SHIFT)
+#define HCR_TTLBOS     (UL(1) << 55)
+#define HCR_TTLBIS     (UL(1) << 54)
+#define HCR_ENSCXT     (UL(1) << 53)
+#define HCR_TOCU       (UL(1) << 52)
 #define HCR_AMVOFFEN   (UL(1) << 51)
+#define HCR_TICAB      (UL(1) << 50)
 #define HCR_TID4       (UL(1) << 49)
 #define HCR_FIEN       (UL(1) << 47)
 #define HCR_FWB                (UL(1) << 46)
+#define HCR_NV2                (UL(1) << 45)
+#define HCR_AT         (UL(1) << 44)
+#define HCR_NV1                (UL(1) << 43)
+#define HCR_NV         (UL(1) << 42)
 #define HCR_API                (UL(1) << 41)
 #define HCR_APK                (UL(1) << 40)
 #define HCR_TEA                (UL(1) << 37)
                                 BIT(18) |              \
                                 GENMASK(16, 15))
 
+/*
+ * FGT register definitions
+ *
+ * RES0 and polarity masks as of DDI0487J.a, to be updated as needed.
+ * We're not using the generated masks as they are usually ahead of
+ * the published ARM ARM, which we use as a reference.
+ *
+ * Once we get to a point where the two describe the same thing, we'll
+ * merge the definitions. One day.
+ */
+#define __HFGRTR_EL2_RES0      (GENMASK(63, 56) | GENMASK(53, 51))
+#define __HFGRTR_EL2_MASK      GENMASK(49, 0)
+#define __HFGRTR_EL2_nMASK     (GENMASK(55, 54) | BIT(50))
+
+#define __HFGWTR_EL2_RES0      (GENMASK(63, 56) | GENMASK(53, 51) |    \
+                                BIT(46) | BIT(42) | BIT(40) | BIT(28) | \
+                                GENMASK(26, 25) | BIT(21) | BIT(18) |  \
+                                GENMASK(15, 14) | GENMASK(10, 9) | BIT(2))
+#define __HFGWTR_EL2_MASK      GENMASK(49, 0)
+#define __HFGWTR_EL2_nMASK     (GENMASK(55, 54) | BIT(50))
+
+#define __HFGITR_EL2_RES0      GENMASK(63, 57)
+#define __HFGITR_EL2_MASK      GENMASK(54, 0)
+#define __HFGITR_EL2_nMASK     GENMASK(56, 55)
+
+#define __HDFGRTR_EL2_RES0     (BIT(49) | BIT(42) | GENMASK(39, 38) |  \
+                                GENMASK(21, 20) | BIT(8))
+#define __HDFGRTR_EL2_MASK     ~__HDFGRTR_EL2_nMASK
+#define __HDFGRTR_EL2_nMASK    GENMASK(62, 59)
+
+#define __HDFGWTR_EL2_RES0     (BIT(63) | GENMASK(59, 58) | BIT(51) | BIT(47) | \
+                                BIT(43) | GENMASK(40, 38) | BIT(34) | BIT(30) | \
+                                BIT(22) | BIT(9) | BIT(6))
+#define __HDFGWTR_EL2_MASK     ~__HDFGWTR_EL2_nMASK
+#define __HDFGWTR_EL2_nMASK    GENMASK(62, 60)
+
+/* Similar definitions for HCRX_EL2 */
+#define __HCRX_EL2_RES0                (GENMASK(63, 16) | GENMASK(13, 12))
+#define __HCRX_EL2_MASK                (0)
+#define __HCRX_EL2_nMASK       (GENMASK(15, 14) | GENMASK(4, 0))
+
 /* Hyp Prefetch Fault Address Register (HPFAR/HDFAR) */
 #define HPFAR_MASK     (~UL(0xf))
 /*
index f623b98..967ee7e 100644 (file)
@@ -380,6 +380,7 @@ enum vcpu_sysreg {
        CPTR_EL2,       /* Architectural Feature Trap Register (EL2) */
        HSTR_EL2,       /* Hypervisor System Trap Register */
        HACR_EL2,       /* Hypervisor Auxiliary Control Register */
+       HCRX_EL2,       /* Extended Hypervisor Configuration Register */
        TTBR0_EL2,      /* Translation Table Base Register 0 (EL2) */
        TTBR1_EL2,      /* Translation Table Base Register 1 (EL2) */
        TCR_EL2,        /* Translation Control Register (EL2) */
@@ -400,6 +401,11 @@ enum vcpu_sysreg {
        TPIDR_EL2,      /* EL2 Software Thread ID Register */
        CNTHCTL_EL2,    /* Counter-timer Hypervisor Control register */
        SP_EL2,         /* EL2 Stack Pointer */
+       HFGRTR_EL2,
+       HFGWTR_EL2,
+       HFGITR_EL2,
+       HDFGRTR_EL2,
+       HDFGWTR_EL2,
        CNTHP_CTL_EL2,
        CNTHP_CVAL_EL2,
        CNTHV_CTL_EL2,
@@ -567,8 +573,7 @@ struct kvm_vcpu_arch {
        /* Cache some mmu pages needed inside spinlock regions */
        struct kvm_mmu_memory_cache mmu_page_cache;
 
-       /* Target CPU and feature flags */
-       int target;
+       /* feature flags */
        DECLARE_BITMAP(features, KVM_VCPU_MAX_FEATURES);
 
        /* Virtual SError ESR to restore when HCR_EL2.VSE is set */
@@ -669,6 +674,8 @@ struct kvm_vcpu_arch {
 #define VCPU_SVE_FINALIZED     __vcpu_single_flag(cflags, BIT(1))
 /* PTRAUTH exposed to guest */
 #define GUEST_HAS_PTRAUTH      __vcpu_single_flag(cflags, BIT(2))
+/* KVM_ARM_VCPU_INIT completed */
+#define VCPU_INITIALIZED       __vcpu_single_flag(cflags, BIT(3))
 
 /* Exception pending */
 #define PENDING_EXCEPTION      __vcpu_single_flag(iflags, BIT(0))
@@ -899,7 +906,6 @@ struct kvm_vcpu_stat {
        u64 exits;
 };
 
-void kvm_vcpu_preferred_target(struct kvm_vcpu_init *init);
 unsigned long kvm_arm_num_regs(struct kvm_vcpu *vcpu);
 int kvm_arm_copy_reg_indices(struct kvm_vcpu *vcpu, u64 __user *indices);
 int kvm_arm_get_reg(struct kvm_vcpu *vcpu, const struct kvm_one_reg *reg);
@@ -983,6 +989,7 @@ int kvm_handle_cp10_id(struct kvm_vcpu *vcpu);
 void kvm_reset_sys_regs(struct kvm_vcpu *vcpu);
 
 int __init kvm_sys_reg_table_init(void);
+int __init populate_nv_trap_config(void);
 
 bool lock_all_vcpus(struct kvm *kvm);
 void unlock_all_vcpus(struct kvm *kvm);
index 8fb67f0..fa23cc9 100644 (file)
@@ -11,6 +11,8 @@ static inline bool vcpu_has_nv(const struct kvm_vcpu *vcpu)
                test_bit(KVM_ARM_VCPU_HAS_EL2, vcpu->arch.features));
 }
 
+extern bool __check_nv_sr_forward(struct kvm_vcpu *vcpu);
+
 struct sys_reg_params;
 struct sys_reg_desc;
 
index b481935..818c111 100644 (file)
 #define SYS_DC_CIGSW                   sys_insn(1, 0, 7, 14, 4)
 #define SYS_DC_CIGDSW                  sys_insn(1, 0, 7, 14, 6)
 
+#define SYS_IC_IALLUIS                 sys_insn(1, 0, 7, 1, 0)
+#define SYS_IC_IALLU                   sys_insn(1, 0, 7, 5, 0)
+#define SYS_IC_IVAU                    sys_insn(1, 3, 7, 5, 1)
+
+#define SYS_DC_IVAC                    sys_insn(1, 0, 7, 6, 1)
+#define SYS_DC_IGVAC                   sys_insn(1, 0, 7, 6, 3)
+#define SYS_DC_IGDVAC                  sys_insn(1, 0, 7, 6, 5)
+
+#define SYS_DC_CVAC                    sys_insn(1, 3, 7, 10, 1)
+#define SYS_DC_CGVAC                   sys_insn(1, 3, 7, 10, 3)
+#define SYS_DC_CGDVAC                  sys_insn(1, 3, 7, 10, 5)
+
+#define SYS_DC_CVAU                    sys_insn(1, 3, 7, 11, 1)
+
+#define SYS_DC_CVAP                    sys_insn(1, 3, 7, 12, 1)
+#define SYS_DC_CGVAP                   sys_insn(1, 3, 7, 12, 3)
+#define SYS_DC_CGDVAP                  sys_insn(1, 3, 7, 12, 5)
+
+#define SYS_DC_CVADP                   sys_insn(1, 3, 7, 13, 1)
+#define SYS_DC_CGVADP                  sys_insn(1, 3, 7, 13, 3)
+#define SYS_DC_CGDVADP                 sys_insn(1, 3, 7, 13, 5)
+
+#define SYS_DC_CIVAC                   sys_insn(1, 3, 7, 14, 1)
+#define SYS_DC_CIGVAC                  sys_insn(1, 3, 7, 14, 3)
+#define SYS_DC_CIGDVAC                 sys_insn(1, 3, 7, 14, 5)
+
+/* Data cache zero operations */
+#define SYS_DC_ZVA                     sys_insn(1, 3, 7, 4, 1)
+#define SYS_DC_GVA                     sys_insn(1, 3, 7, 4, 3)
+#define SYS_DC_GZVA                    sys_insn(1, 3, 7, 4, 4)
+
 /*
  * Automatically generated definitions for system registers, the
  * manual encodings below are in the process of being converted to
 #define SYS_DBGDTRTX_EL0               sys_reg(2, 3, 0, 5, 0)
 #define SYS_DBGVCR32_EL2               sys_reg(2, 4, 0, 7, 0)
 
+#define SYS_BRBINF_EL1(n)              sys_reg(2, 1, 8, (n & 15), (((n & 16) >> 2) | 0))
+#define SYS_BRBINFINJ_EL1              sys_reg(2, 1, 9, 1, 0)
+#define SYS_BRBSRC_EL1(n)              sys_reg(2, 1, 8, (n & 15), (((n & 16) >> 2) | 1))
+#define SYS_BRBSRCINJ_EL1              sys_reg(2, 1, 9, 1, 1)
+#define SYS_BRBTGT_EL1(n)              sys_reg(2, 1, 8, (n & 15), (((n & 16) >> 2) | 2))
+#define SYS_BRBTGTINJ_EL1              sys_reg(2, 1, 9, 1, 2)
+#define SYS_BRBTS_EL1                  sys_reg(2, 1, 9, 0, 2)
+
+#define SYS_BRBCR_EL1                  sys_reg(2, 1, 9, 0, 0)
+#define SYS_BRBFCR_EL1                 sys_reg(2, 1, 9, 0, 1)
+#define SYS_BRBIDR0_EL1                        sys_reg(2, 1, 9, 2, 0)
+
+#define SYS_TRCITECR_EL1               sys_reg(3, 0, 1, 2, 3)
+#define SYS_TRCACATR(m)                        sys_reg(2, 1, 2, ((m & 7) << 1), (2 | (m >> 3)))
+#define SYS_TRCACVR(m)                 sys_reg(2, 1, 2, ((m & 7) << 1), (0 | (m >> 3)))
+#define SYS_TRCAUTHSTATUS              sys_reg(2, 1, 7, 14, 6)
+#define SYS_TRCAUXCTLR                 sys_reg(2, 1, 0, 6, 0)
+#define SYS_TRCBBCTLR                  sys_reg(2, 1, 0, 15, 0)
+#define SYS_TRCCCCTLR                  sys_reg(2, 1, 0, 14, 0)
+#define SYS_TRCCIDCCTLR0               sys_reg(2, 1, 3, 0, 2)
+#define SYS_TRCCIDCCTLR1               sys_reg(2, 1, 3, 1, 2)
+#define SYS_TRCCIDCVR(m)               sys_reg(2, 1, 3, ((m & 7) << 1), 0)
+#define SYS_TRCCLAIMCLR                        sys_reg(2, 1, 7, 9, 6)
+#define SYS_TRCCLAIMSET                        sys_reg(2, 1, 7, 8, 6)
+#define SYS_TRCCNTCTLR(m)              sys_reg(2, 1, 0, (4 | (m & 3)), 5)
+#define SYS_TRCCNTRLDVR(m)             sys_reg(2, 1, 0, (0 | (m & 3)), 5)
+#define SYS_TRCCNTVR(m)                        sys_reg(2, 1, 0, (8 | (m & 3)), 5)
+#define SYS_TRCCONFIGR                 sys_reg(2, 1, 0, 4, 0)
+#define SYS_TRCDEVARCH                 sys_reg(2, 1, 7, 15, 6)
+#define SYS_TRCDEVID                   sys_reg(2, 1, 7, 2, 7)
+#define SYS_TRCEVENTCTL0R              sys_reg(2, 1, 0, 8, 0)
+#define SYS_TRCEVENTCTL1R              sys_reg(2, 1, 0, 9, 0)
+#define SYS_TRCEXTINSELR(m)            sys_reg(2, 1, 0, (8 | (m & 3)), 4)
+#define SYS_TRCIDR0                    sys_reg(2, 1, 0, 8, 7)
+#define SYS_TRCIDR10                   sys_reg(2, 1, 0, 2, 6)
+#define SYS_TRCIDR11                   sys_reg(2, 1, 0, 3, 6)
+#define SYS_TRCIDR12                   sys_reg(2, 1, 0, 4, 6)
+#define SYS_TRCIDR13                   sys_reg(2, 1, 0, 5, 6)
+#define SYS_TRCIDR1                    sys_reg(2, 1, 0, 9, 7)
+#define SYS_TRCIDR2                    sys_reg(2, 1, 0, 10, 7)
+#define SYS_TRCIDR3                    sys_reg(2, 1, 0, 11, 7)
+#define SYS_TRCIDR4                    sys_reg(2, 1, 0, 12, 7)
+#define SYS_TRCIDR5                    sys_reg(2, 1, 0, 13, 7)
+#define SYS_TRCIDR6                    sys_reg(2, 1, 0, 14, 7)
+#define SYS_TRCIDR7                    sys_reg(2, 1, 0, 15, 7)
+#define SYS_TRCIDR8                    sys_reg(2, 1, 0, 0, 6)
+#define SYS_TRCIDR9                    sys_reg(2, 1, 0, 1, 6)
+#define SYS_TRCIMSPEC(m)               sys_reg(2, 1, 0, (m & 7), 7)
+#define SYS_TRCITEEDCR                 sys_reg(2, 1, 0, 2, 1)
+#define SYS_TRCOSLSR                   sys_reg(2, 1, 1, 1, 4)
+#define SYS_TRCPRGCTLR                 sys_reg(2, 1, 0, 1, 0)
+#define SYS_TRCQCTLR                   sys_reg(2, 1, 0, 1, 1)
+#define SYS_TRCRSCTLR(m)               sys_reg(2, 1, 1, (m & 15), (0 | (m >> 4)))
+#define SYS_TRCRSR                     sys_reg(2, 1, 0, 10, 0)
+#define SYS_TRCSEQEVR(m)               sys_reg(2, 1, 0, (m & 3), 4)
+#define SYS_TRCSEQRSTEVR               sys_reg(2, 1, 0, 6, 4)
+#define SYS_TRCSEQSTR                  sys_reg(2, 1, 0, 7, 4)
+#define SYS_TRCSSCCR(m)                        sys_reg(2, 1, 1, (m & 7), 2)
+#define SYS_TRCSSCSR(m)                        sys_reg(2, 1, 1, (8 | (m & 7)), 2)
+#define SYS_TRCSSPCICR(m)              sys_reg(2, 1, 1, (m & 7), 3)
+#define SYS_TRCSTALLCTLR               sys_reg(2, 1, 0, 11, 0)
+#define SYS_TRCSTATR                   sys_reg(2, 1, 0, 3, 0)
+#define SYS_TRCSYNCPR                  sys_reg(2, 1, 0, 13, 0)
+#define SYS_TRCTRACEIDR                        sys_reg(2, 1, 0, 0, 1)
+#define SYS_TRCTSCTLR                  sys_reg(2, 1, 0, 12, 0)
+#define SYS_TRCVICTLR                  sys_reg(2, 1, 0, 0, 2)
+#define SYS_TRCVIIECTLR                        sys_reg(2, 1, 0, 1, 2)
+#define SYS_TRCVIPCSSCTLR              sys_reg(2, 1, 0, 3, 2)
+#define SYS_TRCVISSCTLR                        sys_reg(2, 1, 0, 2, 2)
+#define SYS_TRCVMIDCCTLR0              sys_reg(2, 1, 3, 2, 2)
+#define SYS_TRCVMIDCCTLR1              sys_reg(2, 1, 3, 3, 2)
+#define SYS_TRCVMIDCVR(m)              sys_reg(2, 1, 3, ((m & 7) << 1), 1)
+
+/* ETM */
+#define SYS_TRCOSLAR                   sys_reg(2, 1, 1, 0, 4)
+
 #define SYS_MIDR_EL1                   sys_reg(3, 0, 0, 0, 0)
 #define SYS_MPIDR_EL1                  sys_reg(3, 0, 0, 0, 5)
 #define SYS_REVIDR_EL1                 sys_reg(3, 0, 0, 0, 6)
 #define SYS_ERXCTLR_EL1                        sys_reg(3, 0, 5, 4, 1)
 #define SYS_ERXSTATUS_EL1              sys_reg(3, 0, 5, 4, 2)
 #define SYS_ERXADDR_EL1                        sys_reg(3, 0, 5, 4, 3)
+#define SYS_ERXPFGF_EL1                        sys_reg(3, 0, 5, 4, 4)
+#define SYS_ERXPFGCTL_EL1              sys_reg(3, 0, 5, 4, 5)
+#define SYS_ERXPFGCDN_EL1              sys_reg(3, 0, 5, 4, 6)
 #define SYS_ERXMISC0_EL1               sys_reg(3, 0, 5, 5, 0)
 #define SYS_ERXMISC1_EL1               sys_reg(3, 0, 5, 5, 1)
+#define SYS_ERXMISC2_EL1               sys_reg(3, 0, 5, 5, 2)
+#define SYS_ERXMISC3_EL1               sys_reg(3, 0, 5, 5, 3)
 #define SYS_TFSR_EL1                   sys_reg(3, 0, 5, 6, 0)
 #define SYS_TFSRE0_EL1                 sys_reg(3, 0, 5, 6, 1)
 
 #define SYS_ICC_IGRPEN0_EL1            sys_reg(3, 0, 12, 12, 6)
 #define SYS_ICC_IGRPEN1_EL1            sys_reg(3, 0, 12, 12, 7)
 
+#define SYS_ACCDATA_EL1                        sys_reg(3, 0, 13, 0, 5)
+
 #define SYS_CNTKCTL_EL1                        sys_reg(3, 0, 14, 1, 0)
 
 #define SYS_AIDR_EL1                   sys_reg(3, 1, 0, 0, 7)
 #define SYS_VTCR_EL2                   sys_reg(3, 4, 2, 1, 2)
 
 #define SYS_TRFCR_EL2                  sys_reg(3, 4, 1, 2, 1)
-#define SYS_HDFGRTR_EL2                        sys_reg(3, 4, 3, 1, 4)
-#define SYS_HDFGWTR_EL2                        sys_reg(3, 4, 3, 1, 5)
 #define SYS_HAFGRTR_EL2                        sys_reg(3, 4, 3, 1, 6)
 #define SYS_SPSR_EL2                   sys_reg(3, 4, 4, 0, 0)
 #define SYS_ELR_EL2                    sys_reg(3, 4, 4, 0, 1)
 
 #define SYS_SP_EL2                     sys_reg(3, 6,  4, 1, 0)
 
+/* AT instructions */
+#define AT_Op0 1
+#define AT_CRn 7
+
+#define OP_AT_S1E1R    sys_insn(AT_Op0, 0, AT_CRn, 8, 0)
+#define OP_AT_S1E1W    sys_insn(AT_Op0, 0, AT_CRn, 8, 1)
+#define OP_AT_S1E0R    sys_insn(AT_Op0, 0, AT_CRn, 8, 2)
+#define OP_AT_S1E0W    sys_insn(AT_Op0, 0, AT_CRn, 8, 3)
+#define OP_AT_S1E1RP   sys_insn(AT_Op0, 0, AT_CRn, 9, 0)
+#define OP_AT_S1E1WP   sys_insn(AT_Op0, 0, AT_CRn, 9, 1)
+#define OP_AT_S1E2R    sys_insn(AT_Op0, 4, AT_CRn, 8, 0)
+#define OP_AT_S1E2W    sys_insn(AT_Op0, 4, AT_CRn, 8, 1)
+#define OP_AT_S12E1R   sys_insn(AT_Op0, 4, AT_CRn, 8, 4)
+#define OP_AT_S12E1W   sys_insn(AT_Op0, 4, AT_CRn, 8, 5)
+#define OP_AT_S12E0R   sys_insn(AT_Op0, 4, AT_CRn, 8, 6)
+#define OP_AT_S12E0W   sys_insn(AT_Op0, 4, AT_CRn, 8, 7)
+
+/* TLBI instructions */
+#define OP_TLBI_VMALLE1OS              sys_insn(1, 0, 8, 1, 0)
+#define OP_TLBI_VAE1OS                 sys_insn(1, 0, 8, 1, 1)
+#define OP_TLBI_ASIDE1OS               sys_insn(1, 0, 8, 1, 2)
+#define OP_TLBI_VAAE1OS                        sys_insn(1, 0, 8, 1, 3)
+#define OP_TLBI_VALE1OS                        sys_insn(1, 0, 8, 1, 5)
+#define OP_TLBI_VAALE1OS               sys_insn(1, 0, 8, 1, 7)
+#define OP_TLBI_RVAE1IS                        sys_insn(1, 0, 8, 2, 1)
+#define OP_TLBI_RVAAE1IS               sys_insn(1, 0, 8, 2, 3)
+#define OP_TLBI_RVALE1IS               sys_insn(1, 0, 8, 2, 5)
+#define OP_TLBI_RVAALE1IS              sys_insn(1, 0, 8, 2, 7)
+#define OP_TLBI_VMALLE1IS              sys_insn(1, 0, 8, 3, 0)
+#define OP_TLBI_VAE1IS                 sys_insn(1, 0, 8, 3, 1)
+#define OP_TLBI_ASIDE1IS               sys_insn(1, 0, 8, 3, 2)
+#define OP_TLBI_VAAE1IS                        sys_insn(1, 0, 8, 3, 3)
+#define OP_TLBI_VALE1IS                        sys_insn(1, 0, 8, 3, 5)
+#define OP_TLBI_VAALE1IS               sys_insn(1, 0, 8, 3, 7)
+#define OP_TLBI_RVAE1OS                        sys_insn(1, 0, 8, 5, 1)
+#define OP_TLBI_RVAAE1OS               sys_insn(1, 0, 8, 5, 3)
+#define OP_TLBI_RVALE1OS               sys_insn(1, 0, 8, 5, 5)
+#define OP_TLBI_RVAALE1OS              sys_insn(1, 0, 8, 5, 7)
+#define OP_TLBI_RVAE1                  sys_insn(1, 0, 8, 6, 1)
+#define OP_TLBI_RVAAE1                 sys_insn(1, 0, 8, 6, 3)
+#define OP_TLBI_RVALE1                 sys_insn(1, 0, 8, 6, 5)
+#define OP_TLBI_RVAALE1                        sys_insn(1, 0, 8, 6, 7)
+#define OP_TLBI_VMALLE1                        sys_insn(1, 0, 8, 7, 0)
+#define OP_TLBI_VAE1                   sys_insn(1, 0, 8, 7, 1)
+#define OP_TLBI_ASIDE1                 sys_insn(1, 0, 8, 7, 2)
+#define OP_TLBI_VAAE1                  sys_insn(1, 0, 8, 7, 3)
+#define OP_TLBI_VALE1                  sys_insn(1, 0, 8, 7, 5)
+#define OP_TLBI_VAALE1                 sys_insn(1, 0, 8, 7, 7)
+#define OP_TLBI_VMALLE1OSNXS           sys_insn(1, 0, 9, 1, 0)
+#define OP_TLBI_VAE1OSNXS              sys_insn(1, 0, 9, 1, 1)
+#define OP_TLBI_ASIDE1OSNXS            sys_insn(1, 0, 9, 1, 2)
+#define OP_TLBI_VAAE1OSNXS             sys_insn(1, 0, 9, 1, 3)
+#define OP_TLBI_VALE1OSNXS             sys_insn(1, 0, 9, 1, 5)
+#define OP_TLBI_VAALE1OSNXS            sys_insn(1, 0, 9, 1, 7)
+#define OP_TLBI_RVAE1ISNXS             sys_insn(1, 0, 9, 2, 1)
+#define OP_TLBI_RVAAE1ISNXS            sys_insn(1, 0, 9, 2, 3)
+#define OP_TLBI_RVALE1ISNXS            sys_insn(1, 0, 9, 2, 5)
+#define OP_TLBI_RVAALE1ISNXS           sys_insn(1, 0, 9, 2, 7)
+#define OP_TLBI_VMALLE1ISNXS           sys_insn(1, 0, 9, 3, 0)
+#define OP_TLBI_VAE1ISNXS              sys_insn(1, 0, 9, 3, 1)
+#define OP_TLBI_ASIDE1ISNXS            sys_insn(1, 0, 9, 3, 2)
+#define OP_TLBI_VAAE1ISNXS             sys_insn(1, 0, 9, 3, 3)
+#define OP_TLBI_VALE1ISNXS             sys_insn(1, 0, 9, 3, 5)
+#define OP_TLBI_VAALE1ISNXS            sys_insn(1, 0, 9, 3, 7)
+#define OP_TLBI_RVAE1OSNXS             sys_insn(1, 0, 9, 5, 1)
+#define OP_TLBI_RVAAE1OSNXS            sys_insn(1, 0, 9, 5, 3)
+#define OP_TLBI_RVALE1OSNXS            sys_insn(1, 0, 9, 5, 5)
+#define OP_TLBI_RVAALE1OSNXS           sys_insn(1, 0, 9, 5, 7)
+#define OP_TLBI_RVAE1NXS               sys_insn(1, 0, 9, 6, 1)
+#define OP_TLBI_RVAAE1NXS              sys_insn(1, 0, 9, 6, 3)
+#define OP_TLBI_RVALE1NXS              sys_insn(1, 0, 9, 6, 5)
+#define OP_TLBI_RVAALE1NXS             sys_insn(1, 0, 9, 6, 7)
+#define OP_TLBI_VMALLE1NXS             sys_insn(1, 0, 9, 7, 0)
+#define OP_TLBI_VAE1NXS                        sys_insn(1, 0, 9, 7, 1)
+#define OP_TLBI_ASIDE1NXS              sys_insn(1, 0, 9, 7, 2)
+#define OP_TLBI_VAAE1NXS               sys_insn(1, 0, 9, 7, 3)
+#define OP_TLBI_VALE1NXS               sys_insn(1, 0, 9, 7, 5)
+#define OP_TLBI_VAALE1NXS              sys_insn(1, 0, 9, 7, 7)
+#define OP_TLBI_IPAS2E1IS              sys_insn(1, 4, 8, 0, 1)
+#define OP_TLBI_RIPAS2E1IS             sys_insn(1, 4, 8, 0, 2)
+#define OP_TLBI_IPAS2LE1IS             sys_insn(1, 4, 8, 0, 5)
+#define OP_TLBI_RIPAS2LE1IS            sys_insn(1, 4, 8, 0, 6)
+#define OP_TLBI_ALLE2OS                        sys_insn(1, 4, 8, 1, 0)
+#define OP_TLBI_VAE2OS                 sys_insn(1, 4, 8, 1, 1)
+#define OP_TLBI_ALLE1OS                        sys_insn(1, 4, 8, 1, 4)
+#define OP_TLBI_VALE2OS                        sys_insn(1, 4, 8, 1, 5)
+#define OP_TLBI_VMALLS12E1OS           sys_insn(1, 4, 8, 1, 6)
+#define OP_TLBI_RVAE2IS                        sys_insn(1, 4, 8, 2, 1)
+#define OP_TLBI_RVALE2IS               sys_insn(1, 4, 8, 2, 5)
+#define OP_TLBI_ALLE2IS                        sys_insn(1, 4, 8, 3, 0)
+#define OP_TLBI_VAE2IS                 sys_insn(1, 4, 8, 3, 1)
+#define OP_TLBI_ALLE1IS                        sys_insn(1, 4, 8, 3, 4)
+#define OP_TLBI_VALE2IS                        sys_insn(1, 4, 8, 3, 5)
+#define OP_TLBI_VMALLS12E1IS           sys_insn(1, 4, 8, 3, 6)
+#define OP_TLBI_IPAS2E1OS              sys_insn(1, 4, 8, 4, 0)
+#define OP_TLBI_IPAS2E1                        sys_insn(1, 4, 8, 4, 1)
+#define OP_TLBI_RIPAS2E1               sys_insn(1, 4, 8, 4, 2)
+#define OP_TLBI_RIPAS2E1OS             sys_insn(1, 4, 8, 4, 3)
+#define OP_TLBI_IPAS2LE1OS             sys_insn(1, 4, 8, 4, 4)
+#define OP_TLBI_IPAS2LE1               sys_insn(1, 4, 8, 4, 5)
+#define OP_TLBI_RIPAS2LE1              sys_insn(1, 4, 8, 4, 6)
+#define OP_TLBI_RIPAS2LE1OS            sys_insn(1, 4, 8, 4, 7)
+#define OP_TLBI_RVAE2OS                        sys_insn(1, 4, 8, 5, 1)
+#define OP_TLBI_RVALE2OS               sys_insn(1, 4, 8, 5, 5)
+#define OP_TLBI_RVAE2                  sys_insn(1, 4, 8, 6, 1)
+#define OP_TLBI_RVALE2                 sys_insn(1, 4, 8, 6, 5)
+#define OP_TLBI_ALLE2                  sys_insn(1, 4, 8, 7, 0)
+#define OP_TLBI_VAE2                   sys_insn(1, 4, 8, 7, 1)
+#define OP_TLBI_ALLE1                  sys_insn(1, 4, 8, 7, 4)
+#define OP_TLBI_VALE2                  sys_insn(1, 4, 8, 7, 5)
+#define OP_TLBI_VMALLS12E1             sys_insn(1, 4, 8, 7, 6)
+#define OP_TLBI_IPAS2E1ISNXS           sys_insn(1, 4, 9, 0, 1)
+#define OP_TLBI_RIPAS2E1ISNXS          sys_insn(1, 4, 9, 0, 2)
+#define OP_TLBI_IPAS2LE1ISNXS          sys_insn(1, 4, 9, 0, 5)
+#define OP_TLBI_RIPAS2LE1ISNXS         sys_insn(1, 4, 9, 0, 6)
+#define OP_TLBI_ALLE2OSNXS             sys_insn(1, 4, 9, 1, 0)
+#define OP_TLBI_VAE2OSNXS              sys_insn(1, 4, 9, 1, 1)
+#define OP_TLBI_ALLE1OSNXS             sys_insn(1, 4, 9, 1, 4)
+#define OP_TLBI_VALE2OSNXS             sys_insn(1, 4, 9, 1, 5)
+#define OP_TLBI_VMALLS12E1OSNXS                sys_insn(1, 4, 9, 1, 6)
+#define OP_TLBI_RVAE2ISNXS             sys_insn(1, 4, 9, 2, 1)
+#define OP_TLBI_RVALE2ISNXS            sys_insn(1, 4, 9, 2, 5)
+#define OP_TLBI_ALLE2ISNXS             sys_insn(1, 4, 9, 3, 0)
+#define OP_TLBI_VAE2ISNXS              sys_insn(1, 4, 9, 3, 1)
+#define OP_TLBI_ALLE1ISNXS             sys_insn(1, 4, 9, 3, 4)
+#define OP_TLBI_VALE2ISNXS             sys_insn(1, 4, 9, 3, 5)
+#define OP_TLBI_VMALLS12E1ISNXS                sys_insn(1, 4, 9, 3, 6)
+#define OP_TLBI_IPAS2E1OSNXS           sys_insn(1, 4, 9, 4, 0)
+#define OP_TLBI_IPAS2E1NXS             sys_insn(1, 4, 9, 4, 1)
+#define OP_TLBI_RIPAS2E1NXS            sys_insn(1, 4, 9, 4, 2)
+#define OP_TLBI_RIPAS2E1OSNXS          sys_insn(1, 4, 9, 4, 3)
+#define OP_TLBI_IPAS2LE1OSNXS          sys_insn(1, 4, 9, 4, 4)
+#define OP_TLBI_IPAS2LE1NXS            sys_insn(1, 4, 9, 4, 5)
+#define OP_TLBI_RIPAS2LE1NXS           sys_insn(1, 4, 9, 4, 6)
+#define OP_TLBI_RIPAS2LE1OSNXS         sys_insn(1, 4, 9, 4, 7)
+#define OP_TLBI_RVAE2OSNXS             sys_insn(1, 4, 9, 5, 1)
+#define OP_TLBI_RVALE2OSNXS            sys_insn(1, 4, 9, 5, 5)
+#define OP_TLBI_RVAE2NXS               sys_insn(1, 4, 9, 6, 1)
+#define OP_TLBI_RVALE2NXS              sys_insn(1, 4, 9, 6, 5)
+#define OP_TLBI_ALLE2NXS               sys_insn(1, 4, 9, 7, 0)
+#define OP_TLBI_VAE2NXS                        sys_insn(1, 4, 9, 7, 1)
+#define OP_TLBI_ALLE1NXS               sys_insn(1, 4, 9, 7, 4)
+#define OP_TLBI_VALE2NXS               sys_insn(1, 4, 9, 7, 5)
+#define OP_TLBI_VMALLS12E1NXS          sys_insn(1, 4, 9, 7, 6)
+
+/* Misc instructions */
+#define OP_BRB_IALL                    sys_insn(1, 1, 7, 2, 4)
+#define OP_BRB_INJ                     sys_insn(1, 1, 7, 2, 5)
+#define OP_CFP_RCTX                    sys_insn(1, 3, 7, 3, 4)
+#define OP_DVP_RCTX                    sys_insn(1, 3, 7, 3, 5)
+#define OP_CPP_RCTX                    sys_insn(1, 3, 7, 3, 7)
+
 /* Common SCTLR_ELx flags. */
 #define SCTLR_ELx_ENTP2        (BIT(60))
 #define SCTLR_ELx_DSSBS        (BIT(44))
index f9d456f..668e287 100644 (file)
@@ -2627,6 +2627,13 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .matches = has_cpuid_feature,
                ARM64_CPUID_FIELDS(ID_AA64ISAR1_EL1, LRCPC, IMP)
        },
+       {
+               .desc = "Fine Grained Traps",
+               .type = ARM64_CPUCAP_SYSTEM_FEATURE,
+               .capability = ARM64_HAS_FGT,
+               .matches = has_cpuid_feature,
+               ARM64_CPUID_FIELDS(ID_AA64MMFR0_EL1, FGT, IMP)
+       },
 #ifdef CONFIG_ARM64_SME
        {
                .desc = "Scalable Matrix Extension",
index 6b730fc..83c1e09 100644 (file)
@@ -42,6 +42,7 @@ menuconfig KVM
        select SCHED_INFO
        select GUEST_PERF_EVENTS if PERF_EVENTS
        select INTERVAL_TREE
+       select XARRAY_MULTI
        help
          Support hosting virtualized guest machines.
 
index fd2af63..1cad736 100644 (file)
@@ -36,6 +36,7 @@
 #include <asm/kvm_arm.h>
 #include <asm/kvm_asm.h>
 #include <asm/kvm_mmu.h>
+#include <asm/kvm_nested.h>
 #include <asm/kvm_pkvm.h>
 #include <asm/kvm_emulate.h>
 #include <asm/sections.h>
@@ -365,7 +366,7 @@ int kvm_arch_vcpu_create(struct kvm_vcpu *vcpu)
 #endif
 
        /* Force users to call KVM_ARM_VCPU_INIT */
-       vcpu->arch.target = -1;
+       vcpu_clear_flag(vcpu, VCPU_INITIALIZED);
        bitmap_zero(vcpu->arch.features, KVM_VCPU_MAX_FEATURES);
 
        vcpu->arch.mmu_page_cache.gfp_zero = __GFP_ZERO;
@@ -574,7 +575,7 @@ unsigned long kvm_arch_vcpu_get_ip(struct kvm_vcpu *vcpu)
 
 static int kvm_vcpu_initialized(struct kvm_vcpu *vcpu)
 {
-       return vcpu->arch.target >= 0;
+       return vcpu_get_flag(vcpu, VCPU_INITIALIZED);
 }
 
 /*
@@ -818,6 +819,9 @@ static bool vcpu_mode_is_bad_32bit(struct kvm_vcpu *vcpu)
        if (likely(!vcpu_mode_is_32bit(vcpu)))
                return false;
 
+       if (vcpu_has_nv(vcpu))
+               return true;
+
        return !kvm_supports_32bit_el0();
 }
 
@@ -1058,7 +1062,7 @@ int kvm_arch_vcpu_ioctl_run(struct kvm_vcpu *vcpu)
                         * invalid. The VMM can try and fix it by issuing  a
                         * KVM_ARM_VCPU_INIT if it really wants to.
                         */
-                       vcpu->arch.target = -1;
+                       vcpu_clear_flag(vcpu, VCPU_INITIALIZED);
                        ret = ARM_EXCEPTION_IL;
                }
 
@@ -1219,8 +1223,7 @@ static bool kvm_vcpu_init_changed(struct kvm_vcpu *vcpu,
 {
        unsigned long features = init->features[0];
 
-       return !bitmap_equal(vcpu->arch.features, &features, KVM_VCPU_MAX_FEATURES) ||
-                       vcpu->arch.target != init->target;
+       return !bitmap_equal(vcpu->arch.features, &features, KVM_VCPU_MAX_FEATURES);
 }
 
 static int __kvm_vcpu_set_target(struct kvm_vcpu *vcpu,
@@ -1236,20 +1239,18 @@ static int __kvm_vcpu_set_target(struct kvm_vcpu *vcpu,
            !bitmap_equal(kvm->arch.vcpu_features, &features, KVM_VCPU_MAX_FEATURES))
                goto out_unlock;
 
-       vcpu->arch.target = init->target;
        bitmap_copy(vcpu->arch.features, &features, KVM_VCPU_MAX_FEATURES);
 
        /* Now we know what it is, we can reset it. */
        ret = kvm_reset_vcpu(vcpu);
        if (ret) {
-               vcpu->arch.target = -1;
                bitmap_zero(vcpu->arch.features, KVM_VCPU_MAX_FEATURES);
                goto out_unlock;
        }
 
        bitmap_copy(kvm->arch.vcpu_features, &features, KVM_VCPU_MAX_FEATURES);
        set_bit(KVM_ARCH_FLAG_VCPU_FEATURES_CONFIGURED, &kvm->arch.flags);
-
+       vcpu_set_flag(vcpu, VCPU_INITIALIZED);
 out_unlock:
        mutex_unlock(&kvm->arch.config_lock);
        return ret;
@@ -1260,14 +1261,15 @@ static int kvm_vcpu_set_target(struct kvm_vcpu *vcpu,
 {
        int ret;
 
-       if (init->target != kvm_target_cpu())
+       if (init->target != KVM_ARM_TARGET_GENERIC_V8 &&
+           init->target != kvm_target_cpu())
                return -EINVAL;
 
        ret = kvm_vcpu_init_check_features(vcpu, init);
        if (ret)
                return ret;
 
-       if (vcpu->arch.target == -1)
+       if (!kvm_vcpu_initialized(vcpu))
                return __kvm_vcpu_set_target(vcpu, init);
 
        if (kvm_vcpu_init_changed(vcpu, init))
@@ -1589,9 +1591,9 @@ int kvm_arch_vm_ioctl(struct file *filp, unsigned int ioctl, unsigned long arg)
                return kvm_vm_ioctl_set_device_addr(kvm, &dev_addr);
        }
        case KVM_ARM_PREFERRED_TARGET: {
-               struct kvm_vcpu_init init;
-
-               kvm_vcpu_preferred_target(&init);
+               struct kvm_vcpu_init init = {
+                       .target = KVM_ARM_TARGET_GENERIC_V8,
+               };
 
                if (copy_to_user(argp, &init, sizeof(init)))
                        return -EFAULT;
index b966620..9ced1bf 100644 (file)
 
 #include "trace.h"
 
+enum trap_behaviour {
+       BEHAVE_HANDLE_LOCALLY   = 0,
+       BEHAVE_FORWARD_READ     = BIT(0),
+       BEHAVE_FORWARD_WRITE    = BIT(1),
+       BEHAVE_FORWARD_ANY      = BEHAVE_FORWARD_READ | BEHAVE_FORWARD_WRITE,
+};
+
+struct trap_bits {
+       const enum vcpu_sysreg          index;
+       const enum trap_behaviour       behaviour;
+       const u64                       value;
+       const u64                       mask;
+};
+
+/* Coarse Grained Trap definitions */
+enum cgt_group_id {
+       /* Indicates no coarse trap control */
+       __RESERVED__,
+
+       /*
+        * The first batch of IDs denote coarse trapping that are used
+        * on their own instead of being part of a combination of
+        * trap controls.
+        */
+       CGT_HCR_TID1,
+       CGT_HCR_TID2,
+       CGT_HCR_TID3,
+       CGT_HCR_IMO,
+       CGT_HCR_FMO,
+       CGT_HCR_TIDCP,
+       CGT_HCR_TACR,
+       CGT_HCR_TSW,
+       CGT_HCR_TPC,
+       CGT_HCR_TPU,
+       CGT_HCR_TTLB,
+       CGT_HCR_TVM,
+       CGT_HCR_TDZ,
+       CGT_HCR_TRVM,
+       CGT_HCR_TLOR,
+       CGT_HCR_TERR,
+       CGT_HCR_APK,
+       CGT_HCR_NV,
+       CGT_HCR_NV_nNV2,
+       CGT_HCR_NV1_nNV2,
+       CGT_HCR_AT,
+       CGT_HCR_nFIEN,
+       CGT_HCR_TID4,
+       CGT_HCR_TICAB,
+       CGT_HCR_TOCU,
+       CGT_HCR_ENSCXT,
+       CGT_HCR_TTLBIS,
+       CGT_HCR_TTLBOS,
+
+       CGT_MDCR_TPMCR,
+       CGT_MDCR_TPM,
+       CGT_MDCR_TDE,
+       CGT_MDCR_TDA,
+       CGT_MDCR_TDOSA,
+       CGT_MDCR_TDRA,
+       CGT_MDCR_E2PB,
+       CGT_MDCR_TPMS,
+       CGT_MDCR_TTRF,
+       CGT_MDCR_E2TB,
+       CGT_MDCR_TDCC,
+
+       /*
+        * Anything after this point is a combination of coarse trap
+        * controls, which must all be evaluated to decide what to do.
+        */
+       __MULTIPLE_CONTROL_BITS__,
+       CGT_HCR_IMO_FMO = __MULTIPLE_CONTROL_BITS__,
+       CGT_HCR_TID2_TID4,
+       CGT_HCR_TTLB_TTLBIS,
+       CGT_HCR_TTLB_TTLBOS,
+       CGT_HCR_TVM_TRVM,
+       CGT_HCR_TPU_TICAB,
+       CGT_HCR_TPU_TOCU,
+       CGT_HCR_NV1_nNV2_ENSCXT,
+       CGT_MDCR_TPM_TPMCR,
+       CGT_MDCR_TDE_TDA,
+       CGT_MDCR_TDE_TDOSA,
+       CGT_MDCR_TDE_TDRA,
+       CGT_MDCR_TDCC_TDE_TDA,
+
+       /*
+        * Anything after this point requires a callback evaluating a
+        * complex trap condition. Ugly stuff.
+        */
+       __COMPLEX_CONDITIONS__,
+       CGT_CNTHCTL_EL1PCTEN = __COMPLEX_CONDITIONS__,
+       CGT_CNTHCTL_EL1PTEN,
+
+       /* Must be last */
+       __NR_CGT_GROUP_IDS__
+};
+
+static const struct trap_bits coarse_trap_bits[] = {
+       [CGT_HCR_TID1] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TID1,
+               .mask           = HCR_TID1,
+               .behaviour      = BEHAVE_FORWARD_READ,
+       },
+       [CGT_HCR_TID2] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TID2,
+               .mask           = HCR_TID2,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TID3] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TID3,
+               .mask           = HCR_TID3,
+               .behaviour      = BEHAVE_FORWARD_READ,
+       },
+       [CGT_HCR_IMO] = {
+               .index          = HCR_EL2,
+               .value          = HCR_IMO,
+               .mask           = HCR_IMO,
+               .behaviour      = BEHAVE_FORWARD_WRITE,
+       },
+       [CGT_HCR_FMO] = {
+               .index          = HCR_EL2,
+               .value          = HCR_FMO,
+               .mask           = HCR_FMO,
+               .behaviour      = BEHAVE_FORWARD_WRITE,
+       },
+       [CGT_HCR_TIDCP] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TIDCP,
+               .mask           = HCR_TIDCP,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TACR] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TACR,
+               .mask           = HCR_TACR,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TSW] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TSW,
+               .mask           = HCR_TSW,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TPC] = { /* Also called TCPC when FEAT_DPB is implemented */
+               .index          = HCR_EL2,
+               .value          = HCR_TPC,
+               .mask           = HCR_TPC,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TPU] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TPU,
+               .mask           = HCR_TPU,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TTLB] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TTLB,
+               .mask           = HCR_TTLB,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TVM] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TVM,
+               .mask           = HCR_TVM,
+               .behaviour      = BEHAVE_FORWARD_WRITE,
+       },
+       [CGT_HCR_TDZ] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TDZ,
+               .mask           = HCR_TDZ,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TRVM] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TRVM,
+               .mask           = HCR_TRVM,
+               .behaviour      = BEHAVE_FORWARD_READ,
+       },
+       [CGT_HCR_TLOR] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TLOR,
+               .mask           = HCR_TLOR,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TERR] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TERR,
+               .mask           = HCR_TERR,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_APK] = {
+               .index          = HCR_EL2,
+               .value          = 0,
+               .mask           = HCR_APK,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_NV] = {
+               .index          = HCR_EL2,
+               .value          = HCR_NV,
+               .mask           = HCR_NV,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_NV_nNV2] = {
+               .index          = HCR_EL2,
+               .value          = HCR_NV,
+               .mask           = HCR_NV | HCR_NV2,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_NV1_nNV2] = {
+               .index          = HCR_EL2,
+               .value          = HCR_NV | HCR_NV1,
+               .mask           = HCR_NV | HCR_NV1 | HCR_NV2,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_AT] = {
+               .index          = HCR_EL2,
+               .value          = HCR_AT,
+               .mask           = HCR_AT,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_nFIEN] = {
+               .index          = HCR_EL2,
+               .value          = 0,
+               .mask           = HCR_FIEN,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TID4] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TID4,
+               .mask           = HCR_TID4,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TICAB] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TICAB,
+               .mask           = HCR_TICAB,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TOCU] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TOCU,
+               .mask           = HCR_TOCU,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_ENSCXT] = {
+               .index          = HCR_EL2,
+               .value          = 0,
+               .mask           = HCR_ENSCXT,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TTLBIS] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TTLBIS,
+               .mask           = HCR_TTLBIS,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_HCR_TTLBOS] = {
+               .index          = HCR_EL2,
+               .value          = HCR_TTLBOS,
+               .mask           = HCR_TTLBOS,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TPMCR] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TPMCR,
+               .mask           = MDCR_EL2_TPMCR,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TPM] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TPM,
+               .mask           = MDCR_EL2_TPM,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TDE] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TDE,
+               .mask           = MDCR_EL2_TDE,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TDA] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TDA,
+               .mask           = MDCR_EL2_TDA,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TDOSA] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TDOSA,
+               .mask           = MDCR_EL2_TDOSA,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TDRA] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TDRA,
+               .mask           = MDCR_EL2_TDRA,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_E2PB] = {
+               .index          = MDCR_EL2,
+               .value          = 0,
+               .mask           = BIT(MDCR_EL2_E2PB_SHIFT),
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TPMS] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TPMS,
+               .mask           = MDCR_EL2_TPMS,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TTRF] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TTRF,
+               .mask           = MDCR_EL2_TTRF,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_E2TB] = {
+               .index          = MDCR_EL2,
+               .value          = 0,
+               .mask           = BIT(MDCR_EL2_E2TB_SHIFT),
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+       [CGT_MDCR_TDCC] = {
+               .index          = MDCR_EL2,
+               .value          = MDCR_EL2_TDCC,
+               .mask           = MDCR_EL2_TDCC,
+               .behaviour      = BEHAVE_FORWARD_ANY,
+       },
+};
+
+#define MCB(id, ...)                                           \
+       [id - __MULTIPLE_CONTROL_BITS__]        =               \
+               (const enum cgt_group_id[]){                    \
+               __VA_ARGS__, __RESERVED__                       \
+               }
+
+static const enum cgt_group_id *coarse_control_combo[] = {
+       MCB(CGT_HCR_IMO_FMO,            CGT_HCR_IMO, CGT_HCR_FMO),
+       MCB(CGT_HCR_TID2_TID4,          CGT_HCR_TID2, CGT_HCR_TID4),
+       MCB(CGT_HCR_TTLB_TTLBIS,        CGT_HCR_TTLB, CGT_HCR_TTLBIS),
+       MCB(CGT_HCR_TTLB_TTLBOS,        CGT_HCR_TTLB, CGT_HCR_TTLBOS),
+       MCB(CGT_HCR_TVM_TRVM,           CGT_HCR_TVM, CGT_HCR_TRVM),
+       MCB(CGT_HCR_TPU_TICAB,          CGT_HCR_TPU, CGT_HCR_TICAB),
+       MCB(CGT_HCR_TPU_TOCU,           CGT_HCR_TPU, CGT_HCR_TOCU),
+       MCB(CGT_HCR_NV1_nNV2_ENSCXT,    CGT_HCR_NV1_nNV2, CGT_HCR_ENSCXT),
+       MCB(CGT_MDCR_TPM_TPMCR,         CGT_MDCR_TPM, CGT_MDCR_TPMCR),
+       MCB(CGT_MDCR_TDE_TDA,           CGT_MDCR_TDE, CGT_MDCR_TDA),
+       MCB(CGT_MDCR_TDE_TDOSA,         CGT_MDCR_TDE, CGT_MDCR_TDOSA),
+       MCB(CGT_MDCR_TDE_TDRA,          CGT_MDCR_TDE, CGT_MDCR_TDRA),
+       MCB(CGT_MDCR_TDCC_TDE_TDA,      CGT_MDCR_TDCC, CGT_MDCR_TDE, CGT_MDCR_TDA),
+};
+
+typedef enum trap_behaviour (*complex_condition_check)(struct kvm_vcpu *);
+
+/*
+ * Warning, maximum confusion ahead.
+ *
+ * When E2H=0, CNTHCTL_EL2[1:0] are defined as EL1PCEN:EL1PCTEN
+ * When E2H=1, CNTHCTL_EL2[11:10] are defined as EL1PTEN:EL1PCTEN
+ *
+ * Note the single letter difference? Yet, the bits have the same
+ * function despite a different layout and a different name.
+ *
+ * We don't try to reconcile this mess. We just use the E2H=0 bits
+ * to generate something that is in the E2H=1 format, and live with
+ * it. You're welcome.
+ */
+static u64 get_sanitized_cnthctl(struct kvm_vcpu *vcpu)
+{
+       u64 val = __vcpu_sys_reg(vcpu, CNTHCTL_EL2);
+
+       if (!vcpu_el2_e2h_is_set(vcpu))
+               val = (val & (CNTHCTL_EL1PCEN | CNTHCTL_EL1PCTEN)) << 10;
+
+       return val & ((CNTHCTL_EL1PCEN | CNTHCTL_EL1PCTEN) << 10);
+}
+
+static enum trap_behaviour check_cnthctl_el1pcten(struct kvm_vcpu *vcpu)
+{
+       if (get_sanitized_cnthctl(vcpu) & (CNTHCTL_EL1PCTEN << 10))
+               return BEHAVE_HANDLE_LOCALLY;
+
+       return BEHAVE_FORWARD_ANY;
+}
+
+static enum trap_behaviour check_cnthctl_el1pten(struct kvm_vcpu *vcpu)
+{
+       if (get_sanitized_cnthctl(vcpu) & (CNTHCTL_EL1PCEN << 10))
+               return BEHAVE_HANDLE_LOCALLY;
+
+       return BEHAVE_FORWARD_ANY;
+}
+
+#define CCC(id, fn)                            \
+       [id - __COMPLEX_CONDITIONS__] = fn
+
+static const complex_condition_check ccc[] = {
+       CCC(CGT_CNTHCTL_EL1PCTEN, check_cnthctl_el1pcten),
+       CCC(CGT_CNTHCTL_EL1PTEN, check_cnthctl_el1pten),
+};
+
+/*
+ * Bit assignment for the trap controls. We use a 64bit word with the
+ * following layout for each trapped sysreg:
+ *
+ * [9:0]       enum cgt_group_id (10 bits)
+ * [13:10]     enum fgt_group_id (4 bits)
+ * [19:14]     bit number in the FGT register (6 bits)
+ * [20]                trap polarity (1 bit)
+ * [25:21]     FG filter (5 bits)
+ * [62:26]     Unused (37 bits)
+ * [63]                RES0 - Must be zero, as lost on insertion in the xarray
+ */
+#define TC_CGT_BITS    10
+#define TC_FGT_BITS    4
+#define TC_FGF_BITS    5
+
+union trap_config {
+       u64     val;
+       struct {
+               unsigned long   cgt:TC_CGT_BITS; /* Coarse Grained Trap id */
+               unsigned long   fgt:TC_FGT_BITS; /* Fine Grained Trap id */
+               unsigned long   bit:6;           /* Bit number */
+               unsigned long   pol:1;           /* Polarity */
+               unsigned long   fgf:TC_FGF_BITS; /* Fine Grained Filter */
+               unsigned long   unused:37;       /* Unused, should be zero */
+               unsigned long   mbz:1;           /* Must Be Zero */
+       };
+};
+
+struct encoding_to_trap_config {
+       const u32                       encoding;
+       const u32                       end;
+       const union trap_config         tc;
+       const unsigned int              line;
+};
+
+#define SR_RANGE_TRAP(sr_start, sr_end, trap_id)                       \
+       {                                                               \
+               .encoding       = sr_start,                             \
+               .end            = sr_end,                               \
+               .tc             = {                                     \
+                       .cgt            = trap_id,                      \
+               },                                                      \
+               .line = __LINE__,                                       \
+       }
+
+#define SR_TRAP(sr, trap_id)           SR_RANGE_TRAP(sr, sr, trap_id)
+
+/*
+ * Map encoding to trap bits for exception reported with EC=0x18.
+ * These must only be evaluated when running a nested hypervisor, but
+ * that the current context is not a hypervisor context. When the
+ * trapped access matches one of the trap controls, the exception is
+ * re-injected in the nested hypervisor.
+ */
+static const struct encoding_to_trap_config encoding_to_cgt[] __initconst = {
+       SR_TRAP(SYS_REVIDR_EL1,         CGT_HCR_TID1),
+       SR_TRAP(SYS_AIDR_EL1,           CGT_HCR_TID1),
+       SR_TRAP(SYS_SMIDR_EL1,          CGT_HCR_TID1),
+       SR_TRAP(SYS_CTR_EL0,            CGT_HCR_TID2),
+       SR_TRAP(SYS_CCSIDR_EL1,         CGT_HCR_TID2_TID4),
+       SR_TRAP(SYS_CCSIDR2_EL1,        CGT_HCR_TID2_TID4),
+       SR_TRAP(SYS_CLIDR_EL1,          CGT_HCR_TID2_TID4),
+       SR_TRAP(SYS_CSSELR_EL1,         CGT_HCR_TID2_TID4),
+       SR_RANGE_TRAP(SYS_ID_PFR0_EL1,
+                     sys_reg(3, 0, 0, 7, 7), CGT_HCR_TID3),
+       SR_TRAP(SYS_ICC_SGI0R_EL1,      CGT_HCR_IMO_FMO),
+       SR_TRAP(SYS_ICC_ASGI1R_EL1,     CGT_HCR_IMO_FMO),
+       SR_TRAP(SYS_ICC_SGI1R_EL1,      CGT_HCR_IMO_FMO),
+       SR_RANGE_TRAP(sys_reg(3, 0, 11, 0, 0),
+                     sys_reg(3, 0, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 1, 11, 0, 0),
+                     sys_reg(3, 1, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 2, 11, 0, 0),
+                     sys_reg(3, 2, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 3, 11, 0, 0),
+                     sys_reg(3, 3, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 4, 11, 0, 0),
+                     sys_reg(3, 4, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 5, 11, 0, 0),
+                     sys_reg(3, 5, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 6, 11, 0, 0),
+                     sys_reg(3, 6, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 7, 11, 0, 0),
+                     sys_reg(3, 7, 11, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 0, 15, 0, 0),
+                     sys_reg(3, 0, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 1, 15, 0, 0),
+                     sys_reg(3, 1, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 2, 15, 0, 0),
+                     sys_reg(3, 2, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 3, 15, 0, 0),
+                     sys_reg(3, 3, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 4, 15, 0, 0),
+                     sys_reg(3, 4, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 5, 15, 0, 0),
+                     sys_reg(3, 5, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 6, 15, 0, 0),
+                     sys_reg(3, 6, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_RANGE_TRAP(sys_reg(3, 7, 15, 0, 0),
+                     sys_reg(3, 7, 15, 15, 7), CGT_HCR_TIDCP),
+       SR_TRAP(SYS_ACTLR_EL1,          CGT_HCR_TACR),
+       SR_TRAP(SYS_DC_ISW,             CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CSW,             CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CISW,            CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_IGSW,            CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_IGDSW,           CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CGSW,            CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CGDSW,           CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CIGSW,           CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CIGDSW,          CGT_HCR_TSW),
+       SR_TRAP(SYS_DC_CIVAC,           CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CVAC,            CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CVAP,            CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CVADP,           CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_IVAC,            CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CIGVAC,          CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CIGDVAC,         CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_IGVAC,           CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_IGDVAC,          CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGVAC,           CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGDVAC,          CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGVAP,           CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGDVAP,          CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGVADP,          CGT_HCR_TPC),
+       SR_TRAP(SYS_DC_CGDVADP,         CGT_HCR_TPC),
+       SR_TRAP(SYS_IC_IVAU,            CGT_HCR_TPU_TOCU),
+       SR_TRAP(SYS_IC_IALLU,           CGT_HCR_TPU_TOCU),
+       SR_TRAP(SYS_IC_IALLUIS,         CGT_HCR_TPU_TICAB),
+       SR_TRAP(SYS_DC_CVAU,            CGT_HCR_TPU_TOCU),
+       SR_TRAP(OP_TLBI_RVAE1,          CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAAE1,         CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVALE1,         CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAALE1,        CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VMALLE1,        CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAE1,           CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_ASIDE1,         CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAAE1,          CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VALE1,          CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAALE1,         CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAE1NXS,       CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAAE1NXS,      CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVALE1NXS,      CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAALE1NXS,     CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VMALLE1NXS,     CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAE1NXS,        CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_ASIDE1NXS,      CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAAE1NXS,       CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VALE1NXS,       CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_VAALE1NXS,      CGT_HCR_TTLB),
+       SR_TRAP(OP_TLBI_RVAE1IS,        CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVAAE1IS,       CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVALE1IS,       CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVAALE1IS,      CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VMALLE1IS,      CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAE1IS,         CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_ASIDE1IS,       CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAAE1IS,        CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VALE1IS,        CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAALE1IS,       CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVAE1ISNXS,     CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVAAE1ISNXS,    CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVALE1ISNXS,    CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_RVAALE1ISNXS,   CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VMALLE1ISNXS,   CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAE1ISNXS,      CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_ASIDE1ISNXS,    CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAAE1ISNXS,     CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VALE1ISNXS,     CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VAALE1ISNXS,    CGT_HCR_TTLB_TTLBIS),
+       SR_TRAP(OP_TLBI_VMALLE1OS,      CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAE1OS,         CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_ASIDE1OS,       CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAAE1OS,        CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VALE1OS,        CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAALE1OS,       CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAE1OS,        CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAAE1OS,       CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVALE1OS,       CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAALE1OS,      CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VMALLE1OSNXS,   CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAE1OSNXS,      CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_ASIDE1OSNXS,    CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAAE1OSNXS,     CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VALE1OSNXS,     CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_VAALE1OSNXS,    CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAE1OSNXS,     CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAAE1OSNXS,    CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVALE1OSNXS,    CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(OP_TLBI_RVAALE1OSNXS,   CGT_HCR_TTLB_TTLBOS),
+       SR_TRAP(SYS_SCTLR_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_TTBR0_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_TTBR1_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_TCR_EL1,            CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_ESR_EL1,            CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_FAR_EL1,            CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_AFSR0_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_AFSR1_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_MAIR_EL1,           CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_AMAIR_EL1,          CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_CONTEXTIDR_EL1,     CGT_HCR_TVM_TRVM),
+       SR_TRAP(SYS_DC_ZVA,             CGT_HCR_TDZ),
+       SR_TRAP(SYS_DC_GVA,             CGT_HCR_TDZ),
+       SR_TRAP(SYS_DC_GZVA,            CGT_HCR_TDZ),
+       SR_TRAP(SYS_LORSA_EL1,          CGT_HCR_TLOR),
+       SR_TRAP(SYS_LOREA_EL1,          CGT_HCR_TLOR),
+       SR_TRAP(SYS_LORN_EL1,           CGT_HCR_TLOR),
+       SR_TRAP(SYS_LORC_EL1,           CGT_HCR_TLOR),
+       SR_TRAP(SYS_LORID_EL1,          CGT_HCR_TLOR),
+       SR_TRAP(SYS_ERRIDR_EL1,         CGT_HCR_TERR),
+       SR_TRAP(SYS_ERRSELR_EL1,        CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXADDR_EL1,        CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXCTLR_EL1,        CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXFR_EL1,          CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXMISC0_EL1,       CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXMISC1_EL1,       CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXMISC2_EL1,       CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXMISC3_EL1,       CGT_HCR_TERR),
+       SR_TRAP(SYS_ERXSTATUS_EL1,      CGT_HCR_TERR),
+       SR_TRAP(SYS_APIAKEYLO_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APIAKEYHI_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APIBKEYLO_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APIBKEYHI_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APDAKEYLO_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APDAKEYHI_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APDBKEYLO_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APDBKEYHI_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APGAKEYLO_EL1,      CGT_HCR_APK),
+       SR_TRAP(SYS_APGAKEYHI_EL1,      CGT_HCR_APK),
+       /* All _EL2 registers */
+       SR_RANGE_TRAP(sys_reg(3, 4, 0, 0, 0),
+                     sys_reg(3, 4, 3, 15, 7), CGT_HCR_NV),
+       /* Skip the SP_EL1 encoding... */
+       SR_TRAP(SYS_SPSR_EL2,           CGT_HCR_NV),
+       SR_TRAP(SYS_ELR_EL2,            CGT_HCR_NV),
+       SR_RANGE_TRAP(sys_reg(3, 4, 4, 1, 1),
+                     sys_reg(3, 4, 10, 15, 7), CGT_HCR_NV),
+       SR_RANGE_TRAP(sys_reg(3, 4, 12, 0, 0),
+                     sys_reg(3, 4, 14, 15, 7), CGT_HCR_NV),
+       /* All _EL02, _EL12 registers */
+       SR_RANGE_TRAP(sys_reg(3, 5, 0, 0, 0),
+                     sys_reg(3, 5, 10, 15, 7), CGT_HCR_NV),
+       SR_RANGE_TRAP(sys_reg(3, 5, 12, 0, 0),
+                     sys_reg(3, 5, 14, 15, 7), CGT_HCR_NV),
+       SR_TRAP(OP_AT_S1E2R,            CGT_HCR_NV),
+       SR_TRAP(OP_AT_S1E2W,            CGT_HCR_NV),
+       SR_TRAP(OP_AT_S12E1R,           CGT_HCR_NV),
+       SR_TRAP(OP_AT_S12E1W,           CGT_HCR_NV),
+       SR_TRAP(OP_AT_S12E0R,           CGT_HCR_NV),
+       SR_TRAP(OP_AT_S12E0W,           CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2,          CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2,         CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2,          CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2,           CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1,          CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2,          CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1NXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1NXS,    CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1NXS,    CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1NXS,   CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2NXS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2NXS,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2NXS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2NXS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1NXS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2NXS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1NXS,  CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1IS,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1IS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1IS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1IS,    CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2IS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2IS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2IS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2IS,         CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1IS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2IS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1IS,   CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1ISNXS,   CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1ISNXS,  CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1ISNXS,  CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1ISNXS, CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2ISNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2ISNXS,    CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2ISNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2ISNXS,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1ISNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2ISNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1ISNXS,CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2OS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2OS,         CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1OS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2OS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1OS,   CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1OS,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1OS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1OS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1OS,    CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2OS,        CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2OS,       CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE2OSNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VAE2OSNXS,      CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_ALLE1OSNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VALE2OSNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_VMALLS12E1OSNXS,CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2E1OSNXS,   CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2E1OSNXS,  CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_IPAS2LE1OSNXS,  CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RIPAS2LE1OSNXS, CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVAE2OSNXS,     CGT_HCR_NV),
+       SR_TRAP(OP_TLBI_RVALE2OSNXS,    CGT_HCR_NV),
+       SR_TRAP(OP_CPP_RCTX,            CGT_HCR_NV),
+       SR_TRAP(OP_DVP_RCTX,            CGT_HCR_NV),
+       SR_TRAP(OP_CFP_RCTX,            CGT_HCR_NV),
+       SR_TRAP(SYS_SP_EL1,             CGT_HCR_NV_nNV2),
+       SR_TRAP(SYS_VBAR_EL1,           CGT_HCR_NV1_nNV2),
+       SR_TRAP(SYS_ELR_EL1,            CGT_HCR_NV1_nNV2),
+       SR_TRAP(SYS_SPSR_EL1,           CGT_HCR_NV1_nNV2),
+       SR_TRAP(SYS_SCXTNUM_EL1,        CGT_HCR_NV1_nNV2_ENSCXT),
+       SR_TRAP(SYS_SCXTNUM_EL0,        CGT_HCR_ENSCXT),
+       SR_TRAP(OP_AT_S1E1R,            CGT_HCR_AT),
+       SR_TRAP(OP_AT_S1E1W,            CGT_HCR_AT),
+       SR_TRAP(OP_AT_S1E0R,            CGT_HCR_AT),
+       SR_TRAP(OP_AT_S1E0W,            CGT_HCR_AT),
+       SR_TRAP(OP_AT_S1E1RP,           CGT_HCR_AT),
+       SR_TRAP(OP_AT_S1E1WP,           CGT_HCR_AT),
+       SR_TRAP(SYS_ERXPFGF_EL1,        CGT_HCR_nFIEN),
+       SR_TRAP(SYS_ERXPFGCTL_EL1,      CGT_HCR_nFIEN),
+       SR_TRAP(SYS_ERXPFGCDN_EL1,      CGT_HCR_nFIEN),
+       SR_TRAP(SYS_PMCR_EL0,           CGT_MDCR_TPM_TPMCR),
+       SR_TRAP(SYS_PMCNTENSET_EL0,     CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMCNTENCLR_EL0,     CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMOVSSET_EL0,       CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMOVSCLR_EL0,       CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMCEID0_EL0,        CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMCEID1_EL0,        CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMXEVTYPER_EL0,     CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMSWINC_EL0,        CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMSELR_EL0,         CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMXEVCNTR_EL0,      CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMCCNTR_EL0,        CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMUSERENR_EL0,      CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMINTENSET_EL1,     CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMINTENCLR_EL1,     CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMMIR_EL1,          CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(0),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(1),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(2),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(3),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(4),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(5),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(6),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(7),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(8),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(9),   CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(10),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(11),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(12),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(13),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(14),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(15),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(16),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(17),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(18),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(19),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(20),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(21),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(22),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(23),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(24),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(25),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(26),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(27),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(28),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(29),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVCNTRn_EL0(30),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(0),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(1),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(2),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(3),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(4),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(5),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(6),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(7),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(8),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(9),  CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(10), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(11), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(12), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(13), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(14), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(15), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(16), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(17), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(18), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(19), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(20), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(21), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(22), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(23), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(24), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(25), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(26), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(27), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(28), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(29), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMEVTYPERn_EL0(30), CGT_MDCR_TPM),
+       SR_TRAP(SYS_PMCCFILTR_EL0,      CGT_MDCR_TPM),
+       SR_TRAP(SYS_MDCCSR_EL0,         CGT_MDCR_TDCC_TDE_TDA),
+       SR_TRAP(SYS_MDCCINT_EL1,        CGT_MDCR_TDCC_TDE_TDA),
+       SR_TRAP(SYS_OSDTRRX_EL1,        CGT_MDCR_TDCC_TDE_TDA),
+       SR_TRAP(SYS_OSDTRTX_EL1,        CGT_MDCR_TDCC_TDE_TDA),
+       SR_TRAP(SYS_DBGDTR_EL0,         CGT_MDCR_TDCC_TDE_TDA),
+       /*
+        * Also covers DBGDTRRX_EL0, which has the same encoding as
+        * SYS_DBGDTRTX_EL0...
+        */
+       SR_TRAP(SYS_DBGDTRTX_EL0,       CGT_MDCR_TDCC_TDE_TDA),
+       SR_TRAP(SYS_MDSCR_EL1,          CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_OSECCR_EL1,         CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(0),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(1),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(2),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(3),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(4),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(5),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(6),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(7),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(8),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(9),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(10),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(11),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(12),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(13),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(14),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBVRn_EL1(15),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(0),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(1),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(2),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(3),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(4),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(5),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(6),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(7),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(8),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(9),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(10),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(11),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(12),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(13),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(14),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGBCRn_EL1(15),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(0),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(1),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(2),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(3),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(4),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(5),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(6),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(7),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(8),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(9),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(10),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(11),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(12),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(13),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(14),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWVRn_EL1(15),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(0),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(1),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(2),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(3),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(4),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(5),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(6),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(7),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(8),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(9),     CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(10),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(11),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(12),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(13),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGWCRn_EL1(14),    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGCLAIMSET_EL1,    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGCLAIMCLR_EL1,    CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_DBGAUTHSTATUS_EL1,  CGT_MDCR_TDE_TDA),
+       SR_TRAP(SYS_OSLAR_EL1,          CGT_MDCR_TDE_TDOSA),
+       SR_TRAP(SYS_OSLSR_EL1,          CGT_MDCR_TDE_TDOSA),
+       SR_TRAP(SYS_OSDLR_EL1,          CGT_MDCR_TDE_TDOSA),
+       SR_TRAP(SYS_DBGPRCR_EL1,        CGT_MDCR_TDE_TDOSA),
+       SR_TRAP(SYS_MDRAR_EL1,          CGT_MDCR_TDE_TDRA),
+       SR_TRAP(SYS_PMBLIMITR_EL1,      CGT_MDCR_E2PB),
+       SR_TRAP(SYS_PMBPTR_EL1,         CGT_MDCR_E2PB),
+       SR_TRAP(SYS_PMBSR_EL1,          CGT_MDCR_E2PB),
+       SR_TRAP(SYS_PMSCR_EL1,          CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSEVFR_EL1,        CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSFCR_EL1,         CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSICR_EL1,         CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSIDR_EL1,         CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSIRR_EL1,         CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSLATFR_EL1,       CGT_MDCR_TPMS),
+       SR_TRAP(SYS_PMSNEVFR_EL1,       CGT_MDCR_TPMS),
+       SR_TRAP(SYS_TRFCR_EL1,          CGT_MDCR_TTRF),
+       SR_TRAP(SYS_TRBBASER_EL1,       CGT_MDCR_E2TB),
+       SR_TRAP(SYS_TRBLIMITR_EL1,      CGT_MDCR_E2TB),
+       SR_TRAP(SYS_TRBMAR_EL1,         CGT_MDCR_E2TB),
+       SR_TRAP(SYS_TRBPTR_EL1,         CGT_MDCR_E2TB),
+       SR_TRAP(SYS_TRBSR_EL1,          CGT_MDCR_E2TB),
+       SR_TRAP(SYS_TRBTRG_EL1,         CGT_MDCR_E2TB),
+       SR_TRAP(SYS_CNTP_TVAL_EL0,      CGT_CNTHCTL_EL1PTEN),
+       SR_TRAP(SYS_CNTP_CVAL_EL0,      CGT_CNTHCTL_EL1PTEN),
+       SR_TRAP(SYS_CNTP_CTL_EL0,       CGT_CNTHCTL_EL1PTEN),
+       SR_TRAP(SYS_CNTPCT_EL0,         CGT_CNTHCTL_EL1PCTEN),
+       SR_TRAP(SYS_CNTPCTSS_EL0,       CGT_CNTHCTL_EL1PCTEN),
+};
+
+static DEFINE_XARRAY(sr_forward_xa);
+
+enum fgt_group_id {
+       __NO_FGT_GROUP__,
+       HFGxTR_GROUP,
+       HDFGRTR_GROUP,
+       HDFGWTR_GROUP,
+       HFGITR_GROUP,
+
+       /* Must be last */
+       __NR_FGT_GROUP_IDS__
+};
+
+enum fg_filter_id {
+       __NO_FGF__,
+       HCRX_FGTnXS,
+
+       /* Must be last */
+       __NR_FG_FILTER_IDS__
+};
+
+#define SR_FGF(sr, g, b, p, f)                                 \
+       {                                                       \
+               .encoding       = sr,                           \
+               .end            = sr,                           \
+               .tc             = {                             \
+                       .fgt = g ## _GROUP,                     \
+                       .bit = g ## _EL2_ ## b ## _SHIFT,       \
+                       .pol = p,                               \
+                       .fgf = f,                               \
+               },                                              \
+               .line = __LINE__,                               \
+       }
+
+#define SR_FGT(sr, g, b, p)    SR_FGF(sr, g, b, p, __NO_FGF__)
+
+static const struct encoding_to_trap_config encoding_to_fgt[] __initconst = {
+       /* HFGRTR_EL2, HFGWTR_EL2 */
+       SR_FGT(SYS_TPIDR2_EL0,          HFGxTR, nTPIDR2_EL0, 0),
+       SR_FGT(SYS_SMPRI_EL1,           HFGxTR, nSMPRI_EL1, 0),
+       SR_FGT(SYS_ACCDATA_EL1,         HFGxTR, nACCDATA_EL1, 0),
+       SR_FGT(SYS_ERXADDR_EL1,         HFGxTR, ERXADDR_EL1, 1),
+       SR_FGT(SYS_ERXPFGCDN_EL1,       HFGxTR, ERXPFGCDN_EL1, 1),
+       SR_FGT(SYS_ERXPFGCTL_EL1,       HFGxTR, ERXPFGCTL_EL1, 1),
+       SR_FGT(SYS_ERXPFGF_EL1,         HFGxTR, ERXPFGF_EL1, 1),
+       SR_FGT(SYS_ERXMISC0_EL1,        HFGxTR, ERXMISCn_EL1, 1),
+       SR_FGT(SYS_ERXMISC1_EL1,        HFGxTR, ERXMISCn_EL1, 1),
+       SR_FGT(SYS_ERXMISC2_EL1,        HFGxTR, ERXMISCn_EL1, 1),
+       SR_FGT(SYS_ERXMISC3_EL1,        HFGxTR, ERXMISCn_EL1, 1),
+       SR_FGT(SYS_ERXSTATUS_EL1,       HFGxTR, ERXSTATUS_EL1, 1),
+       SR_FGT(SYS_ERXCTLR_EL1,         HFGxTR, ERXCTLR_EL1, 1),
+       SR_FGT(SYS_ERXFR_EL1,           HFGxTR, ERXFR_EL1, 1),
+       SR_FGT(SYS_ERRSELR_EL1,         HFGxTR, ERRSELR_EL1, 1),
+       SR_FGT(SYS_ERRIDR_EL1,          HFGxTR, ERRIDR_EL1, 1),
+       SR_FGT(SYS_ICC_IGRPEN0_EL1,     HFGxTR, ICC_IGRPENn_EL1, 1),
+       SR_FGT(SYS_ICC_IGRPEN1_EL1,     HFGxTR, ICC_IGRPENn_EL1, 1),
+       SR_FGT(SYS_VBAR_EL1,            HFGxTR, VBAR_EL1, 1),
+       SR_FGT(SYS_TTBR1_EL1,           HFGxTR, TTBR1_EL1, 1),
+       SR_FGT(SYS_TTBR0_EL1,           HFGxTR, TTBR0_EL1, 1),
+       SR_FGT(SYS_TPIDR_EL0,           HFGxTR, TPIDR_EL0, 1),
+       SR_FGT(SYS_TPIDRRO_EL0,         HFGxTR, TPIDRRO_EL0, 1),
+       SR_FGT(SYS_TPIDR_EL1,           HFGxTR, TPIDR_EL1, 1),
+       SR_FGT(SYS_TCR_EL1,             HFGxTR, TCR_EL1, 1),
+       SR_FGT(SYS_SCXTNUM_EL0,         HFGxTR, SCXTNUM_EL0, 1),
+       SR_FGT(SYS_SCXTNUM_EL1,         HFGxTR, SCXTNUM_EL1, 1),
+       SR_FGT(SYS_SCTLR_EL1,           HFGxTR, SCTLR_EL1, 1),
+       SR_FGT(SYS_REVIDR_EL1,          HFGxTR, REVIDR_EL1, 1),
+       SR_FGT(SYS_PAR_EL1,             HFGxTR, PAR_EL1, 1),
+       SR_FGT(SYS_MPIDR_EL1,           HFGxTR, MPIDR_EL1, 1),
+       SR_FGT(SYS_MIDR_EL1,            HFGxTR, MIDR_EL1, 1),
+       SR_FGT(SYS_MAIR_EL1,            HFGxTR, MAIR_EL1, 1),
+       SR_FGT(SYS_LORSA_EL1,           HFGxTR, LORSA_EL1, 1),
+       SR_FGT(SYS_LORN_EL1,            HFGxTR, LORN_EL1, 1),
+       SR_FGT(SYS_LORID_EL1,           HFGxTR, LORID_EL1, 1),
+       SR_FGT(SYS_LOREA_EL1,           HFGxTR, LOREA_EL1, 1),
+       SR_FGT(SYS_LORC_EL1,            HFGxTR, LORC_EL1, 1),
+       SR_FGT(SYS_ISR_EL1,             HFGxTR, ISR_EL1, 1),
+       SR_FGT(SYS_FAR_EL1,             HFGxTR, FAR_EL1, 1),
+       SR_FGT(SYS_ESR_EL1,             HFGxTR, ESR_EL1, 1),
+       SR_FGT(SYS_DCZID_EL0,           HFGxTR, DCZID_EL0, 1),
+       SR_FGT(SYS_CTR_EL0,             HFGxTR, CTR_EL0, 1),
+       SR_FGT(SYS_CSSELR_EL1,          HFGxTR, CSSELR_EL1, 1),
+       SR_FGT(SYS_CPACR_EL1,           HFGxTR, CPACR_EL1, 1),
+       SR_FGT(SYS_CONTEXTIDR_EL1,      HFGxTR, CONTEXTIDR_EL1, 1),
+       SR_FGT(SYS_CLIDR_EL1,           HFGxTR, CLIDR_EL1, 1),
+       SR_FGT(SYS_CCSIDR_EL1,          HFGxTR, CCSIDR_EL1, 1),
+       SR_FGT(SYS_APIBKEYLO_EL1,       HFGxTR, APIBKey, 1),
+       SR_FGT(SYS_APIBKEYHI_EL1,       HFGxTR, APIBKey, 1),
+       SR_FGT(SYS_APIAKEYLO_EL1,       HFGxTR, APIAKey, 1),
+       SR_FGT(SYS_APIAKEYHI_EL1,       HFGxTR, APIAKey, 1),
+       SR_FGT(SYS_APGAKEYLO_EL1,       HFGxTR, APGAKey, 1),
+       SR_FGT(SYS_APGAKEYHI_EL1,       HFGxTR, APGAKey, 1),
+       SR_FGT(SYS_APDBKEYLO_EL1,       HFGxTR, APDBKey, 1),
+       SR_FGT(SYS_APDBKEYHI_EL1,       HFGxTR, APDBKey, 1),
+       SR_FGT(SYS_APDAKEYLO_EL1,       HFGxTR, APDAKey, 1),
+       SR_FGT(SYS_APDAKEYHI_EL1,       HFGxTR, APDAKey, 1),
+       SR_FGT(SYS_AMAIR_EL1,           HFGxTR, AMAIR_EL1, 1),
+       SR_FGT(SYS_AIDR_EL1,            HFGxTR, AIDR_EL1, 1),
+       SR_FGT(SYS_AFSR1_EL1,           HFGxTR, AFSR1_EL1, 1),
+       SR_FGT(SYS_AFSR0_EL1,           HFGxTR, AFSR0_EL1, 1),
+       /* HFGITR_EL2 */
+       SR_FGT(OP_BRB_IALL,             HFGITR, nBRBIALL, 0),
+       SR_FGT(OP_BRB_INJ,              HFGITR, nBRBINJ, 0),
+       SR_FGT(SYS_DC_CVAC,             HFGITR, DCCVAC, 1),
+       SR_FGT(SYS_DC_CGVAC,            HFGITR, DCCVAC, 1),
+       SR_FGT(SYS_DC_CGDVAC,           HFGITR, DCCVAC, 1),
+       SR_FGT(OP_CPP_RCTX,             HFGITR, CPPRCTX, 1),
+       SR_FGT(OP_DVP_RCTX,             HFGITR, DVPRCTX, 1),
+       SR_FGT(OP_CFP_RCTX,             HFGITR, CFPRCTX, 1),
+       SR_FGT(OP_TLBI_VAALE1,          HFGITR, TLBIVAALE1, 1),
+       SR_FGT(OP_TLBI_VALE1,           HFGITR, TLBIVALE1, 1),
+       SR_FGT(OP_TLBI_VAAE1,           HFGITR, TLBIVAAE1, 1),
+       SR_FGT(OP_TLBI_ASIDE1,          HFGITR, TLBIASIDE1, 1),
+       SR_FGT(OP_TLBI_VAE1,            HFGITR, TLBIVAE1, 1),
+       SR_FGT(OP_TLBI_VMALLE1,         HFGITR, TLBIVMALLE1, 1),
+       SR_FGT(OP_TLBI_RVAALE1,         HFGITR, TLBIRVAALE1, 1),
+       SR_FGT(OP_TLBI_RVALE1,          HFGITR, TLBIRVALE1, 1),
+       SR_FGT(OP_TLBI_RVAAE1,          HFGITR, TLBIRVAAE1, 1),
+       SR_FGT(OP_TLBI_RVAE1,           HFGITR, TLBIRVAE1, 1),
+       SR_FGT(OP_TLBI_RVAALE1IS,       HFGITR, TLBIRVAALE1IS, 1),
+       SR_FGT(OP_TLBI_RVALE1IS,        HFGITR, TLBIRVALE1IS, 1),
+       SR_FGT(OP_TLBI_RVAAE1IS,        HFGITR, TLBIRVAAE1IS, 1),
+       SR_FGT(OP_TLBI_RVAE1IS,         HFGITR, TLBIRVAE1IS, 1),
+       SR_FGT(OP_TLBI_VAALE1IS,        HFGITR, TLBIVAALE1IS, 1),
+       SR_FGT(OP_TLBI_VALE1IS,         HFGITR, TLBIVALE1IS, 1),
+       SR_FGT(OP_TLBI_VAAE1IS,         HFGITR, TLBIVAAE1IS, 1),
+       SR_FGT(OP_TLBI_ASIDE1IS,        HFGITR, TLBIASIDE1IS, 1),
+       SR_FGT(OP_TLBI_VAE1IS,          HFGITR, TLBIVAE1IS, 1),
+       SR_FGT(OP_TLBI_VMALLE1IS,       HFGITR, TLBIVMALLE1IS, 1),
+       SR_FGT(OP_TLBI_RVAALE1OS,       HFGITR, TLBIRVAALE1OS, 1),
+       SR_FGT(OP_TLBI_RVALE1OS,        HFGITR, TLBIRVALE1OS, 1),
+       SR_FGT(OP_TLBI_RVAAE1OS,        HFGITR, TLBIRVAAE1OS, 1),
+       SR_FGT(OP_TLBI_RVAE1OS,         HFGITR, TLBIRVAE1OS, 1),
+       SR_FGT(OP_TLBI_VAALE1OS,        HFGITR, TLBIVAALE1OS, 1),
+       SR_FGT(OP_TLBI_VALE1OS,         HFGITR, TLBIVALE1OS, 1),
+       SR_FGT(OP_TLBI_VAAE1OS,         HFGITR, TLBIVAAE1OS, 1),
+       SR_FGT(OP_TLBI_ASIDE1OS,        HFGITR, TLBIASIDE1OS, 1),
+       SR_FGT(OP_TLBI_VAE1OS,          HFGITR, TLBIVAE1OS, 1),
+       SR_FGT(OP_TLBI_VMALLE1OS,       HFGITR, TLBIVMALLE1OS, 1),
+       /* nXS variants must be checked against HCRX_EL2.FGTnXS */
+       SR_FGF(OP_TLBI_VAALE1NXS,       HFGITR, TLBIVAALE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VALE1NXS,        HFGITR, TLBIVALE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAAE1NXS,        HFGITR, TLBIVAAE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_ASIDE1NXS,       HFGITR, TLBIASIDE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAE1NXS,         HFGITR, TLBIVAE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VMALLE1NXS,      HFGITR, TLBIVMALLE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAALE1NXS,      HFGITR, TLBIRVAALE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVALE1NXS,       HFGITR, TLBIRVALE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAAE1NXS,       HFGITR, TLBIRVAAE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAE1NXS,        HFGITR, TLBIRVAE1, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAALE1ISNXS,    HFGITR, TLBIRVAALE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVALE1ISNXS,     HFGITR, TLBIRVALE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAAE1ISNXS,     HFGITR, TLBIRVAAE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAE1ISNXS,      HFGITR, TLBIRVAE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAALE1ISNXS,     HFGITR, TLBIVAALE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VALE1ISNXS,      HFGITR, TLBIVALE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAAE1ISNXS,      HFGITR, TLBIVAAE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_ASIDE1ISNXS,     HFGITR, TLBIASIDE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAE1ISNXS,       HFGITR, TLBIVAE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VMALLE1ISNXS,    HFGITR, TLBIVMALLE1IS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAALE1OSNXS,    HFGITR, TLBIRVAALE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVALE1OSNXS,     HFGITR, TLBIRVALE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAAE1OSNXS,     HFGITR, TLBIRVAAE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_RVAE1OSNXS,      HFGITR, TLBIRVAE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAALE1OSNXS,     HFGITR, TLBIVAALE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VALE1OSNXS,      HFGITR, TLBIVALE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAAE1OSNXS,      HFGITR, TLBIVAAE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_ASIDE1OSNXS,     HFGITR, TLBIASIDE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VAE1OSNXS,       HFGITR, TLBIVAE1OS, 1, HCRX_FGTnXS),
+       SR_FGF(OP_TLBI_VMALLE1OSNXS,    HFGITR, TLBIVMALLE1OS, 1, HCRX_FGTnXS),
+       SR_FGT(OP_AT_S1E1WP,            HFGITR, ATS1E1WP, 1),
+       SR_FGT(OP_AT_S1E1RP,            HFGITR, ATS1E1RP, 1),
+       SR_FGT(OP_AT_S1E0W,             HFGITR, ATS1E0W, 1),
+       SR_FGT(OP_AT_S1E0R,             HFGITR, ATS1E0R, 1),
+       SR_FGT(OP_AT_S1E1W,             HFGITR, ATS1E1W, 1),
+       SR_FGT(OP_AT_S1E1R,             HFGITR, ATS1E1R, 1),
+       SR_FGT(SYS_DC_ZVA,              HFGITR, DCZVA, 1),
+       SR_FGT(SYS_DC_GVA,              HFGITR, DCZVA, 1),
+       SR_FGT(SYS_DC_GZVA,             HFGITR, DCZVA, 1),
+       SR_FGT(SYS_DC_CIVAC,            HFGITR, DCCIVAC, 1),
+       SR_FGT(SYS_DC_CIGVAC,           HFGITR, DCCIVAC, 1),
+       SR_FGT(SYS_DC_CIGDVAC,          HFGITR, DCCIVAC, 1),
+       SR_FGT(SYS_DC_CVADP,            HFGITR, DCCVADP, 1),
+       SR_FGT(SYS_DC_CGVADP,           HFGITR, DCCVADP, 1),
+       SR_FGT(SYS_DC_CGDVADP,          HFGITR, DCCVADP, 1),
+       SR_FGT(SYS_DC_CVAP,             HFGITR, DCCVAP, 1),
+       SR_FGT(SYS_DC_CGVAP,            HFGITR, DCCVAP, 1),
+       SR_FGT(SYS_DC_CGDVAP,           HFGITR, DCCVAP, 1),
+       SR_FGT(SYS_DC_CVAU,             HFGITR, DCCVAU, 1),
+       SR_FGT(SYS_DC_CISW,             HFGITR, DCCISW, 1),
+       SR_FGT(SYS_DC_CIGSW,            HFGITR, DCCISW, 1),
+       SR_FGT(SYS_DC_CIGDSW,           HFGITR, DCCISW, 1),
+       SR_FGT(SYS_DC_CSW,              HFGITR, DCCSW, 1),
+       SR_FGT(SYS_DC_CGSW,             HFGITR, DCCSW, 1),
+       SR_FGT(SYS_DC_CGDSW,            HFGITR, DCCSW, 1),
+       SR_FGT(SYS_DC_ISW,              HFGITR, DCISW, 1),
+       SR_FGT(SYS_DC_IGSW,             HFGITR, DCISW, 1),
+       SR_FGT(SYS_DC_IGDSW,            HFGITR, DCISW, 1),
+       SR_FGT(SYS_DC_IVAC,             HFGITR, DCIVAC, 1),
+       SR_FGT(SYS_DC_IGVAC,            HFGITR, DCIVAC, 1),
+       SR_FGT(SYS_DC_IGDVAC,           HFGITR, DCIVAC, 1),
+       SR_FGT(SYS_IC_IVAU,             HFGITR, ICIVAU, 1),
+       SR_FGT(SYS_IC_IALLU,            HFGITR, ICIALLU, 1),
+       SR_FGT(SYS_IC_IALLUIS,          HFGITR, ICIALLUIS, 1),
+       /* HDFGRTR_EL2 */
+       SR_FGT(SYS_PMBIDR_EL1,          HDFGRTR, PMBIDR_EL1, 1),
+       SR_FGT(SYS_PMSNEVFR_EL1,        HDFGRTR, nPMSNEVFR_EL1, 0),
+       SR_FGT(SYS_BRBINF_EL1(0),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(1),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(2),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(3),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(4),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(5),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(6),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(7),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(8),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(9),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(10),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(11),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(12),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(13),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(14),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(15),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(16),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(17),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(18),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(19),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(20),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(21),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(22),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(23),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(24),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(25),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(26),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(27),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(28),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(29),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(30),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINF_EL1(31),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBINFINJ_EL1,       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(0),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(1),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(2),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(3),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(4),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(5),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(6),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(7),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(8),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(9),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(10),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(11),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(12),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(13),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(14),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(15),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(16),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(17),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(18),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(19),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(20),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(21),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(22),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(23),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(24),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(25),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(26),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(27),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(28),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(29),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(30),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRC_EL1(31),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBSRCINJ_EL1,       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(0),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(1),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(2),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(3),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(4),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(5),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(6),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(7),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(8),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(9),       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(10),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(11),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(12),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(13),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(14),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(15),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(16),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(17),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(18),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(19),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(20),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(21),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(22),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(23),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(24),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(25),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(26),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(27),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(28),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(29),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(30),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGT_EL1(31),      HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTGTINJ_EL1,       HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBTS_EL1,           HDFGRTR, nBRBDATA, 0),
+       SR_FGT(SYS_BRBCR_EL1,           HDFGRTR, nBRBCTL, 0),
+       SR_FGT(SYS_BRBFCR_EL1,          HDFGRTR, nBRBCTL, 0),
+       SR_FGT(SYS_BRBIDR0_EL1,         HDFGRTR, nBRBIDR, 0),
+       SR_FGT(SYS_PMCEID0_EL0,         HDFGRTR, PMCEIDn_EL0, 1),
+       SR_FGT(SYS_PMCEID1_EL0,         HDFGRTR, PMCEIDn_EL0, 1),
+       SR_FGT(SYS_PMUSERENR_EL0,       HDFGRTR, PMUSERENR_EL0, 1),
+       SR_FGT(SYS_TRBTRG_EL1,          HDFGRTR, TRBTRG_EL1, 1),
+       SR_FGT(SYS_TRBSR_EL1,           HDFGRTR, TRBSR_EL1, 1),
+       SR_FGT(SYS_TRBPTR_EL1,          HDFGRTR, TRBPTR_EL1, 1),
+       SR_FGT(SYS_TRBMAR_EL1,          HDFGRTR, TRBMAR_EL1, 1),
+       SR_FGT(SYS_TRBLIMITR_EL1,       HDFGRTR, TRBLIMITR_EL1, 1),
+       SR_FGT(SYS_TRBIDR_EL1,          HDFGRTR, TRBIDR_EL1, 1),
+       SR_FGT(SYS_TRBBASER_EL1,        HDFGRTR, TRBBASER_EL1, 1),
+       SR_FGT(SYS_TRCVICTLR,           HDFGRTR, TRCVICTLR, 1),
+       SR_FGT(SYS_TRCSTATR,            HDFGRTR, TRCSTATR, 1),
+       SR_FGT(SYS_TRCSSCSR(0),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(1),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(2),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(3),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(4),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(5),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(6),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSSCSR(7),         HDFGRTR, TRCSSCSRn, 1),
+       SR_FGT(SYS_TRCSEQSTR,           HDFGRTR, TRCSEQSTR, 1),
+       SR_FGT(SYS_TRCPRGCTLR,          HDFGRTR, TRCPRGCTLR, 1),
+       SR_FGT(SYS_TRCOSLSR,            HDFGRTR, TRCOSLSR, 1),
+       SR_FGT(SYS_TRCIMSPEC(0),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(1),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(2),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(3),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(4),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(5),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(6),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCIMSPEC(7),        HDFGRTR, TRCIMSPECn, 1),
+       SR_FGT(SYS_TRCDEVARCH,          HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCDEVID,            HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR0,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR1,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR2,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR3,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR4,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR5,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR6,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR7,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR8,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR9,             HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR10,            HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR11,            HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR12,            HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCIDR13,            HDFGRTR, TRCID, 1),
+       SR_FGT(SYS_TRCCNTVR(0),         HDFGRTR, TRCCNTVRn, 1),
+       SR_FGT(SYS_TRCCNTVR(1),         HDFGRTR, TRCCNTVRn, 1),
+       SR_FGT(SYS_TRCCNTVR(2),         HDFGRTR, TRCCNTVRn, 1),
+       SR_FGT(SYS_TRCCNTVR(3),         HDFGRTR, TRCCNTVRn, 1),
+       SR_FGT(SYS_TRCCLAIMCLR,         HDFGRTR, TRCCLAIM, 1),
+       SR_FGT(SYS_TRCCLAIMSET,         HDFGRTR, TRCCLAIM, 1),
+       SR_FGT(SYS_TRCAUXCTLR,          HDFGRTR, TRCAUXCTLR, 1),
+       SR_FGT(SYS_TRCAUTHSTATUS,       HDFGRTR, TRCAUTHSTATUS, 1),
+       SR_FGT(SYS_TRCACATR(0),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(1),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(2),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(3),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(4),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(5),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(6),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(7),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(8),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(9),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(10),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(11),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(12),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(13),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(14),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACATR(15),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(0),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(1),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(2),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(3),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(4),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(5),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(6),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(7),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(8),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(9),          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(10),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(11),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(12),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(13),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(14),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCACVR(15),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCBBCTLR,           HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCCCTLR,           HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCCTLR0,        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCCTLR1,        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(0),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(1),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(2),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(3),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(4),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(5),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(6),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCIDCVR(7),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTCTLR(0),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTCTLR(1),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTCTLR(2),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTCTLR(3),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTRLDVR(0),      HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTRLDVR(1),      HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTRLDVR(2),      HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCNTRLDVR(3),      HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCCONFIGR,          HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEVENTCTL0R,       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEVENTCTL1R,       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEXTINSELR(0),     HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEXTINSELR(1),     HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEXTINSELR(2),     HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCEXTINSELR(3),     HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCQCTLR,            HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(2),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(3),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(4),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(5),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(6),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(7),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(8),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(9),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(10),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(11),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(12),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(13),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(14),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(15),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(16),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(17),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(18),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(19),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(20),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(21),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(22),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(23),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(24),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(25),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(26),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(27),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(28),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(29),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(30),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSCTLR(31),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCRSR,              HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSEQEVR(0),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSEQEVR(1),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSEQEVR(2),        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSEQRSTEVR,        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(0),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(1),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(2),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(3),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(4),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(5),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(6),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSCCR(7),         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(0),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(1),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(2),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(3),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(4),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(5),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(6),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSSPCICR(7),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSTALLCTLR,        HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCSYNCPR,           HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCTRACEIDR,         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCTSCTLR,           HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVIIECTLR,         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVIPCSSCTLR,       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVISSCTLR,         HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCCTLR0,       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCCTLR1,       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(0),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(1),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(2),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(3),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(4),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(5),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(6),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_TRCVMIDCVR(7),       HDFGRTR, TRC, 1),
+       SR_FGT(SYS_PMSLATFR_EL1,        HDFGRTR, PMSLATFR_EL1, 1),
+       SR_FGT(SYS_PMSIRR_EL1,          HDFGRTR, PMSIRR_EL1, 1),
+       SR_FGT(SYS_PMSIDR_EL1,          HDFGRTR, PMSIDR_EL1, 1),
+       SR_FGT(SYS_PMSICR_EL1,          HDFGRTR, PMSICR_EL1, 1),
+       SR_FGT(SYS_PMSFCR_EL1,          HDFGRTR, PMSFCR_EL1, 1),
+       SR_FGT(SYS_PMSEVFR_EL1,         HDFGRTR, PMSEVFR_EL1, 1),
+       SR_FGT(SYS_PMSCR_EL1,           HDFGRTR, PMSCR_EL1, 1),
+       SR_FGT(SYS_PMBSR_EL1,           HDFGRTR, PMBSR_EL1, 1),
+       SR_FGT(SYS_PMBPTR_EL1,          HDFGRTR, PMBPTR_EL1, 1),
+       SR_FGT(SYS_PMBLIMITR_EL1,       HDFGRTR, PMBLIMITR_EL1, 1),
+       SR_FGT(SYS_PMMIR_EL1,           HDFGRTR, PMMIR_EL1, 1),
+       SR_FGT(SYS_PMSELR_EL0,          HDFGRTR, PMSELR_EL0, 1),
+       SR_FGT(SYS_PMOVSCLR_EL0,        HDFGRTR, PMOVS, 1),
+       SR_FGT(SYS_PMOVSSET_EL0,        HDFGRTR, PMOVS, 1),
+       SR_FGT(SYS_PMINTENCLR_EL1,      HDFGRTR, PMINTEN, 1),
+       SR_FGT(SYS_PMINTENSET_EL1,      HDFGRTR, PMINTEN, 1),
+       SR_FGT(SYS_PMCNTENCLR_EL0,      HDFGRTR, PMCNTEN, 1),
+       SR_FGT(SYS_PMCNTENSET_EL0,      HDFGRTR, PMCNTEN, 1),
+       SR_FGT(SYS_PMCCNTR_EL0,         HDFGRTR, PMCCNTR_EL0, 1),
+       SR_FGT(SYS_PMCCFILTR_EL0,       HDFGRTR, PMCCFILTR_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(0),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(1),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(2),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(3),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(4),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(5),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(6),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(7),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(8),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(9),   HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(10),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(11),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(12),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(13),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(14),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(15),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(16),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(17),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(18),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(19),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(20),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(21),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(22),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(23),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(24),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(25),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(26),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(27),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(28),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(29),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVTYPERn_EL0(30),  HDFGRTR, PMEVTYPERn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(0),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(1),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(2),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(3),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(4),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(5),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(6),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(7),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(8),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(9),    HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(10),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(11),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(12),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(13),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(14),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(15),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(16),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(17),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(18),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(19),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(20),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(21),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(22),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(23),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(24),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(25),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(26),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(27),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(28),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(29),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_PMEVCNTRn_EL0(30),   HDFGRTR, PMEVCNTRn_EL0, 1),
+       SR_FGT(SYS_OSDLR_EL1,           HDFGRTR, OSDLR_EL1, 1),
+       SR_FGT(SYS_OSECCR_EL1,          HDFGRTR, OSECCR_EL1, 1),
+       SR_FGT(SYS_OSLSR_EL1,           HDFGRTR, OSLSR_EL1, 1),
+       SR_FGT(SYS_DBGPRCR_EL1,         HDFGRTR, DBGPRCR_EL1, 1),
+       SR_FGT(SYS_DBGAUTHSTATUS_EL1,   HDFGRTR, DBGAUTHSTATUS_EL1, 1),
+       SR_FGT(SYS_DBGCLAIMSET_EL1,     HDFGRTR, DBGCLAIM, 1),
+       SR_FGT(SYS_DBGCLAIMCLR_EL1,     HDFGRTR, DBGCLAIM, 1),
+       SR_FGT(SYS_MDSCR_EL1,           HDFGRTR, MDSCR_EL1, 1),
+       /*
+        * The trap bits capture *64* debug registers per bit, but the
+        * ARM ARM only describes the encoding for the first 16, and
+        * we don't really support more than that anyway.
+        */
+       SR_FGT(SYS_DBGWVRn_EL1(0),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(1),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(2),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(3),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(4),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(5),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(6),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(7),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(8),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(9),      HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(10),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(11),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(12),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(13),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(14),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWVRn_EL1(15),     HDFGRTR, DBGWVRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(0),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(1),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(2),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(3),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(4),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(5),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(6),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(7),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(8),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(9),      HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(10),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(11),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(12),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(13),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(14),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGWCRn_EL1(15),     HDFGRTR, DBGWCRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(0),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(1),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(2),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(3),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(4),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(5),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(6),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(7),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(8),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(9),      HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(10),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(11),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(12),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(13),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(14),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBVRn_EL1(15),     HDFGRTR, DBGBVRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(0),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(1),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(2),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(3),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(4),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(5),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(6),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(7),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(8),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(9),      HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(10),     HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(11),     HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(12),     HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(13),     HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(14),     HDFGRTR, DBGBCRn_EL1, 1),
+       SR_FGT(SYS_DBGBCRn_EL1(15),     HDFGRTR, DBGBCRn_EL1, 1),
+       /*
+        * HDFGWTR_EL2
+        *
+        * Although HDFGRTR_EL2 and HDFGWTR_EL2 registers largely
+        * overlap in their bit assignment, there are a number of bits
+        * that are RES0 on one side, and an actual trap bit on the
+        * other.  The policy chosen here is to describe all the
+        * read-side mappings, and only the write-side mappings that
+        * differ from the read side, and the trap handler will pick
+        * the correct shadow register based on the access type.
+        */
+       SR_FGT(SYS_TRFCR_EL1,           HDFGWTR, TRFCR_EL1, 1),
+       SR_FGT(SYS_TRCOSLAR,            HDFGWTR, TRCOSLAR, 1),
+       SR_FGT(SYS_PMCR_EL0,            HDFGWTR, PMCR_EL0, 1),
+       SR_FGT(SYS_PMSWINC_EL0,         HDFGWTR, PMSWINC_EL0, 1),
+       SR_FGT(SYS_OSLAR_EL1,           HDFGWTR, OSLAR_EL1, 1),
+};
+
+static union trap_config get_trap_config(u32 sysreg)
+{
+       return (union trap_config) {
+               .val = xa_to_value(xa_load(&sr_forward_xa, sysreg)),
+       };
+}
+
+static __init void print_nv_trap_error(const struct encoding_to_trap_config *tc,
+                                      const char *type, int err)
+{
+       kvm_err("%s line %d encoding range "
+               "(%d, %d, %d, %d, %d) - (%d, %d, %d, %d, %d) (err=%d)\n",
+               type, tc->line,
+               sys_reg_Op0(tc->encoding), sys_reg_Op1(tc->encoding),
+               sys_reg_CRn(tc->encoding), sys_reg_CRm(tc->encoding),
+               sys_reg_Op2(tc->encoding),
+               sys_reg_Op0(tc->end), sys_reg_Op1(tc->end),
+               sys_reg_CRn(tc->end), sys_reg_CRm(tc->end),
+               sys_reg_Op2(tc->end),
+               err);
+}
+
+int __init populate_nv_trap_config(void)
+{
+       int ret = 0;
+
+       BUILD_BUG_ON(sizeof(union trap_config) != sizeof(void *));
+       BUILD_BUG_ON(__NR_CGT_GROUP_IDS__ > BIT(TC_CGT_BITS));
+       BUILD_BUG_ON(__NR_FGT_GROUP_IDS__ > BIT(TC_FGT_BITS));
+       BUILD_BUG_ON(__NR_FG_FILTER_IDS__ > BIT(TC_FGF_BITS));
+
+       for (int i = 0; i < ARRAY_SIZE(encoding_to_cgt); i++) {
+               const struct encoding_to_trap_config *cgt = &encoding_to_cgt[i];
+               void *prev;
+
+               if (cgt->tc.val & BIT(63)) {
+                       kvm_err("CGT[%d] has MBZ bit set\n", i);
+                       ret = -EINVAL;
+               }
+
+               if (cgt->encoding != cgt->end) {
+                       prev = xa_store_range(&sr_forward_xa,
+                                             cgt->encoding, cgt->end,
+                                             xa_mk_value(cgt->tc.val),
+                                             GFP_KERNEL);
+               } else {
+                       prev = xa_store(&sr_forward_xa, cgt->encoding,
+                                       xa_mk_value(cgt->tc.val), GFP_KERNEL);
+                       if (prev && !xa_is_err(prev)) {
+                               ret = -EINVAL;
+                               print_nv_trap_error(cgt, "Duplicate CGT", ret);
+                       }
+               }
+
+               if (xa_is_err(prev)) {
+                       ret = xa_err(prev);
+                       print_nv_trap_error(cgt, "Failed CGT insertion", ret);
+               }
+       }
+
+       kvm_info("nv: %ld coarse grained trap handlers\n",
+                ARRAY_SIZE(encoding_to_cgt));
+
+       if (!cpus_have_final_cap(ARM64_HAS_FGT))
+               goto check_mcb;
+
+       for (int i = 0; i < ARRAY_SIZE(encoding_to_fgt); i++) {
+               const struct encoding_to_trap_config *fgt = &encoding_to_fgt[i];
+               union trap_config tc;
+
+               if (fgt->tc.fgt >= __NR_FGT_GROUP_IDS__) {
+                       ret = -EINVAL;
+                       print_nv_trap_error(fgt, "Invalid FGT", ret);
+               }
+
+               tc = get_trap_config(fgt->encoding);
+
+               if (tc.fgt) {
+                       ret = -EINVAL;
+                       print_nv_trap_error(fgt, "Duplicate FGT", ret);
+               }
+
+               tc.val |= fgt->tc.val;
+               xa_store(&sr_forward_xa, fgt->encoding,
+                        xa_mk_value(tc.val), GFP_KERNEL);
+       }
+
+       kvm_info("nv: %ld fine grained trap handlers\n",
+                ARRAY_SIZE(encoding_to_fgt));
+
+check_mcb:
+       for (int id = __MULTIPLE_CONTROL_BITS__; id < __COMPLEX_CONDITIONS__; id++) {
+               const enum cgt_group_id *cgids;
+
+               cgids = coarse_control_combo[id - __MULTIPLE_CONTROL_BITS__];
+
+               for (int i = 0; cgids[i] != __RESERVED__; i++) {
+                       if (cgids[i] >= __MULTIPLE_CONTROL_BITS__) {
+                               kvm_err("Recursive MCB %d/%d\n", id, cgids[i]);
+                               ret = -EINVAL;
+                       }
+               }
+       }
+
+       if (ret)
+               xa_destroy(&sr_forward_xa);
+
+       return ret;
+}
+
+static enum trap_behaviour get_behaviour(struct kvm_vcpu *vcpu,
+                                        const struct trap_bits *tb)
+{
+       enum trap_behaviour b = BEHAVE_HANDLE_LOCALLY;
+       u64 val;
+
+       val = __vcpu_sys_reg(vcpu, tb->index);
+       if ((val & tb->mask) == tb->value)
+               b |= tb->behaviour;
+
+       return b;
+}
+
+static enum trap_behaviour __compute_trap_behaviour(struct kvm_vcpu *vcpu,
+                                                   const enum cgt_group_id id,
+                                                   enum trap_behaviour b)
+{
+       switch (id) {
+               const enum cgt_group_id *cgids;
+
+       case __RESERVED__ ... __MULTIPLE_CONTROL_BITS__ - 1:
+               if (likely(id != __RESERVED__))
+                       b |= get_behaviour(vcpu, &coarse_trap_bits[id]);
+               break;
+       case __MULTIPLE_CONTROL_BITS__ ... __COMPLEX_CONDITIONS__ - 1:
+               /* Yes, this is recursive. Don't do anything stupid. */
+               cgids = coarse_control_combo[id - __MULTIPLE_CONTROL_BITS__];
+               for (int i = 0; cgids[i] != __RESERVED__; i++)
+                       b |= __compute_trap_behaviour(vcpu, cgids[i], b);
+               break;
+       default:
+               if (ARRAY_SIZE(ccc))
+                       b |= ccc[id -  __COMPLEX_CONDITIONS__](vcpu);
+               break;
+       }
+
+       return b;
+}
+
+static enum trap_behaviour compute_trap_behaviour(struct kvm_vcpu *vcpu,
+                                                 const union trap_config tc)
+{
+       enum trap_behaviour b = BEHAVE_HANDLE_LOCALLY;
+
+       return __compute_trap_behaviour(vcpu, tc.cgt, b);
+}
+
+static bool check_fgt_bit(u64 val, const union trap_config tc)
+{
+       return ((val >> tc.bit) & 1) == tc.pol;
+}
+
+#define sanitised_sys_reg(vcpu, reg)                   \
+       ({                                              \
+               u64 __val;                              \
+               __val = __vcpu_sys_reg(vcpu, reg);      \
+               __val &= ~__ ## reg ## _RES0;           \
+               (__val);                                \
+       })
+
+bool __check_nv_sr_forward(struct kvm_vcpu *vcpu)
+{
+       union trap_config tc;
+       enum trap_behaviour b;
+       bool is_read;
+       u32 sysreg;
+       u64 esr, val;
+
+       if (!vcpu_has_nv(vcpu) || is_hyp_ctxt(vcpu))
+               return false;
+
+       esr = kvm_vcpu_get_esr(vcpu);
+       sysreg = esr_sys64_to_sysreg(esr);
+       is_read = (esr & ESR_ELx_SYS64_ISS_DIR_MASK) == ESR_ELx_SYS64_ISS_DIR_READ;
+
+       tc = get_trap_config(sysreg);
+
+       /*
+        * A value of 0 for the whole entry means that we know nothing
+        * for this sysreg, and that it cannot be re-injected into the
+        * nested hypervisor. In this situation, let's cut it short.
+        *
+        * Note that ultimately, we could also make use of the xarray
+        * to store the index of the sysreg in the local descriptor
+        * array, avoiding another search... Hint, hint...
+        */
+       if (!tc.val)
+               return false;
+
+       switch ((enum fgt_group_id)tc.fgt) {
+       case __NO_FGT_GROUP__:
+               break;
+
+       case HFGxTR_GROUP:
+               if (is_read)
+                       val = sanitised_sys_reg(vcpu, HFGRTR_EL2);
+               else
+                       val = sanitised_sys_reg(vcpu, HFGWTR_EL2);
+               break;
+
+       case HDFGRTR_GROUP:
+       case HDFGWTR_GROUP:
+               if (is_read)
+                       val = sanitised_sys_reg(vcpu, HDFGRTR_EL2);
+               else
+                       val = sanitised_sys_reg(vcpu, HDFGWTR_EL2);
+               break;
+
+       case HFGITR_GROUP:
+               val = sanitised_sys_reg(vcpu, HFGITR_EL2);
+               switch (tc.fgf) {
+                       u64 tmp;
+
+               case __NO_FGF__:
+                       break;
+
+               case HCRX_FGTnXS:
+                       tmp = sanitised_sys_reg(vcpu, HCRX_EL2);
+                       if (tmp & HCRX_EL2_FGTnXS)
+                               tc.fgt = __NO_FGT_GROUP__;
+               }
+               break;
+
+       case __NR_FGT_GROUP_IDS__:
+               /* Something is really wrong, bail out */
+               WARN_ONCE(1, "__NR_FGT_GROUP_IDS__");
+               return false;
+       }
+
+       if (tc.fgt != __NO_FGT_GROUP__ && check_fgt_bit(val, tc))
+               goto inject;
+
+       b = compute_trap_behaviour(vcpu, tc);
+
+       if (((b & BEHAVE_FORWARD_READ) && is_read) ||
+           ((b & BEHAVE_FORWARD_WRITE) && !is_read))
+               goto inject;
+
+       return false;
+
+inject:
+       trace_kvm_forward_sysreg_trap(vcpu, sysreg, is_read);
+
+       kvm_inject_nested_sync(vcpu, kvm_vcpu_get_esr(vcpu));
+       return true;
+}
+
 static u64 kvm_check_illegal_exception_return(struct kvm_vcpu *vcpu, u64 spsr)
 {
        u64 mode = spsr & PSR_MODE_MASK;
index 20280a5..95f6945 100644 (file)
@@ -884,21 +884,6 @@ u32 __attribute_const__ kvm_target_cpu(void)
        return KVM_ARM_TARGET_GENERIC_V8;
 }
 
-void kvm_vcpu_preferred_target(struct kvm_vcpu_init *init)
-{
-       u32 target = kvm_target_cpu();
-
-       memset(init, 0, sizeof(*init));
-
-       /*
-        * For now, we don't return any features.
-        * In future, we might use features to return target
-        * specific features available for the preferred
-        * target type.
-        */
-       init->target = (__u32)target;
-}
-
 int kvm_arch_vcpu_ioctl_get_fpu(struct kvm_vcpu *vcpu, struct kvm_fpu *fpu)
 {
        return -EINVAL;
index 6dcd660..617ae6d 100644 (file)
@@ -222,7 +222,33 @@ static int kvm_handle_eret(struct kvm_vcpu *vcpu)
        if (kvm_vcpu_get_esr(vcpu) & ESR_ELx_ERET_ISS_ERET)
                return kvm_handle_ptrauth(vcpu);
 
-       kvm_emulate_nested_eret(vcpu);
+       /*
+        * If we got here, two possibilities:
+        *
+        * - the guest is in EL2, and we need to fully emulate ERET
+        *
+        * - the guest is in EL1, and we need to reinject the
+         *   exception into the L1 hypervisor.
+        *
+        * If KVM ever traps ERET for its own use, we'll have to
+        * revisit this.
+        */
+       if (is_hyp_ctxt(vcpu))
+               kvm_emulate_nested_eret(vcpu);
+       else
+               kvm_inject_nested_sync(vcpu, kvm_vcpu_get_esr(vcpu));
+
+       return 1;
+}
+
+static int handle_svc(struct kvm_vcpu *vcpu)
+{
+       /*
+        * So far, SVC traps only for NV via HFGITR_EL2. A SVC from a
+        * 32bit guest would be caught by vpcu_mode_is_bad_32bit(), so
+        * we should only have to deal with a 64 bit exception.
+        */
+       kvm_inject_nested_sync(vcpu, kvm_vcpu_get_esr(vcpu));
        return 1;
 }
 
@@ -239,6 +265,7 @@ static exit_handle_fn arm_exit_handlers[] = {
        [ESR_ELx_EC_SMC32]      = handle_smc,
        [ESR_ELx_EC_HVC64]      = handle_hvc,
        [ESR_ELx_EC_SMC64]      = handle_smc,
+       [ESR_ELx_EC_SVC64]      = handle_svc,
        [ESR_ELx_EC_SYS64]      = kvm_handle_sys_reg,
        [ESR_ELx_EC_SVE]        = handle_sve,
        [ESR_ELx_EC_ERET]       = kvm_handle_eret,
index 4bddb85..3acf6d7 100644 (file)
@@ -70,20 +70,26 @@ static inline void __activate_traps_fpsimd32(struct kvm_vcpu *vcpu)
        }
 }
 
-static inline bool __hfgxtr_traps_required(void)
-{
-       if (cpus_have_final_cap(ARM64_SME))
-               return true;
-
-       if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
-               return true;
+#define compute_clr_set(vcpu, reg, clr, set)                           \
+       do {                                                            \
+               u64 hfg;                                                \
+               hfg = __vcpu_sys_reg(vcpu, reg) & ~__ ## reg ## _RES0;  \
+               set |= hfg & __ ## reg ## _MASK;                        \
+               clr |= ~hfg & __ ## reg ## _nMASK;                      \
+       } while(0)
 
-       return false;
-}
 
-static inline void __activate_traps_hfgxtr(void)
+static inline void __activate_traps_hfgxtr(struct kvm_vcpu *vcpu)
 {
+       struct kvm_cpu_context *hctxt = &this_cpu_ptr(&kvm_host_data)->host_ctxt;
        u64 r_clr = 0, w_clr = 0, r_set = 0, w_set = 0, tmp;
+       u64 r_val, w_val;
+
+       if (!cpus_have_final_cap(ARM64_HAS_FGT))
+               return;
+
+       ctxt_sys_reg(hctxt, HFGRTR_EL2) = read_sysreg_s(SYS_HFGRTR_EL2);
+       ctxt_sys_reg(hctxt, HFGWTR_EL2) = read_sysreg_s(SYS_HFGWTR_EL2);
 
        if (cpus_have_final_cap(ARM64_SME)) {
                tmp = HFGxTR_EL2_nSMPRI_EL1_MASK | HFGxTR_EL2_nTPIDR2_EL0_MASK;
@@ -98,26 +104,72 @@ static inline void __activate_traps_hfgxtr(void)
        if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
                w_set |= HFGxTR_EL2_TCR_EL1_MASK;
 
-       sysreg_clear_set_s(SYS_HFGRTR_EL2, r_clr, r_set);
-       sysreg_clear_set_s(SYS_HFGWTR_EL2, w_clr, w_set);
+       if (vcpu_has_nv(vcpu) && !is_hyp_ctxt(vcpu)) {
+               compute_clr_set(vcpu, HFGRTR_EL2, r_clr, r_set);
+               compute_clr_set(vcpu, HFGWTR_EL2, w_clr, w_set);
+       }
+
+       /* The default is not to trap anything but ACCDATA_EL1 */
+       r_val = __HFGRTR_EL2_nMASK & ~HFGxTR_EL2_nACCDATA_EL1;
+       r_val |= r_set;
+       r_val &= ~r_clr;
+
+       w_val = __HFGWTR_EL2_nMASK & ~HFGxTR_EL2_nACCDATA_EL1;
+       w_val |= w_set;
+       w_val &= ~w_clr;
+
+       write_sysreg_s(r_val, SYS_HFGRTR_EL2);
+       write_sysreg_s(w_val, SYS_HFGWTR_EL2);
+
+       if (!vcpu_has_nv(vcpu) || is_hyp_ctxt(vcpu))
+               return;
+
+       ctxt_sys_reg(hctxt, HFGITR_EL2) = read_sysreg_s(SYS_HFGITR_EL2);
+
+       r_set = r_clr = 0;
+       compute_clr_set(vcpu, HFGITR_EL2, r_clr, r_set);
+       r_val = __HFGITR_EL2_nMASK;
+       r_val |= r_set;
+       r_val &= ~r_clr;
+
+       write_sysreg_s(r_val, SYS_HFGITR_EL2);
+
+       ctxt_sys_reg(hctxt, HDFGRTR_EL2) = read_sysreg_s(SYS_HDFGRTR_EL2);
+       ctxt_sys_reg(hctxt, HDFGWTR_EL2) = read_sysreg_s(SYS_HDFGWTR_EL2);
+
+       r_clr = r_set = w_clr = w_set = 0;
+
+       compute_clr_set(vcpu, HDFGRTR_EL2, r_clr, r_set);
+       compute_clr_set(vcpu, HDFGWTR_EL2, w_clr, w_set);
+
+       r_val = __HDFGRTR_EL2_nMASK;
+       r_val |= r_set;
+       r_val &= ~r_clr;
+
+       w_val = __HDFGWTR_EL2_nMASK;
+       w_val |= w_set;
+       w_val &= ~w_clr;
+
+       write_sysreg_s(r_val, SYS_HDFGRTR_EL2);
+       write_sysreg_s(w_val, SYS_HDFGWTR_EL2);
 }
 
-static inline void __deactivate_traps_hfgxtr(void)
+static inline void __deactivate_traps_hfgxtr(struct kvm_vcpu *vcpu)
 {
-       u64 r_clr = 0, w_clr = 0, r_set = 0, w_set = 0, tmp;
+       struct kvm_cpu_context *hctxt = &this_cpu_ptr(&kvm_host_data)->host_ctxt;
 
-       if (cpus_have_final_cap(ARM64_SME)) {
-               tmp = HFGxTR_EL2_nSMPRI_EL1_MASK | HFGxTR_EL2_nTPIDR2_EL0_MASK;
+       if (!cpus_have_final_cap(ARM64_HAS_FGT))
+               return;
 
-               r_set |= tmp;
-               w_set |= tmp;
-       }
+       write_sysreg_s(ctxt_sys_reg(hctxt, HFGRTR_EL2), SYS_HFGRTR_EL2);
+       write_sysreg_s(ctxt_sys_reg(hctxt, HFGWTR_EL2), SYS_HFGWTR_EL2);
 
-       if (cpus_have_final_cap(ARM64_WORKAROUND_AMPERE_AC03_CPU_38))
-               w_clr |= HFGxTR_EL2_TCR_EL1_MASK;
+       if (!vcpu_has_nv(vcpu) || is_hyp_ctxt(vcpu))
+               return;
 
-       sysreg_clear_set_s(SYS_HFGRTR_EL2, r_clr, r_set);
-       sysreg_clear_set_s(SYS_HFGWTR_EL2, w_clr, w_set);
+       write_sysreg_s(ctxt_sys_reg(hctxt, HFGITR_EL2), SYS_HFGITR_EL2);
+       write_sysreg_s(ctxt_sys_reg(hctxt, HDFGRTR_EL2), SYS_HDFGRTR_EL2);
+       write_sysreg_s(ctxt_sys_reg(hctxt, HDFGWTR_EL2), SYS_HDFGWTR_EL2);
 }
 
 static inline void __activate_traps_common(struct kvm_vcpu *vcpu)
@@ -145,8 +197,21 @@ static inline void __activate_traps_common(struct kvm_vcpu *vcpu)
        vcpu->arch.mdcr_el2_host = read_sysreg(mdcr_el2);
        write_sysreg(vcpu->arch.mdcr_el2, mdcr_el2);
 
-       if (__hfgxtr_traps_required())
-               __activate_traps_hfgxtr();
+       if (cpus_have_final_cap(ARM64_HAS_HCX)) {
+               u64 hcrx = HCRX_GUEST_FLAGS;
+               if (vcpu_has_nv(vcpu) && !is_hyp_ctxt(vcpu)) {
+                       u64 clr = 0, set = 0;
+
+                       compute_clr_set(vcpu, HCRX_EL2, clr, set);
+
+                       hcrx |= set;
+                       hcrx &= ~clr;
+               }
+
+               write_sysreg_s(hcrx, SYS_HCRX_EL2);
+       }
+
+       __activate_traps_hfgxtr(vcpu);
 }
 
 static inline void __deactivate_traps_common(struct kvm_vcpu *vcpu)
@@ -162,8 +227,10 @@ static inline void __deactivate_traps_common(struct kvm_vcpu *vcpu)
                vcpu_clear_flag(vcpu, PMUSERENR_ON_CPU);
        }
 
-       if (__hfgxtr_traps_required())
-               __deactivate_traps_hfgxtr();
+       if (cpus_have_final_cap(ARM64_HAS_HCX))
+               write_sysreg_s(HCRX_HOST_FLAGS, SYS_HCRX_EL2);
+
+       __deactivate_traps_hfgxtr(vcpu);
 }
 
 static inline void ___activate_traps(struct kvm_vcpu *vcpu)
@@ -177,9 +244,6 @@ static inline void ___activate_traps(struct kvm_vcpu *vcpu)
 
        if (cpus_have_final_cap(ARM64_HAS_RAS_EXTN) && (hcr & HCR_VSE))
                write_sysreg_s(vcpu->arch.vsesr_el2, SYS_VSESR_EL2);
-
-       if (cpus_have_final_cap(ARM64_HAS_HCX))
-               write_sysreg_s(HCRX_GUEST_FLAGS, SYS_HCRX_EL2);
 }
 
 static inline void ___deactivate_traps(struct kvm_vcpu *vcpu)
@@ -194,9 +258,6 @@ static inline void ___deactivate_traps(struct kvm_vcpu *vcpu)
                vcpu->arch.hcr_el2 &= ~HCR_VSE;
                vcpu->arch.hcr_el2 |= read_sysreg(hcr_el2) & HCR_VSE;
        }
-
-       if (cpus_have_final_cap(ARM64_HAS_HCX))
-               write_sysreg_s(HCRX_HOST_FLAGS, SYS_HCRX_EL2);
 }
 
 static inline bool __populate_fault_info(struct kvm_vcpu *vcpu)
index 0a62710..b9caac3 100644 (file)
@@ -236,7 +236,7 @@ static void early_exit_filter(struct kvm_vcpu *vcpu, u64 *exit_code)
                 * KVM_ARM_VCPU_INIT, however, this is likely not possible for
                 * protected VMs.
                 */
-               vcpu->arch.target = -1;
+               vcpu_clear_flag(vcpu, VCPU_INITIALIZED);
                *exit_code &= BIT(ARM_EXIT_WITH_SERROR_BIT);
                *exit_code |= ARM_EXCEPTION_IL;
        }
index 315354d..042695a 100644 (file)
@@ -71,8 +71,9 @@ void access_nested_id_reg(struct kvm_vcpu *v, struct sys_reg_params *p,
                break;
 
        case SYS_ID_AA64MMFR0_EL1:
-               /* Hide ECV, FGT, ExS, Secure Memory */
-               val &= ~(GENMASK_ULL(63, 43)            |
+               /* Hide ECV, ExS, Secure Memory */
+               val &= ~(NV_FTR(MMFR0, ECV)             |
+                        NV_FTR(MMFR0, EXS)             |
                         NV_FTR(MMFR0, TGRAN4_2)        |
                         NV_FTR(MMFR0, TGRAN16_2)       |
                         NV_FTR(MMFR0, TGRAN64_2)       |
@@ -116,7 +117,8 @@ void access_nested_id_reg(struct kvm_vcpu *v, struct sys_reg_params *p,
                break;
 
        case SYS_ID_AA64MMFR1_EL1:
-               val &= (NV_FTR(MMFR1, PAN)      |
+               val &= (NV_FTR(MMFR1, HCX)      |
+                       NV_FTR(MMFR1, PAN)      |
                        NV_FTR(MMFR1, LO)       |
                        NV_FTR(MMFR1, HPDS)     |
                        NV_FTR(MMFR1, VH)       |
@@ -124,8 +126,7 @@ void access_nested_id_reg(struct kvm_vcpu *v, struct sys_reg_params *p,
                break;
 
        case SYS_ID_AA64MMFR2_EL1:
-               val &= ~(NV_FTR(MMFR2, EVT)     |
-                        NV_FTR(MMFR2, BBM)     |
+               val &= ~(NV_FTR(MMFR2, BBM)     |
                         NV_FTR(MMFR2, TTL)     |
                         GENMASK_ULL(47, 44)    |
                         NV_FTR(MMFR2, ST)      |
index bc8556b..7a65a35 100644 (file)
@@ -248,21 +248,16 @@ int kvm_reset_vcpu(struct kvm_vcpu *vcpu)
                }
        }
 
-       switch (vcpu->arch.target) {
-       default:
-               if (vcpu_el1_is_32bit(vcpu)) {
-                       pstate = VCPU_RESET_PSTATE_SVC;
-               } else if (vcpu_has_nv(vcpu)) {
-                       pstate = VCPU_RESET_PSTATE_EL2;
-               } else {
-                       pstate = VCPU_RESET_PSTATE_EL1;
-               }
-
-               if (kvm_vcpu_has_pmu(vcpu) && !kvm_arm_support_pmu_v3()) {
-                       ret = -EINVAL;
-                       goto out;
-               }
-               break;
+       if (vcpu_el1_is_32bit(vcpu))
+               pstate = VCPU_RESET_PSTATE_SVC;
+       else if (vcpu_has_nv(vcpu))
+               pstate = VCPU_RESET_PSTATE_EL2;
+       else
+               pstate = VCPU_RESET_PSTATE_EL1;
+
+       if (kvm_vcpu_has_pmu(vcpu) && !kvm_arm_support_pmu_v3()) {
+               ret = -EINVAL;
+               goto out;
        }
 
        /* Reset core registers */
index 2ca2973..e92ec81 100644 (file)
@@ -2151,6 +2151,8 @@ static const struct sys_reg_desc sys_reg_descs[] = {
        { SYS_DESC(SYS_CONTEXTIDR_EL1), access_vm_reg, reset_val, CONTEXTIDR_EL1, 0 },
        { SYS_DESC(SYS_TPIDR_EL1), NULL, reset_unknown, TPIDR_EL1 },
 
+       { SYS_DESC(SYS_ACCDATA_EL1), undef_access },
+
        { SYS_DESC(SYS_SCXTNUM_EL1), undef_access },
 
        { SYS_DESC(SYS_CNTKCTL_EL1), NULL, reset_val, CNTKCTL_EL1, 0},
@@ -2365,8 +2367,13 @@ static const struct sys_reg_desc sys_reg_descs[] = {
        EL2_REG(MDCR_EL2, access_rw, reset_val, 0),
        EL2_REG(CPTR_EL2, access_rw, reset_val, CPTR_NVHE_EL2_RES1),
        EL2_REG(HSTR_EL2, access_rw, reset_val, 0),
+       EL2_REG(HFGRTR_EL2, access_rw, reset_val, 0),
+       EL2_REG(HFGWTR_EL2, access_rw, reset_val, 0),
+       EL2_REG(HFGITR_EL2, access_rw, reset_val, 0),
        EL2_REG(HACR_EL2, access_rw, reset_val, 0),
 
+       EL2_REG(HCRX_EL2, access_rw, reset_val, 0),
+
        EL2_REG(TTBR0_EL2, access_rw, reset_val, 0),
        EL2_REG(TTBR1_EL2, access_rw, reset_val, 0),
        EL2_REG(TCR_EL2, access_rw, reset_val, TCR_EL2_RES1),
@@ -2374,6 +2381,8 @@ static const struct sys_reg_desc sys_reg_descs[] = {
        EL2_REG(VTCR_EL2, access_rw, reset_val, 0),
 
        { SYS_DESC(SYS_DACR32_EL2), NULL, reset_unknown, DACR32_EL2 },
+       EL2_REG(HDFGRTR_EL2, access_rw, reset_val, 0),
+       EL2_REG(HDFGWTR_EL2, access_rw, reset_val, 0),
        EL2_REG(SPSR_EL2, access_rw, reset_val, 0),
        EL2_REG(ELR_EL2, access_rw, reset_val, 0),
        { SYS_DESC(SYS_SP_EL1), access_sp_el1},
@@ -3170,6 +3179,9 @@ int kvm_handle_sys_reg(struct kvm_vcpu *vcpu)
 
        trace_kvm_handle_sys_reg(esr);
 
+       if (__check_nv_sr_forward(vcpu))
+               return 1;
+
        params = esr_sys64_to_params(esr);
        params.regval = vcpu_get_reg(vcpu, Rt);
 
@@ -3587,5 +3599,8 @@ int __init kvm_sys_reg_table_init(void)
        if (!first_idreg)
                return -EINVAL;
 
+       if (kvm_get_mode() == KVM_MODE_NV)
+               return populate_nv_trap_config();
+
        return 0;
 }
index 6ce5c02..8ad5310 100644 (file)
@@ -364,6 +364,32 @@ TRACE_EVENT(kvm_inject_nested_exception,
                  __entry->hcr_el2)
 );
 
+TRACE_EVENT(kvm_forward_sysreg_trap,
+           TP_PROTO(struct kvm_vcpu *vcpu, u32 sysreg, bool is_read),
+           TP_ARGS(vcpu, sysreg, is_read),
+
+           TP_STRUCT__entry(
+               __field(u64,    pc)
+               __field(u32,    sysreg)
+               __field(bool,   is_read)
+           ),
+
+           TP_fast_assign(
+               __entry->pc = *vcpu_pc(vcpu);
+               __entry->sysreg = sysreg;
+               __entry->is_read = is_read;
+           ),
+
+           TP_printk("%llx %c (%d,%d,%d,%d,%d)",
+                     __entry->pc,
+                     __entry->is_read ? 'R' : 'W',
+                     sys_reg_Op0(__entry->sysreg),
+                     sys_reg_Op1(__entry->sysreg),
+                     sys_reg_CRn(__entry->sysreg),
+                     sys_reg_CRm(__entry->sysreg),
+                     sys_reg_Op2(__entry->sysreg))
+);
+
 #endif /* _TRACE_ARM_ARM64_KVM_H */
 
 #undef TRACE_INCLUDE_PATH
index c80ed4f..c3f06fd 100644 (file)
@@ -26,6 +26,7 @@ HAS_ECV
 HAS_ECV_CNTPOFF
 HAS_EPAN
 HAS_EVT
+HAS_FGT
 HAS_GENERIC_AUTH
 HAS_GENERIC_AUTH_ARCH_QARMA3
 HAS_GENERIC_AUTH_ARCH_QARMA5
index 65866bf..2517ef7 100644 (file)
@@ -2156,6 +2156,135 @@ Field   1       ICIALLU
 Field  0       ICIALLUIS
 EndSysreg
 
+Sysreg HDFGRTR_EL2     3       4       3       1       4
+Field  63      PMBIDR_EL1
+Field  62      nPMSNEVFR_EL1
+Field  61      nBRBDATA
+Field  60      nBRBCTL
+Field  59      nBRBIDR
+Field  58      PMCEIDn_EL0
+Field  57      PMUSERENR_EL0
+Field  56      TRBTRG_EL1
+Field  55      TRBSR_EL1
+Field  54      TRBPTR_EL1
+Field  53      TRBMAR_EL1
+Field  52      TRBLIMITR_EL1
+Field  51      TRBIDR_EL1
+Field  50      TRBBASER_EL1
+Res0   49
+Field  48      TRCVICTLR
+Field  47      TRCSTATR
+Field  46      TRCSSCSRn
+Field  45      TRCSEQSTR
+Field  44      TRCPRGCTLR
+Field  43      TRCOSLSR
+Res0   42
+Field  41      TRCIMSPECn
+Field  40      TRCID
+Res0   39:38
+Field  37      TRCCNTVRn
+Field  36      TRCCLAIM
+Field  35      TRCAUXCTLR
+Field  34      TRCAUTHSTATUS
+Field  33      TRC
+Field  32      PMSLATFR_EL1
+Field  31      PMSIRR_EL1
+Field  30      PMSIDR_EL1
+Field  29      PMSICR_EL1
+Field  28      PMSFCR_EL1
+Field  27      PMSEVFR_EL1
+Field  26      PMSCR_EL1
+Field  25      PMBSR_EL1
+Field  24      PMBPTR_EL1
+Field  23      PMBLIMITR_EL1
+Field  22      PMMIR_EL1
+Res0   21:20
+Field  19      PMSELR_EL0
+Field  18      PMOVS
+Field  17      PMINTEN
+Field  16      PMCNTEN
+Field  15      PMCCNTR_EL0
+Field  14      PMCCFILTR_EL0
+Field  13      PMEVTYPERn_EL0
+Field  12      PMEVCNTRn_EL0
+Field  11      OSDLR_EL1
+Field  10      OSECCR_EL1
+Field  9       OSLSR_EL1
+Res0   8
+Field  7       DBGPRCR_EL1
+Field  6       DBGAUTHSTATUS_EL1
+Field  5       DBGCLAIM
+Field  4       MDSCR_EL1
+Field  3       DBGWVRn_EL1
+Field  2       DBGWCRn_EL1
+Field  1       DBGBVRn_EL1
+Field  0       DBGBCRn_EL1
+EndSysreg
+
+Sysreg HDFGWTR_EL2     3       4       3       1       5
+Res0   63
+Field  62      nPMSNEVFR_EL1
+Field  61      nBRBDATA
+Field  60      nBRBCTL
+Res0   59:58
+Field  57      PMUSERENR_EL0
+Field  56      TRBTRG_EL1
+Field  55      TRBSR_EL1
+Field  54      TRBPTR_EL1
+Field  53      TRBMAR_EL1
+Field  52      TRBLIMITR_EL1
+Res0   51
+Field  50      TRBBASER_EL1
+Field  49      TRFCR_EL1
+Field  48      TRCVICTLR
+Res0   47
+Field  46      TRCSSCSRn
+Field  45      TRCSEQSTR
+Field  44      TRCPRGCTLR
+Res0   43
+Field  42      TRCOSLAR
+Field  41      TRCIMSPECn
+Res0   40:38
+Field  37      TRCCNTVRn
+Field  36      TRCCLAIM
+Field  35      TRCAUXCTLR
+Res0   34
+Field  33      TRC
+Field  32      PMSLATFR_EL1
+Field  31      PMSIRR_EL1
+Res0   30
+Field  29      PMSICR_EL1
+Field  28      PMSFCR_EL1
+Field  27      PMSEVFR_EL1
+Field  26      PMSCR_EL1
+Field  25      PMBSR_EL1
+Field  24      PMBPTR_EL1
+Field  23      PMBLIMITR_EL1
+Res0   22
+Field  21      PMCR_EL0
+Field  20      PMSWINC_EL0
+Field  19      PMSELR_EL0
+Field  18      PMOVS
+Field  17      PMINTEN
+Field  16      PMCNTEN
+Field  15      PMCCNTR_EL0
+Field  14      PMCCFILTR_EL0
+Field  13      PMEVTYPERn_EL0
+Field  12      PMEVCNTRn_EL0
+Field  11      OSDLR_EL1
+Field  10      OSECCR_EL1
+Res0   9
+Field  8       OSLAR_EL1
+Field  7       DBGPRCR_EL1
+Res0   6
+Field  5       DBGCLAIM
+Field  4       MDSCR_EL1
+Field  3       DBGWVRn_EL1
+Field  2       DBGWCRn_EL1
+Field  1       DBGBVRn_EL1
+Field  0       DBGBCRn_EL1
+EndSysreg
+
 Sysreg ZCR_EL2 3       4       1       2       0
 Fields ZCR_ELx
 EndSysreg