arm64/sysreg: Add _EL1 into ID_AA64MMFR2_EL1 definition names
authorMark Brown <broonie@kernel.org>
Mon, 5 Sep 2022 22:54:02 +0000 (23:54 +0100)
committerCatalin Marinas <catalin.marinas@arm.com>
Fri, 9 Sep 2022 09:59:02 +0000 (10:59 +0100)
Normally we include the full register name in the defines for fields within
registers but this has not been followed for ID registers. In preparation
for automatic generation of defines add the _EL1s into the defines for
ID_AA64MMFR2_EL1 to follow the convention. No functional changes.

Signed-off-by: Mark Brown <broonie@kernel.org>
Reviewed-by: Kristina Martsenko <kristina.martsenko@arm.com>
Link: https://lore.kernel.org/r/20220905225425.1871461-6-broonie@kernel.org
Signed-off-by: Catalin Marinas <catalin.marinas@arm.com>
arch/arm64/include/asm/assembler.h
arch/arm64/include/asm/sysreg.h
arch/arm64/kernel/cpufeature.c
arch/arm64/kernel/head.S
arch/arm64/kvm/hyp/include/nvhe/fixed_config.h

index a6e7061..0d5ced9 100644 (file)
@@ -612,7 +612,7 @@ alternative_endif
        .macro  offset_ttbr1, ttbr, tmp
 #ifdef CONFIG_ARM64_VA_BITS_52
        mrs_s   \tmp, SYS_ID_AA64MMFR2_EL1
-       and     \tmp, \tmp, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
+       and     \tmp, \tmp, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
        cbnz    \tmp, .Lskipoffs_\@
        orr     \ttbr, \ttbr, #TTBR1_BADDR_4852_OFFSET
 .Lskipoffs_\@ :
index f9af77a..bb1f9ae 100644 (file)
 #define ID_AA64MMFR1_TIDCP1_IMP                1
 
 /* id_aa64mmfr2 */
-#define ID_AA64MMFR2_E0PD_SHIFT                60
-#define ID_AA64MMFR2_EVT_SHIFT         56
-#define ID_AA64MMFR2_BBM_SHIFT         52
-#define ID_AA64MMFR2_TTL_SHIFT         48
-#define ID_AA64MMFR2_FWB_SHIFT         40
-#define ID_AA64MMFR2_IDS_SHIFT         36
-#define ID_AA64MMFR2_AT_SHIFT          32
-#define ID_AA64MMFR2_ST_SHIFT          28
-#define ID_AA64MMFR2_NV_SHIFT          24
-#define ID_AA64MMFR2_CCIDX_SHIFT       20
-#define ID_AA64MMFR2_LVA_SHIFT         16
-#define ID_AA64MMFR2_IESB_SHIFT                12
-#define ID_AA64MMFR2_LSM_SHIFT         8
-#define ID_AA64MMFR2_UAO_SHIFT         4
-#define ID_AA64MMFR2_CNP_SHIFT         0
+#define ID_AA64MMFR2_EL1_E0PD_SHIFT    60
+#define ID_AA64MMFR2_EL1_EVT_SHIFT     56
+#define ID_AA64MMFR2_EL1_BBM_SHIFT     52
+#define ID_AA64MMFR2_EL1_TTL_SHIFT     48
+#define ID_AA64MMFR2_EL1_FWB_SHIFT     40
+#define ID_AA64MMFR2_EL1_IDS_SHIFT     36
+#define ID_AA64MMFR2_EL1_AT_SHIFT      32
+#define ID_AA64MMFR2_EL1_ST_SHIFT      28
+#define ID_AA64MMFR2_EL1_NV_SHIFT      24
+#define ID_AA64MMFR2_EL1_CCIDX_SHIFT   20
+#define ID_AA64MMFR2_EL1_LVA_SHIFT     16
+#define ID_AA64MMFR2_EL1_IESB_SHIFT    12
+#define ID_AA64MMFR2_EL1_LSM_SHIFT     8
+#define ID_AA64MMFR2_EL1_UAO_SHIFT     4
+#define ID_AA64MMFR2_EL1_CNP_SHIFT     0
 
 /* id_aa64dfr0 */
 #define ID_AA64DFR0_MTPMU_SHIFT                48
index 3f45122..eb50d52 100644 (file)
@@ -378,21 +378,21 @@ static const struct arm64_ftr_bits ftr_id_aa64mmfr1[] = {
 };
 
 static const struct arm64_ftr_bits ftr_id_aa64mmfr2[] = {
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_E0PD_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EVT_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_BBM_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_TTL_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_FWB_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_IDS_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_AT_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_ST_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_NV_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_CCIDX_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_LVA_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_IESB_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_LSM_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_UAO_SHIFT, 4, 0),
-       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_CNP_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_E0PD_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_EVT_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_BBM_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_TTL_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_FWB_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_IDS_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_AT_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_ST_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_NV_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_CCIDX_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LVA_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_NONSTRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_IESB_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_LSM_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_UAO_SHIFT, 4, 0),
+       ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64MMFR2_EL1_CNP_SHIFT, 4, 0),
        ARM64_FTR_END,
 };
 
@@ -1571,7 +1571,7 @@ bool kaslr_requires_kpti(void)
        if (IS_ENABLED(CONFIG_ARM64_E0PD)) {
                u64 mmfr2 = read_sysreg_s(SYS_ID_AA64MMFR2_EL1);
                if (cpuid_feature_extract_unsigned_field(mmfr2,
-                                               ID_AA64MMFR2_E0PD_SHIFT))
+                                               ID_AA64MMFR2_EL1_E0PD_SHIFT))
                        return false;
        }
 
@@ -2303,7 +2303,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .capability = ARM64_HAS_STAGE2_FWB,
                .sys_reg = SYS_ID_AA64MMFR2_EL1,
                .sign = FTR_UNSIGNED,
-               .field_pos = ID_AA64MMFR2_FWB_SHIFT,
+               .field_pos = ID_AA64MMFR2_EL1_FWB_SHIFT,
                .field_width = 4,
                .min_field_value = 1,
                .matches = has_cpuid_feature,
@@ -2314,7 +2314,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .capability = ARM64_HAS_ARMv8_4_TTL,
                .sys_reg = SYS_ID_AA64MMFR2_EL1,
                .sign = FTR_UNSIGNED,
-               .field_pos = ID_AA64MMFR2_TTL_SHIFT,
+               .field_pos = ID_AA64MMFR2_EL1_TTL_SHIFT,
                .field_width = 4,
                .min_field_value = 1,
                .matches = has_cpuid_feature,
@@ -2380,7 +2380,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .matches = has_useable_cnp,
                .sys_reg = SYS_ID_AA64MMFR2_EL1,
                .sign = FTR_UNSIGNED,
-               .field_pos = ID_AA64MMFR2_CNP_SHIFT,
+               .field_pos = ID_AA64MMFR2_EL1_CNP_SHIFT,
                .field_width = 4,
                .min_field_value = 1,
                .cpu_enable = cpu_enable_cnp,
@@ -2499,7 +2499,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .sys_reg = SYS_ID_AA64MMFR2_EL1,
                .sign = FTR_UNSIGNED,
                .field_width = 4,
-               .field_pos = ID_AA64MMFR2_E0PD_SHIFT,
+               .field_pos = ID_AA64MMFR2_EL1_E0PD_SHIFT,
                .matches = has_cpuid_feature,
                .min_field_value = 1,
                .cpu_enable = cpu_enable_e0pd,
@@ -2725,7 +2725,7 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
        HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_BF16_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_EBF16),
        HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_DGH_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_DGH),
        HWCAP_CAP(SYS_ID_AA64ISAR1_EL1, ID_AA64ISAR1_EL1_I8MM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_I8MM),
-       HWCAP_CAP(SYS_ID_AA64MMFR2_EL1, ID_AA64MMFR2_AT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_USCAT),
+       HWCAP_CAP(SYS_ID_AA64MMFR2_EL1, ID_AA64MMFR2_EL1_AT_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_USCAT),
 #ifdef CONFIG_ARM64_SVE
        HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_SVE_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR0_SVE, CAP_HWCAP, KERNEL_HWCAP_SVE),
        HWCAP_CAP(SYS_ID_AA64ZFR0_EL1, ID_AA64ZFR0_EL1_SVEver_SHIFT, 4, FTR_UNSIGNED, ID_AA64ZFR0_EL1_SVEver_SVE2, CAP_HWCAP, KERNEL_HWCAP_SVE2),
index bffb034..d040f57 100644 (file)
@@ -99,7 +99,7 @@ SYM_CODE_START(primary_entry)
         */
 #if VA_BITS > 48
        mrs_s   x0, SYS_ID_AA64MMFR2_EL1
-       tst     x0, #0xf << ID_AA64MMFR2_LVA_SHIFT
+       tst     x0, #0xf << ID_AA64MMFR2_EL1_LVA_SHIFT
        mov     x0, #VA_BITS
        mov     x25, #VA_BITS_MIN
        csel    x25, x25, x0, eq
@@ -677,7 +677,7 @@ SYM_FUNC_START(__cpu_secondary_check52bitva)
        b.ne    2f
 
        mrs_s   x0, SYS_ID_AA64MMFR2_EL1
-       and     x0, x0, #(0xf << ID_AA64MMFR2_LVA_SHIFT)
+       and     x0, x0, #(0xf << ID_AA64MMFR2_EL1_LVA_SHIFT)
        cbnz    x0, 2f
 
        update_early_cpu_boot_status \
index aac538c..3dad7b2 100644 (file)
  * - E0PDx mechanism
  */
 #define PVM_ID_AA64MMFR2_ALLOW (\
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_CNP) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_UAO) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_IESB) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_AT) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_IDS) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_TTL) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_BBM) | \
-       ARM64_FEATURE_MASK(ID_AA64MMFR2_E0PD) \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_CNP) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_UAO) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_IESB) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_AT) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_IDS) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_TTL) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_BBM) | \
+       ARM64_FEATURE_MASK(ID_AA64MMFR2_EL1_E0PD) \
        )
 
 /*