arm64/sysreg: Standardise naming for ID_AA64SMFR0_EL1 enums
authorMark Brown <broonie@kernel.org>
Mon, 4 Jul 2022 17:02:46 +0000 (18:02 +0100)
committerWill Deacon <will@kernel.org>
Tue, 5 Jul 2022 10:45:46 +0000 (11:45 +0100)
We have a series of defines for enumeration values we test for in the
fields in ID_AA64SMFR0_EL1 which do not follow our usual convention of
including the EL1 in the name and having _IMP at the end of the basic
"feature present" define. In preparation for automatic register
generation bring the defines into sync with convention, no functional
change.

Signed-off-by: Mark Brown <broonie@kernel.org>
Link: https://lore.kernel.org/r/20220704170302.2609529-13-broonie@kernel.org
Signed-off-by: Will Deacon <will@kernel.org>
arch/arm64/include/asm/el2_setup.h
arch/arm64/include/asm/sysreg.h
arch/arm64/kernel/cpufeature.c

index 34ceff0..bfd0ad6 100644 (file)
        mov     x1, #0                          // SMCR controls
 
        mrs_s   x2, SYS_ID_AA64SMFR0_EL1
-       ubfx    x2, x2, #ID_AA64SMFR0_FA64_SHIFT, #1 // Full FP in SM?
+       ubfx    x2, x2, #ID_AA64SMFR0_EL1_FA64_SHIFT, #1 // Full FP in SM?
        cbz     x2, .Lskip_sme_fa64_\@
 
        orr     x1, x1, SMCR_ELx_FA64_MASK
index 1b1ea5b..3c77cf8 100644 (file)
 #define ID_AA64ZFR0_SVEVER_SVE2                0x1
 
 /* id_aa64smfr0 */
-#define ID_AA64SMFR0_FA64_SHIFT                63
-#define ID_AA64SMFR0_I16I64_SHIFT      52
-#define ID_AA64SMFR0_F64F64_SHIFT      48
-#define ID_AA64SMFR0_I8I32_SHIFT       36
-#define ID_AA64SMFR0_F16F32_SHIFT      35
-#define ID_AA64SMFR0_B16F32_SHIFT      34
-#define ID_AA64SMFR0_F32F32_SHIFT      32
-
-#define ID_AA64SMFR0_FA64              0x1
-#define ID_AA64SMFR0_I16I64            0xf
-#define ID_AA64SMFR0_F64F64            0x1
-#define ID_AA64SMFR0_I8I32             0xf
-#define ID_AA64SMFR0_F16F32            0x1
-#define ID_AA64SMFR0_B16F32            0x1
-#define ID_AA64SMFR0_F32F32            0x1
+#define ID_AA64SMFR0_EL1_FA64_SHIFT            63
+#define ID_AA64SMFR0_EL1_I16I64_SHIFT  52
+#define ID_AA64SMFR0_EL1_F64F64_SHIFT  48
+#define ID_AA64SMFR0_EL1_I8I32_SHIFT   36
+#define ID_AA64SMFR0_EL1_F16F32_SHIFT  35
+#define ID_AA64SMFR0_EL1_B16F32_SHIFT  34
+#define ID_AA64SMFR0_EL1_F32F32_SHIFT  32
+
+#define ID_AA64SMFR0_EL1_FA64_IMP      0x1
+#define ID_AA64SMFR0_EL1_I16I64_IMP    0xf
+#define ID_AA64SMFR0_EL1_F64F64_IMP    0x1
+#define ID_AA64SMFR0_EL1_I8I32_IMP     0xf
+#define ID_AA64SMFR0_EL1_F16F32_IMP    0x1
+#define ID_AA64SMFR0_EL1_B16F32_IMP    0x1
+#define ID_AA64SMFR0_EL1_F32F32_IMP    0x1
 
 /* id_aa64mmfr0 */
 #define ID_AA64MMFR0_ECV_SHIFT         60
index 83f8e9d..a6c2245 100644 (file)
@@ -298,19 +298,19 @@ static const struct arm64_ftr_bits ftr_id_aa64zfr0[] = {
 
 static const struct arm64_ftr_bits ftr_id_aa64smfr0[] = {
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_FA64_SHIFT, 1, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I16I64_SHIFT, 4, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F64F64_SHIFT, 1, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I8I32_SHIFT, 4, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F16F32_SHIFT, 1, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_B16F32_SHIFT, 1, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, 0),
        ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F32F32_SHIFT, 1, 0),
+                      FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, 0),
        ARM64_FTR_END,
 };
 
@@ -2503,9 +2503,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
                .capability = ARM64_SME_FA64,
                .sys_reg = SYS_ID_AA64SMFR0_EL1,
                .sign = FTR_UNSIGNED,
-               .field_pos = ID_AA64SMFR0_FA64_SHIFT,
+               .field_pos = ID_AA64SMFR0_EL1_FA64_SHIFT,
                .field_width = 1,
-               .min_field_value = ID_AA64SMFR0_FA64,
+               .min_field_value = ID_AA64SMFR0_EL1_FA64_IMP,
                .matches = has_cpuid_feature,
                .cpu_enable = fa64_kernel_enable,
        },
@@ -2657,13 +2657,13 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
        HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_WFxT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_WFxT_IMP, CAP_HWCAP, KERNEL_HWCAP_WFXT),
 #ifdef CONFIG_ARM64_SME
        HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SME_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SME, CAP_HWCAP, KERNEL_HWCAP_SME),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_FA64, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I16I64, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F64F64, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I8I32, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F16F32, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_B16F32, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
-       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F32F32, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_FA64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I16I64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F64F64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I8I32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_B16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
+       HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F32F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
 #endif /* CONFIG_ARM64_SME */
        {},
 };