[PATCH v3 10/26] arm64/sysreg: Standardise naming for ID_AA64SMFR0_EL1 enums

Mark Brown broonie at kernel.org
Thu Jun 9 10:08:44 PDT 2022


We have a series of defines for enumeration values we test for in the
fields in ID_AA64SMFR0_EL1 which do not follow our usual convention of
including the EL1 in the name and having _IMP at the end of the basic
"feature present" define. In preparation for automatic register
generation bring the defines into sync with convention, no functional
change.

Signed-off-by: Mark Brown <broonie at kernel.org>
---
 arch/arm64/include/asm/el2_setup.h |  2 +-
 arch/arm64/include/asm/sysreg.h    | 30 ++++++++++++++--------------
 arch/arm64/kernel/cpufeature.c     | 32 +++++++++++++++---------------
 3 files changed, 32 insertions(+), 32 deletions(-)

diff --git a/arch/arm64/include/asm/el2_setup.h b/arch/arm64/include/asm/el2_setup.h
index 34ceff08cac4..bfd0ad64b598 100644
--- a/arch/arm64/include/asm/el2_setup.h
+++ b/arch/arm64/include/asm/el2_setup.h
@@ -161,7 +161,7 @@
 	mov	x1, #0				// SMCR controls
 
 	mrs_s	x2, SYS_ID_AA64SMFR0_EL1
-	ubfx	x2, x2, #ID_AA64SMFR0_FA64_SHIFT, #1 // Full FP in SM?
+	ubfx	x2, x2, #ID_AA64SMFR0_EL1_FA64_SHIFT, #1 // Full FP in SM?
 	cbz	x2, .Lskip_sme_fa64_\@
 
 	orr	x1, x1, SMCR_ELx_FA64_MASK
diff --git a/arch/arm64/include/asm/sysreg.h b/arch/arm64/include/asm/sysreg.h
index 2c7e86d2db9f..fd519e2de551 100644
--- a/arch/arm64/include/asm/sysreg.h
+++ b/arch/arm64/include/asm/sysreg.h
@@ -834,21 +834,21 @@
 #define ID_AA64ZFR0_SVEVER_SVE2		0x1
 
 /* id_aa64smfr0 */
-#define ID_AA64SMFR0_FA64_SHIFT		63
-#define ID_AA64SMFR0_I16I64_SHIFT	52
-#define ID_AA64SMFR0_F64F64_SHIFT	48
-#define ID_AA64SMFR0_I8I32_SHIFT	36
-#define ID_AA64SMFR0_F16F32_SHIFT	35
-#define ID_AA64SMFR0_B16F32_SHIFT	34
-#define ID_AA64SMFR0_F32F32_SHIFT	32
-
-#define ID_AA64SMFR0_FA64		0x1
-#define ID_AA64SMFR0_I16I64		0xf
-#define ID_AA64SMFR0_F64F64		0x1
-#define ID_AA64SMFR0_I8I32		0xf
-#define ID_AA64SMFR0_F16F32		0x1
-#define ID_AA64SMFR0_B16F32		0x1
-#define ID_AA64SMFR0_F32F32		0x1
+#define ID_AA64SMFR0_EL1_FA64_SHIFT		63
+#define ID_AA64SMFR0_EL1_I16I64_SHIFT	52
+#define ID_AA64SMFR0_EL1_F64F64_SHIFT	48
+#define ID_AA64SMFR0_EL1_I8I32_SHIFT	36
+#define ID_AA64SMFR0_EL1_F16F32_SHIFT	35
+#define ID_AA64SMFR0_EL1_B16F32_SHIFT	34
+#define ID_AA64SMFR0_EL1_F32F32_SHIFT	32
+
+#define ID_AA64SMFR0_EL1_FA64_IMP	0x1
+#define ID_AA64SMFR0_EL1_I16I64_IMP	0xf
+#define ID_AA64SMFR0_EL1_F64F64_IMP	0x1
+#define ID_AA64SMFR0_EL1_I8I32_IMP	0xf
+#define ID_AA64SMFR0_EL1_F16F32_IMP	0x1
+#define ID_AA64SMFR0_EL1_B16F32_IMP	0x1
+#define ID_AA64SMFR0_EL1_F32F32_IMP	0x1
 
 /* id_aa64mmfr0 */
 #define ID_AA64MMFR0_ECV_SHIFT		60
diff --git a/arch/arm64/kernel/cpufeature.c b/arch/arm64/kernel/cpufeature.c
index 18f833a90b3a..038213bfd46b 100644
--- a/arch/arm64/kernel/cpufeature.c
+++ b/arch/arm64/kernel/cpufeature.c
@@ -298,19 +298,19 @@ static const struct arm64_ftr_bits ftr_id_aa64zfr0[] = {
 
 static const struct arm64_ftr_bits ftr_id_aa64smfr0[] = {
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_FA64_SHIFT, 1, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I16I64_SHIFT, 4, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F64F64_SHIFT, 1, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I8I32_SHIFT, 4, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F16F32_SHIFT, 1, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_B16F32_SHIFT, 1, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, 0),
 	ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
-		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F32F32_SHIFT, 1, 0),
+		       FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, 0),
 	ARM64_FTR_END,
 };
 
@@ -2511,9 +2511,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
 		.capability = ARM64_SME_FA64,
 		.sys_reg = SYS_ID_AA64SMFR0_EL1,
 		.sign = FTR_UNSIGNED,
-		.field_pos = ID_AA64SMFR0_FA64_SHIFT,
+		.field_pos = ID_AA64SMFR0_EL1_FA64_SHIFT,
 		.field_width = 1,
-		.min_field_value = ID_AA64SMFR0_FA64,
+		.min_field_value = ID_AA64SMFR0_EL1_FA64_IMP,
 		.matches = has_cpuid_feature,
 		.cpu_enable = fa64_kernel_enable,
 	},
@@ -2665,13 +2665,13 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
 	HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_WFxT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_WFxT_IMP, CAP_HWCAP, KERNEL_HWCAP_WFXT),
 #ifdef CONFIG_ARM64_SME
 	HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SME_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SME, CAP_HWCAP, KERNEL_HWCAP_SME),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_FA64, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I16I64, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F64F64, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I8I32, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F16F32, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_B16F32, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
-	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F32F32, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_FA64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I16I64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F64F64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I8I32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_B16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
+	HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F32F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
 #endif /* CONFIG_ARM64_SME */
 	{},
 };
-- 
2.30.2




More information about the linux-arm-kernel mailing list