Commit f13d5469 authored by Mark Brown's avatar Mark Brown Committed by Will Deacon

arm64/sysreg: Standardise naming for ID_AA64SMFR0_EL1 enums

We have a series of defines for enumeration values we test for in the
fields in ID_AA64SMFR0_EL1 which do not follow our usual convention of
including the EL1 in the name and having _IMP at the end of the basic
"feature present" define. In preparation for automatic register
generation bring the defines into sync with convention, no functional
change.
Signed-off-by: default avatarMark Brown <broonie@kernel.org>
Link: https://lore.kernel.org/r/20220704170302.2609529-13-broonie@kernel.orgSigned-off-by: default avatarWill Deacon <will@kernel.org>
parent 9a2f3290
...@@ -161,7 +161,7 @@ ...@@ -161,7 +161,7 @@
mov x1, #0 // SMCR controls mov x1, #0 // SMCR controls
mrs_s x2, SYS_ID_AA64SMFR0_EL1 mrs_s x2, SYS_ID_AA64SMFR0_EL1
ubfx x2, x2, #ID_AA64SMFR0_FA64_SHIFT, #1 // Full FP in SM? ubfx x2, x2, #ID_AA64SMFR0_EL1_FA64_SHIFT, #1 // Full FP in SM?
cbz x2, .Lskip_sme_fa64_\@ cbz x2, .Lskip_sme_fa64_\@
orr x1, x1, SMCR_ELx_FA64_MASK orr x1, x1, SMCR_ELx_FA64_MASK
......
...@@ -834,21 +834,21 @@ ...@@ -834,21 +834,21 @@
#define ID_AA64ZFR0_SVEVER_SVE2 0x1 #define ID_AA64ZFR0_SVEVER_SVE2 0x1
/* id_aa64smfr0 */ /* id_aa64smfr0 */
#define ID_AA64SMFR0_FA64_SHIFT 63 #define ID_AA64SMFR0_EL1_FA64_SHIFT 63
#define ID_AA64SMFR0_I16I64_SHIFT 52 #define ID_AA64SMFR0_EL1_I16I64_SHIFT 52
#define ID_AA64SMFR0_F64F64_SHIFT 48 #define ID_AA64SMFR0_EL1_F64F64_SHIFT 48
#define ID_AA64SMFR0_I8I32_SHIFT 36 #define ID_AA64SMFR0_EL1_I8I32_SHIFT 36
#define ID_AA64SMFR0_F16F32_SHIFT 35 #define ID_AA64SMFR0_EL1_F16F32_SHIFT 35
#define ID_AA64SMFR0_B16F32_SHIFT 34 #define ID_AA64SMFR0_EL1_B16F32_SHIFT 34
#define ID_AA64SMFR0_F32F32_SHIFT 32 #define ID_AA64SMFR0_EL1_F32F32_SHIFT 32
#define ID_AA64SMFR0_FA64 0x1 #define ID_AA64SMFR0_EL1_FA64_IMP 0x1
#define ID_AA64SMFR0_I16I64 0xf #define ID_AA64SMFR0_EL1_I16I64_IMP 0xf
#define ID_AA64SMFR0_F64F64 0x1 #define ID_AA64SMFR0_EL1_F64F64_IMP 0x1
#define ID_AA64SMFR0_I8I32 0xf #define ID_AA64SMFR0_EL1_I8I32_IMP 0xf
#define ID_AA64SMFR0_F16F32 0x1 #define ID_AA64SMFR0_EL1_F16F32_IMP 0x1
#define ID_AA64SMFR0_B16F32 0x1 #define ID_AA64SMFR0_EL1_B16F32_IMP 0x1
#define ID_AA64SMFR0_F32F32 0x1 #define ID_AA64SMFR0_EL1_F32F32_IMP 0x1
/* id_aa64mmfr0 */ /* id_aa64mmfr0 */
#define ID_AA64MMFR0_ECV_SHIFT 60 #define ID_AA64MMFR0_ECV_SHIFT 60
......
...@@ -298,19 +298,19 @@ static const struct arm64_ftr_bits ftr_id_aa64zfr0[] = { ...@@ -298,19 +298,19 @@ static const struct arm64_ftr_bits ftr_id_aa64zfr0[] = {
static const struct arm64_ftr_bits ftr_id_aa64smfr0[] = { static const struct arm64_ftr_bits ftr_id_aa64smfr0[] = {
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_FA64_SHIFT, 1, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I16I64_SHIFT, 4, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F64F64_SHIFT, 1, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_I8I32_SHIFT, 4, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F16F32_SHIFT, 1, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_B16F32_SHIFT, 1, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, 0),
ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME), ARM64_FTR_BITS(FTR_VISIBLE_IF_IS_ENABLED(CONFIG_ARM64_SME),
FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_F32F32_SHIFT, 1, 0), FTR_STRICT, FTR_EXACT, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, 0),
ARM64_FTR_END, ARM64_FTR_END,
}; };
...@@ -2503,9 +2503,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = { ...@@ -2503,9 +2503,9 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
.capability = ARM64_SME_FA64, .capability = ARM64_SME_FA64,
.sys_reg = SYS_ID_AA64SMFR0_EL1, .sys_reg = SYS_ID_AA64SMFR0_EL1,
.sign = FTR_UNSIGNED, .sign = FTR_UNSIGNED,
.field_pos = ID_AA64SMFR0_FA64_SHIFT, .field_pos = ID_AA64SMFR0_EL1_FA64_SHIFT,
.field_width = 1, .field_width = 1,
.min_field_value = ID_AA64SMFR0_FA64, .min_field_value = ID_AA64SMFR0_EL1_FA64_IMP,
.matches = has_cpuid_feature, .matches = has_cpuid_feature,
.cpu_enable = fa64_kernel_enable, .cpu_enable = fa64_kernel_enable,
}, },
...@@ -2657,13 +2657,13 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = { ...@@ -2657,13 +2657,13 @@ static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_WFxT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_WFxT_IMP, CAP_HWCAP, KERNEL_HWCAP_WFXT), HWCAP_CAP(SYS_ID_AA64ISAR2_EL1, ID_AA64ISAR2_WFxT_SHIFT, 4, FTR_UNSIGNED, ID_AA64ISAR2_WFxT_IMP, CAP_HWCAP, KERNEL_HWCAP_WFXT),
#ifdef CONFIG_ARM64_SME #ifdef CONFIG_ARM64_SME
HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SME_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SME, CAP_HWCAP, KERNEL_HWCAP_SME), HWCAP_CAP(SYS_ID_AA64PFR1_EL1, ID_AA64PFR1_SME_SHIFT, 4, FTR_UNSIGNED, ID_AA64PFR1_SME, CAP_HWCAP, KERNEL_HWCAP_SME),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_FA64, CAP_HWCAP, KERNEL_HWCAP_SME_FA64), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_FA64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_FA64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_FA64),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I16I64, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I16I64_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I16I64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I16I64),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F64F64, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F64F64_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F64F64_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F64F64),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_I8I32, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_I8I32_SHIFT, 4, FTR_UNSIGNED, ID_AA64SMFR0_EL1_I8I32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_I8I32),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F16F32, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F16F32),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_B16F32, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_B16F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_B16F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_B16F32),
HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_F32F32, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32), HWCAP_CAP(SYS_ID_AA64SMFR0_EL1, ID_AA64SMFR0_EL1_F32F32_SHIFT, 1, FTR_UNSIGNED, ID_AA64SMFR0_EL1_F32F32_IMP, CAP_HWCAP, KERNEL_HWCAP_SME_F32F32),
#endif /* CONFIG_ARM64_SME */ #endif /* CONFIG_ARM64_SME */
{}, {},
}; };
......
Markdown is supported
0%
or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment