[PATCH v4 2/8] arm64/sysreg: Standardise ID_AA64ISAR0_EL1 macro names
Mark Rutland
mark.rutland at arm.com
Thu Apr 21 02:35:01 PDT 2022
On Tue, Apr 19, 2022 at 11:43:23AM +0100, Mark Brown wrote:
> The macros for accessing fields in ID_AA64ISAR0_EL1 omit the _EL1 from the
> name of the register. In preparation for converting this register to be
> automatically generated update the names to include an _EL1, there should
> be no functional change.
>
> Signed-off-by: Mark Brown <broonie at kernel.org>
Doing this consistently makes sense to me, so FWIW:
Acked-by: Mark RUtland <mark.rutland at arm.com>
Thanks,
Mark.
> ---
> arch/arm64/include/asm/archrandom.h | 2 +-
> arch/arm64/include/asm/sysreg.h | 34 ++++-----
> arch/arm64/kernel/cpufeature.c | 70 +++++++++----------
> .../arm64/kvm/hyp/include/nvhe/fixed_config.h | 28 ++++----
> 4 files changed, 67 insertions(+), 67 deletions(-)
>
> diff --git a/arch/arm64/include/asm/archrandom.h b/arch/arm64/include/asm/archrandom.h
> index d1bb5e71df25..3a6b6d38c5b8 100644
> --- a/arch/arm64/include/asm/archrandom.h
> +++ b/arch/arm64/include/asm/archrandom.h
> @@ -142,7 +142,7 @@ static inline bool __init __early_cpu_has_rndr(void)
> {
> /* Open code as we run prior to the first call to cpufeature. */
> unsigned long ftr = read_sysreg_s(SYS_ID_AA64ISAR0_EL1);
> - return (ftr >> ID_AA64ISAR0_RNDR_SHIFT) & 0xf;
> + return (ftr >> ID_AA64ISAR0_EL1_RNDR_SHIFT) & 0xf;
> }
>
> static inline bool __init __must_check
> diff --git a/arch/arm64/include/asm/sysreg.h b/arch/arm64/include/asm/sysreg.h
> index ff7693902686..1911f36773e5 100644
> --- a/arch/arm64/include/asm/sysreg.h
> +++ b/arch/arm64/include/asm/sysreg.h
> @@ -729,23 +729,23 @@
> #define MAIR_ATTRIDX(attr, idx) ((attr) << ((idx) * 8))
>
> /* id_aa64isar0 */
> -#define ID_AA64ISAR0_RNDR_SHIFT 60
> -#define ID_AA64ISAR0_TLB_SHIFT 56
> -#define ID_AA64ISAR0_TS_SHIFT 52
> -#define ID_AA64ISAR0_FHM_SHIFT 48
> -#define ID_AA64ISAR0_DP_SHIFT 44
> -#define ID_AA64ISAR0_SM4_SHIFT 40
> -#define ID_AA64ISAR0_SM3_SHIFT 36
> -#define ID_AA64ISAR0_SHA3_SHIFT 32
> -#define ID_AA64ISAR0_RDM_SHIFT 28
> -#define ID_AA64ISAR0_ATOMICS_SHIFT 20
> -#define ID_AA64ISAR0_CRC32_SHIFT 16
> -#define ID_AA64ISAR0_SHA2_SHIFT 12
> -#define ID_AA64ISAR0_SHA1_SHIFT 8
> -#define ID_AA64ISAR0_AES_SHIFT 4
> -
> -#define ID_AA64ISAR0_TLB_RANGE_NI 0x0
> -#define ID_AA64ISAR0_TLB_RANGE 0x2
> +#define ID_AA64ISAR0_EL1_RNDR_SHIFT 60
> +#define ID_AA64ISAR0_EL1_TLB_SHIFT 56
> +#define ID_AA64ISAR0_EL1_TS_SHIFT 52
> +#define ID_AA64ISAR0_EL1_FHM_SHIFT 48
> +#define ID_AA64ISAR0_EL1_DP_SHIFT 44
> +#define ID_AA64ISAR0_EL1_SM4_SHIFT 40
> +#define ID_AA64ISAR0_EL1_SM3_SHIFT 36
> +#define ID_AA64ISAR0_EL1_SHA3_SHIFT 32
> +#define ID_AA64ISAR0_EL1_RDM_SHIFT 28
> +#define ID_AA64ISAR0_EL1_ATOMICS_SHIFT 20
> +#define ID_AA64ISAR0_EL1_CRC32_SHIFT 16
> +#define ID_AA64ISAR0_EL1_SHA2_SHIFT 12
> +#define ID_AA64ISAR0_EL1_SHA1_SHIFT 8
> +#define ID_AA64ISAR0_EL1_AES_SHIFT 4
> +
> +#define ID_AA64ISAR0_EL1_TLB_RANGE_NI 0x0
> +#define ID_AA64ISAR0_EL1_TLB_RANGE 0x2
>
> /* id_aa64isar1 */
> #define ID_AA64ISAR1_I8MM_SHIFT 52
> diff --git a/arch/arm64/kernel/cpufeature.c b/arch/arm64/kernel/cpufeature.c
> index d72c4b4d389c..863a510d8944 100644
> --- a/arch/arm64/kernel/cpufeature.c
> +++ b/arch/arm64/kernel/cpufeature.c
> @@ -191,20 +191,20 @@ static bool __system_matches_cap(unsigned int n);
> * sync with the documentation of the CPU feature register ABI.
> */
> static const struct arm64_ftr_bits ftr_id_aa64isar0[] = {
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_RNDR_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_TLB_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_TS_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_FHM_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_DP_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_SM4_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_SM3_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_SHA3_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_RDM_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_ATOMICS_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_CRC32_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_SHA2_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_SHA1_SHIFT, 4, 0),
> - ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_AES_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_RNDR_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_HIDDEN, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_TLB_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_TS_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_FHM_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_DP_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_SM4_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_SM3_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_SHA3_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_RDM_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_ATOMICS_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_CRC32_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_SHA2_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_SHA1_SHIFT, 4, 0),
> + ARM64_FTR_BITS(FTR_VISIBLE, FTR_STRICT, FTR_LOWER_SAFE, ID_AA64ISAR0_EL1_AES_SHIFT, 4, 0),
> ARM64_FTR_END,
> };
>
> @@ -2013,7 +2013,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
> .type = ARM64_CPUCAP_SYSTEM_FEATURE,
> .matches = has_cpuid_feature,
> .sys_reg = SYS_ID_AA64ISAR0_EL1,
> - .field_pos = ID_AA64ISAR0_ATOMICS_SHIFT,
> + .field_pos = ID_AA64ISAR0_EL1_ATOMICS_SHIFT,
> .field_width = 4,
> .sign = FTR_UNSIGNED,
> .min_field_value = 2,
> @@ -2195,10 +2195,10 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
> .type = ARM64_CPUCAP_SYSTEM_FEATURE,
> .matches = has_cpuid_feature,
> .sys_reg = SYS_ID_AA64ISAR0_EL1,
> - .field_pos = ID_AA64ISAR0_TLB_SHIFT,
> + .field_pos = ID_AA64ISAR0_EL1_TLB_SHIFT,
> .field_width = 4,
> .sign = FTR_UNSIGNED,
> - .min_field_value = ID_AA64ISAR0_TLB_RANGE,
> + .min_field_value = ID_AA64ISAR0_EL1_TLB_RANGE,
> },
> #ifdef CONFIG_ARM64_HW_AFDBM
> {
> @@ -2227,7 +2227,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
> .type = ARM64_CPUCAP_SYSTEM_FEATURE,
> .matches = has_cpuid_feature,
> .sys_reg = SYS_ID_AA64ISAR0_EL1,
> - .field_pos = ID_AA64ISAR0_CRC32_SHIFT,
> + .field_pos = ID_AA64ISAR0_EL1_CRC32_SHIFT,
> .field_width = 4,
> .min_field_value = 1,
> },
> @@ -2382,7 +2382,7 @@ static const struct arm64_cpu_capabilities arm64_features[] = {
> .type = ARM64_CPUCAP_SYSTEM_FEATURE,
> .matches = has_cpuid_feature,
> .sys_reg = SYS_ID_AA64ISAR0_EL1,
> - .field_pos = ID_AA64ISAR0_RNDR_SHIFT,
> + .field_pos = ID_AA64ISAR0_EL1_RNDR_SHIFT,
> .field_width = 4,
> .sign = FTR_UNSIGNED,
> .min_field_value = 1,
> @@ -2514,22 +2514,22 @@ static const struct arm64_cpu_capabilities ptr_auth_hwcap_gen_matches[] = {
> #endif
>
> static const struct arm64_cpu_capabilities arm64_elf_hwcaps[] = {
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_AES_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_PMULL),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_AES_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_AES),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SHA1_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA1),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SHA2_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA2),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SHA2_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_SHA512),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_CRC32_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_CRC32),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_ATOMICS_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_ATOMICS),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_RDM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDRDM),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SHA3_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA3),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SM3_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SM3),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_SM4_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SM4),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_DP_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDDP),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_FHM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDFHM),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_TS_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FLAGM),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_TS_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_FLAGM2),
> - HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_RNDR_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_RNG),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_AES_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_PMULL),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_AES_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_AES),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SHA1_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA1),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SHA2_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA2),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SHA2_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_SHA512),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_CRC32_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_CRC32),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_ATOMICS_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_ATOMICS),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_RDM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDRDM),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SHA3_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SHA3),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SM3_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SM3),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_SM4_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_SM4),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_DP_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDDP),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_FHM_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_ASIMDFHM),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_TS_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FLAGM),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_TS_SHIFT, 4, FTR_UNSIGNED, 2, CAP_HWCAP, KERNEL_HWCAP_FLAGM2),
> + HWCAP_CAP(SYS_ID_AA64ISAR0_EL1, ID_AA64ISAR0_EL1_RNDR_SHIFT, 4, FTR_UNSIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_RNG),
> HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_FP_SHIFT, 4, FTR_SIGNED, 0, CAP_HWCAP, KERNEL_HWCAP_FP),
> HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_FP_SHIFT, 4, FTR_SIGNED, 1, CAP_HWCAP, KERNEL_HWCAP_FPHP),
> HWCAP_CAP(SYS_ID_AA64PFR0_EL1, ID_AA64PFR0_ASIMD_SHIFT, 4, FTR_SIGNED, 0, CAP_HWCAP, KERNEL_HWCAP_ASIMD),
> diff --git a/arch/arm64/kvm/hyp/include/nvhe/fixed_config.h b/arch/arm64/kvm/hyp/include/nvhe/fixed_config.h
> index 5ad626527d41..6cda33d23287 100644
> --- a/arch/arm64/kvm/hyp/include/nvhe/fixed_config.h
> +++ b/arch/arm64/kvm/hyp/include/nvhe/fixed_config.h
> @@ -159,20 +159,20 @@
> * No restrictions on instructions implemented in AArch64.
> */
> #define PVM_ID_AA64ISAR0_ALLOW (\
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_AES) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_SHA1) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_SHA2) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_CRC32) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_ATOMICS) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_RDM) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_SHA3) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_SM3) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_SM4) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_DP) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_FHM) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_TS) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_TLB) | \
> - ARM64_FEATURE_MASK(ID_AA64ISAR0_RNDR) \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_AES) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_SHA1) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_SHA2) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_CRC32) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_ATOMICS) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_RDM) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_SHA3) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_SM3) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_SM4) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_DP) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_FHM) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_TS) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_TLB) | \
> + ARM64_FEATURE_MASK(ID_AA64ISAR0_EL1_RNDR) \
> )
>
> #define PVM_ID_AA64ISAR1_ALLOW (\
> --
> 2.30.2
>
More information about the linux-arm-kernel
mailing list