From: Xie XiuQi xiexiuqi@huawei.com
hulk inclusion category: bugfix bugzilla: https://bugzilla.openeuler.org/show_bug.cgi?id=28 CVE: NA
Patch "arm64: errata: add option to disable cache readunique prefetch" breaks the kabi symbols: cpu_hwcaps cpu_hwcap_keys
Fix it by using late_initcall instead arm64_errata mechanism.
Signed-off-by: Xie XiuQi xiexiuqi@huawei.com Signed-off-by: Cheng Jian cj.chengjian@huawei.com --- arch/arm64/include/asm/cpucaps.h | 3 +-- arch/arm64/kernel/cpu_errata.c | 33 +++++++++++++++++++++----------- 2 files changed, 23 insertions(+), 13 deletions(-)
diff --git a/arch/arm64/include/asm/cpucaps.h b/arch/arm64/include/asm/cpucaps.h index 85446098342d..a9090f204a08 100644 --- a/arch/arm64/include/asm/cpucaps.h +++ b/arch/arm64/include/asm/cpucaps.h @@ -56,8 +56,7 @@ #define ARM64_WORKAROUND_1463225 35 #define ARM64_HAS_CRC32 36 #define ARM64_SSBS 37 -#define ARM64_WORKAROUND_HISI_HIP08_RU_PREFETCH 38
-#define ARM64_NCAPS 39 +#define ARM64_NCAPS 38
#endif /* __ASM_CPUCAPS_H */ diff --git a/arch/arm64/kernel/cpu_errata.c b/arch/arm64/kernel/cpu_errata.c index 881187d5fed6..706a77a121f1 100644 --- a/arch/arm64/kernel/cpu_errata.c +++ b/arch/arm64/kernel/cpu_errata.c @@ -518,24 +518,44 @@ static int __init readunique_prefetch_switch(char *data) } early_param("readunique_prefetch", readunique_prefetch_switch);
+static const struct midr_range readunique_prefetch_cpus[] = { + MIDR_ALL_VERSIONS(MIDR_HISI_TSV110), + {}, +}; + static bool -should_disable_hisi_hip08_ru_prefetch(const struct arm64_cpu_capabilities *entry, int unused) +should_disable_hisi_hip08_ru_prefetch(void) { u64 el;
if (readunique_prefetch_enabled) return false;
+ if (!is_midr_in_range_list(read_cpuid_id(), readunique_prefetch_cpus)) + return false; + el = read_sysreg(CurrentEL); return el == CurrentEL_EL2; }
#define CTLR_HISI_HIP08_RU_PREFETCH (1L << 40) static void __maybe_unused -hisi_hip08_ru_prefetch_disable(const struct arm64_cpu_capabilities *__unused) +__hisi_hip08_ru_prefetch_disable(void* unused) { sysreg_clear_set(S3_1_c15_c6_4, 0, CTLR_HISI_HIP08_RU_PREFETCH); } + +static int hisi_hip08_ru_prefetch_disable(void) +{ + if (should_disable_hisi_hip08_ru_prefetch()) { + on_each_cpu(__hisi_hip08_ru_prefetch_disable, NULL, 1); + pr_info("CPU erratum: HiSilicon HIP08 Cache Readunique Prefetch Disable"); + } + + return 0; +} + +late_initcall(hisi_hip08_ru_prefetch_disable); #endif
/* known invulnerable cores */ @@ -917,15 +937,6 @@ const struct arm64_cpu_capabilities arm64_errata[] = { ERRATA_MIDR_RANGE_LIST(tx2_family_cpus), .matches = needs_tx2_tvm_workaround, }, -#endif -#ifdef CONFIG_HISILICON_ERRATUM_HIP08_RU_PREFETCH - { - .desc = "HiSilicon HIP08 Cache Readunique Prefetch Disable", - .capability = ARM64_WORKAROUND_HISI_HIP08_RU_PREFETCH, - ERRATA_MIDR_ALL_VERSIONS(MIDR_HISI_TSV110), - .matches = should_disable_hisi_hip08_ru_prefetch, - .cpu_enable = hisi_hip08_ru_prefetch_disable, - }, #endif { }