summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorMarc Zyngier <marc.zyngier@arm.com>2018-07-20 10:56:24 +0100
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>2018-07-22 14:27:41 +0200
commite7037bd9fc07329f8a2e6383c65876a35ba33a12 (patch)
tree268446a6c11be11ff0142500681304eb3e2f7b57
parentd8174bd75c8b9d3ed5598f39f0a3c7050e9cbbe6 (diff)
arm64: Add ARCH_WORKAROUND_2 probing
commit a725e3dda1813ed306734823ac4c65ca04e38500 upstream. As for Spectre variant-2, we rely on SMCCC 1.1 to provide the discovery mechanism for detecting the SSBD mitigation. A new capability is also allocated for that purpose, and a config option. Reviewed-by: Julien Grall <julien.grall@arm.com> Reviewed-by: Mark Rutland <mark.rutland@arm.com> Acked-by: Will Deacon <will.deacon@arm.com> Reviewed-by: Suzuki K Poulose <suzuki.poulose@arm.com> Signed-off-by: Marc Zyngier <marc.zyngier@arm.com> Signed-off-by: Catalin Marinas <catalin.marinas@arm.com> Signed-off-by: Marc Zyngier <marc.zyngier@arm.com> Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
-rw-r--r--arch/arm64/Kconfig9
-rw-r--r--arch/arm64/include/asm/cpucaps.h3
-rw-r--r--arch/arm64/kernel/cpu_errata.c69
3 files changed, 80 insertions, 1 deletions
diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig
index d0df3611d1e2..3e43874568f9 100644
--- a/arch/arm64/Kconfig
+++ b/arch/arm64/Kconfig
@@ -776,6 +776,15 @@ config HARDEN_BRANCH_PREDICTOR
If unsure, say Y.
+config ARM64_SSBD
+ bool "Speculative Store Bypass Disable" if EXPERT
+ default y
+ help
+ This enables mitigation of the bypassing of previous stores
+ by speculative loads.
+
+ If unsure, say Y.
+
menuconfig ARMV8_DEPRECATED
bool "Emulate deprecated/obsolete ARMv8 instructions"
depends on COMPAT
diff --git a/arch/arm64/include/asm/cpucaps.h b/arch/arm64/include/asm/cpucaps.h
index ce67bf6a0886..7010779a1429 100644
--- a/arch/arm64/include/asm/cpucaps.h
+++ b/arch/arm64/include/asm/cpucaps.h
@@ -36,7 +36,8 @@
#define ARM64_MISMATCHED_CACHE_LINE_SIZE 15
#define ARM64_UNMAP_KERNEL_AT_EL0 16
#define ARM64_HARDEN_BRANCH_PREDICTOR 17
+#define ARM64_SSBD 18
-#define ARM64_NCAPS 18
+#define ARM64_NCAPS 19
#endif /* __ASM_CPUCAPS_H */
diff --git a/arch/arm64/kernel/cpu_errata.c b/arch/arm64/kernel/cpu_errata.c
index a093907214bf..e57331726ac7 100644
--- a/arch/arm64/kernel/cpu_errata.c
+++ b/arch/arm64/kernel/cpu_errata.c
@@ -211,6 +211,67 @@ void __init arm64_update_smccc_conduit(struct alt_instr *alt,
*updptr = cpu_to_le32(insn);
}
+
+static void arm64_set_ssbd_mitigation(bool state)
+{
+ switch (psci_ops.conduit) {
+ case PSCI_CONDUIT_HVC:
+ arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
+ break;
+
+ case PSCI_CONDUIT_SMC:
+ arm_smccc_1_1_smc(ARM_SMCCC_ARCH_WORKAROUND_2, state, NULL);
+ break;
+
+ default:
+ WARN_ON_ONCE(1);
+ break;
+ }
+}
+
+static bool has_ssbd_mitigation(const struct arm64_cpu_capabilities *entry,
+ int scope)
+{
+ struct arm_smccc_res res;
+ bool supported = true;
+
+ WARN_ON(scope != SCOPE_LOCAL_CPU || preemptible());
+
+ if (psci_ops.smccc_version == SMCCC_VERSION_1_0)
+ return false;
+
+ /*
+ * The probe function return value is either negative
+ * (unsupported or mitigated), positive (unaffected), or zero
+ * (requires mitigation). We only need to do anything in the
+ * last case.
+ */
+ switch (psci_ops.conduit) {
+ case PSCI_CONDUIT_HVC:
+ arm_smccc_1_1_hvc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
+ ARM_SMCCC_ARCH_WORKAROUND_2, &res);
+ if ((int)res.a0 != 0)
+ supported = false;
+ break;
+
+ case PSCI_CONDUIT_SMC:
+ arm_smccc_1_1_smc(ARM_SMCCC_ARCH_FEATURES_FUNC_ID,
+ ARM_SMCCC_ARCH_WORKAROUND_2, &res);
+ if ((int)res.a0 != 0)
+ supported = false;
+ break;
+
+ default:
+ supported = false;
+ }
+
+ if (supported) {
+ __this_cpu_write(arm64_ssbd_callback_required, 1);
+ arm64_set_ssbd_mitigation(true);
+ }
+
+ return supported;
+}
#endif /* CONFIG_ARM64_SSBD */
#define MIDR_RANGE(model, min, max) \
@@ -336,6 +397,14 @@ const struct arm64_cpu_capabilities arm64_errata[] = {
.enable = enable_smccc_arch_workaround_1,
},
#endif
+#ifdef CONFIG_ARM64_SSBD
+ {
+ .desc = "Speculative Store Bypass Disable",
+ .def_scope = SCOPE_LOCAL_CPU,
+ .capability = ARM64_SSBD,
+ .matches = has_ssbd_mitigation,
+ },
+#endif
{
}
};