@@ -48,6 +48,10 @@ CHECKFLAGS += -D__ARMEL__
KBUILD_LDFLAGS += -EL
endif
+ifeq ($(CONFIG_HARDEN_SLS_ALL), y)
+KBUILD_CFLAGS += -mharden-sls=all
+endif
+
#
# The Scalar Replacement of Aggregates (SRA) optimization pass in GCC 4.9 and
# later may result in code being generated that handles signed short and signed
@@ -145,3 +145,7 @@
__edtcm_data = .; \
} \
. = __dtcm_start + SIZEOF(.data_dtcm);
+
+#define SLS_TEXT \
+ ALIGN_FUNCTION(); \
+ *(.text.__llvm_slsblr_thunk_*)
@@ -63,6 +63,7 @@ SECTIONS
.text : { /* Real text segment */
_stext = .; /* Text and read-only data */
ARM_TEXT
+ SLS_TEXT
}
#ifdef CONFIG_DEBUG_ALIGN_RODATA
@@ -34,6 +34,10 @@ $(warning LSE atomics not supported by binutils)
endif
endif
+ifeq ($(CONFIG_HARDEN_SLS_ALL), y)
+KBUILD_CFLAGS += -mharden-sls=all
+endif
+
cc_has_k_constraint := $(call try-run,echo \
'int main(void) { \
asm volatile("and w0, w0, %w0" :: "K" (4294967295)); \
@@ -93,6 +93,10 @@ jiffies = jiffies_64;
#define TRAMP_TEXT
#endif
+#define SLS_TEXT \
+ ALIGN_FUNCTION(); \
+ *(.text.__llvm_slsblr_thunk_*)
+
/*
* The size of the PE/COFF section that covers the kernel image, which
* runs from _stext to _edata, must be a round multiple of the PE/COFF
@@ -144,6 +148,7 @@ SECTIONS
HIBERNATE_TEXT
TRAMP_TEXT
*(.fixup)
+ SLS_TEXT
*(.gnu.warning)
. = ALIGN(16);
*(.got) /* Global offset table */
@@ -121,6 +121,16 @@ choice
endchoice
+config HARDEN_SLS_ALL
+ bool "enable SLS vulnerability hardening"
+ default n
+ depends on $(cc-option,-mharden-sls=all)
+ help
+ Enables straight-line speculation vulnerability hardening on ARM and ARM64
+ architectures. It inserts speculation barrier sequences (SB or DSB+ISB
+ depending on the target architecture) after RET and BR, and replacing
+ BLR with BL+BR sequence.
+
config GCC_PLUGIN_STRUCTLEAK_VERBOSE
bool "Report forcefully initialized variables"
depends on GCC_PLUGIN_STRUCTLEAK