@@ -132,6 +132,8 @@ config ARM64
select HAVE_DEBUG_KMEMLEAK
select HAVE_DMA_CONTIGUOUS
select HAVE_DYNAMIC_FTRACE
+ select HAVE_DYNAMIC_FTRACE_WITH_REGS \
+ if $(cc-option,-fpatchable-function-entry=2)
select HAVE_EFFICIENT_UNALIGNED_ACCESS
select HAVE_FTRACE_MCOUNT_RECORD
select HAVE_FUNCTION_TRACER
@@ -89,6 +89,11 @@ ifeq ($(CONFIG_ARM64_MODULE_PLTS),y)
KBUILD_LDFLAGS_MODULE += -T $(srctree)/arch/arm64/kernel/module.lds
endif
+ifeq ($(CONFIG_DYNAMIC_FTRACE_WITH_REGS),y)
+ KBUILD_CPPFLAGS += -DCC_USING_PATCHABLE_FENTRY
+ CC_FLAGS_FTRACE := -fpatchable-function-entry=2
+endif
+
# Default value
head-y := arch/arm64/kernel/head.o
@@ -113,6 +113,7 @@
#define MCOUNT_REC() . = ALIGN(8); \
__start_mcount_loc = .; \
KEEP(*(__mcount_loc)) \
+ KEEP(*(__patchable_function_entries)) \
__stop_mcount_loc = .;
#else
#define MCOUNT_REC()
@@ -112,6 +112,8 @@ struct ftrace_likely_data {
#if defined(CC_USING_HOTPATCH)
#define notrace __attribute__((hotpatch(0, 0)))
+#elif defined(CC_USING_PATCHABLE_FENTRY)
+#define notrace __attribute__((patchable_function_entry(0)))
#else
#define notrace __attribute__((__no_instrument_function__))
#endif
Test whether gcc supports -fpatchable-function-entry and use it to promote DYNAMIC_FTRACE to DYNAMIC_FTRACE_WITH_REGS. Amend support for the new object section that holds the locations (__patchable_function_entries) and define a proper "notrace" attribute to switch it off. Signed-off-by: Torsten Duwe <duwe@suse.de> --- arch/arm64/Kconfig | 2 ++ arch/arm64/Makefile | 5 +++++ include/asm-generic/vmlinux.lds.h | 1 + include/linux/compiler_types.h | 2 ++ 4 files changed, 10 insertions(+)