diff mbox series

[v4,11/17] arm64: mask PAC bits of __builtin_return_address

Message ID 1579774839-19562-12-git-send-email-amit.kachhap@arm.com (mailing list archive)
State New, archived
Headers show
Series arm64: return address signing | expand

Commit Message

Amit Daniel Kachhap Jan. 23, 2020, 10:20 a.m. UTC
This redefines __builtin_return_address to mask pac bits
when Pointer Authentication is enabled. As __builtin_return_address
is used mostly used to refer to the caller function symbol address
so masking runtime generated pac bits will help to find the match.

This patch adds a new file (asm/compiler.h) and is transitively
included (via include/compiler_types.h) on the compiler command line
so it is guaranteed to be loaded and the users of this macro will
not find a wrong version.

A helper macro ptrauth_kernel_pac_mask is created for this purpose
and added in this file. A similar macro ptrauth_user_pac_mask exists
in pointer_auth.h and is now moved here for the sake of consistency.

This change fixes the utilities like cat /proc/vmallocinfo to show
correct symbol names.

Signed-off-by: Amit Daniel Kachhap <amit.kachhap@arm.com>
---
Changes since v3:
* Changed VA_BITS to vabits_actual for kernel pacmask as pointed by Catalin [1].

[1]: http://lists.infradead.org/pipermail/linux-arm-kernel/2020-January/706604.html

 arch/arm64/Kconfig                    |  1 +
 arch/arm64/include/asm/compiler.h     | 20 ++++++++++++++++++++
 arch/arm64/include/asm/pointer_auth.h | 13 +++++--------
 3 files changed, 26 insertions(+), 8 deletions(-)
 create mode 100644 arch/arm64/include/asm/compiler.h
diff mbox series

Patch

diff --git a/arch/arm64/Kconfig b/arch/arm64/Kconfig
index 86cb782..beded60 100644
--- a/arch/arm64/Kconfig
+++ b/arch/arm64/Kconfig
@@ -116,6 +116,7 @@  config ARM64
 	select HAVE_ALIGNED_STRUCT_PAGE if SLUB
 	select HAVE_ARCH_AUDITSYSCALL
 	select HAVE_ARCH_BITREVERSE
+	select HAVE_ARCH_COMPILER_H
 	select HAVE_ARCH_HUGE_VMAP
 	select HAVE_ARCH_JUMP_LABEL
 	select HAVE_ARCH_JUMP_LABEL_RELATIVE
diff --git a/arch/arm64/include/asm/compiler.h b/arch/arm64/include/asm/compiler.h
new file mode 100644
index 0000000..085e7cd0
--- /dev/null
+++ b/arch/arm64/include/asm/compiler.h
@@ -0,0 +1,20 @@ 
+/* SPDX-License-Identifier: GPL-2.0 */
+#ifndef __ASM_COMPILER_H
+#define __ASM_COMPILER_H
+
+#if defined(CONFIG_ARM64_PTR_AUTH)
+
+/*
+ * The EL0/EL1 pointer bits used by a pointer authentication code.
+ * This is dependent on TBI0/TBI1 being enabled, or bits 63:56 would also apply.
+ */
+#define ptrauth_user_pac_mask()		GENMASK_ULL(54, vabits_actual)
+#define ptrauth_kernel_pac_mask()	GENMASK_ULL(63, vabits_actual)
+
+#define __builtin_return_address(val)				\
+	(void *)((unsigned long)__builtin_return_address(val) |	\
+	ptrauth_kernel_pac_mask())
+
+#endif /* CONFIG_ARM64_PTR_AUTH */
+
+#endif /* __ASM_COMPILER_H */
diff --git a/arch/arm64/include/asm/pointer_auth.h b/arch/arm64/include/asm/pointer_auth.h
index 833d3f9..5340dbb 100644
--- a/arch/arm64/include/asm/pointer_auth.h
+++ b/arch/arm64/include/asm/pointer_auth.h
@@ -68,16 +68,13 @@  static __always_inline void ptrauth_keys_switch_kernel(struct ptrauth_keys_kerne
 
 extern int ptrauth_prctl_reset_keys(struct task_struct *tsk, unsigned long arg);
 
-/*
- * The EL0 pointer bits used by a pointer authentication code.
- * This is dependent on TBI0 being enabled, or bits 63:56 would also apply.
- */
-#define ptrauth_user_pac_mask()	GENMASK(54, vabits_actual)
-
-/* Only valid for EL0 TTBR0 instruction pointers */
+/* Valid for EL0 TTBR0 and EL1 TTBR1 instruction pointers */
 static inline unsigned long ptrauth_strip_insn_pac(unsigned long ptr)
 {
-	return ptr & ~ptrauth_user_pac_mask();
+	if (ptr & BIT_ULL(55))
+		return ptr | ptrauth_kernel_pac_mask();
+	else
+		return ptr & ~ptrauth_user_pac_mask();
 }
 
 #define ptrauth_thread_init_user(tsk)					\