@@ -12,6 +12,7 @@
#undef CONFIG_PARAVIRT_XXL
#undef CONFIG_PARAVIRT_SPINLOCKS
#undef CONFIG_KASAN
+#undef CONFIG_KMSAN
/* cpu_feature_enabled() cannot be used this early */
#define USE_EARLY_PGTABLE_L5
@@ -11,11 +11,23 @@
function. */
#define __HAVE_ARCH_MEMCPY 1
+#if defined(CONFIG_KMSAN)
+#undef memcpy
+/* __msan_memcpy() is defined in compiler.h */
+#define memcpy(dst, src, len) __msan_memcpy(dst, src, len)
+#else
extern void *memcpy(void *to, const void *from, size_t len);
+#endif
extern void *__memcpy(void *to, const void *from, size_t len);
#define __HAVE_ARCH_MEMSET
+#if defined(CONFIG_KMSAN)
+extern void *__msan_memset(void *s, int c, size_t n);
+#undef memset
+#define memset(dst, c, len) __msan_memset(dst, c, len)
+#else
void *memset(void *s, int c, size_t n);
+#endif
void *__memset(void *s, int c, size_t n);
#define __HAVE_ARCH_MEMSET16
@@ -55,7 +67,13 @@ static inline void *memset64(uint64_t *s, uint64_t v, size_t n)
}
#define __HAVE_ARCH_MEMMOVE
+#if defined(CONFIG_KMSAN)
+#undef memmove
+void *__msan_memmove(void *dest, const void *src, size_t len);
+#define memmove(dst, src, len) __msan_memmove(dst, src, len)
+#else
void *memmove(void *dest, const void *src, size_t count);
+#endif
void *__memmove(void *dest, const void *src, size_t count);
int memcmp(const void *cs, const void *ct, size_t count);
@@ -64,7 +82,8 @@ char *strcpy(char *dest, const char *src);
char *strcat(char *dest, const char *src);
int strcmp(const char *cs, const char *ct);
-#if defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)
+#if (defined(CONFIG_KASAN) && !defined(__SANITIZE_ADDRESS__)) || \
+ (defined(CONFIG_KMSAN) && !defined(__SANITIZE_MEMORY__))
/*
* For files that not instrumented (e.g. mm/slub.c) we
@@ -73,7 +92,9 @@ int strcmp(const char *cs, const char *ct);
#undef memcpy
#define memcpy(dst, src, len) __memcpy(dst, src, len)
+#undef memmove
#define memmove(dst, src, len) __memmove(dst, src, len)
+#undef memset
#define memset(s, c, n) __memset(s, c, n)
#ifndef __NO_FORTIFY
@@ -5,7 +5,12 @@
* implementation files.
*
* Copyright 2011 Intel Corporation; author Matt Fleming
+ *
+ *
+ * This file is not linked with KMSAN runtime.
+ * Do not replace memcpy with __memcpy.
*/
+#undef CONFIG_KMSAN
#include <linux/efi.h>
#include <asm/efi.h>
@@ -6,7 +6,12 @@
* Copyright (C) 2017 Google, Inc.
* Matthew Garrett <mjg59@google.com>
* Thiebaud Weksteen <tweek@google.com>
+ *
+ *
+ * This file is not linked with KMSAN runtime.
+ * Do not replace memcpy with __memcpy.
*/
+#undef CONFIG_KMSAN
#include <linux/efi.h>
#include <linux/tpm_eventlog.h>
#include <asm/efi.h>
@@ -180,6 +180,13 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
#include <uapi/linux/types.h>
#include <linux/kcsan-checks.h>
+#ifdef CONFIG_KMSAN
+void *__msan_memcpy(void *dst, const void *src, u64 size);
+#define __DO_MEMCPY(res, p, size) __msan_memcpy(res, p, size)
+#else
+#define __DO_MEMCPY(res, p, size) __builtin_memcpy(res, p, size)
+#endif
+
#define __READ_ONCE_SIZE \
({ \
switch (size) { \
@@ -189,7 +196,7 @@ void ftrace_likely_update(struct ftrace_likely_data *f, int val,
case 8: *(__u64 *)res = *(volatile __u64 *)p; break; \
default: \
barrier(); \
- __builtin_memcpy((void *)res, (const void *)p, size); \
+ __DO_MEMCPY((void *)res, (const void *)p, size); \
barrier(); \
} \
})
@@ -356,6 +356,7 @@ __FORTIFY_INLINE char *strncat(char *p, const char *q, __kernel_size_t count)
return p;
}
+#ifndef CONFIG_KMSAN
__FORTIFY_INLINE void *memset(void *p, int c, __kernel_size_t size)
{
size_t p_size = __builtin_object_size(p, 0);
@@ -395,6 +396,7 @@ __FORTIFY_INLINE void *memmove(void *p, const void *q, __kernel_size_t size)
fortify_panic(__func__);
return __builtin_memmove(p, q, size);
}
+#endif
extern void *__real_memscan(void *, int, __kernel_size_t) __RENAME(memscan);
__FORTIFY_INLINE void *memscan(void *p, int c, __kernel_size_t size)
Unless stated otherwise (by explicitly calling __memcpy(), __memset() or __memmove()) we want all string functions to call their __msan_ versions (e.g. __msan_memcpy() instead of memcpy()), so that shadow and origin values are updated accordingly. Bootloader must still use the default string functions to avoid crashes. Signed-off-by: Alexander Potapenko <glider@google.com> To: Alexander Potapenko <glider@google.com> Cc: Vegard Nossum <vegard.nossum@oracle.com> Cc: Dmitry Vyukov <dvyukov@google.com> Cc: Marco Elver <elver@google.com> Cc: Andrey Konovalov <andreyknvl@google.com> Cc: linux-mm@kvack.org --- v3: - use default string functions in the bootloader v4: - include kmsan-checks.h into compiler.h - also handle memset() and memmove() - fix https://github.com/google/kmsan/issues/64 v5: - don't compile memset() and memmove() under KMSAN Change-Id: Ib2512ce5aa8d457453dd38caa12f58f002166813 --- arch/x86/boot/compressed/misc.h | 1 + arch/x86/include/asm/string_64.h | 23 ++++++++++++++++++- .../firmware/efi/libstub/efi-stub-helper.c | 5 ++++ drivers/firmware/efi/libstub/tpm.c | 5 ++++ include/linux/compiler.h | 9 +++++++- include/linux/string.h | 2 ++ 6 files changed, 43 insertions(+), 2 deletions(-)