@@ -4,6 +4,7 @@
#define EX_TYPE_NONE 0
#define EX_TYPE_FIXUP 1
+#define EX_TYPE_UACCESS_ERR_ZERO 2
#ifdef __ASSEMBLY__
@@ -24,6 +25,7 @@
#include <linux/bits.h>
#include <linux/stringify.h>
+#include <asm/gpr-num.h>
#define __ASM_EXTABLE_RAW(insn, fixup, type, data) \
".pushsection __ex_table, \"a\"\n" \
@@ -37,6 +39,26 @@
#define _ASM_EXTABLE(insn, fixup) \
__ASM_EXTABLE_RAW(#insn, #fixup, __stringify(EX_TYPE_FIXUP), "0")
+#define EX_DATA_REG_ERR_SHIFT 0
+#define EX_DATA_REG_ERR GENMASK(4, 0)
+#define EX_DATA_REG_ZERO_SHIFT 5
+#define EX_DATA_REG_ZERO GENMASK(9, 5)
+
+#define EX_DATA_REG(reg, gpr) \
+ "((.L__gpr_num_" #gpr ") << " __stringify(EX_DATA_REG_##reg##_SHIFT) ")"
+
+#define _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero) \
+ __DEFINE_ASM_GPR_NUMS \
+ __ASM_EXTABLE_RAW(#insn, #fixup, \
+ __stringify(EX_TYPE_UACCESS_ERR_ZERO), \
+ "(" \
+ EX_DATA_REG(ERR, err) " | " \
+ EX_DATA_REG(ZERO, zero) \
+ ")")
+
+#define _ASM_EXTABLE_UACCESS_ERR(insn, fixup, err) \
+ _ASM_EXTABLE_UACCESS_ERR_ZERO(insn, fixup, err, zero)
+
#endif /* __ASSEMBLY__ */
#endif /* __ASM_ASM_EXTABLE_H */
@@ -19,16 +19,11 @@
"2: sc.w $t0, %2 \n" \
" beqz $t0, 1b \n" \
"3: \n" \
- " .section .fixup,\"ax\" \n" \
- "4: li.w %0, %6 \n" \
- " b 3b \n" \
- " .previous \n" \
- _ASM_EXTABLE(1b, 4b) \
- _ASM_EXTABLE(2b, 4b) \
+ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0) \
+ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0) \
: "=r" (ret), "=&r" (oldval), \
"=ZC" (*uaddr) \
- : "0" (0), "ZC" (*uaddr), "Jr" (oparg), \
- "i" (-EFAULT) \
+ : "0" (0), "ZC" (*uaddr), "Jr" (oparg) \
: "memory", "t0"); \
}
@@ -85,15 +80,10 @@ futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 oldval, u32 newv
" beqz $t0, 1b \n"
"3: \n"
__WEAK_LLSC_MB
- " .section .fixup,\"ax\" \n"
- "4: li.d %0, %6 \n"
- " b 3b \n"
- " .previous \n"
- _ASM_EXTABLE(1b, 4b)
- _ASM_EXTABLE(2b, 4b)
+ _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %0)
+ _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %0)
: "+r" (ret), "=&r" (val), "=ZC" (*uaddr)
- : "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval),
- "i" (-EFAULT)
+ : "ZC" (*uaddr), "Jr" (oldval), "Jr" (newval)
: "memory", "t0");
*uval = val;
@@ -161,14 +161,9 @@ do { \
__asm__ __volatile__( \
"1: " insn " %1, %2 \n" \
"2: \n" \
- " .section .fixup,\"ax\" \n" \
- "3: li.w %0, %3 \n" \
- " move %1, $zero \n" \
- " b 2b \n" \
- " .previous \n" \
- _ASM_EXTABLE(1b, 3b) \
+ _ASM_EXTABLE_UACCESS_ERR_ZERO(1b, 2b, %0, %1) \
: "+r" (__gu_err), "=r" (__gu_tmp) \
- : "m" (__m(ptr)), "i" (-EFAULT)); \
+ : "m" (__m(ptr))); \
\
(val) = (__typeof__(*(ptr))) __gu_tmp; \
}
@@ -191,13 +186,9 @@ do { \
__asm__ __volatile__( \
"1: " insn " %z2, %1 # __put_user_asm\n" \
"2: \n" \
- " .section .fixup,\"ax\" \n" \
- "3: li.w %0, %3 \n" \
- " b 2b \n" \
- " .previous \n" \
- _ASM_EXTABLE(1b, 3b) \
+ _ASM_EXTABLE_UACCESS_ERR(1b, 2b, %0) \
: "+r" (__pu_err), "=m" (__m(ptr)) \
- : "Jr" (__pu_val), "i" (-EFAULT)); \
+ : "Jr" (__pu_val)); \
}
#define __get_kernel_nofault(dst, src, type, err_label) \
@@ -2,11 +2,12 @@
/*
* Copyright (C) 2020-2022 Loongson Technology Corporation Limited
*/
+#include <linux/bitfield.h>
#include <linux/extable.h>
#include <linux/spinlock.h>
+#include <linux/uaccess.h>
#include <asm/asm-extable.h>
#include <asm/branch.h>
-#include <linux/uaccess.h>
static inline unsigned long
get_ex_fixup(const struct exception_table_entry *ex)
@@ -21,6 +22,28 @@ static bool ex_handler_fixup(const struct exception_table_entry *ex,
return true;
}
+static inline void regs_set_gpr(struct pt_regs *regs, unsigned int offset,
+ unsigned long val)
+{
+ if (unlikely(offset > MAX_REG_OFFSET))
+ return;
+
+ if (offset)
+ *(unsigned long *)((unsigned long)regs + offset) = val;
+}
+
+static bool ex_handler_uaccess_err_zero(const struct exception_table_entry *ex,
+ struct pt_regs *regs)
+{
+ int reg_err = FIELD_GET(EX_DATA_REG_ERR, ex->data);
+ int reg_zero = FIELD_GET(EX_DATA_REG_ZERO, ex->data);
+
+ regs_set_gpr(regs, reg_err * sizeof(unsigned long), -EFAULT);
+ regs_set_gpr(regs, reg_zero * sizeof(unsigned long), 0);
+
+ regs->csr_era = get_ex_fixup(ex);
+ return true;
+}
bool fixup_exception(struct pt_regs *regs)
{
@@ -33,6 +56,8 @@ bool fixup_exception(struct pt_regs *regs)
switch (ex->type) {
case EX_TYPE_FIXUP:
return ex_handler_fixup(ex, regs);
+ case EX_TYPE_UACCESS_ERR_ZERO:
+ return ex_handler_uaccess_err_zero(ex, regs);
}
BUG();
Inspired by commit 2e77a62cb3a6("arm64: extable: add a dedicated uaccess handler"), do similar to LoongArch to add a dedicated uaccess exception handler to update registers in exception context and subsequently return back into the function which faulted, so we remove the need for fixups specialized to each faulting instruction. Signed-off-by: Youling Tang <tangyouling@loongson.cn> --- arch/loongarch/include/asm/asm-extable.h | 22 +++++++++++++++++++ arch/loongarch/include/asm/futex.h | 22 ++++++------------- arch/loongarch/include/asm/uaccess.h | 17 ++++----------- arch/loongarch/mm/extable.c | 27 +++++++++++++++++++++++- 4 files changed, 58 insertions(+), 30 deletions(-)