@@ -326,7 +326,22 @@ do { \
} while (0)
#ifdef CONFIG_X86_32
-#define __get_user_asm_u64(x, ptr, retval, errret) (x) = __get_user_bad()
+#define __get_user_asm_u64(x, addr, err, errret) \
+ asm volatile(ASM_STAC "\n" \
+ "1: movl %2,%%eax\n" \
+ "2: movl %3,%%edx\n" \
+ "3: " ASM_CLAC "\n" \
+ ".section .fixup,\"ax\"\n" \
+ "4: mov %4,%0\n" \
+ " xorl %%eax,%%eax\n" \
+ " xorl %%edx,%%edx\n" \
+ " jmp 3b\n" \
+ ".previous\n" \
+ _ASM_EXTABLE(1b, 4b) \
+ _ASM_EXTABLE(2b, 4b) \
+ : "=r" (err), "=A"(x) \
+ : "m" (__m(addr)), "m" __m(((u32 *)addr) + 1), "i" (errret), "0" (err))
+
#define __get_user_asm_ex_u64(x, ptr) (x) = __get_user_bad()
#else
#define __get_user_asm_u64(x, ptr, retval, errret) \
@@ -407,9 +422,16 @@ do { \
#define __get_user_nocheck(x, ptr, size) \
({ \
int __gu_err; \
- unsigned long __gu_val; \
- __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
- (x) = (__force __typeof__(*(ptr)))__gu_val; \
+ if (size == 8) { \
+ unsigned long __gu_val[2]; \
+ __gu_err = 0; \
+ __get_user_asm_u64(__gu_val, ptr, __gu_err, -EFAULT); \
+ (x) = *(__force __typeof__((ptr)))__gu_val; \
+ } else { \
+ unsigned long __gu_val; \
+ __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT); \
+ (x) = (__force __typeof__(*(ptr)))__gu_val; \
+ } \
__builtin_expect(__gu_err, 0); \
})