x86/mm/32: Add support for 64-bit __get_user() on 32-bit kernels
[sfrench/cifs-2.6.git] / arch / x86 / include / asm / uaccess.h
index c0f27d7ea7ff95eaea44987bd94417fabe68ca98..8b3fb76b489b8d2df25c7a9730e9a360bd183b3d 100644 (file)
@@ -105,9 +105,8 @@ static inline bool __chk_range_not_ok(unsigned long addr, unsigned long size, un
 struct exception_table_entry {
        int insn, fixup, handler;
 };
-/* This is not the generic standard exception_table_entry format */
-#define ARCH_HAS_SORT_EXTABLE
-#define ARCH_HAS_SEARCH_EXTABLE
+
+#define ARCH_HAS_RELATIVE_EXTABLE
 
 extern int fixup_exception(struct pt_regs *regs, int trapnr);
 extern bool ex_has_fault_handler(unsigned long ip);
@@ -179,10 +178,11 @@ __typeof__(__builtin_choose_expr(sizeof(x) > sizeof(0UL), 0ULL, 0UL))
 ({                                                                     \
        int __ret_gu;                                                   \
        register __inttype(*(ptr)) __val_gu asm("%"_ASM_DX);            \
+       register void *__sp asm(_ASM_SP);                               \
        __chk_user_ptr(ptr);                                            \
        might_fault();                                                  \
-       asm volatile("call __get_user_%P3"                              \
-                    : "=a" (__ret_gu), "=r" (__val_gu)                 \
+       asm volatile("call __get_user_%P4"                              \
+                    : "=a" (__ret_gu), "=r" (__val_gu), "+r" (__sp)    \
                     : "0" (ptr), "i" (sizeof(*(ptr))));                \
        (x) = (__force __typeof__(*(ptr))) __val_gu;                    \
        __builtin_expect(__ret_gu, 0);                                  \
@@ -333,7 +333,26 @@ do {                                                                       \
 } while (0)
 
 #ifdef CONFIG_X86_32
-#define __get_user_asm_u64(x, ptr, retval, errret)     (x) = __get_user_bad()
+#define __get_user_asm_u64(x, ptr, retval, errret)                     \
+({                                                                     \
+       __typeof__(ptr) __ptr = (ptr);                                  \
+       asm volatile(ASM_STAC "\n"                                      \
+                    "1:        movl %2,%%eax\n"                        \
+                    "2:        movl %3,%%edx\n"                        \
+                    "3: " ASM_CLAC "\n"                                \
+                    ".section .fixup,\"ax\"\n"                         \
+                    "4:        mov %4,%0\n"                            \
+                    "  xorl %%eax,%%eax\n"                             \
+                    "  xorl %%edx,%%edx\n"                             \
+                    "  jmp 3b\n"                                       \
+                    ".previous\n"                                      \
+                    _ASM_EXTABLE(1b, 4b)                               \
+                    _ASM_EXTABLE(2b, 4b)                               \
+                    : "=r" (retval), "=A"(x)                           \
+                    : "m" (__m(__ptr)), "m" __m(((u32 *)(__ptr)) + 1), \
+                      "i" (errret), "0" (retval));                     \
+})
+
 #define __get_user_asm_ex_u64(x, ptr)                  (x) = __get_user_bad()
 #else
 #define __get_user_asm_u64(x, ptr, retval, errret) \
@@ -420,7 +439,7 @@ do {                                                                        \
 #define __get_user_nocheck(x, ptr, size)                               \
 ({                                                                     \
        int __gu_err;                                                   \
-       unsigned long __gu_val;                                         \
+       __inttype(*(ptr)) __gu_val;                                     \
        __uaccess_begin();                                              \
        __get_user_size(__gu_val, (ptr), (size), __gu_err, -EFAULT);    \
        __uaccess_end();                                                \