1 /* SPDX-License-Identifier: GPL-2.0 */
3 * Authors: Hans-Peter Nilsson (hp@axis.com)
6 #ifndef _CRIS_ARCH_UACCESS_H
7 #define _CRIS_ARCH_UACCESS_H
10 * We don't tell gcc that we are accessing memory, but this is OK
11 * because we do not write to any memory gcc knows about, so there
12 * are no aliasing issues.
14 * Note that PC at a fault is the address *at* the faulting
15 * instruction for CRISv32.
17 #define __put_user_asm(x, addr, err, op) \
18 __asm__ __volatile__( \
21 " .section .fixup,\"ax\"\n" \
26 " .section __ex_table,\"a\"\n" \
30 : "r" (x), "r" (addr), "g" (-EFAULT), "0" (err))
32 #define __put_user_asm_64(x, addr, err) do { \
33 int dummy_for_put_user_asm_64_; \
34 __asm__ __volatile__( \
35 "2: move.d %M2,[%1+]\n" \
36 "4: move.d %H2,[%1]\n" \
38 " .section .fixup,\"ax\"\n" \
42 " .section __ex_table,\"a\"\n" \
46 : "=r" (err), "=b" (dummy_for_put_user_asm_64_) \
47 : "r" (x), "1" (addr), "g" (-EFAULT), \
51 /* See comment before __put_user_asm. */
53 #define __get_user_asm(x, addr, err, op) \
54 __asm__ __volatile__( \
57 " .section .fixup,\"ax\"\n" \
62 " .section __ex_table,\"a\"\n" \
65 : "=r" (err), "=r" (x) \
66 : "r" (addr), "g" (-EFAULT), "0" (err))
68 #define __get_user_asm_64(x, addr, err) do { \
69 int dummy_for_get_user_asm_64_; \
70 __asm__ __volatile__( \
71 "2: move.d [%2+],%M1\n" \
72 "4: move.d [%2],%H1\n" \
74 " .section .fixup,\"ax\"\n" \
79 " .section __ex_table,\"a\"\n" \
83 : "=r" (err), "=r" (x), \
84 "=b" (dummy_for_get_user_asm_64_) \
85 : "2" (addr), "g" (-EFAULT), "0" (err));\
89 * Copy a null terminated string from userspace.
92 * -EFAULT for an exception
93 * count if we hit the buffer limit
94 * bytes copied if we hit a null byte
95 * (without the null byte)
98 __do_strncpy_from_user(char *dst, const char *src, long count)
106 * Currently, in 2.4.0-test9, most ports use a simple byte-copy loop.
109 * This code is deduced from:
114 * while ((*dst++ = (tmp2 = *src++)) != 0
118 * res = count - tmp1;
123 __asm__ __volatile__ (
125 "5: move.b [%2+],$acr\n"
127 " move.b $acr,[%1+]\n"
131 " move.b [%2+],$acr\n"
136 " .section .fixup,\"ax\"\n"
141 /* The address for a fault at the first move is trivial.
142 The address for a fault at the second move is that of
143 the preceding branch insn, since the move insn is in
144 its delay-slot. Just so you don't get confused... */
146 " .section __ex_table,\"a\"\n"
150 : "=r" (res), "=b" (dst), "=b" (src), "=r" (count)
151 : "3" (count), "1" (dst), "2" (src), "g" (-EFAULT)
157 /* A few copy asms to build up the more complex ones from.
159 Note again, a post-increment is performed regardless of whether a bus
160 fault occurred in that instruction, and PC for a faulted insn is the
161 address for the insn, or for the preceding branch when in a delay-slot. */
163 #define __asm_copy_user_cont(to, from, ret, COPY, FIXUP, TENTRY) \
164 __asm__ __volatile__ ( \
167 " .section .fixup,\"ax\"\n" \
170 " .section __ex_table,\"a\"\n" \
173 : "=b" (to), "=b" (from), "=r" (ret) \
174 : "0" (to), "1" (from), "2" (ret) \
177 #define __asm_copy_from_user_1(to, from, ret) \
178 __asm_copy_user_cont(to, from, ret, \
179 "2: move.b [%1+],$acr\n" \
180 " move.b $acr,[%0+]\n", \
185 #define __asm_copy_from_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
186 __asm_copy_user_cont(to, from, ret, \
188 "2: move.w [%1+],$acr\n" \
189 " move.w $acr,[%0+]\n", \
196 #define __asm_copy_from_user_2(to, from, ret) \
197 __asm_copy_from_user_2x_cont(to, from, ret, "", "", "")
199 #define __asm_copy_from_user_3(to, from, ret) \
200 __asm_copy_from_user_2x_cont(to, from, ret, \
201 "4: move.b [%1+],$acr\n" \
202 " move.b $acr,[%0+]\n", \
206 #define __asm_copy_from_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
207 __asm_copy_user_cont(to, from, ret, \
209 "2: move.d [%1+],$acr\n" \
210 " move.d $acr,[%0+]\n", \
217 #define __asm_copy_from_user_4(to, from, ret) \
218 __asm_copy_from_user_4x_cont(to, from, ret, "", "", "")
220 #define __asm_copy_from_user_5(to, from, ret) \
221 __asm_copy_from_user_4x_cont(to, from, ret, \
222 "4: move.b [%1+],$acr\n" \
223 " move.b $acr,[%0+]\n", \
227 #define __asm_copy_from_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
228 __asm_copy_from_user_4x_cont(to, from, ret, \
230 "4: move.w [%1+],$acr\n" \
231 " move.w $acr,[%0+]\n", \
237 #define __asm_copy_from_user_6(to, from, ret) \
238 __asm_copy_from_user_6x_cont(to, from, ret, "", "", "")
240 #define __asm_copy_from_user_7(to, from, ret) \
241 __asm_copy_from_user_6x_cont(to, from, ret, \
242 "6: move.b [%1+],$acr\n" \
243 " move.b $acr,[%0+]\n", \
247 #define __asm_copy_from_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
248 __asm_copy_from_user_4x_cont(to, from, ret, \
250 "4: move.d [%1+],$acr\n" \
251 " move.d $acr,[%0+]\n", \
257 #define __asm_copy_from_user_8(to, from, ret) \
258 __asm_copy_from_user_8x_cont(to, from, ret, "", "", "")
260 #define __asm_copy_from_user_9(to, from, ret) \
261 __asm_copy_from_user_8x_cont(to, from, ret, \
262 "6: move.b [%1+],$acr\n" \
263 " move.b $acr,[%0+]\n", \
267 #define __asm_copy_from_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
268 __asm_copy_from_user_8x_cont(to, from, ret, \
270 "6: move.w [%1+],$acr\n" \
271 " move.w $acr,[%0+]\n", \
277 #define __asm_copy_from_user_10(to, from, ret) \
278 __asm_copy_from_user_10x_cont(to, from, ret, "", "", "")
280 #define __asm_copy_from_user_11(to, from, ret) \
281 __asm_copy_from_user_10x_cont(to, from, ret, \
282 "8: move.b [%1+],$acr\n" \
283 " move.b $acr,[%0+]\n", \
287 #define __asm_copy_from_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
288 __asm_copy_from_user_8x_cont(to, from, ret, \
290 "6: move.d [%1+],$acr\n" \
291 " move.d $acr,[%0+]\n", \
297 #define __asm_copy_from_user_12(to, from, ret) \
298 __asm_copy_from_user_12x_cont(to, from, ret, "", "", "")
300 #define __asm_copy_from_user_13(to, from, ret) \
301 __asm_copy_from_user_12x_cont(to, from, ret, \
302 "8: move.b [%1+],$acr\n" \
303 " move.b $acr,[%0+]\n", \
307 #define __asm_copy_from_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
308 __asm_copy_from_user_12x_cont(to, from, ret, \
310 "8: move.w [%1+],$acr\n" \
311 " move.w $acr,[%0+]\n", \
317 #define __asm_copy_from_user_14(to, from, ret) \
318 __asm_copy_from_user_14x_cont(to, from, ret, "", "", "")
320 #define __asm_copy_from_user_15(to, from, ret) \
321 __asm_copy_from_user_14x_cont(to, from, ret, \
322 "10: move.b [%1+],$acr\n" \
323 " move.b $acr,[%0+]\n", \
327 #define __asm_copy_from_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
328 __asm_copy_from_user_12x_cont(to, from, ret, \
330 "8: move.d [%1+],$acr\n" \
331 " move.d $acr,[%0+]\n", \
337 #define __asm_copy_from_user_16(to, from, ret) \
338 __asm_copy_from_user_16x_cont(to, from, ret, "", "", "")
340 #define __asm_copy_from_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
341 __asm_copy_from_user_16x_cont(to, from, ret, \
343 "10: move.d [%1+],$acr\n" \
344 " move.d $acr,[%0+]\n", \
350 #define __asm_copy_from_user_20(to, from, ret) \
351 __asm_copy_from_user_20x_cont(to, from, ret, "", "", "")
353 #define __asm_copy_from_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
354 __asm_copy_from_user_20x_cont(to, from, ret, \
356 "12: move.d [%1+],$acr\n" \
357 " move.d $acr,[%0+]\n", \
363 #define __asm_copy_from_user_24(to, from, ret) \
364 __asm_copy_from_user_24x_cont(to, from, ret, "", "", "")
366 /* And now, the to-user ones. */
368 #define __asm_copy_to_user_1(to, from, ret) \
369 __asm_copy_user_cont(to, from, ret, \
370 " move.b [%1+],$acr\n" \
371 "2: move.b $acr,[%0+]\n", \
376 #define __asm_copy_to_user_2x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
377 __asm_copy_user_cont(to, from, ret, \
379 " move.w [%1+],$acr\n" \
380 "2: move.w $acr,[%0+]\n", \
387 #define __asm_copy_to_user_2(to, from, ret) \
388 __asm_copy_to_user_2x_cont(to, from, ret, "", "", "")
390 #define __asm_copy_to_user_3(to, from, ret) \
391 __asm_copy_to_user_2x_cont(to, from, ret, \
392 " move.b [%1+],$acr\n" \
393 "4: move.b $acr,[%0+]\n", \
397 #define __asm_copy_to_user_4x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
398 __asm_copy_user_cont(to, from, ret, \
400 " move.d [%1+],$acr\n" \
401 "2: move.d $acr,[%0+]\n", \
408 #define __asm_copy_to_user_4(to, from, ret) \
409 __asm_copy_to_user_4x_cont(to, from, ret, "", "", "")
411 #define __asm_copy_to_user_5(to, from, ret) \
412 __asm_copy_to_user_4x_cont(to, from, ret, \
413 " move.b [%1+],$acr\n" \
414 "4: move.b $acr,[%0+]\n", \
418 #define __asm_copy_to_user_6x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
419 __asm_copy_to_user_4x_cont(to, from, ret, \
421 " move.w [%1+],$acr\n" \
422 "4: move.w $acr,[%0+]\n", \
428 #define __asm_copy_to_user_6(to, from, ret) \
429 __asm_copy_to_user_6x_cont(to, from, ret, "", "", "")
431 #define __asm_copy_to_user_7(to, from, ret) \
432 __asm_copy_to_user_6x_cont(to, from, ret, \
433 " move.b [%1+],$acr\n" \
434 "6: move.b $acr,[%0+]\n", \
438 #define __asm_copy_to_user_8x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
439 __asm_copy_to_user_4x_cont(to, from, ret, \
441 " move.d [%1+],$acr\n" \
442 "4: move.d $acr,[%0+]\n", \
448 #define __asm_copy_to_user_8(to, from, ret) \
449 __asm_copy_to_user_8x_cont(to, from, ret, "", "", "")
451 #define __asm_copy_to_user_9(to, from, ret) \
452 __asm_copy_to_user_8x_cont(to, from, ret, \
453 " move.b [%1+],$acr\n" \
454 "6: move.b $acr,[%0+]\n", \
458 #define __asm_copy_to_user_10x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
459 __asm_copy_to_user_8x_cont(to, from, ret, \
461 " move.w [%1+],$acr\n" \
462 "6: move.w $acr,[%0+]\n", \
468 #define __asm_copy_to_user_10(to, from, ret) \
469 __asm_copy_to_user_10x_cont(to, from, ret, "", "", "")
471 #define __asm_copy_to_user_11(to, from, ret) \
472 __asm_copy_to_user_10x_cont(to, from, ret, \
473 " move.b [%1+],$acr\n" \
474 "8: move.b $acr,[%0+]\n", \
478 #define __asm_copy_to_user_12x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
479 __asm_copy_to_user_8x_cont(to, from, ret, \
481 " move.d [%1+],$acr\n" \
482 "6: move.d $acr,[%0+]\n", \
488 #define __asm_copy_to_user_12(to, from, ret) \
489 __asm_copy_to_user_12x_cont(to, from, ret, "", "", "")
491 #define __asm_copy_to_user_13(to, from, ret) \
492 __asm_copy_to_user_12x_cont(to, from, ret, \
493 " move.b [%1+],$acr\n" \
494 "8: move.b $acr,[%0+]\n", \
498 #define __asm_copy_to_user_14x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
499 __asm_copy_to_user_12x_cont(to, from, ret, \
501 " move.w [%1+],$acr\n" \
502 "8: move.w $acr,[%0+]\n", \
508 #define __asm_copy_to_user_14(to, from, ret) \
509 __asm_copy_to_user_14x_cont(to, from, ret, "", "", "")
511 #define __asm_copy_to_user_15(to, from, ret) \
512 __asm_copy_to_user_14x_cont(to, from, ret, \
513 " move.b [%1+],$acr\n" \
514 "10: move.b $acr,[%0+]\n", \
518 #define __asm_copy_to_user_16x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
519 __asm_copy_to_user_12x_cont(to, from, ret, \
521 " move.d [%1+],$acr\n" \
522 "8: move.d $acr,[%0+]\n", \
528 #define __asm_copy_to_user_16(to, from, ret) \
529 __asm_copy_to_user_16x_cont(to, from, ret, "", "", "")
531 #define __asm_copy_to_user_20x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
532 __asm_copy_to_user_16x_cont(to, from, ret, \
534 " move.d [%1+],$acr\n" \
535 "10: move.d $acr,[%0+]\n", \
541 #define __asm_copy_to_user_20(to, from, ret) \
542 __asm_copy_to_user_20x_cont(to, from, ret, "", "", "")
544 #define __asm_copy_to_user_24x_cont(to, from, ret, COPY, FIXUP, TENTRY) \
545 __asm_copy_to_user_20x_cont(to, from, ret, \
547 " move.d [%1+],$acr\n" \
548 "12: move.d $acr,[%0+]\n", \
554 #define __asm_copy_to_user_24(to, from, ret) \
555 __asm_copy_to_user_24x_cont(to, from, ret, "", "", "")
557 /* Define a few clearing asms with exception handlers. */
559 /* This frame-asm is like the __asm_copy_user_cont one, but has one less
562 #define __asm_clear(to, ret, CLEAR, FIXUP, TENTRY) \
563 __asm__ __volatile__ ( \
566 " .section .fixup,\"ax\"\n" \
569 " .section __ex_table,\"a\"\n" \
572 : "=b" (to), "=r" (ret) \
573 : "0" (to), "1" (ret) \
576 #define __asm_clear_1(to, ret) \
577 __asm_clear(to, ret, \
578 "2: clear.b [%0+]\n", \
583 #define __asm_clear_2(to, ret) \
584 __asm_clear(to, ret, \
585 "2: clear.w [%0+]\n", \
590 #define __asm_clear_3(to, ret) \
591 __asm_clear(to, ret, \
592 "2: clear.w [%0+]\n" \
593 "3: clear.b [%0+]\n", \
600 #define __asm_clear_4x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
601 __asm_clear(to, ret, \
603 "2: clear.d [%0+]\n", \
610 #define __asm_clear_4(to, ret) \
611 __asm_clear_4x_cont(to, ret, "", "", "")
613 #define __asm_clear_8x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
614 __asm_clear_4x_cont(to, ret, \
616 "4: clear.d [%0+]\n", \
622 #define __asm_clear_8(to, ret) \
623 __asm_clear_8x_cont(to, ret, "", "", "")
625 #define __asm_clear_12x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
626 __asm_clear_8x_cont(to, ret, \
628 "6: clear.d [%0+]\n", \
634 #define __asm_clear_12(to, ret) \
635 __asm_clear_12x_cont(to, ret, "", "", "")
637 #define __asm_clear_16x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
638 __asm_clear_12x_cont(to, ret, \
640 "8: clear.d [%0+]\n", \
646 #define __asm_clear_16(to, ret) \
647 __asm_clear_16x_cont(to, ret, "", "", "")
649 #define __asm_clear_20x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
650 __asm_clear_16x_cont(to, ret, \
652 "10: clear.d [%0+]\n", \
658 #define __asm_clear_20(to, ret) \
659 __asm_clear_20x_cont(to, ret, "", "", "")
661 #define __asm_clear_24x_cont(to, ret, CLEAR, FIXUP, TENTRY) \
662 __asm_clear_20x_cont(to, ret, \
664 "12: clear.d [%0+]\n", \
670 #define __asm_clear_24(to, ret) \
671 __asm_clear_24x_cont(to, ret, "", "", "")
674 * Return the size of a string (including the ending 0)
676 * Return length of string in userspace including terminating 0
677 * or 0 for error. Return a value greater than N if too long.
681 strnlen_user(const char *s, long n)
685 if (!access_ok(VERIFY_READ, s, 0))
689 * This code is deduced from:
692 * while (tmp1-- > 0 && *s++)
700 __asm__ __volatile__ (
714 " .section .fixup,\"ax\"\n"
720 " .section __ex_table,\"a\"\n"
723 : "=r" (res), "=r" (tmp1)