1 #ifndef __ASM_SH_ATOMIC_GRB_H
2 #define __ASM_SH_ATOMIC_GRB_H
4 #define ATOMIC_OP(op) \
5 static inline void atomic_##op(int i, atomic_t *v) \
9 __asm__ __volatile__ ( \
11 " mova 1f, r0 \n\t" /* r0 = end point */ \
12 " mov r15, r1 \n\t" /* r1 = saved sp */ \
13 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
14 " mov.l @%1, %0 \n\t" /* load old value */ \
15 " " #op " %2, %0 \n\t" /* $op */ \
16 " mov.l %0, @%1 \n\t" /* store new value */ \
17 "1: mov r1, r15 \n\t" /* LOGOUT */ \
21 : "memory" , "r0", "r1"); \
24 #define ATOMIC_OP_RETURN(op) \
25 static inline int atomic_##op##_return(int i, atomic_t *v) \
29 __asm__ __volatile__ ( \
31 " mova 1f, r0 \n\t" /* r0 = end point */ \
32 " mov r15, r1 \n\t" /* r1 = saved sp */ \
33 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */ \
34 " mov.l @%1, %0 \n\t" /* load old value */ \
35 " " #op " %2, %0 \n\t" /* $op */ \
36 " mov.l %0, @%1 \n\t" /* store new value */ \
37 "1: mov r1, r15 \n\t" /* LOGOUT */ \
41 : "memory" , "r0", "r1"); \
46 #define ATOMIC_OPS(op) ATOMIC_OP(op) ATOMIC_OP_RETURN(op)
52 #undef ATOMIC_OP_RETURN
55 static inline void atomic_clear_mask(unsigned int mask, atomic_t *v)
58 unsigned int _mask = ~mask;
60 __asm__ __volatile__ (
62 " mova 1f, r0 \n\t" /* r0 = end point */
63 " mov r15, r1 \n\t" /* r1 = saved sp */
64 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
65 " mov.l @%1, %0 \n\t" /* load old value */
66 " and %2, %0 \n\t" /* add */
67 " mov.l %0, @%1 \n\t" /* store new value */
68 "1: mov r1, r15 \n\t" /* LOGOUT */
72 : "memory" , "r0", "r1");
75 static inline void atomic_set_mask(unsigned int mask, atomic_t *v)
79 __asm__ __volatile__ (
81 " mova 1f, r0 \n\t" /* r0 = end point */
82 " mov r15, r1 \n\t" /* r1 = saved sp */
83 " mov #-6, r15 \n\t" /* LOGIN: r15 = size */
84 " mov.l @%1, %0 \n\t" /* load old value */
85 " or %2, %0 \n\t" /* or */
86 " mov.l %0, @%1 \n\t" /* store new value */
87 "1: mov r1, r15 \n\t" /* LOGOUT */
91 : "memory" , "r0", "r1");
94 #endif /* __ASM_SH_ATOMIC_GRB_H */