Merge tag 'armsoc-arm64' of git://git.kernel.org/pub/scm/linux/kernel/git/arm/arm-soc
[sfrench/cifs-2.6.git] / arch / alpha / include / asm / spinlock.h
1 #ifndef _ALPHA_SPINLOCK_H
2 #define _ALPHA_SPINLOCK_H
3
4 #include <linux/kernel.h>
5 #include <asm/current.h>
6 #include <asm/barrier.h>
7 #include <asm/processor.h>
8
9 /*
10  * Simple spin lock operations.  There are two variants, one clears IRQ's
11  * on the local processor, one does not.
12  *
13  * We make no fairness assumptions. They have a cost.
14  */
15
16 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
17 #define arch_spin_is_locked(x)  ((x)->lock != 0)
18
19 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
20 {
21         smp_cond_load_acquire(&lock->lock, !VAL);
22 }
23
24 static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
25 {
26         return lock.lock == 0;
27 }
28
29 static inline void arch_spin_unlock(arch_spinlock_t * lock)
30 {
31         mb();
32         lock->lock = 0;
33 }
34
35 static inline void arch_spin_lock(arch_spinlock_t * lock)
36 {
37         long tmp;
38
39         __asm__ __volatile__(
40         "1:     ldl_l   %0,%1\n"
41         "       bne     %0,2f\n"
42         "       lda     %0,1\n"
43         "       stl_c   %0,%1\n"
44         "       beq     %0,2f\n"
45         "       mb\n"
46         ".subsection 2\n"
47         "2:     ldl     %0,%1\n"
48         "       bne     %0,2b\n"
49         "       br      1b\n"
50         ".previous"
51         : "=&r" (tmp), "=m" (lock->lock)
52         : "m"(lock->lock) : "memory");
53 }
54
55 static inline int arch_spin_trylock(arch_spinlock_t *lock)
56 {
57         return !test_and_set_bit(0, &lock->lock);
58 }
59
60 /***********************************************************/
61
62 static inline int arch_read_can_lock(arch_rwlock_t *lock)
63 {
64         return (lock->lock & 1) == 0;
65 }
66
67 static inline int arch_write_can_lock(arch_rwlock_t *lock)
68 {
69         return lock->lock == 0;
70 }
71
72 static inline void arch_read_lock(arch_rwlock_t *lock)
73 {
74         long regx;
75
76         __asm__ __volatile__(
77         "1:     ldl_l   %1,%0\n"
78         "       blbs    %1,6f\n"
79         "       subl    %1,2,%1\n"
80         "       stl_c   %1,%0\n"
81         "       beq     %1,6f\n"
82         "       mb\n"
83         ".subsection 2\n"
84         "6:     ldl     %1,%0\n"
85         "       blbs    %1,6b\n"
86         "       br      1b\n"
87         ".previous"
88         : "=m" (*lock), "=&r" (regx)
89         : "m" (*lock) : "memory");
90 }
91
92 static inline void arch_write_lock(arch_rwlock_t *lock)
93 {
94         long regx;
95
96         __asm__ __volatile__(
97         "1:     ldl_l   %1,%0\n"
98         "       bne     %1,6f\n"
99         "       lda     %1,1\n"
100         "       stl_c   %1,%0\n"
101         "       beq     %1,6f\n"
102         "       mb\n"
103         ".subsection 2\n"
104         "6:     ldl     %1,%0\n"
105         "       bne     %1,6b\n"
106         "       br      1b\n"
107         ".previous"
108         : "=m" (*lock), "=&r" (regx)
109         : "m" (*lock) : "memory");
110 }
111
112 static inline int arch_read_trylock(arch_rwlock_t * lock)
113 {
114         long regx;
115         int success;
116
117         __asm__ __volatile__(
118         "1:     ldl_l   %1,%0\n"
119         "       lda     %2,0\n"
120         "       blbs    %1,2f\n"
121         "       subl    %1,2,%2\n"
122         "       stl_c   %2,%0\n"
123         "       beq     %2,6f\n"
124         "2:     mb\n"
125         ".subsection 2\n"
126         "6:     br      1b\n"
127         ".previous"
128         : "=m" (*lock), "=&r" (regx), "=&r" (success)
129         : "m" (*lock) : "memory");
130
131         return success;
132 }
133
134 static inline int arch_write_trylock(arch_rwlock_t * lock)
135 {
136         long regx;
137         int success;
138
139         __asm__ __volatile__(
140         "1:     ldl_l   %1,%0\n"
141         "       lda     %2,0\n"
142         "       bne     %1,2f\n"
143         "       lda     %2,1\n"
144         "       stl_c   %2,%0\n"
145         "       beq     %2,6f\n"
146         "2:     mb\n"
147         ".subsection 2\n"
148         "6:     br      1b\n"
149         ".previous"
150         : "=m" (*lock), "=&r" (regx), "=&r" (success)
151         : "m" (*lock) : "memory");
152
153         return success;
154 }
155
156 static inline void arch_read_unlock(arch_rwlock_t * lock)
157 {
158         long regx;
159         __asm__ __volatile__(
160         "       mb\n"
161         "1:     ldl_l   %1,%0\n"
162         "       addl    %1,2,%1\n"
163         "       stl_c   %1,%0\n"
164         "       beq     %1,6f\n"
165         ".subsection 2\n"
166         "6:     br      1b\n"
167         ".previous"
168         : "=m" (*lock), "=&r" (regx)
169         : "m" (*lock) : "memory");
170 }
171
172 static inline void arch_write_unlock(arch_rwlock_t * lock)
173 {
174         mb();
175         lock->lock = 0;
176 }
177
178 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
179 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
180
181 #endif /* _ALPHA_SPINLOCK_H */