Merge branch 'for-linus' of git://git.kernel.org/pub/scm/linux/kernel/git/dtor/input
[sfrench/cifs-2.6.git] / arch / blackfin / include / asm / spinlock.h
1 /*
2  * Copyright 2004-2009 Analog Devices Inc.
3  *
4  * Licensed under the GPL-2 or later.
5  */
6
7 #ifndef __BFIN_SPINLOCK_H
8 #define __BFIN_SPINLOCK_H
9
10 #ifndef CONFIG_SMP
11 # include <asm-generic/spinlock.h>
12 #else
13
14 #include <asm/atomic.h>
15
16 asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
17 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
18 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
19 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
20 asmlinkage void arch_read_lock_asm(volatile int *ptr);
21 asmlinkage int arch_read_trylock_asm(volatile int *ptr);
22 asmlinkage void arch_read_unlock_asm(volatile int *ptr);
23 asmlinkage void arch_write_lock_asm(volatile int *ptr);
24 asmlinkage int arch_write_trylock_asm(volatile int *ptr);
25 asmlinkage void arch_write_unlock_asm(volatile int *ptr);
26
27 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
28 {
29         return __raw_spin_is_locked_asm(&lock->lock);
30 }
31
32 static inline void arch_spin_lock(arch_spinlock_t *lock)
33 {
34         __raw_spin_lock_asm(&lock->lock);
35 }
36
37 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
38
39 static inline int arch_spin_trylock(arch_spinlock_t *lock)
40 {
41         return __raw_spin_trylock_asm(&lock->lock);
42 }
43
44 static inline void arch_spin_unlock(arch_spinlock_t *lock)
45 {
46         __raw_spin_unlock_asm(&lock->lock);
47 }
48
49 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
50 {
51         while (arch_spin_is_locked(lock))
52                 cpu_relax();
53 }
54
55 static inline int arch_read_can_lock(arch_rwlock_t *rw)
56 {
57         return __raw_uncached_fetch_asm(&rw->lock) > 0;
58 }
59
60 static inline int arch_write_can_lock(arch_rwlock_t *rw)
61 {
62         return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
63 }
64
65 static inline void arch_read_lock(arch_rwlock_t *rw)
66 {
67         arch_read_lock_asm(&rw->lock);
68 }
69
70 static inline int arch_read_trylock(arch_rwlock_t *rw)
71 {
72         return arch_read_trylock_asm(&rw->lock);
73 }
74
75 static inline void arch_read_unlock(arch_rwlock_t *rw)
76 {
77         arch_read_unlock_asm(&rw->lock);
78 }
79
80 static inline void arch_write_lock(arch_rwlock_t *rw)
81 {
82         arch_write_lock_asm(&rw->lock);
83 }
84
85 static inline int arch_write_trylock(arch_rwlock_t *rw)
86 {
87         return arch_write_trylock_asm(&rw->lock);
88 }
89
90 static inline void arch_write_unlock(arch_rwlock_t *rw)
91 {
92         arch_write_unlock_asm(&rw->lock);
93 }
94
95 #define arch_spin_relax(lock)   cpu_relax()
96 #define arch_read_relax(lock)   cpu_relax()
97 #define arch_write_relax(lock)  cpu_relax()
98
99 #endif
100
101 #endif /*  !__BFIN_SPINLOCK_H */