Merge tag 'random_for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso...
[sfrench/cifs-2.6.git] / arch / blackfin / include / asm / spinlock.h
1 /*
2  * Copyright 2004-2009 Analog Devices Inc.
3  *
4  * Licensed under the GPL-2 or later.
5  */
6
7 #ifndef __BFIN_SPINLOCK_H
8 #define __BFIN_SPINLOCK_H
9
10 #ifndef CONFIG_SMP
11 # include <asm-generic/spinlock.h>
12 #else
13
14 #include <linux/atomic.h>
15 #include <asm/processor.h>
16 #include <asm/barrier.h>
17
18 asmlinkage int __raw_spin_is_locked_asm(volatile int *ptr);
19 asmlinkage void __raw_spin_lock_asm(volatile int *ptr);
20 asmlinkage int __raw_spin_trylock_asm(volatile int *ptr);
21 asmlinkage void __raw_spin_unlock_asm(volatile int *ptr);
22 asmlinkage void __raw_read_lock_asm(volatile int *ptr);
23 asmlinkage int __raw_read_trylock_asm(volatile int *ptr);
24 asmlinkage void __raw_read_unlock_asm(volatile int *ptr);
25 asmlinkage void __raw_write_lock_asm(volatile int *ptr);
26 asmlinkage int __raw_write_trylock_asm(volatile int *ptr);
27 asmlinkage void __raw_write_unlock_asm(volatile int *ptr);
28
29 static inline int arch_spin_is_locked(arch_spinlock_t *lock)
30 {
31         return __raw_spin_is_locked_asm(&lock->lock);
32 }
33
34 static inline void arch_spin_lock(arch_spinlock_t *lock)
35 {
36         __raw_spin_lock_asm(&lock->lock);
37 }
38
39 #define arch_spin_lock_flags(lock, flags) arch_spin_lock(lock)
40
41 static inline int arch_spin_trylock(arch_spinlock_t *lock)
42 {
43         return __raw_spin_trylock_asm(&lock->lock);
44 }
45
46 static inline void arch_spin_unlock(arch_spinlock_t *lock)
47 {
48         __raw_spin_unlock_asm(&lock->lock);
49 }
50
51 static inline void arch_spin_unlock_wait(arch_spinlock_t *lock)
52 {
53         smp_cond_load_acquire(&lock->lock, !VAL);
54 }
55
56 static inline int arch_read_can_lock(arch_rwlock_t *rw)
57 {
58         return __raw_uncached_fetch_asm(&rw->lock) > 0;
59 }
60
61 static inline int arch_write_can_lock(arch_rwlock_t *rw)
62 {
63         return __raw_uncached_fetch_asm(&rw->lock) == RW_LOCK_BIAS;
64 }
65
66 static inline void arch_read_lock(arch_rwlock_t *rw)
67 {
68         __raw_read_lock_asm(&rw->lock);
69 }
70
71 #define arch_read_lock_flags(lock, flags) arch_read_lock(lock)
72
73 static inline int arch_read_trylock(arch_rwlock_t *rw)
74 {
75         return __raw_read_trylock_asm(&rw->lock);
76 }
77
78 static inline void arch_read_unlock(arch_rwlock_t *rw)
79 {
80         __raw_read_unlock_asm(&rw->lock);
81 }
82
83 static inline void arch_write_lock(arch_rwlock_t *rw)
84 {
85         __raw_write_lock_asm(&rw->lock);
86 }
87
88 #define arch_write_lock_flags(lock, flags) arch_write_lock(lock)
89
90 static inline int arch_write_trylock(arch_rwlock_t *rw)
91 {
92         return __raw_write_trylock_asm(&rw->lock);
93 }
94
95 static inline void arch_write_unlock(arch_rwlock_t *rw)
96 {
97         __raw_write_unlock_asm(&rw->lock);
98 }
99
100 #define arch_spin_relax(lock)   cpu_relax()
101 #define arch_read_relax(lock)   cpu_relax()
102 #define arch_write_relax(lock)  cpu_relax()
103
104 #endif
105
106 #endif /*  !__BFIN_SPINLOCK_H */