Merge branch 'sched-core-for-linus' of git://git.kernel.org/pub/scm/linux/kernel...
[sfrench/cifs-2.6.git] / arch / arm64 / include / asm / barrier.h
1 /*
2  * Based on arch/arm/include/asm/barrier.h
3  *
4  * Copyright (C) 2012 ARM Ltd.
5  *
6  * This program is free software; you can redistribute it and/or modify
7  * it under the terms of the GNU General Public License version 2 as
8  * published by the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful,
11  * but WITHOUT ANY WARRANTY; without even the implied warranty of
12  * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
13  * GNU General Public License for more details.
14  *
15  * You should have received a copy of the GNU General Public License
16  * along with this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18 #ifndef __ASM_BARRIER_H
19 #define __ASM_BARRIER_H
20
21 #ifndef __ASSEMBLY__
22
23 #define sev()           asm volatile("sev" : : : "memory")
24 #define wfe()           asm volatile("wfe" : : : "memory")
25 #define wfi()           asm volatile("wfi" : : : "memory")
26
27 #define isb()           asm volatile("isb" : : : "memory")
28 #define dmb(opt)        asm volatile("dmb " #opt : : : "memory")
29 #define dsb(opt)        asm volatile("dsb " #opt : : : "memory")
30
31 #define mb()            dsb(sy)
32 #define rmb()           dsb(ld)
33 #define wmb()           dsb(st)
34
35 #define dma_rmb()       dmb(oshld)
36 #define dma_wmb()       dmb(oshst)
37
38 #define __smp_mb()      dmb(ish)
39 #define __smp_rmb()     dmb(ishld)
40 #define __smp_wmb()     dmb(ishst)
41
42 #define __smp_store_release(p, v)                                               \
43 do {                                                                    \
44         compiletime_assert_atomic_type(*p);                             \
45         switch (sizeof(*p)) {                                           \
46         case 1:                                                         \
47                 asm volatile ("stlrb %w1, %0"                           \
48                                 : "=Q" (*p) : "r" (v) : "memory");      \
49                 break;                                                  \
50         case 2:                                                         \
51                 asm volatile ("stlrh %w1, %0"                           \
52                                 : "=Q" (*p) : "r" (v) : "memory");      \
53                 break;                                                  \
54         case 4:                                                         \
55                 asm volatile ("stlr %w1, %0"                            \
56                                 : "=Q" (*p) : "r" (v) : "memory");      \
57                 break;                                                  \
58         case 8:                                                         \
59                 asm volatile ("stlr %1, %0"                             \
60                                 : "=Q" (*p) : "r" (v) : "memory");      \
61                 break;                                                  \
62         }                                                               \
63 } while (0)
64
65 #define __smp_load_acquire(p)                                           \
66 ({                                                                      \
67         union { typeof(*p) __val; char __c[1]; } __u;                   \
68         compiletime_assert_atomic_type(*p);                             \
69         switch (sizeof(*p)) {                                           \
70         case 1:                                                         \
71                 asm volatile ("ldarb %w0, %1"                           \
72                         : "=r" (*(__u8 *)__u.__c)                       \
73                         : "Q" (*p) : "memory");                         \
74                 break;                                                  \
75         case 2:                                                         \
76                 asm volatile ("ldarh %w0, %1"                           \
77                         : "=r" (*(__u16 *)__u.__c)                      \
78                         : "Q" (*p) : "memory");                         \
79                 break;                                                  \
80         case 4:                                                         \
81                 asm volatile ("ldar %w0, %1"                            \
82                         : "=r" (*(__u32 *)__u.__c)                      \
83                         : "Q" (*p) : "memory");                         \
84                 break;                                                  \
85         case 8:                                                         \
86                 asm volatile ("ldar %0, %1"                             \
87                         : "=r" (*(__u64 *)__u.__c)                      \
88                         : "Q" (*p) : "memory");                         \
89                 break;                                                  \
90         }                                                               \
91         __u.__val;                                                      \
92 })
93
94 #define smp_cond_load_acquire(ptr, cond_expr)                           \
95 ({                                                                      \
96         typeof(ptr) __PTR = (ptr);                                      \
97         typeof(*ptr) VAL;                                               \
98         for (;;) {                                                      \
99                 VAL = smp_load_acquire(__PTR);                          \
100                 if (cond_expr)                                          \
101                         break;                                          \
102                 __cmpwait_relaxed(__PTR, VAL);                          \
103         }                                                               \
104         VAL;                                                            \
105 })
106
107 #include <asm-generic/barrier.h>
108
109 #endif  /* __ASSEMBLY__ */
110
111 #endif  /* __ASM_BARRIER_H */