1 #ifndef _ASM_POWERPC_ATOMIC_H_
2 #define _ASM_POWERPC_ATOMIC_H_
5 * PowerPC atomic operations
8 typedef struct { volatile int counter; } atomic_t;
11 #include <asm/synch.h>
12 #include <asm/asm-compat.h>
14 #define ATOMIC_INIT(i) { (i) }
16 #define atomic_read(v) ((v)->counter)
17 #define atomic_set(v,i) (((v)->counter) = (i))
19 static __inline__ void atomic_add(int a, atomic_t *v)
24 "1: lwarx %0,0,%3 # atomic_add\n\
29 : "=&r" (t), "=m" (v->counter)
30 : "r" (a), "r" (&v->counter), "m" (v->counter)
34 static __inline__ int atomic_add_return(int a, atomic_t *v)
40 "1: lwarx %0,0,%2 # atomic_add_return\n\
47 : "r" (a), "r" (&v->counter)
53 #define atomic_add_negative(a, v) (atomic_add_return((a), (v)) < 0)
55 static __inline__ void atomic_sub(int a, atomic_t *v)
60 "1: lwarx %0,0,%3 # atomic_sub\n\
65 : "=&r" (t), "=m" (v->counter)
66 : "r" (a), "r" (&v->counter), "m" (v->counter)
70 static __inline__ int atomic_sub_return(int a, atomic_t *v)
76 "1: lwarx %0,0,%2 # atomic_sub_return\n\
83 : "r" (a), "r" (&v->counter)
89 static __inline__ void atomic_inc(atomic_t *v)
94 "1: lwarx %0,0,%2 # atomic_inc\n\
99 : "=&r" (t), "=m" (v->counter)
100 : "r" (&v->counter), "m" (v->counter)
104 static __inline__ int atomic_inc_return(atomic_t *v)
108 __asm__ __volatile__(
110 "1: lwarx %0,0,%1 # atomic_inc_return\n\
124 * atomic_inc_and_test - increment and test
125 * @v: pointer of type atomic_t
127 * Atomically increments @v by 1
128 * and returns true if the result is zero, or false for all
131 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
133 static __inline__ void atomic_dec(atomic_t *v)
137 __asm__ __volatile__(
138 "1: lwarx %0,0,%2 # atomic_dec\n\
143 : "=&r" (t), "=m" (v->counter)
144 : "r" (&v->counter), "m" (v->counter)
148 static __inline__ int atomic_dec_return(atomic_t *v)
152 __asm__ __volatile__(
154 "1: lwarx %0,0,%1 # atomic_dec_return\n\
167 #define atomic_sub_and_test(a, v) (atomic_sub_return((a), (v)) == 0)
168 #define atomic_dec_and_test(v) (atomic_dec_return((v)) == 0)
171 * Atomically test *v and decrement if it is greater than 0.
172 * The function returns the old value of *v minus 1.
174 static __inline__ int atomic_dec_if_positive(atomic_t *v)
178 __asm__ __volatile__(
180 "1: lwarx %0,0,%1 # atomic_dec_if_positive\n\
195 #define smp_mb__before_atomic_dec() smp_mb()
196 #define smp_mb__after_atomic_dec() smp_mb()
197 #define smp_mb__before_atomic_inc() smp_mb()
198 #define smp_mb__after_atomic_inc() smp_mb()
202 typedef struct { volatile long counter; } atomic64_t;
204 #define ATOMIC64_INIT(i) { (i) }
206 #define atomic64_read(v) ((v)->counter)
207 #define atomic64_set(v,i) (((v)->counter) = (i))
209 static __inline__ void atomic64_add(long a, atomic64_t *v)
213 __asm__ __volatile__(
214 "1: ldarx %0,0,%3 # atomic64_add\n\
218 : "=&r" (t), "=m" (v->counter)
219 : "r" (a), "r" (&v->counter), "m" (v->counter)
223 static __inline__ long atomic64_add_return(long a, atomic64_t *v)
227 __asm__ __volatile__(
229 "1: ldarx %0,0,%2 # atomic64_add_return\n\
235 : "r" (a), "r" (&v->counter)
241 #define atomic64_add_negative(a, v) (atomic64_add_return((a), (v)) < 0)
243 static __inline__ void atomic64_sub(long a, atomic64_t *v)
247 __asm__ __volatile__(
248 "1: ldarx %0,0,%3 # atomic64_sub\n\
252 : "=&r" (t), "=m" (v->counter)
253 : "r" (a), "r" (&v->counter), "m" (v->counter)
257 static __inline__ long atomic64_sub_return(long a, atomic64_t *v)
261 __asm__ __volatile__(
263 "1: ldarx %0,0,%2 # atomic64_sub_return\n\
269 : "r" (a), "r" (&v->counter)
275 static __inline__ void atomic64_inc(atomic64_t *v)
279 __asm__ __volatile__(
280 "1: ldarx %0,0,%2 # atomic64_inc\n\
284 : "=&r" (t), "=m" (v->counter)
285 : "r" (&v->counter), "m" (v->counter)
289 static __inline__ long atomic64_inc_return(atomic64_t *v)
293 __asm__ __volatile__(
295 "1: ldarx %0,0,%1 # atomic64_inc_return\n\
308 * atomic64_inc_and_test - increment and test
309 * @v: pointer of type atomic64_t
311 * Atomically increments @v by 1
312 * and returns true if the result is zero, or false for all
315 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
317 static __inline__ void atomic64_dec(atomic64_t *v)
321 __asm__ __volatile__(
322 "1: ldarx %0,0,%2 # atomic64_dec\n\
326 : "=&r" (t), "=m" (v->counter)
327 : "r" (&v->counter), "m" (v->counter)
331 static __inline__ long atomic64_dec_return(atomic64_t *v)
335 __asm__ __volatile__(
337 "1: ldarx %0,0,%1 # atomic64_dec_return\n\
349 #define atomic64_sub_and_test(a, v) (atomic64_sub_return((a), (v)) == 0)
350 #define atomic64_dec_and_test(v) (atomic64_dec_return((v)) == 0)
353 * Atomically test *v and decrement if it is greater than 0.
354 * The function returns the old value of *v minus 1.
356 static __inline__ long atomic64_dec_if_positive(atomic64_t *v)
360 __asm__ __volatile__(
362 "1: ldarx %0,0,%1 # atomic64_dec_if_positive\n\
376 #endif /* __powerpc64__ */
378 #endif /* __KERNEL__ */
379 #endif /* _ASM_POWERPC_ATOMIC_H_ */