2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 #include <asm/cpu-features.h>
29 extern spinlock_t atomic_lock;
31 typedef struct { volatile int counter; } atomic_t;
33 #define ATOMIC_INIT(i) { (i) }
36 * atomic_read - read atomic variable
37 * @v: pointer of type atomic_t
39 * Atomically reads the value of @v.
41 #define atomic_read(v) ((v)->counter)
44 * atomic_set - set atomic variable
45 * @v: pointer of type atomic_t
48 * Atomically sets the value of @v to @i.
50 #define atomic_set(v,i) ((v)->counter = (i))
53 * atomic_add - add integer to atomic variable
54 * @i: integer value to add
55 * @v: pointer of type atomic_t
57 * Atomically adds @i to @v.
59 static __inline__ void atomic_add(int i, atomic_t * v)
61 if (cpu_has_llsc && R10000_LLSC_WAR) {
66 "1: ll %0, %1 # atomic_add \n"
71 : "=&r" (temp), "=m" (v->counter)
72 : "Ir" (i), "m" (v->counter));
73 } else if (cpu_has_llsc) {
78 "1: ll %0, %1 # atomic_add \n"
83 : "=&r" (temp), "=m" (v->counter)
84 : "Ir" (i), "m" (v->counter));
88 spin_lock_irqsave(&atomic_lock, flags);
90 spin_unlock_irqrestore(&atomic_lock, flags);
95 * atomic_sub - subtract the atomic variable
96 * @i: integer value to subtract
97 * @v: pointer of type atomic_t
99 * Atomically subtracts @i from @v.
101 static __inline__ void atomic_sub(int i, atomic_t * v)
103 if (cpu_has_llsc && R10000_LLSC_WAR) {
106 __asm__ __volatile__(
108 "1: ll %0, %1 # atomic_sub \n"
113 : "=&r" (temp), "=m" (v->counter)
114 : "Ir" (i), "m" (v->counter));
115 } else if (cpu_has_llsc) {
118 __asm__ __volatile__(
120 "1: ll %0, %1 # atomic_sub \n"
125 : "=&r" (temp), "=m" (v->counter)
126 : "Ir" (i), "m" (v->counter));
130 spin_lock_irqsave(&atomic_lock, flags);
132 spin_unlock_irqrestore(&atomic_lock, flags);
137 * Same as above, but return the result value
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
141 unsigned long result;
143 if (cpu_has_llsc && R10000_LLSC_WAR) {
146 __asm__ __volatile__(
148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
152 " addu %0, %1, %3 \n"
155 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156 : "Ir" (i), "m" (v->counter)
158 } else if (cpu_has_llsc) {
161 __asm__ __volatile__(
163 "1: ll %1, %2 # atomic_add_return \n"
164 " addu %0, %1, %3 \n"
167 " addu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
176 spin_lock_irqsave(&atomic_lock, flags);
180 spin_unlock_irqrestore(&atomic_lock, flags);
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
188 unsigned long result;
190 if (cpu_has_llsc && R10000_LLSC_WAR) {
193 __asm__ __volatile__(
195 "1: ll %1, %2 # atomic_sub_return \n"
196 " subu %0, %1, %3 \n"
199 " subu %0, %1, %3 \n"
202 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203 : "Ir" (i), "m" (v->counter)
205 } else if (cpu_has_llsc) {
208 __asm__ __volatile__(
210 "1: ll %1, %2 # atomic_sub_return \n"
211 " subu %0, %1, %3 \n"
214 " subu %0, %1, %3 \n"
217 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218 : "Ir" (i), "m" (v->counter)
223 spin_lock_irqsave(&atomic_lock, flags);
227 spin_unlock_irqrestore(&atomic_lock, flags);
234 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
235 * @i: integer value to subtract
236 * @v: pointer of type atomic_t
238 * Atomically test @v and subtract @i if @v is greater or equal than @i.
239 * The function returns the old value of @v minus @i.
241 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
243 unsigned long result;
245 if (cpu_has_llsc && R10000_LLSC_WAR) {
248 __asm__ __volatile__(
250 "1: ll %1, %2 # atomic_sub_if_positive\n"
251 " subu %0, %1, %3 \n"
258 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
259 : "Ir" (i), "m" (v->counter)
261 } else if (cpu_has_llsc) {
264 __asm__ __volatile__(
266 "1: ll %1, %2 # atomic_sub_if_positive\n"
267 " subu %0, %1, %3 \n"
274 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
275 : "Ir" (i), "m" (v->counter)
280 spin_lock_irqsave(&atomic_lock, flags);
285 spin_unlock_irqrestore(&atomic_lock, flags);
291 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
292 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
295 * atomic_add_unless - add unless the number is a given value
296 * @v: pointer of type atomic_t
297 * @a: the amount to add to v...
298 * @u: ...unless v is equal to u.
300 * Atomically adds @a to @v, so long as it was not @u.
301 * Returns non-zero if @v was not @u, and zero otherwise.
303 #define atomic_add_unless(v, a, u) \
306 c = atomic_read(v); \
307 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
311 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
313 #define atomic_dec_return(v) atomic_sub_return(1,(v))
314 #define atomic_inc_return(v) atomic_add_return(1,(v))
317 * atomic_sub_and_test - subtract value from variable and test result
318 * @i: integer value to subtract
319 * @v: pointer of type atomic_t
321 * Atomically subtracts @i from @v and returns
322 * true if the result is zero, or false for all
325 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
328 * atomic_inc_and_test - increment and test
329 * @v: pointer of type atomic_t
331 * Atomically increments @v by 1
332 * and returns true if the result is zero, or false for all
335 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
338 * atomic_dec_and_test - decrement by 1 and test
339 * @v: pointer of type atomic_t
341 * Atomically decrements @v by 1 and
342 * returns true if the result is 0, or false for all other
345 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
348 * atomic_dec_if_positive - decrement by 1 if old value positive
349 * @v: pointer of type atomic_t
351 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
354 * atomic_inc - increment atomic variable
355 * @v: pointer of type atomic_t
357 * Atomically increments @v by 1.
359 #define atomic_inc(v) atomic_add(1,(v))
362 * atomic_dec - decrement and test
363 * @v: pointer of type atomic_t
365 * Atomically decrements @v by 1.
367 #define atomic_dec(v) atomic_sub(1,(v))
370 * atomic_add_negative - add and test if negative
371 * @v: pointer of type atomic_t
372 * @i: integer value to add
374 * Atomically adds @i to @v and returns true
375 * if the result is negative, or false when
376 * result is greater than or equal to zero.
378 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
382 typedef struct { volatile __s64 counter; } atomic64_t;
384 #define ATOMIC64_INIT(i) { (i) }
387 * atomic64_read - read atomic variable
388 * @v: pointer of type atomic64_t
391 #define atomic64_read(v) ((v)->counter)
394 * atomic64_set - set atomic variable
395 * @v: pointer of type atomic64_t
398 #define atomic64_set(v,i) ((v)->counter = (i))
401 * atomic64_add - add integer to atomic variable
402 * @i: integer value to add
403 * @v: pointer of type atomic64_t
405 * Atomically adds @i to @v.
407 static __inline__ void atomic64_add(long i, atomic64_t * v)
409 if (cpu_has_llsc && R10000_LLSC_WAR) {
412 __asm__ __volatile__(
414 "1: lld %0, %1 # atomic64_add \n"
419 : "=&r" (temp), "=m" (v->counter)
420 : "Ir" (i), "m" (v->counter));
421 } else if (cpu_has_llsc) {
424 __asm__ __volatile__(
426 "1: lld %0, %1 # atomic64_add \n"
431 : "=&r" (temp), "=m" (v->counter)
432 : "Ir" (i), "m" (v->counter));
436 spin_lock_irqsave(&atomic_lock, flags);
438 spin_unlock_irqrestore(&atomic_lock, flags);
443 * atomic64_sub - subtract the atomic variable
444 * @i: integer value to subtract
445 * @v: pointer of type atomic64_t
447 * Atomically subtracts @i from @v.
449 static __inline__ void atomic64_sub(long i, atomic64_t * v)
451 if (cpu_has_llsc && R10000_LLSC_WAR) {
454 __asm__ __volatile__(
456 "1: lld %0, %1 # atomic64_sub \n"
461 : "=&r" (temp), "=m" (v->counter)
462 : "Ir" (i), "m" (v->counter));
463 } else if (cpu_has_llsc) {
466 __asm__ __volatile__(
468 "1: lld %0, %1 # atomic64_sub \n"
473 : "=&r" (temp), "=m" (v->counter)
474 : "Ir" (i), "m" (v->counter));
478 spin_lock_irqsave(&atomic_lock, flags);
480 spin_unlock_irqrestore(&atomic_lock, flags);
485 * Same as above, but return the result value
487 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
489 unsigned long result;
491 if (cpu_has_llsc && R10000_LLSC_WAR) {
494 __asm__ __volatile__(
496 "1: lld %1, %2 # atomic64_add_return \n"
497 " addu %0, %1, %3 \n"
500 " addu %0, %1, %3 \n"
503 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
504 : "Ir" (i), "m" (v->counter)
506 } else if (cpu_has_llsc) {
509 __asm__ __volatile__(
511 "1: lld %1, %2 # atomic64_add_return \n"
512 " addu %0, %1, %3 \n"
515 " addu %0, %1, %3 \n"
518 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
519 : "Ir" (i), "m" (v->counter)
524 spin_lock_irqsave(&atomic_lock, flags);
528 spin_unlock_irqrestore(&atomic_lock, flags);
534 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
536 unsigned long result;
538 if (cpu_has_llsc && R10000_LLSC_WAR) {
541 __asm__ __volatile__(
543 "1: lld %1, %2 # atomic64_sub_return \n"
544 " subu %0, %1, %3 \n"
547 " subu %0, %1, %3 \n"
550 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
551 : "Ir" (i), "m" (v->counter)
553 } else if (cpu_has_llsc) {
556 __asm__ __volatile__(
558 "1: lld %1, %2 # atomic64_sub_return \n"
559 " subu %0, %1, %3 \n"
562 " subu %0, %1, %3 \n"
565 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
566 : "Ir" (i), "m" (v->counter)
571 spin_lock_irqsave(&atomic_lock, flags);
575 spin_unlock_irqrestore(&atomic_lock, flags);
582 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
583 * @i: integer value to subtract
584 * @v: pointer of type atomic64_t
586 * Atomically test @v and subtract @i if @v is greater or equal than @i.
587 * The function returns the old value of @v minus @i.
589 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
591 unsigned long result;
593 if (cpu_has_llsc && R10000_LLSC_WAR) {
596 __asm__ __volatile__(
598 "1: lld %1, %2 # atomic64_sub_if_positive\n"
599 " dsubu %0, %1, %3 \n"
606 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
607 : "Ir" (i), "m" (v->counter)
609 } else if (cpu_has_llsc) {
612 __asm__ __volatile__(
614 "1: lld %1, %2 # atomic64_sub_if_positive\n"
615 " dsubu %0, %1, %3 \n"
622 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
623 : "Ir" (i), "m" (v->counter)
628 spin_lock_irqsave(&atomic_lock, flags);
633 spin_unlock_irqrestore(&atomic_lock, flags);
639 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
640 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
643 * atomic64_sub_and_test - subtract value from variable and test result
644 * @i: integer value to subtract
645 * @v: pointer of type atomic64_t
647 * Atomically subtracts @i from @v and returns
648 * true if the result is zero, or false for all
651 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
654 * atomic64_inc_and_test - increment and test
655 * @v: pointer of type atomic64_t
657 * Atomically increments @v by 1
658 * and returns true if the result is zero, or false for all
661 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
664 * atomic64_dec_and_test - decrement by 1 and test
665 * @v: pointer of type atomic64_t
667 * Atomically decrements @v by 1 and
668 * returns true if the result is 0, or false for all other
671 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
674 * atomic64_dec_if_positive - decrement by 1 if old value positive
675 * @v: pointer of type atomic64_t
677 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
680 * atomic64_inc - increment atomic variable
681 * @v: pointer of type atomic64_t
683 * Atomically increments @v by 1.
685 #define atomic64_inc(v) atomic64_add(1,(v))
688 * atomic64_dec - decrement and test
689 * @v: pointer of type atomic64_t
691 * Atomically decrements @v by 1.
693 #define atomic64_dec(v) atomic64_sub(1,(v))
696 * atomic64_add_negative - add and test if negative
697 * @v: pointer of type atomic64_t
698 * @i: integer value to add
700 * Atomically adds @i to @v and returns true
701 * if the result is negative, or false when
702 * result is greater than or equal to zero.
704 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
706 #endif /* CONFIG_64BIT */
709 * atomic*_return operations are serializing but not the non-*_return
712 #define smp_mb__before_atomic_dec() smp_mb()
713 #define smp_mb__after_atomic_dec() smp_mb()
714 #define smp_mb__before_atomic_inc() smp_mb()
715 #define smp_mb__after_atomic_inc() smp_mb()
717 #include <asm-generic/atomic.h>
718 #endif /* _ASM_ATOMIC_H */