2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04 by Ralf Baechle
16 * As workaround for the ATOMIC_DEC_AND_LOCK / atomic_dec_and_lock mess in
17 * <linux/spinlock.h> we have to include <linux/spinlock.h> outside the
18 * main big wrapper ...
20 #include <linux/config.h>
21 #include <linux/spinlock.h>
26 #include <asm/cpu-features.h>
29 extern spinlock_t atomic_lock;
31 typedef struct { volatile int counter; } atomic_t;
33 #define ATOMIC_INIT(i) { (i) }
36 * atomic_read - read atomic variable
37 * @v: pointer of type atomic_t
39 * Atomically reads the value of @v.
41 #define atomic_read(v) ((v)->counter)
44 * atomic_set - set atomic variable
45 * @v: pointer of type atomic_t
48 * Atomically sets the value of @v to @i.
50 #define atomic_set(v,i) ((v)->counter = (i))
53 * atomic_add - add integer to atomic variable
54 * @i: integer value to add
55 * @v: pointer of type atomic_t
57 * Atomically adds @i to @v.
59 static __inline__ void atomic_add(int i, atomic_t * v)
61 if (cpu_has_llsc && R10000_LLSC_WAR) {
66 "1: ll %0, %1 # atomic_add \n"
71 : "=&r" (temp), "=m" (v->counter)
72 : "Ir" (i), "m" (v->counter));
73 } else if (cpu_has_llsc) {
78 "1: ll %0, %1 # atomic_add \n"
83 : "=&r" (temp), "=m" (v->counter)
84 : "Ir" (i), "m" (v->counter));
88 spin_lock_irqsave(&atomic_lock, flags);
90 spin_unlock_irqrestore(&atomic_lock, flags);
95 * atomic_sub - subtract the atomic variable
96 * @i: integer value to subtract
97 * @v: pointer of type atomic_t
99 * Atomically subtracts @i from @v.
101 static __inline__ void atomic_sub(int i, atomic_t * v)
103 if (cpu_has_llsc && R10000_LLSC_WAR) {
106 __asm__ __volatile__(
108 "1: ll %0, %1 # atomic_sub \n"
113 : "=&r" (temp), "=m" (v->counter)
114 : "Ir" (i), "m" (v->counter));
115 } else if (cpu_has_llsc) {
118 __asm__ __volatile__(
120 "1: ll %0, %1 # atomic_sub \n"
125 : "=&r" (temp), "=m" (v->counter)
126 : "Ir" (i), "m" (v->counter));
130 spin_lock_irqsave(&atomic_lock, flags);
132 spin_unlock_irqrestore(&atomic_lock, flags);
137 * Same as above, but return the result value
139 static __inline__ int atomic_add_return(int i, atomic_t * v)
141 unsigned long result;
143 if (cpu_has_llsc && R10000_LLSC_WAR) {
146 __asm__ __volatile__(
148 "1: ll %1, %2 # atomic_add_return \n"
149 " addu %0, %1, %3 \n"
152 " addu %0, %1, %3 \n"
155 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
156 : "Ir" (i), "m" (v->counter)
158 } else if (cpu_has_llsc) {
161 __asm__ __volatile__(
163 "1: ll %1, %2 # atomic_add_return \n"
164 " addu %0, %1, %3 \n"
167 " addu %0, %1, %3 \n"
170 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
171 : "Ir" (i), "m" (v->counter)
176 spin_lock_irqsave(&atomic_lock, flags);
180 spin_unlock_irqrestore(&atomic_lock, flags);
186 static __inline__ int atomic_sub_return(int i, atomic_t * v)
188 unsigned long result;
190 if (cpu_has_llsc && R10000_LLSC_WAR) {
193 __asm__ __volatile__(
195 "1: ll %1, %2 # atomic_sub_return \n"
196 " subu %0, %1, %3 \n"
199 " subu %0, %1, %3 \n"
202 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
203 : "Ir" (i), "m" (v->counter)
205 } else if (cpu_has_llsc) {
208 __asm__ __volatile__(
210 "1: ll %1, %2 # atomic_sub_return \n"
211 " subu %0, %1, %3 \n"
214 " subu %0, %1, %3 \n"
217 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
218 : "Ir" (i), "m" (v->counter)
223 spin_lock_irqsave(&atomic_lock, flags);
227 spin_unlock_irqrestore(&atomic_lock, flags);
234 * atomic_sub_if_positive - add integer to atomic variable
235 * @v: pointer of type atomic_t
237 * Atomically test @v and decrement if it is greater than 0.
238 * The function returns the old value of @v minus 1.
240 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
242 unsigned long result;
244 if (cpu_has_llsc && R10000_LLSC_WAR) {
247 __asm__ __volatile__(
249 "1: ll %1, %2 # atomic_sub_if_positive\n"
250 " subu %0, %1, %3 \n"
257 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
258 : "Ir" (i), "m" (v->counter)
260 } else if (cpu_has_llsc) {
263 __asm__ __volatile__(
265 "1: ll %1, %2 # atomic_sub_if_positive\n"
266 " subu %0, %1, %3 \n"
273 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
274 : "Ir" (i), "m" (v->counter)
279 spin_lock_irqsave(&atomic_lock, flags);
284 spin_unlock_irqrestore(&atomic_lock, flags);
290 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
292 #define atomic_dec_return(v) atomic_sub_return(1,(v))
293 #define atomic_inc_return(v) atomic_add_return(1,(v))
296 * atomic_sub_and_test - subtract value from variable and test result
297 * @i: integer value to subtract
298 * @v: pointer of type atomic_t
300 * Atomically subtracts @i from @v and returns
301 * true if the result is zero, or false for all
304 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
307 * atomic_inc_and_test - increment and test
308 * @v: pointer of type atomic_t
310 * Atomically increments @v by 1
311 * and returns true if the result is zero, or false for all
314 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
317 * atomic_dec_and_test - decrement by 1 and test
318 * @v: pointer of type atomic_t
320 * Atomically decrements @v by 1 and
321 * returns true if the result is 0, or false for all other
324 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
327 * atomic_dec_if_positive - decrement by 1 if old value positive
328 * @v: pointer of type atomic_t
330 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
333 * atomic_inc - increment atomic variable
334 * @v: pointer of type atomic_t
336 * Atomically increments @v by 1.
338 #define atomic_inc(v) atomic_add(1,(v))
341 * atomic_dec - decrement and test
342 * @v: pointer of type atomic_t
344 * Atomically decrements @v by 1.
346 #define atomic_dec(v) atomic_sub(1,(v))
349 * atomic_add_negative - add and test if negative
350 * @v: pointer of type atomic_t
351 * @i: integer value to add
353 * Atomically adds @i to @v and returns true
354 * if the result is negative, or false when
355 * result is greater than or equal to zero.
357 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
361 typedef struct { volatile __s64 counter; } atomic64_t;
363 #define ATOMIC64_INIT(i) { (i) }
366 * atomic64_read - read atomic variable
367 * @v: pointer of type atomic64_t
370 #define atomic64_read(v) ((v)->counter)
373 * atomic64_set - set atomic variable
374 * @v: pointer of type atomic64_t
377 #define atomic64_set(v,i) ((v)->counter = (i))
380 * atomic64_add - add integer to atomic variable
381 * @i: integer value to add
382 * @v: pointer of type atomic64_t
384 * Atomically adds @i to @v.
386 static __inline__ void atomic64_add(long i, atomic64_t * v)
388 if (cpu_has_llsc && R10000_LLSC_WAR) {
391 __asm__ __volatile__(
393 "1: lld %0, %1 # atomic64_add \n"
398 : "=&r" (temp), "=m" (v->counter)
399 : "Ir" (i), "m" (v->counter));
400 } else if (cpu_has_llsc) {
403 __asm__ __volatile__(
405 "1: lld %0, %1 # atomic64_add \n"
410 : "=&r" (temp), "=m" (v->counter)
411 : "Ir" (i), "m" (v->counter));
415 spin_lock_irqsave(&atomic_lock, flags);
417 spin_unlock_irqrestore(&atomic_lock, flags);
422 * atomic64_sub - subtract the atomic variable
423 * @i: integer value to subtract
424 * @v: pointer of type atomic64_t
426 * Atomically subtracts @i from @v.
428 static __inline__ void atomic64_sub(long i, atomic64_t * v)
430 if (cpu_has_llsc && R10000_LLSC_WAR) {
433 __asm__ __volatile__(
435 "1: lld %0, %1 # atomic64_sub \n"
440 : "=&r" (temp), "=m" (v->counter)
441 : "Ir" (i), "m" (v->counter));
442 } else if (cpu_has_llsc) {
445 __asm__ __volatile__(
447 "1: lld %0, %1 # atomic64_sub \n"
452 : "=&r" (temp), "=m" (v->counter)
453 : "Ir" (i), "m" (v->counter));
457 spin_lock_irqsave(&atomic_lock, flags);
459 spin_unlock_irqrestore(&atomic_lock, flags);
464 * Same as above, but return the result value
466 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
468 unsigned long result;
470 if (cpu_has_llsc && R10000_LLSC_WAR) {
473 __asm__ __volatile__(
475 "1: lld %1, %2 # atomic64_add_return \n"
476 " addu %0, %1, %3 \n"
479 " addu %0, %1, %3 \n"
482 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
483 : "Ir" (i), "m" (v->counter)
485 } else if (cpu_has_llsc) {
488 __asm__ __volatile__(
490 "1: lld %1, %2 # atomic64_add_return \n"
491 " addu %0, %1, %3 \n"
494 " addu %0, %1, %3 \n"
497 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
498 : "Ir" (i), "m" (v->counter)
503 spin_lock_irqsave(&atomic_lock, flags);
507 spin_unlock_irqrestore(&atomic_lock, flags);
513 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
515 unsigned long result;
517 if (cpu_has_llsc && R10000_LLSC_WAR) {
520 __asm__ __volatile__(
522 "1: lld %1, %2 # atomic64_sub_return \n"
523 " subu %0, %1, %3 \n"
526 " subu %0, %1, %3 \n"
529 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
530 : "Ir" (i), "m" (v->counter)
532 } else if (cpu_has_llsc) {
535 __asm__ __volatile__(
537 "1: lld %1, %2 # atomic64_sub_return \n"
538 " subu %0, %1, %3 \n"
541 " subu %0, %1, %3 \n"
544 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
545 : "Ir" (i), "m" (v->counter)
550 spin_lock_irqsave(&atomic_lock, flags);
554 spin_unlock_irqrestore(&atomic_lock, flags);
561 * atomic64_sub_if_positive - add integer to atomic variable
562 * @v: pointer of type atomic64_t
564 * Atomically test @v and decrement if it is greater than 0.
565 * The function returns the old value of @v minus 1.
567 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
569 unsigned long result;
571 if (cpu_has_llsc && R10000_LLSC_WAR) {
574 __asm__ __volatile__(
576 "1: lld %1, %2 # atomic64_sub_if_positive\n"
577 " dsubu %0, %1, %3 \n"
584 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
585 : "Ir" (i), "m" (v->counter)
587 } else if (cpu_has_llsc) {
590 __asm__ __volatile__(
592 "1: lld %1, %2 # atomic64_sub_if_positive\n"
593 " dsubu %0, %1, %3 \n"
600 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
601 : "Ir" (i), "m" (v->counter)
606 spin_lock_irqsave(&atomic_lock, flags);
611 spin_unlock_irqrestore(&atomic_lock, flags);
617 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
618 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
621 * atomic64_sub_and_test - subtract value from variable and test result
622 * @i: integer value to subtract
623 * @v: pointer of type atomic64_t
625 * Atomically subtracts @i from @v and returns
626 * true if the result is zero, or false for all
629 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
632 * atomic64_inc_and_test - increment and test
633 * @v: pointer of type atomic64_t
635 * Atomically increments @v by 1
636 * and returns true if the result is zero, or false for all
639 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
642 * atomic64_dec_and_test - decrement by 1 and test
643 * @v: pointer of type atomic64_t
645 * Atomically decrements @v by 1 and
646 * returns true if the result is 0, or false for all other
649 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
652 * atomic64_dec_if_positive - decrement by 1 if old value positive
653 * @v: pointer of type atomic64_t
655 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
658 * atomic64_inc - increment atomic variable
659 * @v: pointer of type atomic64_t
661 * Atomically increments @v by 1.
663 #define atomic64_inc(v) atomic64_add(1,(v))
666 * atomic64_dec - decrement and test
667 * @v: pointer of type atomic64_t
669 * Atomically decrements @v by 1.
671 #define atomic64_dec(v) atomic64_sub(1,(v))
674 * atomic64_add_negative - add and test if negative
675 * @v: pointer of type atomic64_t
676 * @i: integer value to add
678 * Atomically adds @i to @v and returns true
679 * if the result is negative, or false when
680 * result is greater than or equal to zero.
682 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
684 #endif /* CONFIG_64BIT */
687 * atomic*_return operations are serializing but not the non-*_return
690 #define smp_mb__before_atomic_dec() smp_mb()
691 #define smp_mb__after_atomic_dec() smp_mb()
692 #define smp_mb__before_atomic_inc() smp_mb()
693 #define smp_mb__after_atomic_inc() smp_mb()
695 #endif /* _ASM_ATOMIC_H */