2 * Atomic operations that C can't guarantee us. Useful for
3 * resource counting etc..
5 * But use these as seldom as possible since they are much more slower
6 * than regular operations.
8 * This file is subject to the terms and conditions of the GNU General Public
9 * License. See the file "COPYING" in the main directory of this archive
12 * Copyright (C) 1996, 97, 99, 2000, 03, 04, 06 by Ralf Baechle
17 #include <linux/irqflags.h>
18 #include <asm/cpu-features.h>
21 typedef struct { volatile int counter; } atomic_t;
23 #define ATOMIC_INIT(i) { (i) }
26 * atomic_read - read atomic variable
27 * @v: pointer of type atomic_t
29 * Atomically reads the value of @v.
31 #define atomic_read(v) ((v)->counter)
34 * atomic_set - set atomic variable
35 * @v: pointer of type atomic_t
38 * Atomically sets the value of @v to @i.
40 #define atomic_set(v,i) ((v)->counter = (i))
43 * atomic_add - add integer to atomic variable
44 * @i: integer value to add
45 * @v: pointer of type atomic_t
47 * Atomically adds @i to @v.
49 static __inline__ void atomic_add(int i, atomic_t * v)
51 if (cpu_has_llsc && R10000_LLSC_WAR) {
56 "1: ll %0, %1 # atomic_add \n"
61 : "=&r" (temp), "=m" (v->counter)
62 : "Ir" (i), "m" (v->counter));
63 } else if (cpu_has_llsc) {
68 "1: ll %0, %1 # atomic_add \n"
73 : "=&r" (temp), "=m" (v->counter)
74 : "Ir" (i), "m" (v->counter));
78 local_irq_save(flags);
80 local_irq_restore(flags);
85 * atomic_sub - subtract the atomic variable
86 * @i: integer value to subtract
87 * @v: pointer of type atomic_t
89 * Atomically subtracts @i from @v.
91 static __inline__ void atomic_sub(int i, atomic_t * v)
93 if (cpu_has_llsc && R10000_LLSC_WAR) {
98 "1: ll %0, %1 # atomic_sub \n"
103 : "=&r" (temp), "=m" (v->counter)
104 : "Ir" (i), "m" (v->counter));
105 } else if (cpu_has_llsc) {
108 __asm__ __volatile__(
110 "1: ll %0, %1 # atomic_sub \n"
115 : "=&r" (temp), "=m" (v->counter)
116 : "Ir" (i), "m" (v->counter));
120 local_irq_save(flags);
122 local_irq_restore(flags);
127 * Same as above, but return the result value
129 static __inline__ int atomic_add_return(int i, atomic_t * v)
131 unsigned long result;
133 if (cpu_has_llsc && R10000_LLSC_WAR) {
136 __asm__ __volatile__(
138 "1: ll %1, %2 # atomic_add_return \n"
139 " addu %0, %1, %3 \n"
142 " addu %0, %1, %3 \n"
145 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
146 : "Ir" (i), "m" (v->counter)
148 } else if (cpu_has_llsc) {
151 __asm__ __volatile__(
153 "1: ll %1, %2 # atomic_add_return \n"
154 " addu %0, %1, %3 \n"
157 " addu %0, %1, %3 \n"
160 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
161 : "Ir" (i), "m" (v->counter)
166 local_irq_save(flags);
170 local_irq_restore(flags);
176 static __inline__ int atomic_sub_return(int i, atomic_t * v)
178 unsigned long result;
180 if (cpu_has_llsc && R10000_LLSC_WAR) {
183 __asm__ __volatile__(
185 "1: ll %1, %2 # atomic_sub_return \n"
186 " subu %0, %1, %3 \n"
189 " subu %0, %1, %3 \n"
192 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
193 : "Ir" (i), "m" (v->counter)
195 } else if (cpu_has_llsc) {
198 __asm__ __volatile__(
200 "1: ll %1, %2 # atomic_sub_return \n"
201 " subu %0, %1, %3 \n"
204 " subu %0, %1, %3 \n"
207 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
208 : "Ir" (i), "m" (v->counter)
213 local_irq_save(flags);
217 local_irq_restore(flags);
224 * atomic_sub_if_positive - conditionally subtract integer from atomic variable
225 * @i: integer value to subtract
226 * @v: pointer of type atomic_t
228 * Atomically test @v and subtract @i if @v is greater or equal than @i.
229 * The function returns the old value of @v minus @i.
231 static __inline__ int atomic_sub_if_positive(int i, atomic_t * v)
233 unsigned long result;
235 if (cpu_has_llsc && R10000_LLSC_WAR) {
238 __asm__ __volatile__(
240 "1: ll %1, %2 # atomic_sub_if_positive\n"
241 " subu %0, %1, %3 \n"
246 " subu %0, %1, %3 \n"
251 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
252 : "Ir" (i), "m" (v->counter)
254 } else if (cpu_has_llsc) {
257 __asm__ __volatile__(
259 "1: ll %1, %2 # atomic_sub_if_positive\n"
260 " subu %0, %1, %3 \n"
265 " subu %0, %1, %3 \n"
270 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
271 : "Ir" (i), "m" (v->counter)
276 local_irq_save(flags);
281 local_irq_restore(flags);
287 #define atomic_cmpxchg(v, o, n) ((int)cmpxchg(&((v)->counter), (o), (n)))
288 #define atomic_xchg(v, new) (xchg(&((v)->counter), new))
291 * atomic_add_unless - add unless the number is a given value
292 * @v: pointer of type atomic_t
293 * @a: the amount to add to v...
294 * @u: ...unless v is equal to u.
296 * Atomically adds @a to @v, so long as it was not @u.
297 * Returns non-zero if @v was not @u, and zero otherwise.
299 #define atomic_add_unless(v, a, u) \
302 c = atomic_read(v); \
303 while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \
307 #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0)
309 #define atomic_dec_return(v) atomic_sub_return(1,(v))
310 #define atomic_inc_return(v) atomic_add_return(1,(v))
313 * atomic_sub_and_test - subtract value from variable and test result
314 * @i: integer value to subtract
315 * @v: pointer of type atomic_t
317 * Atomically subtracts @i from @v and returns
318 * true if the result is zero, or false for all
321 #define atomic_sub_and_test(i,v) (atomic_sub_return((i), (v)) == 0)
324 * atomic_inc_and_test - increment and test
325 * @v: pointer of type atomic_t
327 * Atomically increments @v by 1
328 * and returns true if the result is zero, or false for all
331 #define atomic_inc_and_test(v) (atomic_inc_return(v) == 0)
334 * atomic_dec_and_test - decrement by 1 and test
335 * @v: pointer of type atomic_t
337 * Atomically decrements @v by 1 and
338 * returns true if the result is 0, or false for all other
341 #define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
344 * atomic_dec_if_positive - decrement by 1 if old value positive
345 * @v: pointer of type atomic_t
347 #define atomic_dec_if_positive(v) atomic_sub_if_positive(1, v)
350 * atomic_inc - increment atomic variable
351 * @v: pointer of type atomic_t
353 * Atomically increments @v by 1.
355 #define atomic_inc(v) atomic_add(1,(v))
358 * atomic_dec - decrement and test
359 * @v: pointer of type atomic_t
361 * Atomically decrements @v by 1.
363 #define atomic_dec(v) atomic_sub(1,(v))
366 * atomic_add_negative - add and test if negative
367 * @v: pointer of type atomic_t
368 * @i: integer value to add
370 * Atomically adds @i to @v and returns true
371 * if the result is negative, or false when
372 * result is greater than or equal to zero.
374 #define atomic_add_negative(i,v) (atomic_add_return(i, (v)) < 0)
378 typedef struct { volatile __s64 counter; } atomic64_t;
380 #define ATOMIC64_INIT(i) { (i) }
383 * atomic64_read - read atomic variable
384 * @v: pointer of type atomic64_t
387 #define atomic64_read(v) ((v)->counter)
390 * atomic64_set - set atomic variable
391 * @v: pointer of type atomic64_t
394 #define atomic64_set(v,i) ((v)->counter = (i))
397 * atomic64_add - add integer to atomic variable
398 * @i: integer value to add
399 * @v: pointer of type atomic64_t
401 * Atomically adds @i to @v.
403 static __inline__ void atomic64_add(long i, atomic64_t * v)
405 if (cpu_has_llsc && R10000_LLSC_WAR) {
408 __asm__ __volatile__(
410 "1: lld %0, %1 # atomic64_add \n"
415 : "=&r" (temp), "=m" (v->counter)
416 : "Ir" (i), "m" (v->counter));
417 } else if (cpu_has_llsc) {
420 __asm__ __volatile__(
422 "1: lld %0, %1 # atomic64_add \n"
427 : "=&r" (temp), "=m" (v->counter)
428 : "Ir" (i), "m" (v->counter));
432 local_irq_save(flags);
434 local_irq_restore(flags);
439 * atomic64_sub - subtract the atomic variable
440 * @i: integer value to subtract
441 * @v: pointer of type atomic64_t
443 * Atomically subtracts @i from @v.
445 static __inline__ void atomic64_sub(long i, atomic64_t * v)
447 if (cpu_has_llsc && R10000_LLSC_WAR) {
450 __asm__ __volatile__(
452 "1: lld %0, %1 # atomic64_sub \n"
457 : "=&r" (temp), "=m" (v->counter)
458 : "Ir" (i), "m" (v->counter));
459 } else if (cpu_has_llsc) {
462 __asm__ __volatile__(
464 "1: lld %0, %1 # atomic64_sub \n"
469 : "=&r" (temp), "=m" (v->counter)
470 : "Ir" (i), "m" (v->counter));
474 local_irq_save(flags);
476 local_irq_restore(flags);
481 * Same as above, but return the result value
483 static __inline__ long atomic64_add_return(long i, atomic64_t * v)
485 unsigned long result;
487 if (cpu_has_llsc && R10000_LLSC_WAR) {
490 __asm__ __volatile__(
492 "1: lld %1, %2 # atomic64_add_return \n"
493 " addu %0, %1, %3 \n"
496 " addu %0, %1, %3 \n"
499 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
500 : "Ir" (i), "m" (v->counter)
502 } else if (cpu_has_llsc) {
505 __asm__ __volatile__(
507 "1: lld %1, %2 # atomic64_add_return \n"
508 " addu %0, %1, %3 \n"
511 " addu %0, %1, %3 \n"
514 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
515 : "Ir" (i), "m" (v->counter)
520 local_irq_save(flags);
524 local_irq_restore(flags);
530 static __inline__ long atomic64_sub_return(long i, atomic64_t * v)
532 unsigned long result;
534 if (cpu_has_llsc && R10000_LLSC_WAR) {
537 __asm__ __volatile__(
539 "1: lld %1, %2 # atomic64_sub_return \n"
540 " subu %0, %1, %3 \n"
543 " subu %0, %1, %3 \n"
546 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
547 : "Ir" (i), "m" (v->counter)
549 } else if (cpu_has_llsc) {
552 __asm__ __volatile__(
554 "1: lld %1, %2 # atomic64_sub_return \n"
555 " subu %0, %1, %3 \n"
558 " subu %0, %1, %3 \n"
561 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
562 : "Ir" (i), "m" (v->counter)
567 local_irq_save(flags);
571 local_irq_restore(flags);
578 * atomic64_sub_if_positive - conditionally subtract integer from atomic variable
579 * @i: integer value to subtract
580 * @v: pointer of type atomic64_t
582 * Atomically test @v and subtract @i if @v is greater or equal than @i.
583 * The function returns the old value of @v minus @i.
585 static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v)
587 unsigned long result;
589 if (cpu_has_llsc && R10000_LLSC_WAR) {
592 __asm__ __volatile__(
594 "1: lld %1, %2 # atomic64_sub_if_positive\n"
595 " dsubu %0, %1, %3 \n"
600 " dsubu %0, %1, %3 \n"
605 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
606 : "Ir" (i), "m" (v->counter)
608 } else if (cpu_has_llsc) {
611 __asm__ __volatile__(
613 "1: lld %1, %2 # atomic64_sub_if_positive\n"
614 " dsubu %0, %1, %3 \n"
619 " dsubu %0, %1, %3 \n"
624 : "=&r" (result), "=&r" (temp), "=m" (v->counter)
625 : "Ir" (i), "m" (v->counter)
630 local_irq_save(flags);
635 local_irq_restore(flags);
641 #define atomic64_dec_return(v) atomic64_sub_return(1,(v))
642 #define atomic64_inc_return(v) atomic64_add_return(1,(v))
645 * atomic64_sub_and_test - subtract value from variable and test result
646 * @i: integer value to subtract
647 * @v: pointer of type atomic64_t
649 * Atomically subtracts @i from @v and returns
650 * true if the result is zero, or false for all
653 #define atomic64_sub_and_test(i,v) (atomic64_sub_return((i), (v)) == 0)
656 * atomic64_inc_and_test - increment and test
657 * @v: pointer of type atomic64_t
659 * Atomically increments @v by 1
660 * and returns true if the result is zero, or false for all
663 #define atomic64_inc_and_test(v) (atomic64_inc_return(v) == 0)
666 * atomic64_dec_and_test - decrement by 1 and test
667 * @v: pointer of type atomic64_t
669 * Atomically decrements @v by 1 and
670 * returns true if the result is 0, or false for all other
673 #define atomic64_dec_and_test(v) (atomic64_sub_return(1, (v)) == 0)
676 * atomic64_dec_if_positive - decrement by 1 if old value positive
677 * @v: pointer of type atomic64_t
679 #define atomic64_dec_if_positive(v) atomic64_sub_if_positive(1, v)
682 * atomic64_inc - increment atomic variable
683 * @v: pointer of type atomic64_t
685 * Atomically increments @v by 1.
687 #define atomic64_inc(v) atomic64_add(1,(v))
690 * atomic64_dec - decrement and test
691 * @v: pointer of type atomic64_t
693 * Atomically decrements @v by 1.
695 #define atomic64_dec(v) atomic64_sub(1,(v))
698 * atomic64_add_negative - add and test if negative
699 * @v: pointer of type atomic64_t
700 * @i: integer value to add
702 * Atomically adds @i to @v and returns true
703 * if the result is negative, or false when
704 * result is greater than or equal to zero.
706 #define atomic64_add_negative(i,v) (atomic64_add_return(i, (v)) < 0)
708 #endif /* CONFIG_64BIT */
711 * atomic*_return operations are serializing but not the non-*_return
714 #define smp_mb__before_atomic_dec() smp_mb()
715 #define smp_mb__after_atomic_dec() smp_mb()
716 #define smp_mb__before_atomic_inc() smp_mb()
717 #define smp_mb__after_atomic_inc() smp_mb()
719 #include <asm-generic/atomic.h>
720 #endif /* _ASM_ATOMIC_H */