X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-ia64%2Fatomic.h;h=50c2b83fd5a04cccf72996add92fae7b2ba0b407;hb=05e31754d18169555f2c8b54b5fa8631c6be6e7f;hp=2fbebf85c31d55a1b1ac8b5f7d69a62f5c554860;hpb=97f2aab6698f3ab2552c41c1024a65ffd0763a6d;p=linux-2.6 diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h index 2fbebf85c3..50c2b83fd5 100644 --- a/include/asm-ia64/atomic.h +++ b/include/asm-ia64/atomic.h @@ -15,6 +15,7 @@ #include #include +#include /* * On IA-64, counter must always be volatile to ensure that that the @@ -54,7 +55,7 @@ ia64_atomic64_add (__s64 i, atomic64_t *v) do { CMPXCHG_BUGCHECK(v); - old = atomic_read(v); + old = atomic64_read(v); new = old + i; } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); return new; @@ -82,24 +83,53 @@ ia64_atomic64_sub (__s64 i, atomic64_t *v) do { CMPXCHG_BUGCHECK(v); - old = atomic_read(v); + old = atomic64_read(v); new = old - i; } while (ia64_cmpxchg(acq, v, old, new, sizeof(atomic64_t)) != old); return new; } -#define atomic_cmpxchg(v, old, new) ((int)cmpxchg(&((v)->counter), old, new)) +#define atomic_cmpxchg(v, old, new) (cmpxchg(&((v)->counter), old, new)) +#define atomic_xchg(v, new) (xchg(&((v)->counter), new)) + +#define atomic64_cmpxchg(v, old, new) \ + (cmpxchg(&((v)->counter), old, new)) +#define atomic64_xchg(v, new) (xchg(&((v)->counter), new)) + +static __inline__ int atomic_add_unless(atomic_t *v, int a, int u) +{ + int c, old; + c = atomic_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} -#define atomic_add_unless(v, a, u) \ -({ \ - int c, old; \ - c = atomic_read(v); \ - while (c != (u) && (old = atomic_cmpxchg((v), c, c + (a))) != c) \ - c = old; \ - c != (u); \ -}) #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) +static __inline__ int atomic64_add_unless(atomic64_t *v, long a, long u) +{ + long c, old; + c = atomic64_read(v); + for (;;) { + if (unlikely(c == (u))) + break; + old = atomic64_cmpxchg((v), c, c + (a)); + if (likely(old == c)) + break; + c = old; + } + return c != (u); +} + +#define atomic64_inc_not_zero(v) atomic64_add_unless((v), 1, 0) + #define atomic_add_return(i,v) \ ({ \ int __ia64_aar_i = (i); \ @@ -192,4 +222,5 @@ atomic64_add_negative (__s64 i, atomic64_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* _ASM_IA64_ATOMIC_H */