#define atomic_dec_and_test(v) (atomic_sub_return(1, (v)) == 0)
#define atomic_inc_and_test(v) (atomic_add_return(1, (v)) == 0)
-#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
-static inline
-unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v)
-{
- unsigned long old, tmp;
-
- asm volatile(
- "0: \n"
- " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
- " ckeq icc3,cc7 \n"
- " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
- " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
- " and%I3 %1,%3,%2 \n"
- " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
- " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
- " beq icc3,#0,0b \n"
- : "+U"(*v), "=&r"(old), "=r"(tmp)
- : "NPr"(~mask)
- : "memory", "cc7", "cc3", "icc3"
- );
-
- return old;
-}
-
-static inline
-unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v)
-{
- unsigned long old, tmp;
-
- asm volatile(
- "0: \n"
- " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
- " ckeq icc3,cc7 \n"
- " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
- " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
- " or%I3 %1,%3,%2 \n"
- " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
- " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
- " beq icc3,#0,0b \n"
- : "+U"(*v), "=&r"(old), "=r"(tmp)
- : "NPr"(mask)
- : "memory", "cc7", "cc3", "icc3"
- );
-
- return old;
-}
-
-static inline
-unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v)
-{
- unsigned long old, tmp;
-
- asm volatile(
- "0: \n"
- " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
- " ckeq icc3,cc7 \n"
- " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
- " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
- " xor%I3 %1,%3,%2 \n"
- " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
- " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
- " beq icc3,#0,0b \n"
- : "+U"(*v), "=&r"(old), "=r"(tmp)
- : "NPr"(mask)
- : "memory", "cc7", "cc3", "icc3"
- );
-
- return old;
-}
-
-#else
-
-extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v);
-extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v);
-extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v);
-
-#endif
-
-#define atomic_clear_mask(mask, v) atomic_test_and_ANDNOT_mask((mask), (v))
-#define atomic_set_mask(mask, v) atomic_test_and_OR_mask((mask), (v))
-
/*****************************************************************************/
/*
* exchange value with memory
#include <linux/compiler.h>
#include <asm/byteorder.h>
-#include <asm/system.h>
-#include <asm/atomic.h>
#ifdef __KERNEL__
#define smp_mb__before_clear_bit() barrier()
#define smp_mb__after_clear_bit() barrier()
+#ifndef CONFIG_FRV_OUTOFLINE_ATOMIC_OPS
+static inline
+unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v)
+{
+ unsigned long old, tmp;
+
+ asm volatile(
+ "0: \n"
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
+ " ckeq icc3,cc7 \n"
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
+ " and%I3 %1,%3,%2 \n"
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
+ " beq icc3,#0,0b \n"
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
+ : "NPr"(~mask)
+ : "memory", "cc7", "cc3", "icc3"
+ );
+
+ return old;
+}
+
+static inline
+unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v)
+{
+ unsigned long old, tmp;
+
+ asm volatile(
+ "0: \n"
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
+ " ckeq icc3,cc7 \n"
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
+ " or%I3 %1,%3,%2 \n"
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
+ " beq icc3,#0,0b \n"
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
+ : "NPr"(mask)
+ : "memory", "cc7", "cc3", "icc3"
+ );
+
+ return old;
+}
+
+static inline
+unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v)
+{
+ unsigned long old, tmp;
+
+ asm volatile(
+ "0: \n"
+ " orcc gr0,gr0,gr0,icc3 \n" /* set ICC3.Z */
+ " ckeq icc3,cc7 \n"
+ " ld.p %M0,%1 \n" /* LD.P/ORCR are atomic */
+ " orcr cc7,cc7,cc3 \n" /* set CC3 to true */
+ " xor%I3 %1,%3,%2 \n"
+ " cst.p %2,%M0 ,cc3,#1 \n" /* if store happens... */
+ " corcc gr29,gr29,gr0 ,cc3,#1 \n" /* ... clear ICC3.Z */
+ " beq icc3,#0,0b \n"
+ : "+U"(*v), "=&r"(old), "=r"(tmp)
+ : "NPr"(mask)
+ : "memory", "cc7", "cc3", "icc3"
+ );
+
+ return old;
+}
+
+#else
+
+extern unsigned long atomic_test_and_ANDNOT_mask(unsigned long mask, volatile unsigned long *v);
+extern unsigned long atomic_test_and_OR_mask(unsigned long mask, volatile unsigned long *v);
+extern unsigned long atomic_test_and_XOR_mask(unsigned long mask, volatile unsigned long *v);
+
+#endif
+
+#define atomic_clear_mask(mask, v) atomic_test_and_ANDNOT_mask((mask), (v))
+#define atomic_set_mask(mask, v) atomic_test_and_OR_mask((mask), (v))
+
static inline int test_and_clear_bit(int nr, volatile void *addr)
{
volatile unsigned long *ptr = addr;
#include <linux/types.h>
#include <linux/linkage.h>
+#include <linux/kernel.h>
struct thread_struct;
{
switch (size) {
case 4:
- return cmpxchg(ptr, old, new);
+ return cmpxchg((unsigned long *)ptr, old, new);
default:
return __cmpxchg_local_generic(ptr, old, new, size);
}