From: Christoph Lameter Date: Fri, 6 Jan 2006 08:11:20 +0000 (-0800) Subject: [PATCH] atomic_long_t & include/asm-generic/atomic.h V2 X-Git-Tag: v2.6.16-rc1~936^2~239 X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=commitdiff_plain;h=d3cb487149bd706aa6aeb02042332a450978dc1c;p=linux-2.6 [PATCH] atomic_long_t & include/asm-generic/atomic.h V2 Several counters already have the need to use 64 atomic variables on 64 bit platforms (see mm_counter_t in sched.h). We have to do ugly ifdefs to fall back to 32 bit atomic on 32 bit platforms. The VM statistics patch that I am working on will also make more extensive use of atomic64. This patch introduces a new type atomic_long_t by providing definitions in asm-generic/atomic.h that works similar to the c "long" type. Its 32 bits on 32 bit platforms and 64 bits on 64 bit platforms. Also cleans up the determination of the mm_counter_t in sched.h. Signed-off-by: Christoph Lameter Signed-off-by: Andrew Morton Signed-off-by: Linus Torvalds --- diff --git a/include/asm-alpha/atomic.h b/include/asm-alpha/atomic.h index 6183eab006..cb03bbe92c 100644 --- a/include/asm-alpha/atomic.h +++ b/include/asm-alpha/atomic.h @@ -216,4 +216,5 @@ static __inline__ long atomic64_sub_return(long i, atomic64_t * v) #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() +#include #endif /* _ALPHA_ATOMIC_H */ diff --git a/include/asm-arm/atomic.h b/include/asm-arm/atomic.h index d586f65c82..f72b63309b 100644 --- a/include/asm-arm/atomic.h +++ b/include/asm-arm/atomic.h @@ -205,5 +205,6 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif #endif diff --git a/include/asm-arm26/atomic.h b/include/asm-arm26/atomic.h index a47cadc596..3074b0e763 100644 --- a/include/asm-arm26/atomic.h +++ b/include/asm-arm26/atomic.h @@ -118,5 +118,6 @@ static inline void atomic_clear_mask(unsigned long mask, unsigned long *addr) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif #endif diff --git a/include/asm-cris/atomic.h b/include/asm-cris/atomic.h index 683b05a57d..2df2c7aa19 100644 --- a/include/asm-cris/atomic.h +++ b/include/asm-cris/atomic.h @@ -156,4 +156,5 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif diff --git a/include/asm-frv/atomic.h b/include/asm-frv/atomic.h index f6539ff569..3f54fea2b0 100644 --- a/include/asm-frv/atomic.h +++ b/include/asm-frv/atomic.h @@ -426,4 +426,5 @@ extern uint32_t __cmpxchg_32(uint32_t *v, uint32_t test, uint32_t new); }) #define atomic_inc_not_zero(v) atomic_add_unless((v), 1, 0) +#include #endif /* _ASM_ATOMIC_H */ diff --git a/include/asm-generic/atomic.h b/include/asm-generic/atomic.h new file mode 100644 index 0000000000..e0a28b925e --- /dev/null +++ b/include/asm-generic/atomic.h @@ -0,0 +1,116 @@ +#ifndef _ASM_GENERIC_ATOMIC_H +#define _ASM_GENERIC_ATOMIC_H +/* + * Copyright (C) 2005 Silicon Graphics, Inc. + * Christoph Lameter + * + * Allows to provide arch independent atomic definitions without the need to + * edit all arch specific atomic.h files. + */ + + +/* + * Suppport for atomic_long_t + * + * Casts for parameters are avoided for existing atomic functions in order to + * avoid issues with cast-as-lval under gcc 4.x and other limitations that the + * macros of a platform may have. + */ + +#if BITS_PER_LONG == 64 + +typedef atomic64_t atomic_long_t; + +#define ATOMIC_LONG_INIT(i) ATOMIC64_INIT(i) + +static inline long atomic_long_read(atomic_long_t *l) +{ + atomic64_t *v = (atomic64_t *)l; + + return (long)atomic64_read(v); +} + +static inline void atomic_long_set(atomic_long_t *l, long i) +{ + atomic64_t *v = (atomic64_t *)l; + + atomic_set(v, i); +} + +static inline void atomic_long_inc(atomic_long_t *l) +{ + atomic64_t *v = (atomic64_t *)l; + + atomic64_inc(v); +} + +static inline void atomic_long_dec(atomic_long_t *l) +{ + atomic64_t *v = (atomic64_t *)l; + + atomic64_dec(v); +} + +static inline void atomic_long_add(long i, atomic_long_t *l) +{ + atomic64_t *v = (atomic64_t *)l; + + atomic64_add(i, v); +} + +static inline void atomic_long_sub(long i, atomic_long_t *l) +{ + atomic64_t *v = (atomic64_t *)l; + + atomic64_sub(i, v); +} + +#else + +typedef atomic_t atomic_long_t; + +#define ATOMIC_LONG_INIT(i) ATOMIC_INIT(i) +static inline long atomic_long_read(atomic_long_t *l) +{ + atomic_t *v = (atomic_t *)l; + + return (long)atomic_read(v); +} + +static inline void atomic_long_set(atomic_long_t *l, long i) +{ + atomic_t *v = (atomic_t *)l; + + atomic_set(v, i); +} + +static inline void atomic_long_inc(atomic_long_t *l) +{ + atomic_t *v = (atomic_t *)l; + + atomic_inc(v); +} + +static inline void atomic_long_dec(atomic_long_t *l) +{ + atomic_t *v = (atomic_t *)l; + + atomic_dec(v); +} + +static inline void atomic_long_add(long i, atomic_long_t *l) +{ + atomic_t *v = (atomic_t *)l; + + atomic_add(i, v); +} + +static inline void atomic_long_sub(long i, atomic_long_t *l) +{ + atomic_t *v = (atomic_t *)l; + + atomic_sub(i, v); +} + +#endif +#endif diff --git a/include/asm-h8300/atomic.h b/include/asm-h8300/atomic.h index f23d86819e..d891541e89 100644 --- a/include/asm-h8300/atomic.h +++ b/include/asm-h8300/atomic.h @@ -137,4 +137,5 @@ static __inline__ void atomic_set_mask(unsigned long mask, unsigned long *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __ARCH_H8300_ATOMIC __ */ diff --git a/include/asm-i386/atomic.h b/include/asm-i386/atomic.h index c68557aa04..7a5472d770 100644 --- a/include/asm-i386/atomic.h +++ b/include/asm-i386/atomic.h @@ -254,4 +254,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif diff --git a/include/asm-ia64/atomic.h b/include/asm-ia64/atomic.h index 2fbebf85c3..15cf7984c4 100644 --- a/include/asm-ia64/atomic.h +++ b/include/asm-ia64/atomic.h @@ -192,4 +192,5 @@ atomic64_add_negative (__s64 i, atomic64_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* _ASM_IA64_ATOMIC_H */ diff --git a/include/asm-m32r/atomic.h b/include/asm-m32r/atomic.h index ef1fb8ea47..70761278b6 100644 --- a/include/asm-m32r/atomic.h +++ b/include/asm-m32r/atomic.h @@ -313,4 +313,5 @@ static __inline__ void atomic_set_mask(unsigned long mask, atomic_t *addr) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* _ASM_M32R_ATOMIC_H */ diff --git a/include/asm-m68k/atomic.h b/include/asm-m68k/atomic.h index e3c962eeab..b8a4e75d67 100644 --- a/include/asm-m68k/atomic.h +++ b/include/asm-m68k/atomic.h @@ -157,4 +157,5 @@ static inline void atomic_set_mask(unsigned long mask, unsigned long *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __ARCH_M68K_ATOMIC __ */ diff --git a/include/asm-m68knommu/atomic.h b/include/asm-m68knommu/atomic.h index 3c1cc153c4..1702dbe931 100644 --- a/include/asm-m68knommu/atomic.h +++ b/include/asm-m68knommu/atomic.h @@ -143,4 +143,5 @@ static inline int atomic_sub_return(int i, atomic_t * v) #define atomic_dec_return(v) atomic_sub_return(1,(v)) #define atomic_inc_return(v) atomic_add_return(1,(v)) +#include #endif /* __ARCH_M68KNOMMU_ATOMIC __ */ diff --git a/include/asm-mips/atomic.h b/include/asm-mips/atomic.h index 55c37c106e..92256e43a9 100644 --- a/include/asm-mips/atomic.h +++ b/include/asm-mips/atomic.h @@ -713,4 +713,5 @@ static __inline__ long atomic64_sub_if_positive(long i, atomic64_t * v) #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() +#include #endif /* _ASM_ATOMIC_H */ diff --git a/include/asm-parisc/atomic.h b/include/asm-parisc/atomic.h index 983e9a2b60..64ebd086c4 100644 --- a/include/asm-parisc/atomic.h +++ b/include/asm-parisc/atomic.h @@ -216,4 +216,5 @@ static __inline__ int atomic_read(const atomic_t *v) #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() +#include #endif diff --git a/include/asm-powerpc/atomic.h b/include/asm-powerpc/atomic.h index ec4b144689..ae395a0632 100644 --- a/include/asm-powerpc/atomic.h +++ b/include/asm-powerpc/atomic.h @@ -402,5 +402,6 @@ static __inline__ long atomic64_dec_if_positive(atomic64_t *v) #endif /* __powerpc64__ */ +#include #endif /* __KERNEL__ */ #endif /* _ASM_POWERPC_ATOMIC_H_ */ diff --git a/include/asm-s390/atomic.h b/include/asm-s390/atomic.h index b3bd4f679f..6d07c7df4b 100644 --- a/include/asm-s390/atomic.h +++ b/include/asm-s390/atomic.h @@ -215,5 +215,6 @@ atomic_compare_and_swap(int expected_oldval,int new_val,atomic_t *v) #define smp_mb__before_atomic_inc() smp_mb() #define smp_mb__after_atomic_inc() smp_mb() +#include #endif /* __KERNEL__ */ #endif /* __ARCH_S390_ATOMIC__ */ diff --git a/include/asm-sh/atomic.h b/include/asm-sh/atomic.h index aabfd33446..618d8e0de3 100644 --- a/include/asm-sh/atomic.h +++ b/include/asm-sh/atomic.h @@ -140,4 +140,5 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __ASM_SH_ATOMIC_H */ diff --git a/include/asm-sh64/atomic.h b/include/asm-sh64/atomic.h index 927a2bc27b..f3ce5c0df1 100644 --- a/include/asm-sh64/atomic.h +++ b/include/asm-sh64/atomic.h @@ -152,4 +152,5 @@ static __inline__ void atomic_set_mask(unsigned int mask, atomic_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __ASM_SH64_ATOMIC_H */ diff --git a/include/asm-sparc/atomic.h b/include/asm-sparc/atomic.h index 62bec7ad27..accb4967e9 100644 --- a/include/asm-sparc/atomic.h +++ b/include/asm-sparc/atomic.h @@ -159,4 +159,5 @@ static inline int __atomic24_sub(int i, atomic24_t *v) #endif /* !(__KERNEL__) */ +#include #endif /* !(__ARCH_SPARC_ATOMIC__) */ diff --git a/include/asm-sparc64/atomic.h b/include/asm-sparc64/atomic.h index 3789fe3159..11f5aa5d10 100644 --- a/include/asm-sparc64/atomic.h +++ b/include/asm-sparc64/atomic.h @@ -96,4 +96,5 @@ extern int atomic64_sub_ret(int, atomic64_t *); #define smp_mb__after_atomic_inc() barrier() #endif +#include #endif /* !(__ARCH_SPARC64_ATOMIC__) */ diff --git a/include/asm-v850/atomic.h b/include/asm-v850/atomic.h index bede3172ce..f5b9ab6f4e 100644 --- a/include/asm-v850/atomic.h +++ b/include/asm-v850/atomic.h @@ -126,4 +126,5 @@ static inline int atomic_add_unless(atomic_t *v, int a, int u) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __V850_ATOMIC_H__ */ diff --git a/include/asm-x86_64/atomic.h b/include/asm-x86_64/atomic.h index 50db9f3927..72eb071488 100644 --- a/include/asm-x86_64/atomic.h +++ b/include/asm-x86_64/atomic.h @@ -424,4 +424,5 @@ __asm__ __volatile__(LOCK "orl %0,%1" \ #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif diff --git a/include/asm-xtensa/atomic.h b/include/asm-xtensa/atomic.h index 3670cc7695..e2ce06b101 100644 --- a/include/asm-xtensa/atomic.h +++ b/include/asm-xtensa/atomic.h @@ -286,6 +286,7 @@ static inline void atomic_set_mask(unsigned int mask, atomic_t *v) #define smp_mb__before_atomic_inc() barrier() #define smp_mb__after_atomic_inc() barrier() +#include #endif /* __KERNEL__ */ #endif /* _XTENSA_ATOMIC_H */ diff --git a/include/linux/sched.h b/include/linux/sched.h index b0ad6f3067..7da33619d5 100644 --- a/include/linux/sched.h +++ b/include/linux/sched.h @@ -254,25 +254,12 @@ extern void arch_unmap_area_topdown(struct mm_struct *, unsigned long); * The mm counters are not protected by its page_table_lock, * so must be incremented atomically. */ -#ifdef ATOMIC64_INIT -#define set_mm_counter(mm, member, value) atomic64_set(&(mm)->_##member, value) -#define get_mm_counter(mm, member) ((unsigned long)atomic64_read(&(mm)->_##member)) -#define add_mm_counter(mm, member, value) atomic64_add(value, &(mm)->_##member) -#define inc_mm_counter(mm, member) atomic64_inc(&(mm)->_##member) -#define dec_mm_counter(mm, member) atomic64_dec(&(mm)->_##member) -typedef atomic64_t mm_counter_t; -#else /* !ATOMIC64_INIT */ -/* - * The counters wrap back to 0 at 2^32 * PAGE_SIZE, - * that is, at 16TB if using 4kB page size. - */ -#define set_mm_counter(mm, member, value) atomic_set(&(mm)->_##member, value) -#define get_mm_counter(mm, member) ((unsigned long)atomic_read(&(mm)->_##member)) -#define add_mm_counter(mm, member, value) atomic_add(value, &(mm)->_##member) -#define inc_mm_counter(mm, member) atomic_inc(&(mm)->_##member) -#define dec_mm_counter(mm, member) atomic_dec(&(mm)->_##member) -typedef atomic_t mm_counter_t; -#endif /* !ATOMIC64_INIT */ +#define set_mm_counter(mm, member, value) atomic_long_set(&(mm)->_##member, value) +#define get_mm_counter(mm, member) ((unsigned long)atomic_long_read(&(mm)->_##member)) +#define add_mm_counter(mm, member, value) atomic_long_add(value, &(mm)->_##member) +#define inc_mm_counter(mm, member) atomic_long_inc(&(mm)->_##member) +#define dec_mm_counter(mm, member) atomic_long_dec(&(mm)->_##member) +typedef atomic_long_t mm_counter_t; #else /* NR_CPUS < CONFIG_SPLIT_PTLOCK_CPUS */ /*