X-Git-Url: https://err.no/cgi-bin/gitweb.cgi?a=blobdiff_plain;f=include%2Fasm-sparc64%2Fspinlock.h;h=9cb93a5c2b4feac61ab2245c9ca17cb74997ded0;hb=f7f24758ac98a506770bc5910d33567610fa3403;hp=11efa474865bd20eef9b0a13f42749a6e999da2a;hpb=1da177e4c3f41524e886b7f1b8a0c1fc7321cac2;p=linux-2.6 diff --git a/include/asm-sparc64/spinlock.h b/include/asm-sparc64/spinlock.h index 11efa47486..9cb93a5c2b 100644 --- a/include/asm-sparc64/spinlock.h +++ b/include/asm-sparc64/spinlock.h @@ -31,15 +31,20 @@ #ifndef CONFIG_DEBUG_SPINLOCK -typedef unsigned char spinlock_t; -#define SPIN_LOCK_UNLOCKED 0 +typedef struct { + volatile unsigned char lock; +#ifdef CONFIG_PREEMPT + unsigned int break_lock; +#endif +} spinlock_t; +#define SPIN_LOCK_UNLOCKED (spinlock_t) {0,} -#define spin_lock_init(lock) (*((unsigned char *)(lock)) = 0) -#define spin_is_locked(lock) (*((volatile unsigned char *)(lock)) != 0) +#define spin_lock_init(lp) do { *(lp)= SPIN_LOCK_UNLOCKED; } while(0) +#define spin_is_locked(lp) ((lp)->lock != 0) -#define spin_unlock_wait(lock) \ +#define spin_unlock_wait(lp) \ do { membar("#LoadLoad"); \ -} while(*((volatile unsigned char *)lock)) +} while((lp)->lock) static inline void _raw_spin_lock(spinlock_t *lock) { @@ -47,12 +52,14 @@ static inline void _raw_spin_lock(spinlock_t *lock) __asm__ __volatile__( "1: ldstub [%1], %0\n" +" membar #StoreLoad | #StoreStore\n" " brnz,pn %0, 2f\n" -" membar #StoreLoad | #StoreStore\n" +" nop\n" " .subsection 2\n" "2: ldub [%1], %0\n" +" membar #LoadLoad\n" " brnz,pt %0, 2b\n" -" membar #LoadLoad\n" +" nop\n" " ba,a,pt %%xcc, 1b\n" " .previous" : "=&r" (tmp) @@ -90,16 +97,18 @@ static inline void _raw_spin_lock_flags(spinlock_t *lock, unsigned long flags) __asm__ __volatile__( "1: ldstub [%2], %0\n" -" brnz,pn %0, 2f\n" " membar #StoreLoad | #StoreStore\n" +" brnz,pn %0, 2f\n" +" nop\n" " .subsection 2\n" "2: rdpr %%pil, %1\n" " wrpr %3, %%pil\n" "3: ldub [%2], %0\n" -" brnz,pt %0, 3b\n" " membar #LoadLoad\n" +" brnz,pt %0, 3b\n" +" nop\n" " ba,pt %%xcc, 1b\n" -" wrpr %1, %%pil\n" +" wrpr %1, %%pil\n" " .previous" : "=&r" (tmp1), "=&r" (tmp2) : "r"(lock), "r"(flags) @@ -109,20 +118,19 @@ static inline void _raw_spin_lock_flags(spinlock_t *lock, unsigned long flags) #else /* !(CONFIG_DEBUG_SPINLOCK) */ typedef struct { - unsigned char lock; + volatile unsigned char lock; unsigned int owner_pc, owner_cpu; +#ifdef CONFIG_PREEMPT + unsigned int break_lock; +#endif } spinlock_t; #define SPIN_LOCK_UNLOCKED (spinlock_t) { 0, 0, 0xff } -#define spin_lock_init(__lock) \ -do { (__lock)->lock = 0; \ - (__lock)->owner_pc = 0; \ - (__lock)->owner_cpu = 0xff; \ -} while(0) -#define spin_is_locked(__lock) (*((volatile unsigned char *)(&((__lock)->lock))) != 0) +#define spin_lock_init(lp) do { *(lp)= SPIN_LOCK_UNLOCKED; } while(0) +#define spin_is_locked(__lock) ((__lock)->lock != 0) #define spin_unlock_wait(__lock) \ do { \ membar("#LoadLoad"); \ -} while(*((volatile unsigned char *)(&((__lock)->lock)))) +} while((__lock)->lock) extern void _do_spin_lock (spinlock_t *lock, char *str); extern void _do_spin_unlock (spinlock_t *lock); @@ -139,8 +147,13 @@ extern int _do_spin_trylock (spinlock_t *lock); #ifndef CONFIG_DEBUG_SPINLOCK -typedef unsigned int rwlock_t; -#define RW_LOCK_UNLOCKED 0 +typedef struct { + volatile unsigned int lock; +#ifdef CONFIG_PREEMPT + unsigned int break_lock; +#endif +} rwlock_t; +#define RW_LOCK_UNLOCKED (rwlock_t) {0,} #define rwlock_init(lp) do { *(lp) = RW_LOCK_UNLOCKED; } while(0) static void inline __read_lock(rwlock_t *lock) @@ -153,12 +166,14 @@ static void inline __read_lock(rwlock_t *lock) "4: add %0, 1, %1\n" " cas [%2], %0, %1\n" " cmp %0, %1\n" +" membar #StoreLoad | #StoreStore\n" " bne,pn %%icc, 1b\n" -" membar #StoreLoad | #StoreStore\n" +" nop\n" " .subsection 2\n" "2: ldsw [%2], %0\n" +" membar #LoadLoad\n" " brlz,pt %0, 2b\n" -" membar #LoadLoad\n" +" nop\n" " ba,a,pt %%xcc, 4b\n" " .previous" : "=&r" (tmp1), "=&r" (tmp2) @@ -195,12 +210,14 @@ static void inline __write_lock(rwlock_t *lock) "4: or %0, %3, %1\n" " cas [%2], %0, %1\n" " cmp %0, %1\n" +" membar #StoreLoad | #StoreStore\n" " bne,pn %%icc, 1b\n" -" membar #StoreLoad | #StoreStore\n" +" nop\n" " .subsection 2\n" "2: lduw [%2], %0\n" +" membar #LoadLoad\n" " brnz,pt %0, 2b\n" -" membar #LoadLoad\n" +" nop\n" " ba,a,pt %%xcc, 4b\n" " .previous" : "=&r" (tmp1), "=&r" (tmp2) @@ -231,8 +248,9 @@ static int inline __write_trylock(rwlock_t *lock) " or %0, %4, %1\n" " cas [%3], %0, %1\n" " cmp %0, %1\n" +" membar #StoreLoad | #StoreStore\n" " bne,pn %%icc, 1b\n" -" membar #StoreLoad | #StoreStore\n" +" nop\n" " mov 1, %2\n" "2:" : "=&r" (tmp1), "=&r" (tmp2), "=&r" (result) @@ -251,9 +269,12 @@ static int inline __write_trylock(rwlock_t *lock) #else /* !(CONFIG_DEBUG_SPINLOCK) */ typedef struct { - unsigned long lock; + volatile unsigned long lock; unsigned int writer_pc, writer_cpu; unsigned int reader_pc[NR_CPUS]; +#ifdef CONFIG_PREEMPT + unsigned int break_lock; +#endif } rwlock_t; #define RW_LOCK_UNLOCKED (rwlock_t) { 0, 0, 0xff, { } } #define rwlock_init(lp) do { *(lp) = RW_LOCK_UNLOCKED; } while(0) @@ -304,6 +325,8 @@ do { unsigned long flags; \ #endif /* CONFIG_DEBUG_SPINLOCK */ #define _raw_read_trylock(lock) generic_raw_read_trylock(lock) +#define read_can_lock(rw) (!((rw)->lock & 0x80000000UL)) +#define write_can_lock(rw) (!(rw)->lock) #endif /* !(__ASSEMBLY__) */