]> err.no Git - linux-2.6/commitdiff
[MIPS] Replace generic__raw_read_trylock usage
authorRalf Baechle <ralf@linux-mips.org>
Thu, 31 Aug 2006 13:16:06 +0000 (14:16 +0100)
committerRalf Baechle <ralf@linux-mips.org>
Wed, 27 Sep 2006 12:37:39 +0000 (13:37 +0100)
generic__raw_read_trylock() is a defect generic function actually doing
a __raw_read_lock ...

Signed-off-by: Ralf Baechle <ralf@linux-mips.org>
include/asm-mips/spinlock.h

index 669b8e349ff29544fce6a35c91307950ba660f4a..4c1a1b53aeaf1c3db24c8f01fab27147600e0020 100644 (file)
@@ -239,7 +239,51 @@ static inline void __raw_write_unlock(raw_rwlock_t *rw)
        : "memory");
 }
 
-#define __raw_read_trylock(lock) generic__raw_read_trylock(lock)
+static inline int __raw_read_trylock(raw_rwlock_t *rw)
+{
+       unsigned int tmp;
+       int ret;
+
+       if (R10000_LLSC_WAR) {
+               __asm__ __volatile__(
+               "       .set    noreorder       # __raw_read_trylock    \n"
+               "       li      %2, 0                                   \n"
+               "1:     ll      %1, %3                                  \n"
+               "       bnez    %1, 2f                                  \n"
+               "        addu   %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqzl   %1, 1b                                  \n"
+               "       .set    reorder                                 \n"
+#ifdef CONFIG_SMP
+               "        sync                                           \n"
+#endif
+               "       li      %2, 1                                   \n"
+               "2:                                                     \n"
+               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : "m" (rw->lock)
+               : "memory");
+       } else {
+               __asm__ __volatile__(
+               "       .set    noreorder       # __raw_read_trylock    \n"
+               "       li      %2, 0                                   \n"
+               "1:     ll      %1, %3                                  \n"
+               "       bnez    %1, 2f                                  \n"
+               "        addu   %1, 1                                   \n"
+               "       sc      %1, %0                                  \n"
+               "       beqz    %1, 1b                                  \n"
+               "       .set    reorder                                 \n"
+#ifdef CONFIG_SMP
+               "        sync                                           \n"
+#endif
+               "       li      %2, 1                                   \n"
+               "2:                                                     \n"
+               : "=m" (rw->lock), "=&r" (tmp), "=&r" (ret)
+               : "m" (rw->lock)
+               : "memory");
+       }
+
+       return ret;
+}
 
 static inline int __raw_write_trylock(raw_rwlock_t *rw)
 {
@@ -283,4 +327,5 @@ static inline int __raw_write_trylock(raw_rwlock_t *rw)
        return ret;
 }
 
+
 #endif /* _ASM_SPINLOCK_H */