summaryrefslogtreecommitdiffstats
path: root/include/asm-alpha/spinlock.h
diff options
context:
space:
mode:
Diffstat (limited to 'include/asm-alpha/spinlock.h')
-rw-r--r--include/asm-alpha/spinlock.h158
1 files changed, 138 insertions, 20 deletions
diff --git a/include/asm-alpha/spinlock.h b/include/asm-alpha/spinlock.h
index a38477426..1c7afd449 100644
--- a/include/asm-alpha/spinlock.h
+++ b/include/asm-alpha/spinlock.h
@@ -45,7 +45,10 @@ typedef struct { int dummy; } rwlock_t;
#define write_lock_irqsave(lock, flags) do { (flags) = swpipl(7); } while (0)
#define write_unlock_irqrestore(lock, flags) setipl(flags)
-#else
+#else /* __SMP__ */
+
+#include <linux/kernel.h>
+#include <asm/current.h>
/* Simple spin lock operations. There are two variants, one clears IRQ's
* on the local processor, one does not.
@@ -56,12 +59,15 @@ typedef struct { int dummy; } rwlock_t;
typedef struct {
volatile unsigned long lock;
unsigned long previous;
+ unsigned long task;
} spinlock_t;
#define SPIN_LOCK_UNLOCKED { 0, 0 }
-#define spin_lock_init(lock) do { (lock)->lock = 0; (lock)->previous = 0; } while(0)
-#define spin_unlock_wait(lock) do { barrier(); } while(((volatile spinlock_t *)lock)->lock)
+#define spin_lock_init(x) \
+ do { (x)->lock = 0; (x)->previous = 0; } while(0)
+#define spin_unlock_wait(x) \
+ do { barrier(); } while(((volatile spinlock_t *)x)->lock)
typedef struct { unsigned long a[100]; } __dummy_lock_t;
#define __dummy_lock(lock) (*(__dummy_lock_t *)(lock))
@@ -73,40 +79,38 @@ static inline void spin_unlock(spinlock_t * lock)
:"=m" (__dummy_lock(lock)));
}
+#if 1
+#define DEBUG_SPINLOCK
+#else
+#undef DEBUG_SPINLOCK
+#endif
+
+#ifdef DEBUG_SPINLOCK
+extern void spin_lock(spinlock_t * lock);
+#else
static inline void spin_lock(spinlock_t * lock)
{
- __label__ l1;
long tmp;
- long stuck = 0x100000000;
-l1:
+
/* Use sub-sections to put the actual loop at the end
of this object file's text section so as to perfect
branch prediction. */
__asm__ __volatile__(
"1: ldq_l %0,%1\n"
- " subq %2,1,%2\n"
" blbs %0,2f\n"
" or %0,1,%0\n"
" stq_c %0,%1\n"
- " beq %0,3f\n"
+ " beq %0,2f\n"
"4: mb\n"
".section .text2,\"ax\"\n"
"2: ldq %0,%1\n"
- " subq %2,1,%2\n"
- "3: blt %2,4b\n"
" blbs %0,2b\n"
" br 1b\n"
".previous"
: "=r" (tmp),
- "=m" (__dummy_lock(lock)),
- "=r" (stuck)
- : "2" (stuck));
-
- if (stuck < 0)
- printk("spinlock stuck at %p (%lx)\n",&&l1,lock->previous);
- else
- lock->previous = (unsigned long) &&l1;
+ "=m" (__dummy_lock(lock)));
}
+#endif /* DEBUG_SPINLOCK */
#define spin_trylock(lock) (!test_and_set_bit(0,(lock)))
@@ -117,10 +121,124 @@ l1:
do { spin_unlock(lock); __sti(); } while (0)
#define spin_lock_irqsave(lock, flags) \
- do { flags = swpipl(7); spin_lock(lock); } while (0)
+ do { __save_and_cli(flags); spin_lock(lock); } while (0)
#define spin_unlock_irqrestore(lock, flags) \
- do { spin_unlock(lock); setipl(flags); } while (0)
+ do { spin_unlock(lock); __restore_flags(flags); } while (0)
+
+/***********************************************************/
+
+#if 1
+#define DEBUG_RWLOCK
+#else
+#undef DEBUG_RWLOCK
+#endif
+
+typedef struct { volatile int write_lock:1, read_counter:31; } rwlock_t;
+
+#define RW_LOCK_UNLOCKED { 0, 0 }
+
+#ifdef DEBUG_RWLOCK
+extern void write_lock(rwlock_t * lock);
+#else
+static inline void write_lock(rwlock_t * lock)
+{
+ long regx, regy;
+
+ __asm__ __volatile__(
+ "1: ldl_l %1,%0;"
+ " blbs %1,6f;"
+ " or %1,1,%2;"
+ " stl_c %2,%0;"
+ " beq %2,6f;"
+ " blt %1,8f;"
+ "4: mb\n"
+ ".section .text2,\"ax\"\n"
+ "6: ldl %1,%0;"
+ " blbs %1,6b;"
+ " br 1b;"
+ "8: ldl %1,%0;"
+ " blt %1,8b;"
+ "9: br 4b\n"
+ ".previous"
+ : "=m" (__dummy_lock(lock)), "=&r" (regx), "=&r" (regy)
+ : "0" (__dummy_lock(lock))
+ );
+}
+#endif /* DEBUG_RWLOCK */
+
+static inline void write_unlock(rwlock_t * lock)
+{
+ __asm__ __volatile__("mb; stl $31,%0" : "=m" (__dummy_lock(lock)));
+}
+
+#ifdef DEBUG_RWLOCK
+extern void _read_lock(rwlock_t * lock);
+#else
+static inline void _read_lock(rwlock_t * lock)
+{
+ long regx;
+
+ __asm__ __volatile__(
+ "1: ldl_l %1,%0;"
+ " blbs %1,6f;"
+ " subl %1,2,%1;"
+ " stl_c %1,%0;"
+ " beq %1,6f;"
+ "4: mb\n"
+ ".section .text2,\"ax\"\n"
+ "6: ldl %1,%0;"
+ " blbs %1,6b;"
+ " br 1b\n"
+ ".previous"
+ : "=m" (__dummy_lock(lock)), "=&r" (regx)
+ : "0" (__dummy_lock(lock))
+ );
+}
+#endif /* DEBUG_RWLOCK */
+
+#define read_lock(lock) \
+do { unsigned long flags; \
+ __save_and_cli(flags); \
+ _read_lock(lock); \
+ __restore_flags(flags); \
+} while(0)
+
+static inline void _read_unlock(rwlock_t * lock)
+{
+ long regx;
+ __asm__ __volatile__(
+ "1: ldl_l %1,%0;"
+ " addl %1,2,%1;"
+ " stl_c %1,%0;"
+ " beq %1,6f;"
+ ".section .text2,\"ax\"\n"
+ "6: br 1b\n"
+ ".previous"
+ : "=m" (__dummy_lock(lock)), "=&r" (regx)
+ : "0" (__dummy_lock(lock)));
+}
+
+#define read_unlock(lock) \
+do { unsigned long flags; \
+ __save_and_cli(flags); \
+ _read_unlock(lock); \
+ __restore_flags(flags); \
+} while(0)
+
+#define read_lock_irq(lock) do { __cli(); _read_lock(lock); } while (0)
+#define read_unlock_irq(lock) do { _read_unlock(lock); __sti(); } while (0)
+#define write_lock_irq(lock) do { __cli(); write_lock(lock); } while (0)
+#define write_unlock_irq(lock) do { write_unlock(lock); __sti(); } while (0)
+
+#define read_lock_irqsave(lock, flags) \
+ do { __save_and_cli(flags); _read_lock(lock); } while (0)
+#define read_unlock_irqrestore(lock, flags) \
+ do { _read_unlock(lock); __restore_flags(flags); } while (0)
+#define write_lock_irqsave(lock, flags) \
+ do { __save_and_cli(flags); write_lock(lock); } while (0)
+#define write_unlock_irqrestore(lock, flags) \
+ do { write_unlock(lock); __restore_flags(flags); } while (0)
#endif /* SMP */
#endif /* _ALPHA_SPINLOCK_H */