0001
0002 #ifndef _ALPHA_SPINLOCK_H
0003 #define _ALPHA_SPINLOCK_H
0004
0005 #include <linux/kernel.h>
0006 #include <asm/current.h>
0007 #include <asm/barrier.h>
0008 #include <asm/processor.h>
0009
0010
0011
0012
0013
0014
0015
0016
0017 #define arch_spin_is_locked(x) ((x)->lock != 0)
0018
0019 static inline int arch_spin_value_unlocked(arch_spinlock_t lock)
0020 {
0021 return lock.lock == 0;
0022 }
0023
0024 static inline void arch_spin_unlock(arch_spinlock_t * lock)
0025 {
0026 mb();
0027 lock->lock = 0;
0028 }
0029
0030 static inline void arch_spin_lock(arch_spinlock_t * lock)
0031 {
0032 long tmp;
0033
0034 __asm__ __volatile__(
0035 "1: ldl_l %0,%1\n"
0036 " bne %0,2f\n"
0037 " lda %0,1\n"
0038 " stl_c %0,%1\n"
0039 " beq %0,2f\n"
0040 " mb\n"
0041 ".subsection 2\n"
0042 "2: ldl %0,%1\n"
0043 " bne %0,2b\n"
0044 " br 1b\n"
0045 ".previous"
0046 : "=&r" (tmp), "=m" (lock->lock)
0047 : "m"(lock->lock) : "memory");
0048 }
0049
0050 static inline int arch_spin_trylock(arch_spinlock_t *lock)
0051 {
0052 return !test_and_set_bit(0, &lock->lock);
0053 }
0054
0055
0056
0057 static inline void arch_read_lock(arch_rwlock_t *lock)
0058 {
0059 long regx;
0060
0061 __asm__ __volatile__(
0062 "1: ldl_l %1,%0\n"
0063 " blbs %1,6f\n"
0064 " subl %1,2,%1\n"
0065 " stl_c %1,%0\n"
0066 " beq %1,6f\n"
0067 " mb\n"
0068 ".subsection 2\n"
0069 "6: ldl %1,%0\n"
0070 " blbs %1,6b\n"
0071 " br 1b\n"
0072 ".previous"
0073 : "=m" (*lock), "=&r" (regx)
0074 : "m" (*lock) : "memory");
0075 }
0076
0077 static inline void arch_write_lock(arch_rwlock_t *lock)
0078 {
0079 long regx;
0080
0081 __asm__ __volatile__(
0082 "1: ldl_l %1,%0\n"
0083 " bne %1,6f\n"
0084 " lda %1,1\n"
0085 " stl_c %1,%0\n"
0086 " beq %1,6f\n"
0087 " mb\n"
0088 ".subsection 2\n"
0089 "6: ldl %1,%0\n"
0090 " bne %1,6b\n"
0091 " br 1b\n"
0092 ".previous"
0093 : "=m" (*lock), "=&r" (regx)
0094 : "m" (*lock) : "memory");
0095 }
0096
0097 static inline int arch_read_trylock(arch_rwlock_t * lock)
0098 {
0099 long regx;
0100 int success;
0101
0102 __asm__ __volatile__(
0103 "1: ldl_l %1,%0\n"
0104 " lda %2,0\n"
0105 " blbs %1,2f\n"
0106 " subl %1,2,%2\n"
0107 " stl_c %2,%0\n"
0108 " beq %2,6f\n"
0109 "2: mb\n"
0110 ".subsection 2\n"
0111 "6: br 1b\n"
0112 ".previous"
0113 : "=m" (*lock), "=&r" (regx), "=&r" (success)
0114 : "m" (*lock) : "memory");
0115
0116 return success;
0117 }
0118
0119 static inline int arch_write_trylock(arch_rwlock_t * lock)
0120 {
0121 long regx;
0122 int success;
0123
0124 __asm__ __volatile__(
0125 "1: ldl_l %1,%0\n"
0126 " lda %2,0\n"
0127 " bne %1,2f\n"
0128 " lda %2,1\n"
0129 " stl_c %2,%0\n"
0130 " beq %2,6f\n"
0131 "2: mb\n"
0132 ".subsection 2\n"
0133 "6: br 1b\n"
0134 ".previous"
0135 : "=m" (*lock), "=&r" (regx), "=&r" (success)
0136 : "m" (*lock) : "memory");
0137
0138 return success;
0139 }
0140
0141 static inline void arch_read_unlock(arch_rwlock_t * lock)
0142 {
0143 long regx;
0144 __asm__ __volatile__(
0145 " mb\n"
0146 "1: ldl_l %1,%0\n"
0147 " addl %1,2,%1\n"
0148 " stl_c %1,%0\n"
0149 " beq %1,6f\n"
0150 ".subsection 2\n"
0151 "6: br 1b\n"
0152 ".previous"
0153 : "=m" (*lock), "=&r" (regx)
0154 : "m" (*lock) : "memory");
0155 }
0156
0157 static inline void arch_write_unlock(arch_rwlock_t * lock)
0158 {
0159 mb();
0160 lock->lock = 0;
0161 }
0162
0163 #endif