0001
0002
0003
0004
0005
0006
0007
0008 #ifndef _ASM_FUTEX_H
0009 #define _ASM_FUTEX_H
0010
0011 #include <linux/futex.h>
0012 #include <linux/preempt.h>
0013 #include <linux/uaccess.h>
0014 #include <asm/errno.h>
0015
0016 #ifdef CONFIG_ARC_HAS_LLSC
0017
0018 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
0019 \
0020 smp_mb(); \
0021 __asm__ __volatile__( \
0022 "1: llock %1, [%2] \n" \
0023 insn "\n" \
0024 "2: scond %0, [%2] \n" \
0025 " bnz 1b \n" \
0026 " mov %0, 0 \n" \
0027 "3: \n" \
0028 " .section .fixup,\"ax\" \n" \
0029 " .align 4 \n" \
0030 "4: mov %0, %4 \n" \
0031 " j 3b \n" \
0032 " .previous \n" \
0033 " .section __ex_table,\"a\" \n" \
0034 " .align 4 \n" \
0035 " .word 1b, 4b \n" \
0036 " .word 2b, 4b \n" \
0037 " .previous \n" \
0038 \
0039 : "=&r" (ret), "=&r" (oldval) \
0040 : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \
0041 : "cc", "memory"); \
0042 smp_mb() \
0043
0044 #else
0045
0046 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg)\
0047 \
0048 smp_mb(); \
0049 __asm__ __volatile__( \
0050 "1: ld %1, [%2] \n" \
0051 insn "\n" \
0052 "2: st %0, [%2] \n" \
0053 " mov %0, 0 \n" \
0054 "3: \n" \
0055 " .section .fixup,\"ax\" \n" \
0056 " .align 4 \n" \
0057 "4: mov %0, %4 \n" \
0058 " j 3b \n" \
0059 " .previous \n" \
0060 " .section __ex_table,\"a\" \n" \
0061 " .align 4 \n" \
0062 " .word 1b, 4b \n" \
0063 " .word 2b, 4b \n" \
0064 " .previous \n" \
0065 \
0066 : "=&r" (ret), "=&r" (oldval) \
0067 : "r" (uaddr), "r" (oparg), "ir" (-EFAULT) \
0068 : "cc", "memory"); \
0069 smp_mb() \
0070
0071 #endif
0072
0073 static inline int arch_futex_atomic_op_inuser(int op, int oparg, int *oval,
0074 u32 __user *uaddr)
0075 {
0076 int oldval = 0, ret;
0077
0078 if (!access_ok(uaddr, sizeof(u32)))
0079 return -EFAULT;
0080
0081 #ifndef CONFIG_ARC_HAS_LLSC
0082 preempt_disable();
0083 #endif
0084
0085 switch (op) {
0086 case FUTEX_OP_SET:
0087 __futex_atomic_op("mov %0, %3", ret, oldval, uaddr, oparg);
0088 break;
0089 case FUTEX_OP_ADD:
0090
0091 __futex_atomic_op("add %0, %1, %3", ret, oldval, uaddr, oparg);
0092 break;
0093 case FUTEX_OP_OR:
0094 __futex_atomic_op("or %0, %1, %3", ret, oldval, uaddr, oparg);
0095 break;
0096 case FUTEX_OP_ANDN:
0097 __futex_atomic_op("bic %0, %1, %3", ret, oldval, uaddr, oparg);
0098 break;
0099 case FUTEX_OP_XOR:
0100 __futex_atomic_op("xor %0, %1, %3", ret, oldval, uaddr, oparg);
0101 break;
0102 default:
0103 ret = -ENOSYS;
0104 }
0105
0106 #ifndef CONFIG_ARC_HAS_LLSC
0107 preempt_enable();
0108 #endif
0109
0110 if (!ret)
0111 *oval = oldval;
0112
0113 return ret;
0114 }
0115
0116
0117
0118
0119
0120 static inline int
0121 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr, u32 expval,
0122 u32 newval)
0123 {
0124 int ret = 0;
0125 u32 existval;
0126
0127 if (!access_ok(uaddr, sizeof(u32)))
0128 return -EFAULT;
0129
0130 #ifndef CONFIG_ARC_HAS_LLSC
0131 preempt_disable();
0132 #endif
0133 smp_mb();
0134
0135 __asm__ __volatile__(
0136 #ifdef CONFIG_ARC_HAS_LLSC
0137 "1: llock %1, [%4] \n"
0138 " brne %1, %2, 3f \n"
0139 "2: scond %3, [%4] \n"
0140 " bnz 1b \n"
0141 #else
0142 "1: ld %1, [%4] \n"
0143 " brne %1, %2, 3f \n"
0144 "2: st %3, [%4] \n"
0145 #endif
0146 "3: \n"
0147 " .section .fixup,\"ax\" \n"
0148 "4: mov %0, %5 \n"
0149 " j 3b \n"
0150 " .previous \n"
0151 " .section __ex_table,\"a\" \n"
0152 " .align 4 \n"
0153 " .word 1b, 4b \n"
0154 " .word 2b, 4b \n"
0155 " .previous\n"
0156 : "+&r"(ret), "=&r"(existval)
0157 : "r"(expval), "r"(newval), "r"(uaddr), "ir"(-EFAULT)
0158 : "cc", "memory");
0159
0160 smp_mb();
0161
0162 #ifndef CONFIG_ARC_HAS_LLSC
0163 preempt_enable();
0164 #endif
0165 *uval = existval;
0166 return ret;
0167 }
0168
0169 #endif