0001
0002
0003 #ifndef __ASM_CSKY_FUTEX_H
0004 #define __ASM_CSKY_FUTEX_H
0005
0006 #ifndef CONFIG_SMP
0007 #include <asm-generic/futex.h>
0008 #else
0009 #include <linux/atomic.h>
0010 #include <linux/futex.h>
0011 #include <linux/uaccess.h>
0012 #include <linux/errno.h>
0013
0014 #define __futex_atomic_op(insn, ret, oldval, uaddr, oparg) \
0015 { \
0016 u32 tmp; \
0017 \
0018 __atomic_pre_full_fence(); \
0019 \
0020 __asm__ __volatile__ ( \
0021 "1: ldex.w %[ov], %[u] \n" \
0022 " "insn" \n" \
0023 "2: stex.w %[t], %[u] \n" \
0024 " bez %[t], 1b \n" \
0025 " br 4f \n" \
0026 "3: mov %[r], %[e] \n" \
0027 "4: \n" \
0028 " .section __ex_table,\"a\" \n" \
0029 " .balign 4 \n" \
0030 " .long 1b, 3b \n" \
0031 " .long 2b, 3b \n" \
0032 " .previous \n" \
0033 : [r] "+r" (ret), [ov] "=&r" (oldval), \
0034 [u] "+m" (*uaddr), [t] "=&r" (tmp) \
0035 : [op] "Jr" (oparg), [e] "jr" (-EFAULT) \
0036 : "memory"); \
0037 \
0038 __atomic_post_full_fence(); \
0039 }
0040
0041 static inline int
0042 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
0043 {
0044 int oldval = 0, ret = 0;
0045
0046 if (!access_ok(uaddr, sizeof(u32)))
0047 return -EFAULT;
0048
0049 switch (op) {
0050 case FUTEX_OP_SET:
0051 __futex_atomic_op("mov %[t], %[ov]",
0052 ret, oldval, uaddr, oparg);
0053 break;
0054 case FUTEX_OP_ADD:
0055 __futex_atomic_op("add %[t], %[ov], %[op]",
0056 ret, oldval, uaddr, oparg);
0057 break;
0058 case FUTEX_OP_OR:
0059 __futex_atomic_op("or %[t], %[ov], %[op]",
0060 ret, oldval, uaddr, oparg);
0061 break;
0062 case FUTEX_OP_ANDN:
0063 __futex_atomic_op("and %[t], %[ov], %[op]",
0064 ret, oldval, uaddr, ~oparg);
0065 break;
0066 case FUTEX_OP_XOR:
0067 __futex_atomic_op("xor %[t], %[ov], %[op]",
0068 ret, oldval, uaddr, oparg);
0069 break;
0070 default:
0071 ret = -ENOSYS;
0072 }
0073
0074 if (!ret)
0075 *oval = oldval;
0076
0077 return ret;
0078 }
0079
0080
0081
0082 static inline int
0083 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
0084 u32 oldval, u32 newval)
0085 {
0086 int ret = 0;
0087 u32 val, tmp;
0088
0089 if (!access_ok(uaddr, sizeof(u32)))
0090 return -EFAULT;
0091
0092 __atomic_pre_full_fence();
0093
0094 __asm__ __volatile__ (
0095 "1: ldex.w %[v], %[u] \n"
0096 " cmpne %[v], %[ov] \n"
0097 " bt 4f \n"
0098 " mov %[t], %[nv] \n"
0099 "2: stex.w %[t], %[u] \n"
0100 " bez %[t], 1b \n"
0101 " br 4f \n"
0102 "3: mov %[r], %[e] \n"
0103 "4: \n"
0104 " .section __ex_table,\"a\" \n"
0105 " .balign 4 \n"
0106 " .long 1b, 3b \n"
0107 " .long 2b, 3b \n"
0108 " .previous \n"
0109 : [r] "+r" (ret), [v] "=&r" (val), [u] "+m" (*uaddr),
0110 [t] "=&r" (tmp)
0111 : [ov] "Jr" (oldval), [nv] "Jr" (newval), [e] "Jr" (-EFAULT)
0112 : "memory");
0113
0114 __atomic_post_full_fence();
0115
0116 *uval = val;
0117 return ret;
0118 }
0119
0120 #endif
0121 #endif