0001
0002 #ifndef _ASM_ARM_FUTEX_H
0003 #define _ASM_ARM_FUTEX_H
0004
0005 #ifdef __KERNEL__
0006
0007 #include <linux/futex.h>
0008 #include <linux/uaccess.h>
0009 #include <asm/errno.h>
0010
0011 #define __futex_atomic_ex_table(err_reg) \
0012 "3:\n" \
0013 " .pushsection __ex_table,\"a\"\n" \
0014 " .align 3\n" \
0015 " .long 1b, 4f, 2b, 4f\n" \
0016 " .popsection\n" \
0017 " .pushsection .text.fixup,\"ax\"\n" \
0018 " .align 2\n" \
0019 "4: mov %0, " err_reg "\n" \
0020 " b 3b\n" \
0021 " .popsection"
0022
0023 #ifdef CONFIG_SMP
0024
0025 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
0026 ({ \
0027 unsigned int __ua_flags; \
0028 smp_mb(); \
0029 prefetchw(uaddr); \
0030 __ua_flags = uaccess_save_and_enable(); \
0031 __asm__ __volatile__( \
0032 "1: ldrex %1, [%3]\n" \
0033 " " insn "\n" \
0034 "2: strex %2, %0, [%3]\n" \
0035 " teq %2, #0\n" \
0036 " bne 1b\n" \
0037 " mov %0, #0\n" \
0038 __futex_atomic_ex_table("%5") \
0039 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
0040 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
0041 : "cc", "memory"); \
0042 uaccess_restore(__ua_flags); \
0043 })
0044
0045 static inline int
0046 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
0047 u32 oldval, u32 newval)
0048 {
0049 unsigned int __ua_flags;
0050 int ret;
0051 u32 val;
0052
0053 if (!access_ok(uaddr, sizeof(u32)))
0054 return -EFAULT;
0055
0056 smp_mb();
0057
0058 prefetchw(uaddr);
0059 __ua_flags = uaccess_save_and_enable();
0060 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
0061 "1: ldrex %1, [%4]\n"
0062 " teq %1, %2\n"
0063 " ite eq @ explicit IT needed for the 2b label\n"
0064 "2: strexeq %0, %3, [%4]\n"
0065 " movne %0, #0\n"
0066 " teq %0, #0\n"
0067 " bne 1b\n"
0068 __futex_atomic_ex_table("%5")
0069 : "=&r" (ret), "=&r" (val)
0070 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
0071 : "cc", "memory");
0072 uaccess_restore(__ua_flags);
0073 smp_mb();
0074
0075 *uval = val;
0076 return ret;
0077 }
0078
0079 #else
0080
0081 #include <linux/preempt.h>
0082 #include <asm/domain.h>
0083
0084 #define __futex_atomic_op(insn, ret, oldval, tmp, uaddr, oparg) \
0085 ({ \
0086 unsigned int __ua_flags = uaccess_save_and_enable(); \
0087 __asm__ __volatile__( \
0088 "1: " TUSER(ldr) " %1, [%3]\n" \
0089 " " insn "\n" \
0090 "2: " TUSER(str) " %0, [%3]\n" \
0091 " mov %0, #0\n" \
0092 __futex_atomic_ex_table("%5") \
0093 : "=&r" (ret), "=&r" (oldval), "=&r" (tmp) \
0094 : "r" (uaddr), "r" (oparg), "Ir" (-EFAULT) \
0095 : "cc", "memory"); \
0096 uaccess_restore(__ua_flags); \
0097 })
0098
0099 static inline int
0100 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
0101 u32 oldval, u32 newval)
0102 {
0103 unsigned int __ua_flags;
0104 int ret = 0;
0105 u32 val;
0106
0107 if (!access_ok(uaddr, sizeof(u32)))
0108 return -EFAULT;
0109
0110 preempt_disable();
0111 __ua_flags = uaccess_save_and_enable();
0112 __asm__ __volatile__("@futex_atomic_cmpxchg_inatomic\n"
0113 " .syntax unified\n"
0114 "1: " TUSER(ldr) " %1, [%4]\n"
0115 " teq %1, %2\n"
0116 " it eq @ explicit IT needed for the 2b label\n"
0117 "2: " TUSERCOND(str, eq) " %3, [%4]\n"
0118 __futex_atomic_ex_table("%5")
0119 : "+r" (ret), "=&r" (val)
0120 : "r" (oldval), "r" (newval), "r" (uaddr), "Ir" (-EFAULT)
0121 : "cc", "memory");
0122 uaccess_restore(__ua_flags);
0123
0124 *uval = val;
0125 preempt_enable();
0126
0127 return ret;
0128 }
0129
0130 #endif
0131
0132 static inline int
0133 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
0134 {
0135 int oldval = 0, ret, tmp;
0136
0137 if (!access_ok(uaddr, sizeof(u32)))
0138 return -EFAULT;
0139
0140 #ifndef CONFIG_SMP
0141 preempt_disable();
0142 #endif
0143
0144 switch (op) {
0145 case FUTEX_OP_SET:
0146 __futex_atomic_op("mov %0, %4", ret, oldval, tmp, uaddr, oparg);
0147 break;
0148 case FUTEX_OP_ADD:
0149 __futex_atomic_op("add %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
0150 break;
0151 case FUTEX_OP_OR:
0152 __futex_atomic_op("orr %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
0153 break;
0154 case FUTEX_OP_ANDN:
0155 __futex_atomic_op("and %0, %1, %4", ret, oldval, tmp, uaddr, ~oparg);
0156 break;
0157 case FUTEX_OP_XOR:
0158 __futex_atomic_op("eor %0, %1, %4", ret, oldval, tmp, uaddr, oparg);
0159 break;
0160 default:
0161 ret = -ENOSYS;
0162 }
0163
0164 #ifndef CONFIG_SMP
0165 preempt_enable();
0166 #endif
0167
0168
0169
0170
0171
0172
0173
0174 *oval = oldval;
0175
0176 return ret;
0177 }
0178
0179 #endif
0180 #endif