0001
0002
0003
0004
0005
0006
0007
0008 #ifndef _ASM_FUTEX_H
0009 #define _ASM_FUTEX_H
0010
0011 #ifdef __KERNEL__
0012
0013 #include <linux/futex.h>
0014 #include <linux/uaccess.h>
0015 #include <asm/asm-eva.h>
0016 #include <asm/barrier.h>
0017 #include <asm/compiler.h>
0018 #include <asm/errno.h>
0019 #include <asm/sync.h>
0020
0021 #define arch_futex_atomic_op_inuser arch_futex_atomic_op_inuser
0022 #define futex_atomic_cmpxchg_inatomic futex_atomic_cmpxchg_inatomic
0023 #include <asm-generic/futex.h>
0024
0025 #define __futex_atomic_op(op, insn, ret, oldval, uaddr, oparg) \
0026 { \
0027 if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) { \
0028 __asm__ __volatile__( \
0029 " .set push \n" \
0030 " .set noat \n" \
0031 " .set push \n" \
0032 " .set arch=r4000 \n" \
0033 "1: ll %1, %4 # __futex_atomic_op \n" \
0034 " .set pop \n" \
0035 " " insn " \n" \
0036 " .set arch=r4000 \n" \
0037 "2: sc $1, %2 \n" \
0038 " beqzl $1, 1b \n" \
0039 __stringify(__WEAK_LLSC_MB) " \n" \
0040 "3: \n" \
0041 " .insn \n" \
0042 " .set pop \n" \
0043 " .section .fixup,\"ax\" \n" \
0044 "4: li %0, %6 \n" \
0045 " j 3b \n" \
0046 " .previous \n" \
0047 " .section __ex_table,\"a\" \n" \
0048 " "__UA_ADDR "\t1b, 4b \n" \
0049 " "__UA_ADDR "\t2b, 4b \n" \
0050 " .previous \n" \
0051 : "=r" (ret), "=&r" (oldval), \
0052 "=" GCC_OFF_SMALL_ASM() (*uaddr) \
0053 : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
0054 "i" (-EFAULT) \
0055 : "memory"); \
0056 } else if (cpu_has_llsc) { \
0057 __asm__ __volatile__( \
0058 " .set push \n" \
0059 " .set noat \n" \
0060 " .set push \n" \
0061 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
0062 " " __SYNC(full, loongson3_war) " \n" \
0063 "1: "user_ll("%1", "%4")" # __futex_atomic_op\n" \
0064 " .set pop \n" \
0065 " " insn " \n" \
0066 " .set "MIPS_ISA_ARCH_LEVEL" \n" \
0067 "2: "user_sc("$1", "%2")" \n" \
0068 " beqz $1, 1b \n" \
0069 __stringify(__WEAK_LLSC_MB) " \n" \
0070 "3: \n" \
0071 " .insn \n" \
0072 " .set pop \n" \
0073 " .section .fixup,\"ax\" \n" \
0074 "4: li %0, %6 \n" \
0075 " j 3b \n" \
0076 " .previous \n" \
0077 " .section __ex_table,\"a\" \n" \
0078 " "__UA_ADDR "\t1b, 4b \n" \
0079 " "__UA_ADDR "\t2b, 4b \n" \
0080 " .previous \n" \
0081 : "=r" (ret), "=&r" (oldval), \
0082 "=" GCC_OFF_SMALL_ASM() (*uaddr) \
0083 : "0" (0), GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oparg), \
0084 "i" (-EFAULT) \
0085 : "memory"); \
0086 } else { \
0087 \
0088 ret = futex_atomic_op_inuser_local(op, oparg, oval, uaddr); \
0089 } \
0090 }
0091
0092 static inline int
0093 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *uaddr)
0094 {
0095 int oldval = 0, ret;
0096
0097 if (!access_ok(uaddr, sizeof(u32)))
0098 return -EFAULT;
0099
0100 switch (op) {
0101 case FUTEX_OP_SET:
0102 __futex_atomic_op(op, "move $1, %z5", ret, oldval, uaddr, oparg);
0103 break;
0104
0105 case FUTEX_OP_ADD:
0106 __futex_atomic_op(op, "addu $1, %1, %z5",
0107 ret, oldval, uaddr, oparg);
0108 break;
0109 case FUTEX_OP_OR:
0110 __futex_atomic_op(op, "or $1, %1, %z5",
0111 ret, oldval, uaddr, oparg);
0112 break;
0113 case FUTEX_OP_ANDN:
0114 __futex_atomic_op(op, "and $1, %1, %z5",
0115 ret, oldval, uaddr, ~oparg);
0116 break;
0117 case FUTEX_OP_XOR:
0118 __futex_atomic_op(op, "xor $1, %1, %z5",
0119 ret, oldval, uaddr, oparg);
0120 break;
0121 default:
0122 ret = -ENOSYS;
0123 }
0124
0125 if (!ret)
0126 *oval = oldval;
0127
0128 return ret;
0129 }
0130
0131 static inline int
0132 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *uaddr,
0133 u32 oldval, u32 newval)
0134 {
0135 int ret = 0;
0136 u32 val;
0137
0138 if (!access_ok(uaddr, sizeof(u32)))
0139 return -EFAULT;
0140
0141 if (cpu_has_llsc && IS_ENABLED(CONFIG_WAR_R10000_LLSC)) {
0142 __asm__ __volatile__(
0143 "# futex_atomic_cmpxchg_inatomic \n"
0144 " .set push \n"
0145 " .set noat \n"
0146 " .set push \n"
0147 " .set arch=r4000 \n"
0148 "1: ll %1, %3 \n"
0149 " bne %1, %z4, 3f \n"
0150 " .set pop \n"
0151 " move $1, %z5 \n"
0152 " .set arch=r4000 \n"
0153 "2: sc $1, %2 \n"
0154 " beqzl $1, 1b \n"
0155 __stringify(__WEAK_LLSC_MB) " \n"
0156 "3: \n"
0157 " .insn \n"
0158 " .set pop \n"
0159 " .section .fixup,\"ax\" \n"
0160 "4: li %0, %6 \n"
0161 " j 3b \n"
0162 " .previous \n"
0163 " .section __ex_table,\"a\" \n"
0164 " "__UA_ADDR "\t1b, 4b \n"
0165 " "__UA_ADDR "\t2b, 4b \n"
0166 " .previous \n"
0167 : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
0168 : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
0169 "i" (-EFAULT)
0170 : "memory");
0171 } else if (cpu_has_llsc) {
0172 __asm__ __volatile__(
0173 "# futex_atomic_cmpxchg_inatomic \n"
0174 " .set push \n"
0175 " .set noat \n"
0176 " .set push \n"
0177 " .set "MIPS_ISA_ARCH_LEVEL" \n"
0178 " " __SYNC(full, loongson3_war) " \n"
0179 "1: "user_ll("%1", "%3")" \n"
0180 " bne %1, %z4, 3f \n"
0181 " .set pop \n"
0182 " move $1, %z5 \n"
0183 " .set "MIPS_ISA_ARCH_LEVEL" \n"
0184 "2: "user_sc("$1", "%2")" \n"
0185 " beqz $1, 1b \n"
0186 "3: " __SYNC_ELSE(full, loongson3_war, __WEAK_LLSC_MB) "\n"
0187 " .insn \n"
0188 " .set pop \n"
0189 " .section .fixup,\"ax\" \n"
0190 "4: li %0, %6 \n"
0191 " j 3b \n"
0192 " .previous \n"
0193 " .section __ex_table,\"a\" \n"
0194 " "__UA_ADDR "\t1b, 4b \n"
0195 " "__UA_ADDR "\t2b, 4b \n"
0196 " .previous \n"
0197 : "+r" (ret), "=&r" (val), "=" GCC_OFF_SMALL_ASM() (*uaddr)
0198 : GCC_OFF_SMALL_ASM() (*uaddr), "Jr" (oldval), "Jr" (newval),
0199 "i" (-EFAULT)
0200 : "memory");
0201 } else {
0202 return futex_atomic_cmpxchg_inatomic_local(uval, uaddr, oldval, newval);
0203 }
0204
0205 *uval = val;
0206 return ret;
0207 }
0208
0209 #endif
0210 #endif