Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0-only */
0002 /*
0003  * Copyright (C) 2012 ARM Ltd.
0004  */
0005 #ifndef __ASM_FUTEX_H
0006 #define __ASM_FUTEX_H
0007 
0008 #include <linux/futex.h>
0009 #include <linux/uaccess.h>
0010 
0011 #include <asm/errno.h>
0012 
0013 #define FUTEX_MAX_LOOPS 128 /* What's the largest number you can think of? */
0014 
0015 #define __futex_atomic_op(insn, ret, oldval, uaddr, tmp, oparg)     \
0016 do {                                    \
0017     unsigned int loops = FUTEX_MAX_LOOPS;               \
0018                                     \
0019     uaccess_enable_privileged();                    \
0020     asm volatile(                           \
0021 "   prfm    pstl1strm, %2\n"                    \
0022 "1: ldxr    %w1, %2\n"                      \
0023     insn "\n"                           \
0024 "2: stlxr   %w0, %w3, %2\n"                     \
0025 "   cbz %w0, 3f\n"                      \
0026 "   sub %w4, %w4, %w0\n"                    \
0027 "   cbnz    %w4, 1b\n"                      \
0028 "   mov %w0, %w6\n"                     \
0029 "3:\n"                                  \
0030 "   dmb ish\n"                          \
0031     _ASM_EXTABLE_UACCESS_ERR(1b, 3b, %w0)               \
0032     _ASM_EXTABLE_UACCESS_ERR(2b, 3b, %w0)               \
0033     : "=&r" (ret), "=&r" (oldval), "+Q" (*uaddr), "=&r" (tmp),  \
0034       "+r" (loops)                          \
0035     : "r" (oparg), "Ir" (-EAGAIN)                   \
0036     : "memory");                            \
0037     uaccess_disable_privileged();                   \
0038 } while (0)
0039 
0040 static inline int
0041 arch_futex_atomic_op_inuser(int op, int oparg, int *oval, u32 __user *_uaddr)
0042 {
0043     int oldval = 0, ret, tmp;
0044     u32 __user *uaddr = __uaccess_mask_ptr(_uaddr);
0045 
0046     if (!access_ok(_uaddr, sizeof(u32)))
0047         return -EFAULT;
0048 
0049     switch (op) {
0050     case FUTEX_OP_SET:
0051         __futex_atomic_op("mov  %w3, %w5",
0052                   ret, oldval, uaddr, tmp, oparg);
0053         break;
0054     case FUTEX_OP_ADD:
0055         __futex_atomic_op("add  %w3, %w1, %w5",
0056                   ret, oldval, uaddr, tmp, oparg);
0057         break;
0058     case FUTEX_OP_OR:
0059         __futex_atomic_op("orr  %w3, %w1, %w5",
0060                   ret, oldval, uaddr, tmp, oparg);
0061         break;
0062     case FUTEX_OP_ANDN:
0063         __futex_atomic_op("and  %w3, %w1, %w5",
0064                   ret, oldval, uaddr, tmp, ~oparg);
0065         break;
0066     case FUTEX_OP_XOR:
0067         __futex_atomic_op("eor  %w3, %w1, %w5",
0068                   ret, oldval, uaddr, tmp, oparg);
0069         break;
0070     default:
0071         ret = -ENOSYS;
0072     }
0073 
0074     if (!ret)
0075         *oval = oldval;
0076 
0077     return ret;
0078 }
0079 
0080 static inline int
0081 futex_atomic_cmpxchg_inatomic(u32 *uval, u32 __user *_uaddr,
0082                   u32 oldval, u32 newval)
0083 {
0084     int ret = 0;
0085     unsigned int loops = FUTEX_MAX_LOOPS;
0086     u32 val, tmp;
0087     u32 __user *uaddr;
0088 
0089     if (!access_ok(_uaddr, sizeof(u32)))
0090         return -EFAULT;
0091 
0092     uaddr = __uaccess_mask_ptr(_uaddr);
0093     uaccess_enable_privileged();
0094     asm volatile("// futex_atomic_cmpxchg_inatomic\n"
0095 "   prfm    pstl1strm, %2\n"
0096 "1: ldxr    %w1, %2\n"
0097 "   sub %w3, %w1, %w5\n"
0098 "   cbnz    %w3, 4f\n"
0099 "2: stlxr   %w3, %w6, %2\n"
0100 "   cbz %w3, 3f\n"
0101 "   sub %w4, %w4, %w3\n"
0102 "   cbnz    %w4, 1b\n"
0103 "   mov %w0, %w7\n"
0104 "3:\n"
0105 "   dmb ish\n"
0106 "4:\n"
0107     _ASM_EXTABLE_UACCESS_ERR(1b, 4b, %w0)
0108     _ASM_EXTABLE_UACCESS_ERR(2b, 4b, %w0)
0109     : "+r" (ret), "=&r" (val), "+Q" (*uaddr), "=&r" (tmp), "+r" (loops)
0110     : "r" (oldval), "r" (newval), "Ir" (-EAGAIN)
0111     : "memory");
0112     uaccess_disable_privileged();
0113 
0114     if (!ret)
0115         *uval = val;
0116 
0117     return ret;
0118 }
0119 
0120 #endif /* __ASM_FUTEX_H */