Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef _ALPHA_LOCAL_H
0003 #define _ALPHA_LOCAL_H
0004 
0005 #include <linux/percpu.h>
0006 #include <linux/atomic.h>
0007 
0008 typedef struct
0009 {
0010     atomic_long_t a;
0011 } local_t;
0012 
0013 #define LOCAL_INIT(i)   { ATOMIC_LONG_INIT(i) }
0014 #define local_read(l)   atomic_long_read(&(l)->a)
0015 #define local_set(l,i)  atomic_long_set(&(l)->a, (i))
0016 #define local_inc(l)    atomic_long_inc(&(l)->a)
0017 #define local_dec(l)    atomic_long_dec(&(l)->a)
0018 #define local_add(i,l)  atomic_long_add((i),(&(l)->a))
0019 #define local_sub(i,l)  atomic_long_sub((i),(&(l)->a))
0020 
0021 static __inline__ long local_add_return(long i, local_t * l)
0022 {
0023     long temp, result;
0024     __asm__ __volatile__(
0025     "1: ldq_l %0,%1\n"
0026     "   addq %0,%3,%2\n"
0027     "   addq %0,%3,%0\n"
0028     "   stq_c %0,%1\n"
0029     "   beq %0,2f\n"
0030     ".subsection 2\n"
0031     "2: br 1b\n"
0032     ".previous"
0033     :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
0034     :"Ir" (i), "m" (l->a.counter) : "memory");
0035     return result;
0036 }
0037 
0038 static __inline__ long local_sub_return(long i, local_t * l)
0039 {
0040     long temp, result;
0041     __asm__ __volatile__(
0042     "1: ldq_l %0,%1\n"
0043     "   subq %0,%3,%2\n"
0044     "   subq %0,%3,%0\n"
0045     "   stq_c %0,%1\n"
0046     "   beq %0,2f\n"
0047     ".subsection 2\n"
0048     "2: br 1b\n"
0049     ".previous"
0050     :"=&r" (temp), "=m" (l->a.counter), "=&r" (result)
0051     :"Ir" (i), "m" (l->a.counter) : "memory");
0052     return result;
0053 }
0054 
0055 #define local_cmpxchg(l, o, n) \
0056     (cmpxchg_local(&((l)->a.counter), (o), (n)))
0057 #define local_xchg(l, n) (xchg_local(&((l)->a.counter), (n)))
0058 
0059 /**
0060  * local_add_unless - add unless the number is a given value
0061  * @l: pointer of type local_t
0062  * @a: the amount to add to l...
0063  * @u: ...unless l is equal to u.
0064  *
0065  * Atomically adds @a to @l, so long as it was not @u.
0066  * Returns non-zero if @l was not @u, and zero otherwise.
0067  */
0068 #define local_add_unless(l, a, u)               \
0069 ({                              \
0070     long c, old;                        \
0071     c = local_read(l);                  \
0072     for (;;) {                      \
0073         if (unlikely(c == (u)))             \
0074             break;                  \
0075         old = local_cmpxchg((l), c, c + (a));   \
0076         if (likely(old == c))               \
0077             break;                  \
0078         c = old;                    \
0079     }                           \
0080     c != (u);                       \
0081 })
0082 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
0083 
0084 #define local_add_negative(a, l) (local_add_return((a), (l)) < 0)
0085 
0086 #define local_dec_return(l) local_sub_return(1,(l))
0087 
0088 #define local_inc_return(l) local_add_return(1,(l))
0089 
0090 #define local_sub_and_test(i,l) (local_sub_return((i), (l)) == 0)
0091 
0092 #define local_inc_and_test(l) (local_add_return(1, (l)) == 0)
0093 
0094 #define local_dec_and_test(l) (local_sub_return(1, (l)) == 0)
0095 
0096 /* Verify if faster than atomic ops */
0097 #define __local_inc(l)      ((l)->a.counter++)
0098 #define __local_dec(l)      ((l)->a.counter++)
0099 #define __local_add(i,l)    ((l)->a.counter+=(i))
0100 #define __local_sub(i,l)    ((l)->a.counter-=(i))
0101 
0102 #endif /* _ALPHA_LOCAL_H */