Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef _ASM_X86_LOCAL_H
0003 #define _ASM_X86_LOCAL_H
0004 
0005 #include <linux/percpu.h>
0006 
0007 #include <linux/atomic.h>
0008 #include <asm/asm.h>
0009 
0010 typedef struct {
0011     atomic_long_t a;
0012 } local_t;
0013 
0014 #define LOCAL_INIT(i)   { ATOMIC_LONG_INIT(i) }
0015 
0016 #define local_read(l)   atomic_long_read(&(l)->a)
0017 #define local_set(l, i) atomic_long_set(&(l)->a, (i))
0018 
0019 static inline void local_inc(local_t *l)
0020 {
0021     asm volatile(_ASM_INC "%0"
0022              : "+m" (l->a.counter));
0023 }
0024 
0025 static inline void local_dec(local_t *l)
0026 {
0027     asm volatile(_ASM_DEC "%0"
0028              : "+m" (l->a.counter));
0029 }
0030 
0031 static inline void local_add(long i, local_t *l)
0032 {
0033     asm volatile(_ASM_ADD "%1,%0"
0034              : "+m" (l->a.counter)
0035              : "ir" (i));
0036 }
0037 
0038 static inline void local_sub(long i, local_t *l)
0039 {
0040     asm volatile(_ASM_SUB "%1,%0"
0041              : "+m" (l->a.counter)
0042              : "ir" (i));
0043 }
0044 
0045 /**
0046  * local_sub_and_test - subtract value from variable and test result
0047  * @i: integer value to subtract
0048  * @l: pointer to type local_t
0049  *
0050  * Atomically subtracts @i from @l and returns
0051  * true if the result is zero, or false for all
0052  * other cases.
0053  */
0054 static inline bool local_sub_and_test(long i, local_t *l)
0055 {
0056     return GEN_BINARY_RMWcc(_ASM_SUB, l->a.counter, e, "er", i);
0057 }
0058 
0059 /**
0060  * local_dec_and_test - decrement and test
0061  * @l: pointer to type local_t
0062  *
0063  * Atomically decrements @l by 1 and
0064  * returns true if the result is 0, or false for all other
0065  * cases.
0066  */
0067 static inline bool local_dec_and_test(local_t *l)
0068 {
0069     return GEN_UNARY_RMWcc(_ASM_DEC, l->a.counter, e);
0070 }
0071 
0072 /**
0073  * local_inc_and_test - increment and test
0074  * @l: pointer to type local_t
0075  *
0076  * Atomically increments @l by 1
0077  * and returns true if the result is zero, or false for all
0078  * other cases.
0079  */
0080 static inline bool local_inc_and_test(local_t *l)
0081 {
0082     return GEN_UNARY_RMWcc(_ASM_INC, l->a.counter, e);
0083 }
0084 
0085 /**
0086  * local_add_negative - add and test if negative
0087  * @i: integer value to add
0088  * @l: pointer to type local_t
0089  *
0090  * Atomically adds @i to @l and returns true
0091  * if the result is negative, or false when
0092  * result is greater than or equal to zero.
0093  */
0094 static inline bool local_add_negative(long i, local_t *l)
0095 {
0096     return GEN_BINARY_RMWcc(_ASM_ADD, l->a.counter, s, "er", i);
0097 }
0098 
0099 /**
0100  * local_add_return - add and return
0101  * @i: integer value to add
0102  * @l: pointer to type local_t
0103  *
0104  * Atomically adds @i to @l and returns @i + @l
0105  */
0106 static inline long local_add_return(long i, local_t *l)
0107 {
0108     long __i = i;
0109     asm volatile(_ASM_XADD "%0, %1;"
0110              : "+r" (i), "+m" (l->a.counter)
0111              : : "memory");
0112     return i + __i;
0113 }
0114 
0115 static inline long local_sub_return(long i, local_t *l)
0116 {
0117     return local_add_return(-i, l);
0118 }
0119 
0120 #define local_inc_return(l)  (local_add_return(1, l))
0121 #define local_dec_return(l)  (local_sub_return(1, l))
0122 
0123 #define local_cmpxchg(l, o, n) \
0124     (cmpxchg_local(&((l)->a.counter), (o), (n)))
0125 /* Always has a lock prefix */
0126 #define local_xchg(l, n) (xchg(&((l)->a.counter), (n)))
0127 
0128 /**
0129  * local_add_unless - add unless the number is a given value
0130  * @l: pointer of type local_t
0131  * @a: the amount to add to l...
0132  * @u: ...unless l is equal to u.
0133  *
0134  * Atomically adds @a to @l, so long as it was not @u.
0135  * Returns non-zero if @l was not @u, and zero otherwise.
0136  */
0137 #define local_add_unless(l, a, u)               \
0138 ({                              \
0139     long c, old;                        \
0140     c = local_read((l));                    \
0141     for (;;) {                      \
0142         if (unlikely(c == (u)))             \
0143             break;                  \
0144         old = local_cmpxchg((l), c, c + (a));       \
0145         if (likely(old == c))               \
0146             break;                  \
0147         c = old;                    \
0148     }                           \
0149     c != (u);                       \
0150 })
0151 #define local_inc_not_zero(l) local_add_unless((l), 1, 0)
0152 
0153 /* On x86_32, these are no better than the atomic variants.
0154  * On x86-64 these are better than the atomic variants on SMP kernels
0155  * because they dont use a lock prefix.
0156  */
0157 #define __local_inc(l)      local_inc(l)
0158 #define __local_dec(l)      local_dec(l)
0159 #define __local_add(i, l)   local_add((i), (l))
0160 #define __local_sub(i, l)   local_sub((i), (l))
0161 
0162 #endif /* _ASM_X86_LOCAL_H */