Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef _ASM_GENERIC_LOCAL64_H
0003 #define _ASM_GENERIC_LOCAL64_H
0004 
0005 #include <linux/percpu.h>
0006 #include <asm/types.h>
0007 
0008 /*
0009  * A signed long type for operations which are atomic for a single CPU.
0010  * Usually used in combination with per-cpu variables.
0011  *
0012  * This is the default implementation, which uses atomic64_t.  Which is
0013  * rather pointless.  The whole point behind local64_t is that some processors
0014  * can perform atomic adds and subtracts in a manner which is atomic wrt IRQs
0015  * running on this CPU.  local64_t allows exploitation of such capabilities.
0016  */
0017 
0018 /* Implement in terms of atomics. */
0019 
0020 #if BITS_PER_LONG == 64
0021 
0022 #include <asm/local.h>
0023 
0024 typedef struct {
0025     local_t a;
0026 } local64_t;
0027 
0028 #define LOCAL64_INIT(i) { LOCAL_INIT(i) }
0029 
0030 #define local64_read(l)     local_read(&(l)->a)
0031 #define local64_set(l,i)    local_set((&(l)->a),(i))
0032 #define local64_inc(l)      local_inc(&(l)->a)
0033 #define local64_dec(l)      local_dec(&(l)->a)
0034 #define local64_add(i,l)    local_add((i),(&(l)->a))
0035 #define local64_sub(i,l)    local_sub((i),(&(l)->a))
0036 
0037 #define local64_sub_and_test(i, l) local_sub_and_test((i), (&(l)->a))
0038 #define local64_dec_and_test(l) local_dec_and_test(&(l)->a)
0039 #define local64_inc_and_test(l) local_inc_and_test(&(l)->a)
0040 #define local64_add_negative(i, l) local_add_negative((i), (&(l)->a))
0041 #define local64_add_return(i, l) local_add_return((i), (&(l)->a))
0042 #define local64_sub_return(i, l) local_sub_return((i), (&(l)->a))
0043 #define local64_inc_return(l)   local_inc_return(&(l)->a)
0044 
0045 #define local64_cmpxchg(l, o, n) local_cmpxchg((&(l)->a), (o), (n))
0046 #define local64_xchg(l, n)  local_xchg((&(l)->a), (n))
0047 #define local64_add_unless(l, _a, u) local_add_unless((&(l)->a), (_a), (u))
0048 #define local64_inc_not_zero(l) local_inc_not_zero(&(l)->a)
0049 
0050 /* Non-atomic variants, ie. preemption disabled and won't be touched
0051  * in interrupt, etc.  Some archs can optimize this case well. */
0052 #define __local64_inc(l)    local64_set((l), local64_read(l) + 1)
0053 #define __local64_dec(l)    local64_set((l), local64_read(l) - 1)
0054 #define __local64_add(i,l)  local64_set((l), local64_read(l) + (i))
0055 #define __local64_sub(i,l)  local64_set((l), local64_read(l) - (i))
0056 
0057 #else /* BITS_PER_LONG != 64 */
0058 
0059 #include <linux/atomic.h>
0060 
0061 /* Don't use typedef: don't want them to be mixed with atomic_t's. */
0062 typedef struct {
0063     atomic64_t a;
0064 } local64_t;
0065 
0066 #define LOCAL64_INIT(i) { ATOMIC_LONG_INIT(i) }
0067 
0068 #define local64_read(l)     atomic64_read(&(l)->a)
0069 #define local64_set(l,i)    atomic64_set((&(l)->a),(i))
0070 #define local64_inc(l)      atomic64_inc(&(l)->a)
0071 #define local64_dec(l)      atomic64_dec(&(l)->a)
0072 #define local64_add(i,l)    atomic64_add((i),(&(l)->a))
0073 #define local64_sub(i,l)    atomic64_sub((i),(&(l)->a))
0074 
0075 #define local64_sub_and_test(i, l) atomic64_sub_and_test((i), (&(l)->a))
0076 #define local64_dec_and_test(l) atomic64_dec_and_test(&(l)->a)
0077 #define local64_inc_and_test(l) atomic64_inc_and_test(&(l)->a)
0078 #define local64_add_negative(i, l) atomic64_add_negative((i), (&(l)->a))
0079 #define local64_add_return(i, l) atomic64_add_return((i), (&(l)->a))
0080 #define local64_sub_return(i, l) atomic64_sub_return((i), (&(l)->a))
0081 #define local64_inc_return(l)   atomic64_inc_return(&(l)->a)
0082 
0083 #define local64_cmpxchg(l, o, n) atomic64_cmpxchg((&(l)->a), (o), (n))
0084 #define local64_xchg(l, n)  atomic64_xchg((&(l)->a), (n))
0085 #define local64_add_unless(l, _a, u) atomic64_add_unless((&(l)->a), (_a), (u))
0086 #define local64_inc_not_zero(l) atomic64_inc_not_zero(&(l)->a)
0087 
0088 /* Non-atomic variants, ie. preemption disabled and won't be touched
0089  * in interrupt, etc.  Some archs can optimize this case well. */
0090 #define __local64_inc(l)    local64_set((l), local64_read(l) + 1)
0091 #define __local64_dec(l)    local64_set((l), local64_read(l) - 1)
0092 #define __local64_add(i,l)  local64_set((l), local64_read(l) + (i))
0093 #define __local64_sub(i,l)  local64_set((l), local64_read(l) - (i))
0094 
0095 #endif /* BITS_PER_LONG != 64 */
0096 
0097 #endif /* _ASM_GENERIC_LOCAL64_H */