0001
0002
0003
0004
0005 #ifndef __ASM_PERCPU_H
0006 #define __ASM_PERCPU_H
0007
0008 #include <linux/preempt.h>
0009
0010 #include <asm/alternative.h>
0011 #include <asm/cmpxchg.h>
0012 #include <asm/stack_pointer.h>
0013 #include <asm/sysreg.h>
0014
0015 static inline void set_my_cpu_offset(unsigned long off)
0016 {
0017 asm volatile(ALTERNATIVE("msr tpidr_el1, %0",
0018 "msr tpidr_el2, %0",
0019 ARM64_HAS_VIRT_HOST_EXTN)
0020 :: "r" (off) : "memory");
0021 }
0022
0023 static inline unsigned long __hyp_my_cpu_offset(void)
0024 {
0025
0026
0027
0028
0029 return read_sysreg(tpidr_el2);
0030 }
0031
0032 static inline unsigned long __kern_my_cpu_offset(void)
0033 {
0034 unsigned long off;
0035
0036
0037
0038
0039
0040 asm(ALTERNATIVE("mrs %0, tpidr_el1",
0041 "mrs %0, tpidr_el2",
0042 ARM64_HAS_VIRT_HOST_EXTN)
0043 : "=r" (off) :
0044 "Q" (*(const unsigned long *)current_stack_pointer));
0045
0046 return off;
0047 }
0048
0049 #ifdef __KVM_NVHE_HYPERVISOR__
0050 #define __my_cpu_offset __hyp_my_cpu_offset()
0051 #else
0052 #define __my_cpu_offset __kern_my_cpu_offset()
0053 #endif
0054
0055 #define PERCPU_RW_OPS(sz) \
0056 static inline unsigned long __percpu_read_##sz(void *ptr) \
0057 { \
0058 return READ_ONCE(*(u##sz *)ptr); \
0059 } \
0060 \
0061 static inline void __percpu_write_##sz(void *ptr, unsigned long val) \
0062 { \
0063 WRITE_ONCE(*(u##sz *)ptr, (u##sz)val); \
0064 }
0065
0066 #define __PERCPU_OP_CASE(w, sfx, name, sz, op_llsc, op_lse) \
0067 static inline void \
0068 __percpu_##name##_case_##sz(void *ptr, unsigned long val) \
0069 { \
0070 unsigned int loop; \
0071 u##sz tmp; \
0072 \
0073 asm volatile (ARM64_LSE_ATOMIC_INSN( \
0074 \
0075 "1: ldxr" #sfx "\t%" #w "[tmp], %[ptr]\n" \
0076 #op_llsc "\t%" #w "[tmp], %" #w "[tmp], %" #w "[val]\n" \
0077 " stxr" #sfx "\t%w[loop], %" #w "[tmp], %[ptr]\n" \
0078 " cbnz %w[loop], 1b", \
0079 \
0080 #op_lse "\t%" #w "[val], %[ptr]\n" \
0081 __nops(3)) \
0082 : [loop] "=&r" (loop), [tmp] "=&r" (tmp), \
0083 [ptr] "+Q"(*(u##sz *)ptr) \
0084 : [val] "r" ((u##sz)(val))); \
0085 }
0086
0087 #define __PERCPU_RET_OP_CASE(w, sfx, name, sz, op_llsc, op_lse) \
0088 static inline u##sz \
0089 __percpu_##name##_return_case_##sz(void *ptr, unsigned long val) \
0090 { \
0091 unsigned int loop; \
0092 u##sz ret; \
0093 \
0094 asm volatile (ARM64_LSE_ATOMIC_INSN( \
0095 \
0096 "1: ldxr" #sfx "\t%" #w "[ret], %[ptr]\n" \
0097 #op_llsc "\t%" #w "[ret], %" #w "[ret], %" #w "[val]\n" \
0098 " stxr" #sfx "\t%w[loop], %" #w "[ret], %[ptr]\n" \
0099 " cbnz %w[loop], 1b", \
0100 \
0101 #op_lse "\t%" #w "[val], %" #w "[ret], %[ptr]\n" \
0102 #op_llsc "\t%" #w "[ret], %" #w "[ret], %" #w "[val]\n" \
0103 __nops(2)) \
0104 : [loop] "=&r" (loop), [ret] "=&r" (ret), \
0105 [ptr] "+Q"(*(u##sz *)ptr) \
0106 : [val] "r" ((u##sz)(val))); \
0107 \
0108 return ret; \
0109 }
0110
0111 #define PERCPU_OP(name, op_llsc, op_lse) \
0112 __PERCPU_OP_CASE(w, b, name, 8, op_llsc, op_lse) \
0113 __PERCPU_OP_CASE(w, h, name, 16, op_llsc, op_lse) \
0114 __PERCPU_OP_CASE(w, , name, 32, op_llsc, op_lse) \
0115 __PERCPU_OP_CASE( , , name, 64, op_llsc, op_lse)
0116
0117 #define PERCPU_RET_OP(name, op_llsc, op_lse) \
0118 __PERCPU_RET_OP_CASE(w, b, name, 8, op_llsc, op_lse) \
0119 __PERCPU_RET_OP_CASE(w, h, name, 16, op_llsc, op_lse) \
0120 __PERCPU_RET_OP_CASE(w, , name, 32, op_llsc, op_lse) \
0121 __PERCPU_RET_OP_CASE( , , name, 64, op_llsc, op_lse)
0122
0123 PERCPU_RW_OPS(8)
0124 PERCPU_RW_OPS(16)
0125 PERCPU_RW_OPS(32)
0126 PERCPU_RW_OPS(64)
0127 PERCPU_OP(add, add, stadd)
0128 PERCPU_OP(andnot, bic, stclr)
0129 PERCPU_OP(or, orr, stset)
0130 PERCPU_RET_OP(add, add, ldadd)
0131
0132 #undef PERCPU_RW_OPS
0133 #undef __PERCPU_OP_CASE
0134 #undef __PERCPU_RET_OP_CASE
0135 #undef PERCPU_OP
0136 #undef PERCPU_RET_OP
0137
0138
0139
0140
0141
0142
0143
0144 #define this_cpu_cmpxchg_double_8(ptr1, ptr2, o1, o2, n1, n2) \
0145 ({ \
0146 int __ret; \
0147 preempt_disable_notrace(); \
0148 __ret = cmpxchg_double_local( raw_cpu_ptr(&(ptr1)), \
0149 raw_cpu_ptr(&(ptr2)), \
0150 o1, o2, n1, n2); \
0151 preempt_enable_notrace(); \
0152 __ret; \
0153 })
0154
0155 #define _pcp_protect(op, pcp, ...) \
0156 ({ \
0157 preempt_disable_notrace(); \
0158 op(raw_cpu_ptr(&(pcp)), __VA_ARGS__); \
0159 preempt_enable_notrace(); \
0160 })
0161
0162 #define _pcp_protect_return(op, pcp, args...) \
0163 ({ \
0164 typeof(pcp) __retval; \
0165 preempt_disable_notrace(); \
0166 __retval = (typeof(pcp))op(raw_cpu_ptr(&(pcp)), ##args); \
0167 preempt_enable_notrace(); \
0168 __retval; \
0169 })
0170
0171 #define this_cpu_read_1(pcp) \
0172 _pcp_protect_return(__percpu_read_8, pcp)
0173 #define this_cpu_read_2(pcp) \
0174 _pcp_protect_return(__percpu_read_16, pcp)
0175 #define this_cpu_read_4(pcp) \
0176 _pcp_protect_return(__percpu_read_32, pcp)
0177 #define this_cpu_read_8(pcp) \
0178 _pcp_protect_return(__percpu_read_64, pcp)
0179
0180 #define this_cpu_write_1(pcp, val) \
0181 _pcp_protect(__percpu_write_8, pcp, (unsigned long)val)
0182 #define this_cpu_write_2(pcp, val) \
0183 _pcp_protect(__percpu_write_16, pcp, (unsigned long)val)
0184 #define this_cpu_write_4(pcp, val) \
0185 _pcp_protect(__percpu_write_32, pcp, (unsigned long)val)
0186 #define this_cpu_write_8(pcp, val) \
0187 _pcp_protect(__percpu_write_64, pcp, (unsigned long)val)
0188
0189 #define this_cpu_add_1(pcp, val) \
0190 _pcp_protect(__percpu_add_case_8, pcp, val)
0191 #define this_cpu_add_2(pcp, val) \
0192 _pcp_protect(__percpu_add_case_16, pcp, val)
0193 #define this_cpu_add_4(pcp, val) \
0194 _pcp_protect(__percpu_add_case_32, pcp, val)
0195 #define this_cpu_add_8(pcp, val) \
0196 _pcp_protect(__percpu_add_case_64, pcp, val)
0197
0198 #define this_cpu_add_return_1(pcp, val) \
0199 _pcp_protect_return(__percpu_add_return_case_8, pcp, val)
0200 #define this_cpu_add_return_2(pcp, val) \
0201 _pcp_protect_return(__percpu_add_return_case_16, pcp, val)
0202 #define this_cpu_add_return_4(pcp, val) \
0203 _pcp_protect_return(__percpu_add_return_case_32, pcp, val)
0204 #define this_cpu_add_return_8(pcp, val) \
0205 _pcp_protect_return(__percpu_add_return_case_64, pcp, val)
0206
0207 #define this_cpu_and_1(pcp, val) \
0208 _pcp_protect(__percpu_andnot_case_8, pcp, ~val)
0209 #define this_cpu_and_2(pcp, val) \
0210 _pcp_protect(__percpu_andnot_case_16, pcp, ~val)
0211 #define this_cpu_and_4(pcp, val) \
0212 _pcp_protect(__percpu_andnot_case_32, pcp, ~val)
0213 #define this_cpu_and_8(pcp, val) \
0214 _pcp_protect(__percpu_andnot_case_64, pcp, ~val)
0215
0216 #define this_cpu_or_1(pcp, val) \
0217 _pcp_protect(__percpu_or_case_8, pcp, val)
0218 #define this_cpu_or_2(pcp, val) \
0219 _pcp_protect(__percpu_or_case_16, pcp, val)
0220 #define this_cpu_or_4(pcp, val) \
0221 _pcp_protect(__percpu_or_case_32, pcp, val)
0222 #define this_cpu_or_8(pcp, val) \
0223 _pcp_protect(__percpu_or_case_64, pcp, val)
0224
0225 #define this_cpu_xchg_1(pcp, val) \
0226 _pcp_protect_return(xchg_relaxed, pcp, val)
0227 #define this_cpu_xchg_2(pcp, val) \
0228 _pcp_protect_return(xchg_relaxed, pcp, val)
0229 #define this_cpu_xchg_4(pcp, val) \
0230 _pcp_protect_return(xchg_relaxed, pcp, val)
0231 #define this_cpu_xchg_8(pcp, val) \
0232 _pcp_protect_return(xchg_relaxed, pcp, val)
0233
0234 #define this_cpu_cmpxchg_1(pcp, o, n) \
0235 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
0236 #define this_cpu_cmpxchg_2(pcp, o, n) \
0237 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
0238 #define this_cpu_cmpxchg_4(pcp, o, n) \
0239 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
0240 #define this_cpu_cmpxchg_8(pcp, o, n) \
0241 _pcp_protect_return(cmpxchg_relaxed, pcp, o, n)
0242
0243 #ifdef __KVM_NVHE_HYPERVISOR__
0244 extern unsigned long __hyp_per_cpu_offset(unsigned int cpu);
0245 #define __per_cpu_offset
0246 #define per_cpu_offset(cpu) __hyp_per_cpu_offset((cpu))
0247 #endif
0248
0249 #include <asm-generic/percpu.h>
0250
0251
0252 #if defined(__KVM_NVHE_HYPERVISOR__) && defined(CONFIG_DEBUG_PREEMPT)
0253 #undef this_cpu_ptr
0254 #define this_cpu_ptr raw_cpu_ptr
0255 #undef __this_cpu_read
0256 #define __this_cpu_read raw_cpu_read
0257 #undef __this_cpu_write
0258 #define __this_cpu_write raw_cpu_write
0259 #endif
0260
0261 #endif