0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013
0014
0015
0016
0017 #ifndef _LINUX_PERCPU_DEFS_H
0018 #define _LINUX_PERCPU_DEFS_H
0019
0020 #ifdef CONFIG_SMP
0021
0022 #ifdef MODULE
0023 #define PER_CPU_SHARED_ALIGNED_SECTION ""
0024 #define PER_CPU_ALIGNED_SECTION ""
0025 #else
0026 #define PER_CPU_SHARED_ALIGNED_SECTION "..shared_aligned"
0027 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
0028 #endif
0029 #define PER_CPU_FIRST_SECTION "..first"
0030
0031 #else
0032
0033 #define PER_CPU_SHARED_ALIGNED_SECTION ""
0034 #define PER_CPU_ALIGNED_SECTION "..shared_aligned"
0035 #define PER_CPU_FIRST_SECTION ""
0036
0037 #endif
0038
0039
0040
0041
0042
0043
0044
0045
0046
0047
0048
0049 #define __PCPU_ATTRS(sec) \
0050 __percpu __attribute__((section(PER_CPU_BASE_SECTION sec))) \
0051 PER_CPU_ATTRIBUTES
0052
0053 #define __PCPU_DUMMY_ATTRS \
0054 __section(".discard") __attribute__((unused))
0055
0056
0057
0058
0059
0060
0061
0062
0063
0064
0065
0066
0067
0068
0069
0070
0071
0072
0073
0074
0075 #if defined(ARCH_NEEDS_WEAK_PER_CPU) || defined(CONFIG_DEBUG_FORCE_WEAK_PER_CPU)
0076
0077
0078
0079
0080
0081
0082
0083
0084
0085
0086 #define DECLARE_PER_CPU_SECTION(type, name, sec) \
0087 extern __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
0088 extern __PCPU_ATTRS(sec) __typeof__(type) name
0089
0090 #define DEFINE_PER_CPU_SECTION(type, name, sec) \
0091 __PCPU_DUMMY_ATTRS char __pcpu_scope_##name; \
0092 extern __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
0093 __PCPU_DUMMY_ATTRS char __pcpu_unique_##name; \
0094 extern __PCPU_ATTRS(sec) __typeof__(type) name; \
0095 __PCPU_ATTRS(sec) __weak __typeof__(type) name
0096 #else
0097
0098
0099
0100 #define DECLARE_PER_CPU_SECTION(type, name, sec) \
0101 extern __PCPU_ATTRS(sec) __typeof__(type) name
0102
0103 #define DEFINE_PER_CPU_SECTION(type, name, sec) \
0104 __PCPU_ATTRS(sec) __typeof__(type) name
0105 #endif
0106
0107
0108
0109
0110
0111 #define DECLARE_PER_CPU(type, name) \
0112 DECLARE_PER_CPU_SECTION(type, name, "")
0113
0114 #define DEFINE_PER_CPU(type, name) \
0115 DEFINE_PER_CPU_SECTION(type, name, "")
0116
0117
0118
0119
0120
0121 #define DECLARE_PER_CPU_FIRST(type, name) \
0122 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
0123
0124 #define DEFINE_PER_CPU_FIRST(type, name) \
0125 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_FIRST_SECTION)
0126
0127
0128
0129
0130
0131
0132
0133
0134
0135
0136
0137
0138 #define DECLARE_PER_CPU_SHARED_ALIGNED(type, name) \
0139 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
0140 ____cacheline_aligned_in_smp
0141
0142 #define DEFINE_PER_CPU_SHARED_ALIGNED(type, name) \
0143 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_SHARED_ALIGNED_SECTION) \
0144 ____cacheline_aligned_in_smp
0145
0146 #define DECLARE_PER_CPU_ALIGNED(type, name) \
0147 DECLARE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
0148 ____cacheline_aligned
0149
0150 #define DEFINE_PER_CPU_ALIGNED(type, name) \
0151 DEFINE_PER_CPU_SECTION(type, name, PER_CPU_ALIGNED_SECTION) \
0152 ____cacheline_aligned
0153
0154
0155
0156
0157 #define DECLARE_PER_CPU_PAGE_ALIGNED(type, name) \
0158 DECLARE_PER_CPU_SECTION(type, name, "..page_aligned") \
0159 __aligned(PAGE_SIZE)
0160
0161 #define DEFINE_PER_CPU_PAGE_ALIGNED(type, name) \
0162 DEFINE_PER_CPU_SECTION(type, name, "..page_aligned") \
0163 __aligned(PAGE_SIZE)
0164
0165
0166
0167
0168 #define DECLARE_PER_CPU_READ_MOSTLY(type, name) \
0169 DECLARE_PER_CPU_SECTION(type, name, "..read_mostly")
0170
0171 #define DEFINE_PER_CPU_READ_MOSTLY(type, name) \
0172 DEFINE_PER_CPU_SECTION(type, name, "..read_mostly")
0173
0174
0175
0176
0177
0178 #ifdef CONFIG_AMD_MEM_ENCRYPT
0179 #define DECLARE_PER_CPU_DECRYPTED(type, name) \
0180 DECLARE_PER_CPU_SECTION(type, name, "..decrypted")
0181
0182 #define DEFINE_PER_CPU_DECRYPTED(type, name) \
0183 DEFINE_PER_CPU_SECTION(type, name, "..decrypted")
0184 #else
0185 #define DEFINE_PER_CPU_DECRYPTED(type, name) DEFINE_PER_CPU(type, name)
0186 #endif
0187
0188
0189
0190
0191
0192
0193 #ifndef __CHECKER__
0194 #define EXPORT_PER_CPU_SYMBOL(var) EXPORT_SYMBOL(var)
0195 #define EXPORT_PER_CPU_SYMBOL_GPL(var) EXPORT_SYMBOL_GPL(var)
0196 #else
0197 #define EXPORT_PER_CPU_SYMBOL(var)
0198 #define EXPORT_PER_CPU_SYMBOL_GPL(var)
0199 #endif
0200
0201
0202
0203
0204 #ifndef __ASSEMBLY__
0205
0206
0207
0208
0209
0210
0211
0212
0213
0214
0215
0216
0217 #define __verify_pcpu_ptr(ptr) \
0218 do { \
0219 const void __percpu *__vpp_verify = (typeof((ptr) + 0))NULL; \
0220 (void)__vpp_verify; \
0221 } while (0)
0222
0223 #ifdef CONFIG_SMP
0224
0225
0226
0227
0228
0229
0230 #define SHIFT_PERCPU_PTR(__p, __offset) \
0231 RELOC_HIDE((typeof(*(__p)) __kernel __force *)(__p), (__offset))
0232
0233 #define per_cpu_ptr(ptr, cpu) \
0234 ({ \
0235 __verify_pcpu_ptr(ptr); \
0236 SHIFT_PERCPU_PTR((ptr), per_cpu_offset((cpu))); \
0237 })
0238
0239 #define raw_cpu_ptr(ptr) \
0240 ({ \
0241 __verify_pcpu_ptr(ptr); \
0242 arch_raw_cpu_ptr(ptr); \
0243 })
0244
0245 #ifdef CONFIG_DEBUG_PREEMPT
0246 #define this_cpu_ptr(ptr) \
0247 ({ \
0248 __verify_pcpu_ptr(ptr); \
0249 SHIFT_PERCPU_PTR(ptr, my_cpu_offset); \
0250 })
0251 #else
0252 #define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
0253 #endif
0254
0255 #else
0256
0257 #define VERIFY_PERCPU_PTR(__p) \
0258 ({ \
0259 __verify_pcpu_ptr(__p); \
0260 (typeof(*(__p)) __kernel __force *)(__p); \
0261 })
0262
0263 #define per_cpu_ptr(ptr, cpu) ({ (void)(cpu); VERIFY_PERCPU_PTR(ptr); })
0264 #define raw_cpu_ptr(ptr) per_cpu_ptr(ptr, 0)
0265 #define this_cpu_ptr(ptr) raw_cpu_ptr(ptr)
0266
0267 #endif
0268
0269 #define per_cpu(var, cpu) (*per_cpu_ptr(&(var), cpu))
0270
0271
0272
0273
0274
0275 #define get_cpu_var(var) \
0276 (*({ \
0277 preempt_disable(); \
0278 this_cpu_ptr(&var); \
0279 }))
0280
0281
0282
0283
0284
0285 #define put_cpu_var(var) \
0286 do { \
0287 (void)&(var); \
0288 preempt_enable(); \
0289 } while (0)
0290
0291 #define get_cpu_ptr(var) \
0292 ({ \
0293 preempt_disable(); \
0294 this_cpu_ptr(var); \
0295 })
0296
0297 #define put_cpu_ptr(var) \
0298 do { \
0299 (void)(var); \
0300 preempt_enable(); \
0301 } while (0)
0302
0303
0304
0305
0306
0307
0308 extern void __bad_size_call_parameter(void);
0309
0310 #ifdef CONFIG_DEBUG_PREEMPT
0311 extern void __this_cpu_preempt_check(const char *op);
0312 #else
0313 static inline void __this_cpu_preempt_check(const char *op) { }
0314 #endif
0315
0316 #define __pcpu_size_call_return(stem, variable) \
0317 ({ \
0318 typeof(variable) pscr_ret__; \
0319 __verify_pcpu_ptr(&(variable)); \
0320 switch(sizeof(variable)) { \
0321 case 1: pscr_ret__ = stem##1(variable); break; \
0322 case 2: pscr_ret__ = stem##2(variable); break; \
0323 case 4: pscr_ret__ = stem##4(variable); break; \
0324 case 8: pscr_ret__ = stem##8(variable); break; \
0325 default: \
0326 __bad_size_call_parameter(); break; \
0327 } \
0328 pscr_ret__; \
0329 })
0330
0331 #define __pcpu_size_call_return2(stem, variable, ...) \
0332 ({ \
0333 typeof(variable) pscr2_ret__; \
0334 __verify_pcpu_ptr(&(variable)); \
0335 switch(sizeof(variable)) { \
0336 case 1: pscr2_ret__ = stem##1(variable, __VA_ARGS__); break; \
0337 case 2: pscr2_ret__ = stem##2(variable, __VA_ARGS__); break; \
0338 case 4: pscr2_ret__ = stem##4(variable, __VA_ARGS__); break; \
0339 case 8: pscr2_ret__ = stem##8(variable, __VA_ARGS__); break; \
0340 default: \
0341 __bad_size_call_parameter(); break; \
0342 } \
0343 pscr2_ret__; \
0344 })
0345
0346
0347
0348
0349
0350
0351
0352
0353
0354 #define __pcpu_double_call_return_bool(stem, pcp1, pcp2, ...) \
0355 ({ \
0356 bool pdcrb_ret__; \
0357 __verify_pcpu_ptr(&(pcp1)); \
0358 BUILD_BUG_ON(sizeof(pcp1) != sizeof(pcp2)); \
0359 VM_BUG_ON((unsigned long)(&(pcp1)) % (2 * sizeof(pcp1))); \
0360 VM_BUG_ON((unsigned long)(&(pcp2)) != \
0361 (unsigned long)(&(pcp1)) + sizeof(pcp1)); \
0362 switch(sizeof(pcp1)) { \
0363 case 1: pdcrb_ret__ = stem##1(pcp1, pcp2, __VA_ARGS__); break; \
0364 case 2: pdcrb_ret__ = stem##2(pcp1, pcp2, __VA_ARGS__); break; \
0365 case 4: pdcrb_ret__ = stem##4(pcp1, pcp2, __VA_ARGS__); break; \
0366 case 8: pdcrb_ret__ = stem##8(pcp1, pcp2, __VA_ARGS__); break; \
0367 default: \
0368 __bad_size_call_parameter(); break; \
0369 } \
0370 pdcrb_ret__; \
0371 })
0372
0373 #define __pcpu_size_call(stem, variable, ...) \
0374 do { \
0375 __verify_pcpu_ptr(&(variable)); \
0376 switch(sizeof(variable)) { \
0377 case 1: stem##1(variable, __VA_ARGS__);break; \
0378 case 2: stem##2(variable, __VA_ARGS__);break; \
0379 case 4: stem##4(variable, __VA_ARGS__);break; \
0380 case 8: stem##8(variable, __VA_ARGS__);break; \
0381 default: \
0382 __bad_size_call_parameter();break; \
0383 } \
0384 } while (0)
0385
0386
0387
0388
0389
0390
0391
0392
0393
0394
0395
0396
0397
0398
0399
0400
0401
0402
0403
0404
0405
0406
0407
0408
0409
0410
0411
0412
0413
0414
0415
0416
0417
0418
0419
0420 #define raw_cpu_read(pcp) __pcpu_size_call_return(raw_cpu_read_, pcp)
0421 #define raw_cpu_write(pcp, val) __pcpu_size_call(raw_cpu_write_, pcp, val)
0422 #define raw_cpu_add(pcp, val) __pcpu_size_call(raw_cpu_add_, pcp, val)
0423 #define raw_cpu_and(pcp, val) __pcpu_size_call(raw_cpu_and_, pcp, val)
0424 #define raw_cpu_or(pcp, val) __pcpu_size_call(raw_cpu_or_, pcp, val)
0425 #define raw_cpu_add_return(pcp, val) __pcpu_size_call_return2(raw_cpu_add_return_, pcp, val)
0426 #define raw_cpu_xchg(pcp, nval) __pcpu_size_call_return2(raw_cpu_xchg_, pcp, nval)
0427 #define raw_cpu_cmpxchg(pcp, oval, nval) \
0428 __pcpu_size_call_return2(raw_cpu_cmpxchg_, pcp, oval, nval)
0429 #define raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
0430 __pcpu_double_call_return_bool(raw_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
0431
0432 #define raw_cpu_sub(pcp, val) raw_cpu_add(pcp, -(val))
0433 #define raw_cpu_inc(pcp) raw_cpu_add(pcp, 1)
0434 #define raw_cpu_dec(pcp) raw_cpu_sub(pcp, 1)
0435 #define raw_cpu_sub_return(pcp, val) raw_cpu_add_return(pcp, -(typeof(pcp))(val))
0436 #define raw_cpu_inc_return(pcp) raw_cpu_add_return(pcp, 1)
0437 #define raw_cpu_dec_return(pcp) raw_cpu_add_return(pcp, -1)
0438
0439
0440
0441
0442
0443 #define __this_cpu_read(pcp) \
0444 ({ \
0445 __this_cpu_preempt_check("read"); \
0446 raw_cpu_read(pcp); \
0447 })
0448
0449 #define __this_cpu_write(pcp, val) \
0450 ({ \
0451 __this_cpu_preempt_check("write"); \
0452 raw_cpu_write(pcp, val); \
0453 })
0454
0455 #define __this_cpu_add(pcp, val) \
0456 ({ \
0457 __this_cpu_preempt_check("add"); \
0458 raw_cpu_add(pcp, val); \
0459 })
0460
0461 #define __this_cpu_and(pcp, val) \
0462 ({ \
0463 __this_cpu_preempt_check("and"); \
0464 raw_cpu_and(pcp, val); \
0465 })
0466
0467 #define __this_cpu_or(pcp, val) \
0468 ({ \
0469 __this_cpu_preempt_check("or"); \
0470 raw_cpu_or(pcp, val); \
0471 })
0472
0473 #define __this_cpu_add_return(pcp, val) \
0474 ({ \
0475 __this_cpu_preempt_check("add_return"); \
0476 raw_cpu_add_return(pcp, val); \
0477 })
0478
0479 #define __this_cpu_xchg(pcp, nval) \
0480 ({ \
0481 __this_cpu_preempt_check("xchg"); \
0482 raw_cpu_xchg(pcp, nval); \
0483 })
0484
0485 #define __this_cpu_cmpxchg(pcp, oval, nval) \
0486 ({ \
0487 __this_cpu_preempt_check("cmpxchg"); \
0488 raw_cpu_cmpxchg(pcp, oval, nval); \
0489 })
0490
0491 #define __this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
0492 ({ __this_cpu_preempt_check("cmpxchg_double"); \
0493 raw_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2); \
0494 })
0495
0496 #define __this_cpu_sub(pcp, val) __this_cpu_add(pcp, -(typeof(pcp))(val))
0497 #define __this_cpu_inc(pcp) __this_cpu_add(pcp, 1)
0498 #define __this_cpu_dec(pcp) __this_cpu_sub(pcp, 1)
0499 #define __this_cpu_sub_return(pcp, val) __this_cpu_add_return(pcp, -(typeof(pcp))(val))
0500 #define __this_cpu_inc_return(pcp) __this_cpu_add_return(pcp, 1)
0501 #define __this_cpu_dec_return(pcp) __this_cpu_add_return(pcp, -1)
0502
0503
0504
0505
0506
0507 #define this_cpu_read(pcp) __pcpu_size_call_return(this_cpu_read_, pcp)
0508 #define this_cpu_write(pcp, val) __pcpu_size_call(this_cpu_write_, pcp, val)
0509 #define this_cpu_add(pcp, val) __pcpu_size_call(this_cpu_add_, pcp, val)
0510 #define this_cpu_and(pcp, val) __pcpu_size_call(this_cpu_and_, pcp, val)
0511 #define this_cpu_or(pcp, val) __pcpu_size_call(this_cpu_or_, pcp, val)
0512 #define this_cpu_add_return(pcp, val) __pcpu_size_call_return2(this_cpu_add_return_, pcp, val)
0513 #define this_cpu_xchg(pcp, nval) __pcpu_size_call_return2(this_cpu_xchg_, pcp, nval)
0514 #define this_cpu_cmpxchg(pcp, oval, nval) \
0515 __pcpu_size_call_return2(this_cpu_cmpxchg_, pcp, oval, nval)
0516 #define this_cpu_cmpxchg_double(pcp1, pcp2, oval1, oval2, nval1, nval2) \
0517 __pcpu_double_call_return_bool(this_cpu_cmpxchg_double_, pcp1, pcp2, oval1, oval2, nval1, nval2)
0518
0519 #define this_cpu_sub(pcp, val) this_cpu_add(pcp, -(typeof(pcp))(val))
0520 #define this_cpu_inc(pcp) this_cpu_add(pcp, 1)
0521 #define this_cpu_dec(pcp) this_cpu_sub(pcp, 1)
0522 #define this_cpu_sub_return(pcp, val) this_cpu_add_return(pcp, -(typeof(pcp))(val))
0523 #define this_cpu_inc_return(pcp) this_cpu_add_return(pcp, 1)
0524 #define this_cpu_dec_return(pcp) this_cpu_add_return(pcp, -1)
0525
0526 #endif
0527 #endif