0001
0002
0003
0004
0005
0006 #ifndef _LINUX_ATOMIC_FALLBACK_H
0007 #define _LINUX_ATOMIC_FALLBACK_H
0008
0009 #include <linux/compiler.h>
0010
0011 #ifndef arch_xchg_relaxed
0012 #define arch_xchg_acquire arch_xchg
0013 #define arch_xchg_release arch_xchg
0014 #define arch_xchg_relaxed arch_xchg
0015 #else
0016
0017 #ifndef arch_xchg_acquire
0018 #define arch_xchg_acquire(...) \
0019 __atomic_op_acquire(arch_xchg, __VA_ARGS__)
0020 #endif
0021
0022 #ifndef arch_xchg_release
0023 #define arch_xchg_release(...) \
0024 __atomic_op_release(arch_xchg, __VA_ARGS__)
0025 #endif
0026
0027 #ifndef arch_xchg
0028 #define arch_xchg(...) \
0029 __atomic_op_fence(arch_xchg, __VA_ARGS__)
0030 #endif
0031
0032 #endif
0033
0034 #ifndef arch_cmpxchg_relaxed
0035 #define arch_cmpxchg_acquire arch_cmpxchg
0036 #define arch_cmpxchg_release arch_cmpxchg
0037 #define arch_cmpxchg_relaxed arch_cmpxchg
0038 #else
0039
0040 #ifndef arch_cmpxchg_acquire
0041 #define arch_cmpxchg_acquire(...) \
0042 __atomic_op_acquire(arch_cmpxchg, __VA_ARGS__)
0043 #endif
0044
0045 #ifndef arch_cmpxchg_release
0046 #define arch_cmpxchg_release(...) \
0047 __atomic_op_release(arch_cmpxchg, __VA_ARGS__)
0048 #endif
0049
0050 #ifndef arch_cmpxchg
0051 #define arch_cmpxchg(...) \
0052 __atomic_op_fence(arch_cmpxchg, __VA_ARGS__)
0053 #endif
0054
0055 #endif
0056
0057 #ifndef arch_cmpxchg64_relaxed
0058 #define arch_cmpxchg64_acquire arch_cmpxchg64
0059 #define arch_cmpxchg64_release arch_cmpxchg64
0060 #define arch_cmpxchg64_relaxed arch_cmpxchg64
0061 #else
0062
0063 #ifndef arch_cmpxchg64_acquire
0064 #define arch_cmpxchg64_acquire(...) \
0065 __atomic_op_acquire(arch_cmpxchg64, __VA_ARGS__)
0066 #endif
0067
0068 #ifndef arch_cmpxchg64_release
0069 #define arch_cmpxchg64_release(...) \
0070 __atomic_op_release(arch_cmpxchg64, __VA_ARGS__)
0071 #endif
0072
0073 #ifndef arch_cmpxchg64
0074 #define arch_cmpxchg64(...) \
0075 __atomic_op_fence(arch_cmpxchg64, __VA_ARGS__)
0076 #endif
0077
0078 #endif
0079
0080 #ifndef arch_try_cmpxchg_relaxed
0081 #ifdef arch_try_cmpxchg
0082 #define arch_try_cmpxchg_acquire arch_try_cmpxchg
0083 #define arch_try_cmpxchg_release arch_try_cmpxchg
0084 #define arch_try_cmpxchg_relaxed arch_try_cmpxchg
0085 #endif
0086
0087 #ifndef arch_try_cmpxchg
0088 #define arch_try_cmpxchg(_ptr, _oldp, _new) \
0089 ({ \
0090 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0091 ___r = arch_cmpxchg((_ptr), ___o, (_new)); \
0092 if (unlikely(___r != ___o)) \
0093 *___op = ___r; \
0094 likely(___r == ___o); \
0095 })
0096 #endif
0097
0098 #ifndef arch_try_cmpxchg_acquire
0099 #define arch_try_cmpxchg_acquire(_ptr, _oldp, _new) \
0100 ({ \
0101 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0102 ___r = arch_cmpxchg_acquire((_ptr), ___o, (_new)); \
0103 if (unlikely(___r != ___o)) \
0104 *___op = ___r; \
0105 likely(___r == ___o); \
0106 })
0107 #endif
0108
0109 #ifndef arch_try_cmpxchg_release
0110 #define arch_try_cmpxchg_release(_ptr, _oldp, _new) \
0111 ({ \
0112 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0113 ___r = arch_cmpxchg_release((_ptr), ___o, (_new)); \
0114 if (unlikely(___r != ___o)) \
0115 *___op = ___r; \
0116 likely(___r == ___o); \
0117 })
0118 #endif
0119
0120 #ifndef arch_try_cmpxchg_relaxed
0121 #define arch_try_cmpxchg_relaxed(_ptr, _oldp, _new) \
0122 ({ \
0123 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0124 ___r = arch_cmpxchg_relaxed((_ptr), ___o, (_new)); \
0125 if (unlikely(___r != ___o)) \
0126 *___op = ___r; \
0127 likely(___r == ___o); \
0128 })
0129 #endif
0130
0131 #else
0132
0133 #ifndef arch_try_cmpxchg_acquire
0134 #define arch_try_cmpxchg_acquire(...) \
0135 __atomic_op_acquire(arch_try_cmpxchg, __VA_ARGS__)
0136 #endif
0137
0138 #ifndef arch_try_cmpxchg_release
0139 #define arch_try_cmpxchg_release(...) \
0140 __atomic_op_release(arch_try_cmpxchg, __VA_ARGS__)
0141 #endif
0142
0143 #ifndef arch_try_cmpxchg
0144 #define arch_try_cmpxchg(...) \
0145 __atomic_op_fence(arch_try_cmpxchg, __VA_ARGS__)
0146 #endif
0147
0148 #endif
0149
0150 #ifndef arch_try_cmpxchg64_relaxed
0151 #ifdef arch_try_cmpxchg64
0152 #define arch_try_cmpxchg64_acquire arch_try_cmpxchg64
0153 #define arch_try_cmpxchg64_release arch_try_cmpxchg64
0154 #define arch_try_cmpxchg64_relaxed arch_try_cmpxchg64
0155 #endif
0156
0157 #ifndef arch_try_cmpxchg64
0158 #define arch_try_cmpxchg64(_ptr, _oldp, _new) \
0159 ({ \
0160 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0161 ___r = arch_cmpxchg64((_ptr), ___o, (_new)); \
0162 if (unlikely(___r != ___o)) \
0163 *___op = ___r; \
0164 likely(___r == ___o); \
0165 })
0166 #endif
0167
0168 #ifndef arch_try_cmpxchg64_acquire
0169 #define arch_try_cmpxchg64_acquire(_ptr, _oldp, _new) \
0170 ({ \
0171 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0172 ___r = arch_cmpxchg64_acquire((_ptr), ___o, (_new)); \
0173 if (unlikely(___r != ___o)) \
0174 *___op = ___r; \
0175 likely(___r == ___o); \
0176 })
0177 #endif
0178
0179 #ifndef arch_try_cmpxchg64_release
0180 #define arch_try_cmpxchg64_release(_ptr, _oldp, _new) \
0181 ({ \
0182 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0183 ___r = arch_cmpxchg64_release((_ptr), ___o, (_new)); \
0184 if (unlikely(___r != ___o)) \
0185 *___op = ___r; \
0186 likely(___r == ___o); \
0187 })
0188 #endif
0189
0190 #ifndef arch_try_cmpxchg64_relaxed
0191 #define arch_try_cmpxchg64_relaxed(_ptr, _oldp, _new) \
0192 ({ \
0193 typeof(*(_ptr)) *___op = (_oldp), ___o = *___op, ___r; \
0194 ___r = arch_cmpxchg64_relaxed((_ptr), ___o, (_new)); \
0195 if (unlikely(___r != ___o)) \
0196 *___op = ___r; \
0197 likely(___r == ___o); \
0198 })
0199 #endif
0200
0201 #else
0202
0203 #ifndef arch_try_cmpxchg64_acquire
0204 #define arch_try_cmpxchg64_acquire(...) \
0205 __atomic_op_acquire(arch_try_cmpxchg64, __VA_ARGS__)
0206 #endif
0207
0208 #ifndef arch_try_cmpxchg64_release
0209 #define arch_try_cmpxchg64_release(...) \
0210 __atomic_op_release(arch_try_cmpxchg64, __VA_ARGS__)
0211 #endif
0212
0213 #ifndef arch_try_cmpxchg64
0214 #define arch_try_cmpxchg64(...) \
0215 __atomic_op_fence(arch_try_cmpxchg64, __VA_ARGS__)
0216 #endif
0217
0218 #endif
0219
0220 #ifndef arch_atomic_read_acquire
0221 static __always_inline int
0222 arch_atomic_read_acquire(const atomic_t *v)
0223 {
0224 int ret;
0225
0226 if (__native_word(atomic_t)) {
0227 ret = smp_load_acquire(&(v)->counter);
0228 } else {
0229 ret = arch_atomic_read(v);
0230 __atomic_acquire_fence();
0231 }
0232
0233 return ret;
0234 }
0235 #define arch_atomic_read_acquire arch_atomic_read_acquire
0236 #endif
0237
0238 #ifndef arch_atomic_set_release
0239 static __always_inline void
0240 arch_atomic_set_release(atomic_t *v, int i)
0241 {
0242 if (__native_word(atomic_t)) {
0243 smp_store_release(&(v)->counter, i);
0244 } else {
0245 __atomic_release_fence();
0246 arch_atomic_set(v, i);
0247 }
0248 }
0249 #define arch_atomic_set_release arch_atomic_set_release
0250 #endif
0251
0252 #ifndef arch_atomic_add_return_relaxed
0253 #define arch_atomic_add_return_acquire arch_atomic_add_return
0254 #define arch_atomic_add_return_release arch_atomic_add_return
0255 #define arch_atomic_add_return_relaxed arch_atomic_add_return
0256 #else
0257
0258 #ifndef arch_atomic_add_return_acquire
0259 static __always_inline int
0260 arch_atomic_add_return_acquire(int i, atomic_t *v)
0261 {
0262 int ret = arch_atomic_add_return_relaxed(i, v);
0263 __atomic_acquire_fence();
0264 return ret;
0265 }
0266 #define arch_atomic_add_return_acquire arch_atomic_add_return_acquire
0267 #endif
0268
0269 #ifndef arch_atomic_add_return_release
0270 static __always_inline int
0271 arch_atomic_add_return_release(int i, atomic_t *v)
0272 {
0273 __atomic_release_fence();
0274 return arch_atomic_add_return_relaxed(i, v);
0275 }
0276 #define arch_atomic_add_return_release arch_atomic_add_return_release
0277 #endif
0278
0279 #ifndef arch_atomic_add_return
0280 static __always_inline int
0281 arch_atomic_add_return(int i, atomic_t *v)
0282 {
0283 int ret;
0284 __atomic_pre_full_fence();
0285 ret = arch_atomic_add_return_relaxed(i, v);
0286 __atomic_post_full_fence();
0287 return ret;
0288 }
0289 #define arch_atomic_add_return arch_atomic_add_return
0290 #endif
0291
0292 #endif
0293
0294 #ifndef arch_atomic_fetch_add_relaxed
0295 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add
0296 #define arch_atomic_fetch_add_release arch_atomic_fetch_add
0297 #define arch_atomic_fetch_add_relaxed arch_atomic_fetch_add
0298 #else
0299
0300 #ifndef arch_atomic_fetch_add_acquire
0301 static __always_inline int
0302 arch_atomic_fetch_add_acquire(int i, atomic_t *v)
0303 {
0304 int ret = arch_atomic_fetch_add_relaxed(i, v);
0305 __atomic_acquire_fence();
0306 return ret;
0307 }
0308 #define arch_atomic_fetch_add_acquire arch_atomic_fetch_add_acquire
0309 #endif
0310
0311 #ifndef arch_atomic_fetch_add_release
0312 static __always_inline int
0313 arch_atomic_fetch_add_release(int i, atomic_t *v)
0314 {
0315 __atomic_release_fence();
0316 return arch_atomic_fetch_add_relaxed(i, v);
0317 }
0318 #define arch_atomic_fetch_add_release arch_atomic_fetch_add_release
0319 #endif
0320
0321 #ifndef arch_atomic_fetch_add
0322 static __always_inline int
0323 arch_atomic_fetch_add(int i, atomic_t *v)
0324 {
0325 int ret;
0326 __atomic_pre_full_fence();
0327 ret = arch_atomic_fetch_add_relaxed(i, v);
0328 __atomic_post_full_fence();
0329 return ret;
0330 }
0331 #define arch_atomic_fetch_add arch_atomic_fetch_add
0332 #endif
0333
0334 #endif
0335
0336 #ifndef arch_atomic_sub_return_relaxed
0337 #define arch_atomic_sub_return_acquire arch_atomic_sub_return
0338 #define arch_atomic_sub_return_release arch_atomic_sub_return
0339 #define arch_atomic_sub_return_relaxed arch_atomic_sub_return
0340 #else
0341
0342 #ifndef arch_atomic_sub_return_acquire
0343 static __always_inline int
0344 arch_atomic_sub_return_acquire(int i, atomic_t *v)
0345 {
0346 int ret = arch_atomic_sub_return_relaxed(i, v);
0347 __atomic_acquire_fence();
0348 return ret;
0349 }
0350 #define arch_atomic_sub_return_acquire arch_atomic_sub_return_acquire
0351 #endif
0352
0353 #ifndef arch_atomic_sub_return_release
0354 static __always_inline int
0355 arch_atomic_sub_return_release(int i, atomic_t *v)
0356 {
0357 __atomic_release_fence();
0358 return arch_atomic_sub_return_relaxed(i, v);
0359 }
0360 #define arch_atomic_sub_return_release arch_atomic_sub_return_release
0361 #endif
0362
0363 #ifndef arch_atomic_sub_return
0364 static __always_inline int
0365 arch_atomic_sub_return(int i, atomic_t *v)
0366 {
0367 int ret;
0368 __atomic_pre_full_fence();
0369 ret = arch_atomic_sub_return_relaxed(i, v);
0370 __atomic_post_full_fence();
0371 return ret;
0372 }
0373 #define arch_atomic_sub_return arch_atomic_sub_return
0374 #endif
0375
0376 #endif
0377
0378 #ifndef arch_atomic_fetch_sub_relaxed
0379 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub
0380 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub
0381 #define arch_atomic_fetch_sub_relaxed arch_atomic_fetch_sub
0382 #else
0383
0384 #ifndef arch_atomic_fetch_sub_acquire
0385 static __always_inline int
0386 arch_atomic_fetch_sub_acquire(int i, atomic_t *v)
0387 {
0388 int ret = arch_atomic_fetch_sub_relaxed(i, v);
0389 __atomic_acquire_fence();
0390 return ret;
0391 }
0392 #define arch_atomic_fetch_sub_acquire arch_atomic_fetch_sub_acquire
0393 #endif
0394
0395 #ifndef arch_atomic_fetch_sub_release
0396 static __always_inline int
0397 arch_atomic_fetch_sub_release(int i, atomic_t *v)
0398 {
0399 __atomic_release_fence();
0400 return arch_atomic_fetch_sub_relaxed(i, v);
0401 }
0402 #define arch_atomic_fetch_sub_release arch_atomic_fetch_sub_release
0403 #endif
0404
0405 #ifndef arch_atomic_fetch_sub
0406 static __always_inline int
0407 arch_atomic_fetch_sub(int i, atomic_t *v)
0408 {
0409 int ret;
0410 __atomic_pre_full_fence();
0411 ret = arch_atomic_fetch_sub_relaxed(i, v);
0412 __atomic_post_full_fence();
0413 return ret;
0414 }
0415 #define arch_atomic_fetch_sub arch_atomic_fetch_sub
0416 #endif
0417
0418 #endif
0419
0420 #ifndef arch_atomic_inc
0421 static __always_inline void
0422 arch_atomic_inc(atomic_t *v)
0423 {
0424 arch_atomic_add(1, v);
0425 }
0426 #define arch_atomic_inc arch_atomic_inc
0427 #endif
0428
0429 #ifndef arch_atomic_inc_return_relaxed
0430 #ifdef arch_atomic_inc_return
0431 #define arch_atomic_inc_return_acquire arch_atomic_inc_return
0432 #define arch_atomic_inc_return_release arch_atomic_inc_return
0433 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return
0434 #endif
0435
0436 #ifndef arch_atomic_inc_return
0437 static __always_inline int
0438 arch_atomic_inc_return(atomic_t *v)
0439 {
0440 return arch_atomic_add_return(1, v);
0441 }
0442 #define arch_atomic_inc_return arch_atomic_inc_return
0443 #endif
0444
0445 #ifndef arch_atomic_inc_return_acquire
0446 static __always_inline int
0447 arch_atomic_inc_return_acquire(atomic_t *v)
0448 {
0449 return arch_atomic_add_return_acquire(1, v);
0450 }
0451 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
0452 #endif
0453
0454 #ifndef arch_atomic_inc_return_release
0455 static __always_inline int
0456 arch_atomic_inc_return_release(atomic_t *v)
0457 {
0458 return arch_atomic_add_return_release(1, v);
0459 }
0460 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
0461 #endif
0462
0463 #ifndef arch_atomic_inc_return_relaxed
0464 static __always_inline int
0465 arch_atomic_inc_return_relaxed(atomic_t *v)
0466 {
0467 return arch_atomic_add_return_relaxed(1, v);
0468 }
0469 #define arch_atomic_inc_return_relaxed arch_atomic_inc_return_relaxed
0470 #endif
0471
0472 #else
0473
0474 #ifndef arch_atomic_inc_return_acquire
0475 static __always_inline int
0476 arch_atomic_inc_return_acquire(atomic_t *v)
0477 {
0478 int ret = arch_atomic_inc_return_relaxed(v);
0479 __atomic_acquire_fence();
0480 return ret;
0481 }
0482 #define arch_atomic_inc_return_acquire arch_atomic_inc_return_acquire
0483 #endif
0484
0485 #ifndef arch_atomic_inc_return_release
0486 static __always_inline int
0487 arch_atomic_inc_return_release(atomic_t *v)
0488 {
0489 __atomic_release_fence();
0490 return arch_atomic_inc_return_relaxed(v);
0491 }
0492 #define arch_atomic_inc_return_release arch_atomic_inc_return_release
0493 #endif
0494
0495 #ifndef arch_atomic_inc_return
0496 static __always_inline int
0497 arch_atomic_inc_return(atomic_t *v)
0498 {
0499 int ret;
0500 __atomic_pre_full_fence();
0501 ret = arch_atomic_inc_return_relaxed(v);
0502 __atomic_post_full_fence();
0503 return ret;
0504 }
0505 #define arch_atomic_inc_return arch_atomic_inc_return
0506 #endif
0507
0508 #endif
0509
0510 #ifndef arch_atomic_fetch_inc_relaxed
0511 #ifdef arch_atomic_fetch_inc
0512 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc
0513 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc
0514 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc
0515 #endif
0516
0517 #ifndef arch_atomic_fetch_inc
0518 static __always_inline int
0519 arch_atomic_fetch_inc(atomic_t *v)
0520 {
0521 return arch_atomic_fetch_add(1, v);
0522 }
0523 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
0524 #endif
0525
0526 #ifndef arch_atomic_fetch_inc_acquire
0527 static __always_inline int
0528 arch_atomic_fetch_inc_acquire(atomic_t *v)
0529 {
0530 return arch_atomic_fetch_add_acquire(1, v);
0531 }
0532 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
0533 #endif
0534
0535 #ifndef arch_atomic_fetch_inc_release
0536 static __always_inline int
0537 arch_atomic_fetch_inc_release(atomic_t *v)
0538 {
0539 return arch_atomic_fetch_add_release(1, v);
0540 }
0541 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
0542 #endif
0543
0544 #ifndef arch_atomic_fetch_inc_relaxed
0545 static __always_inline int
0546 arch_atomic_fetch_inc_relaxed(atomic_t *v)
0547 {
0548 return arch_atomic_fetch_add_relaxed(1, v);
0549 }
0550 #define arch_atomic_fetch_inc_relaxed arch_atomic_fetch_inc_relaxed
0551 #endif
0552
0553 #else
0554
0555 #ifndef arch_atomic_fetch_inc_acquire
0556 static __always_inline int
0557 arch_atomic_fetch_inc_acquire(atomic_t *v)
0558 {
0559 int ret = arch_atomic_fetch_inc_relaxed(v);
0560 __atomic_acquire_fence();
0561 return ret;
0562 }
0563 #define arch_atomic_fetch_inc_acquire arch_atomic_fetch_inc_acquire
0564 #endif
0565
0566 #ifndef arch_atomic_fetch_inc_release
0567 static __always_inline int
0568 arch_atomic_fetch_inc_release(atomic_t *v)
0569 {
0570 __atomic_release_fence();
0571 return arch_atomic_fetch_inc_relaxed(v);
0572 }
0573 #define arch_atomic_fetch_inc_release arch_atomic_fetch_inc_release
0574 #endif
0575
0576 #ifndef arch_atomic_fetch_inc
0577 static __always_inline int
0578 arch_atomic_fetch_inc(atomic_t *v)
0579 {
0580 int ret;
0581 __atomic_pre_full_fence();
0582 ret = arch_atomic_fetch_inc_relaxed(v);
0583 __atomic_post_full_fence();
0584 return ret;
0585 }
0586 #define arch_atomic_fetch_inc arch_atomic_fetch_inc
0587 #endif
0588
0589 #endif
0590
0591 #ifndef arch_atomic_dec
0592 static __always_inline void
0593 arch_atomic_dec(atomic_t *v)
0594 {
0595 arch_atomic_sub(1, v);
0596 }
0597 #define arch_atomic_dec arch_atomic_dec
0598 #endif
0599
0600 #ifndef arch_atomic_dec_return_relaxed
0601 #ifdef arch_atomic_dec_return
0602 #define arch_atomic_dec_return_acquire arch_atomic_dec_return
0603 #define arch_atomic_dec_return_release arch_atomic_dec_return
0604 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return
0605 #endif
0606
0607 #ifndef arch_atomic_dec_return
0608 static __always_inline int
0609 arch_atomic_dec_return(atomic_t *v)
0610 {
0611 return arch_atomic_sub_return(1, v);
0612 }
0613 #define arch_atomic_dec_return arch_atomic_dec_return
0614 #endif
0615
0616 #ifndef arch_atomic_dec_return_acquire
0617 static __always_inline int
0618 arch_atomic_dec_return_acquire(atomic_t *v)
0619 {
0620 return arch_atomic_sub_return_acquire(1, v);
0621 }
0622 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
0623 #endif
0624
0625 #ifndef arch_atomic_dec_return_release
0626 static __always_inline int
0627 arch_atomic_dec_return_release(atomic_t *v)
0628 {
0629 return arch_atomic_sub_return_release(1, v);
0630 }
0631 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
0632 #endif
0633
0634 #ifndef arch_atomic_dec_return_relaxed
0635 static __always_inline int
0636 arch_atomic_dec_return_relaxed(atomic_t *v)
0637 {
0638 return arch_atomic_sub_return_relaxed(1, v);
0639 }
0640 #define arch_atomic_dec_return_relaxed arch_atomic_dec_return_relaxed
0641 #endif
0642
0643 #else
0644
0645 #ifndef arch_atomic_dec_return_acquire
0646 static __always_inline int
0647 arch_atomic_dec_return_acquire(atomic_t *v)
0648 {
0649 int ret = arch_atomic_dec_return_relaxed(v);
0650 __atomic_acquire_fence();
0651 return ret;
0652 }
0653 #define arch_atomic_dec_return_acquire arch_atomic_dec_return_acquire
0654 #endif
0655
0656 #ifndef arch_atomic_dec_return_release
0657 static __always_inline int
0658 arch_atomic_dec_return_release(atomic_t *v)
0659 {
0660 __atomic_release_fence();
0661 return arch_atomic_dec_return_relaxed(v);
0662 }
0663 #define arch_atomic_dec_return_release arch_atomic_dec_return_release
0664 #endif
0665
0666 #ifndef arch_atomic_dec_return
0667 static __always_inline int
0668 arch_atomic_dec_return(atomic_t *v)
0669 {
0670 int ret;
0671 __atomic_pre_full_fence();
0672 ret = arch_atomic_dec_return_relaxed(v);
0673 __atomic_post_full_fence();
0674 return ret;
0675 }
0676 #define arch_atomic_dec_return arch_atomic_dec_return
0677 #endif
0678
0679 #endif
0680
0681 #ifndef arch_atomic_fetch_dec_relaxed
0682 #ifdef arch_atomic_fetch_dec
0683 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec
0684 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec
0685 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec
0686 #endif
0687
0688 #ifndef arch_atomic_fetch_dec
0689 static __always_inline int
0690 arch_atomic_fetch_dec(atomic_t *v)
0691 {
0692 return arch_atomic_fetch_sub(1, v);
0693 }
0694 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
0695 #endif
0696
0697 #ifndef arch_atomic_fetch_dec_acquire
0698 static __always_inline int
0699 arch_atomic_fetch_dec_acquire(atomic_t *v)
0700 {
0701 return arch_atomic_fetch_sub_acquire(1, v);
0702 }
0703 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
0704 #endif
0705
0706 #ifndef arch_atomic_fetch_dec_release
0707 static __always_inline int
0708 arch_atomic_fetch_dec_release(atomic_t *v)
0709 {
0710 return arch_atomic_fetch_sub_release(1, v);
0711 }
0712 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
0713 #endif
0714
0715 #ifndef arch_atomic_fetch_dec_relaxed
0716 static __always_inline int
0717 arch_atomic_fetch_dec_relaxed(atomic_t *v)
0718 {
0719 return arch_atomic_fetch_sub_relaxed(1, v);
0720 }
0721 #define arch_atomic_fetch_dec_relaxed arch_atomic_fetch_dec_relaxed
0722 #endif
0723
0724 #else
0725
0726 #ifndef arch_atomic_fetch_dec_acquire
0727 static __always_inline int
0728 arch_atomic_fetch_dec_acquire(atomic_t *v)
0729 {
0730 int ret = arch_atomic_fetch_dec_relaxed(v);
0731 __atomic_acquire_fence();
0732 return ret;
0733 }
0734 #define arch_atomic_fetch_dec_acquire arch_atomic_fetch_dec_acquire
0735 #endif
0736
0737 #ifndef arch_atomic_fetch_dec_release
0738 static __always_inline int
0739 arch_atomic_fetch_dec_release(atomic_t *v)
0740 {
0741 __atomic_release_fence();
0742 return arch_atomic_fetch_dec_relaxed(v);
0743 }
0744 #define arch_atomic_fetch_dec_release arch_atomic_fetch_dec_release
0745 #endif
0746
0747 #ifndef arch_atomic_fetch_dec
0748 static __always_inline int
0749 arch_atomic_fetch_dec(atomic_t *v)
0750 {
0751 int ret;
0752 __atomic_pre_full_fence();
0753 ret = arch_atomic_fetch_dec_relaxed(v);
0754 __atomic_post_full_fence();
0755 return ret;
0756 }
0757 #define arch_atomic_fetch_dec arch_atomic_fetch_dec
0758 #endif
0759
0760 #endif
0761
0762 #ifndef arch_atomic_fetch_and_relaxed
0763 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and
0764 #define arch_atomic_fetch_and_release arch_atomic_fetch_and
0765 #define arch_atomic_fetch_and_relaxed arch_atomic_fetch_and
0766 #else
0767
0768 #ifndef arch_atomic_fetch_and_acquire
0769 static __always_inline int
0770 arch_atomic_fetch_and_acquire(int i, atomic_t *v)
0771 {
0772 int ret = arch_atomic_fetch_and_relaxed(i, v);
0773 __atomic_acquire_fence();
0774 return ret;
0775 }
0776 #define arch_atomic_fetch_and_acquire arch_atomic_fetch_and_acquire
0777 #endif
0778
0779 #ifndef arch_atomic_fetch_and_release
0780 static __always_inline int
0781 arch_atomic_fetch_and_release(int i, atomic_t *v)
0782 {
0783 __atomic_release_fence();
0784 return arch_atomic_fetch_and_relaxed(i, v);
0785 }
0786 #define arch_atomic_fetch_and_release arch_atomic_fetch_and_release
0787 #endif
0788
0789 #ifndef arch_atomic_fetch_and
0790 static __always_inline int
0791 arch_atomic_fetch_and(int i, atomic_t *v)
0792 {
0793 int ret;
0794 __atomic_pre_full_fence();
0795 ret = arch_atomic_fetch_and_relaxed(i, v);
0796 __atomic_post_full_fence();
0797 return ret;
0798 }
0799 #define arch_atomic_fetch_and arch_atomic_fetch_and
0800 #endif
0801
0802 #endif
0803
0804 #ifndef arch_atomic_andnot
0805 static __always_inline void
0806 arch_atomic_andnot(int i, atomic_t *v)
0807 {
0808 arch_atomic_and(~i, v);
0809 }
0810 #define arch_atomic_andnot arch_atomic_andnot
0811 #endif
0812
0813 #ifndef arch_atomic_fetch_andnot_relaxed
0814 #ifdef arch_atomic_fetch_andnot
0815 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot
0816 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot
0817 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot
0818 #endif
0819
0820 #ifndef arch_atomic_fetch_andnot
0821 static __always_inline int
0822 arch_atomic_fetch_andnot(int i, atomic_t *v)
0823 {
0824 return arch_atomic_fetch_and(~i, v);
0825 }
0826 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
0827 #endif
0828
0829 #ifndef arch_atomic_fetch_andnot_acquire
0830 static __always_inline int
0831 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
0832 {
0833 return arch_atomic_fetch_and_acquire(~i, v);
0834 }
0835 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
0836 #endif
0837
0838 #ifndef arch_atomic_fetch_andnot_release
0839 static __always_inline int
0840 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
0841 {
0842 return arch_atomic_fetch_and_release(~i, v);
0843 }
0844 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
0845 #endif
0846
0847 #ifndef arch_atomic_fetch_andnot_relaxed
0848 static __always_inline int
0849 arch_atomic_fetch_andnot_relaxed(int i, atomic_t *v)
0850 {
0851 return arch_atomic_fetch_and_relaxed(~i, v);
0852 }
0853 #define arch_atomic_fetch_andnot_relaxed arch_atomic_fetch_andnot_relaxed
0854 #endif
0855
0856 #else
0857
0858 #ifndef arch_atomic_fetch_andnot_acquire
0859 static __always_inline int
0860 arch_atomic_fetch_andnot_acquire(int i, atomic_t *v)
0861 {
0862 int ret = arch_atomic_fetch_andnot_relaxed(i, v);
0863 __atomic_acquire_fence();
0864 return ret;
0865 }
0866 #define arch_atomic_fetch_andnot_acquire arch_atomic_fetch_andnot_acquire
0867 #endif
0868
0869 #ifndef arch_atomic_fetch_andnot_release
0870 static __always_inline int
0871 arch_atomic_fetch_andnot_release(int i, atomic_t *v)
0872 {
0873 __atomic_release_fence();
0874 return arch_atomic_fetch_andnot_relaxed(i, v);
0875 }
0876 #define arch_atomic_fetch_andnot_release arch_atomic_fetch_andnot_release
0877 #endif
0878
0879 #ifndef arch_atomic_fetch_andnot
0880 static __always_inline int
0881 arch_atomic_fetch_andnot(int i, atomic_t *v)
0882 {
0883 int ret;
0884 __atomic_pre_full_fence();
0885 ret = arch_atomic_fetch_andnot_relaxed(i, v);
0886 __atomic_post_full_fence();
0887 return ret;
0888 }
0889 #define arch_atomic_fetch_andnot arch_atomic_fetch_andnot
0890 #endif
0891
0892 #endif
0893
0894 #ifndef arch_atomic_fetch_or_relaxed
0895 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or
0896 #define arch_atomic_fetch_or_release arch_atomic_fetch_or
0897 #define arch_atomic_fetch_or_relaxed arch_atomic_fetch_or
0898 #else
0899
0900 #ifndef arch_atomic_fetch_or_acquire
0901 static __always_inline int
0902 arch_atomic_fetch_or_acquire(int i, atomic_t *v)
0903 {
0904 int ret = arch_atomic_fetch_or_relaxed(i, v);
0905 __atomic_acquire_fence();
0906 return ret;
0907 }
0908 #define arch_atomic_fetch_or_acquire arch_atomic_fetch_or_acquire
0909 #endif
0910
0911 #ifndef arch_atomic_fetch_or_release
0912 static __always_inline int
0913 arch_atomic_fetch_or_release(int i, atomic_t *v)
0914 {
0915 __atomic_release_fence();
0916 return arch_atomic_fetch_or_relaxed(i, v);
0917 }
0918 #define arch_atomic_fetch_or_release arch_atomic_fetch_or_release
0919 #endif
0920
0921 #ifndef arch_atomic_fetch_or
0922 static __always_inline int
0923 arch_atomic_fetch_or(int i, atomic_t *v)
0924 {
0925 int ret;
0926 __atomic_pre_full_fence();
0927 ret = arch_atomic_fetch_or_relaxed(i, v);
0928 __atomic_post_full_fence();
0929 return ret;
0930 }
0931 #define arch_atomic_fetch_or arch_atomic_fetch_or
0932 #endif
0933
0934 #endif
0935
0936 #ifndef arch_atomic_fetch_xor_relaxed
0937 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor
0938 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor
0939 #define arch_atomic_fetch_xor_relaxed arch_atomic_fetch_xor
0940 #else
0941
0942 #ifndef arch_atomic_fetch_xor_acquire
0943 static __always_inline int
0944 arch_atomic_fetch_xor_acquire(int i, atomic_t *v)
0945 {
0946 int ret = arch_atomic_fetch_xor_relaxed(i, v);
0947 __atomic_acquire_fence();
0948 return ret;
0949 }
0950 #define arch_atomic_fetch_xor_acquire arch_atomic_fetch_xor_acquire
0951 #endif
0952
0953 #ifndef arch_atomic_fetch_xor_release
0954 static __always_inline int
0955 arch_atomic_fetch_xor_release(int i, atomic_t *v)
0956 {
0957 __atomic_release_fence();
0958 return arch_atomic_fetch_xor_relaxed(i, v);
0959 }
0960 #define arch_atomic_fetch_xor_release arch_atomic_fetch_xor_release
0961 #endif
0962
0963 #ifndef arch_atomic_fetch_xor
0964 static __always_inline int
0965 arch_atomic_fetch_xor(int i, atomic_t *v)
0966 {
0967 int ret;
0968 __atomic_pre_full_fence();
0969 ret = arch_atomic_fetch_xor_relaxed(i, v);
0970 __atomic_post_full_fence();
0971 return ret;
0972 }
0973 #define arch_atomic_fetch_xor arch_atomic_fetch_xor
0974 #endif
0975
0976 #endif
0977
0978 #ifndef arch_atomic_xchg_relaxed
0979 #define arch_atomic_xchg_acquire arch_atomic_xchg
0980 #define arch_atomic_xchg_release arch_atomic_xchg
0981 #define arch_atomic_xchg_relaxed arch_atomic_xchg
0982 #else
0983
0984 #ifndef arch_atomic_xchg_acquire
0985 static __always_inline int
0986 arch_atomic_xchg_acquire(atomic_t *v, int i)
0987 {
0988 int ret = arch_atomic_xchg_relaxed(v, i);
0989 __atomic_acquire_fence();
0990 return ret;
0991 }
0992 #define arch_atomic_xchg_acquire arch_atomic_xchg_acquire
0993 #endif
0994
0995 #ifndef arch_atomic_xchg_release
0996 static __always_inline int
0997 arch_atomic_xchg_release(atomic_t *v, int i)
0998 {
0999 __atomic_release_fence();
1000 return arch_atomic_xchg_relaxed(v, i);
1001 }
1002 #define arch_atomic_xchg_release arch_atomic_xchg_release
1003 #endif
1004
1005 #ifndef arch_atomic_xchg
1006 static __always_inline int
1007 arch_atomic_xchg(atomic_t *v, int i)
1008 {
1009 int ret;
1010 __atomic_pre_full_fence();
1011 ret = arch_atomic_xchg_relaxed(v, i);
1012 __atomic_post_full_fence();
1013 return ret;
1014 }
1015 #define arch_atomic_xchg arch_atomic_xchg
1016 #endif
1017
1018 #endif
1019
1020 #ifndef arch_atomic_cmpxchg_relaxed
1021 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg
1022 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg
1023 #define arch_atomic_cmpxchg_relaxed arch_atomic_cmpxchg
1024 #else
1025
1026 #ifndef arch_atomic_cmpxchg_acquire
1027 static __always_inline int
1028 arch_atomic_cmpxchg_acquire(atomic_t *v, int old, int new)
1029 {
1030 int ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1031 __atomic_acquire_fence();
1032 return ret;
1033 }
1034 #define arch_atomic_cmpxchg_acquire arch_atomic_cmpxchg_acquire
1035 #endif
1036
1037 #ifndef arch_atomic_cmpxchg_release
1038 static __always_inline int
1039 arch_atomic_cmpxchg_release(atomic_t *v, int old, int new)
1040 {
1041 __atomic_release_fence();
1042 return arch_atomic_cmpxchg_relaxed(v, old, new);
1043 }
1044 #define arch_atomic_cmpxchg_release arch_atomic_cmpxchg_release
1045 #endif
1046
1047 #ifndef arch_atomic_cmpxchg
1048 static __always_inline int
1049 arch_atomic_cmpxchg(atomic_t *v, int old, int new)
1050 {
1051 int ret;
1052 __atomic_pre_full_fence();
1053 ret = arch_atomic_cmpxchg_relaxed(v, old, new);
1054 __atomic_post_full_fence();
1055 return ret;
1056 }
1057 #define arch_atomic_cmpxchg arch_atomic_cmpxchg
1058 #endif
1059
1060 #endif
1061
1062 #ifndef arch_atomic_try_cmpxchg_relaxed
1063 #ifdef arch_atomic_try_cmpxchg
1064 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg
1065 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg
1066 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg
1067 #endif
1068
1069 #ifndef arch_atomic_try_cmpxchg
1070 static __always_inline bool
1071 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1072 {
1073 int r, o = *old;
1074 r = arch_atomic_cmpxchg(v, o, new);
1075 if (unlikely(r != o))
1076 *old = r;
1077 return likely(r == o);
1078 }
1079 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1080 #endif
1081
1082 #ifndef arch_atomic_try_cmpxchg_acquire
1083 static __always_inline bool
1084 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1085 {
1086 int r, o = *old;
1087 r = arch_atomic_cmpxchg_acquire(v, o, new);
1088 if (unlikely(r != o))
1089 *old = r;
1090 return likely(r == o);
1091 }
1092 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1093 #endif
1094
1095 #ifndef arch_atomic_try_cmpxchg_release
1096 static __always_inline bool
1097 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1098 {
1099 int r, o = *old;
1100 r = arch_atomic_cmpxchg_release(v, o, new);
1101 if (unlikely(r != o))
1102 *old = r;
1103 return likely(r == o);
1104 }
1105 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1106 #endif
1107
1108 #ifndef arch_atomic_try_cmpxchg_relaxed
1109 static __always_inline bool
1110 arch_atomic_try_cmpxchg_relaxed(atomic_t *v, int *old, int new)
1111 {
1112 int r, o = *old;
1113 r = arch_atomic_cmpxchg_relaxed(v, o, new);
1114 if (unlikely(r != o))
1115 *old = r;
1116 return likely(r == o);
1117 }
1118 #define arch_atomic_try_cmpxchg_relaxed arch_atomic_try_cmpxchg_relaxed
1119 #endif
1120
1121 #else
1122
1123 #ifndef arch_atomic_try_cmpxchg_acquire
1124 static __always_inline bool
1125 arch_atomic_try_cmpxchg_acquire(atomic_t *v, int *old, int new)
1126 {
1127 bool ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1128 __atomic_acquire_fence();
1129 return ret;
1130 }
1131 #define arch_atomic_try_cmpxchg_acquire arch_atomic_try_cmpxchg_acquire
1132 #endif
1133
1134 #ifndef arch_atomic_try_cmpxchg_release
1135 static __always_inline bool
1136 arch_atomic_try_cmpxchg_release(atomic_t *v, int *old, int new)
1137 {
1138 __atomic_release_fence();
1139 return arch_atomic_try_cmpxchg_relaxed(v, old, new);
1140 }
1141 #define arch_atomic_try_cmpxchg_release arch_atomic_try_cmpxchg_release
1142 #endif
1143
1144 #ifndef arch_atomic_try_cmpxchg
1145 static __always_inline bool
1146 arch_atomic_try_cmpxchg(atomic_t *v, int *old, int new)
1147 {
1148 bool ret;
1149 __atomic_pre_full_fence();
1150 ret = arch_atomic_try_cmpxchg_relaxed(v, old, new);
1151 __atomic_post_full_fence();
1152 return ret;
1153 }
1154 #define arch_atomic_try_cmpxchg arch_atomic_try_cmpxchg
1155 #endif
1156
1157 #endif
1158
1159 #ifndef arch_atomic_sub_and_test
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169 static __always_inline bool
1170 arch_atomic_sub_and_test(int i, atomic_t *v)
1171 {
1172 return arch_atomic_sub_return(i, v) == 0;
1173 }
1174 #define arch_atomic_sub_and_test arch_atomic_sub_and_test
1175 #endif
1176
1177 #ifndef arch_atomic_dec_and_test
1178
1179
1180
1181
1182
1183
1184
1185
1186 static __always_inline bool
1187 arch_atomic_dec_and_test(atomic_t *v)
1188 {
1189 return arch_atomic_dec_return(v) == 0;
1190 }
1191 #define arch_atomic_dec_and_test arch_atomic_dec_and_test
1192 #endif
1193
1194 #ifndef arch_atomic_inc_and_test
1195
1196
1197
1198
1199
1200
1201
1202
1203 static __always_inline bool
1204 arch_atomic_inc_and_test(atomic_t *v)
1205 {
1206 return arch_atomic_inc_return(v) == 0;
1207 }
1208 #define arch_atomic_inc_and_test arch_atomic_inc_and_test
1209 #endif
1210
1211 #ifndef arch_atomic_add_negative
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221 static __always_inline bool
1222 arch_atomic_add_negative(int i, atomic_t *v)
1223 {
1224 return arch_atomic_add_return(i, v) < 0;
1225 }
1226 #define arch_atomic_add_negative arch_atomic_add_negative
1227 #endif
1228
1229 #ifndef arch_atomic_fetch_add_unless
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239 static __always_inline int
1240 arch_atomic_fetch_add_unless(atomic_t *v, int a, int u)
1241 {
1242 int c = arch_atomic_read(v);
1243
1244 do {
1245 if (unlikely(c == u))
1246 break;
1247 } while (!arch_atomic_try_cmpxchg(v, &c, c + a));
1248
1249 return c;
1250 }
1251 #define arch_atomic_fetch_add_unless arch_atomic_fetch_add_unless
1252 #endif
1253
1254 #ifndef arch_atomic_add_unless
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264 static __always_inline bool
1265 arch_atomic_add_unless(atomic_t *v, int a, int u)
1266 {
1267 return arch_atomic_fetch_add_unless(v, a, u) != u;
1268 }
1269 #define arch_atomic_add_unless arch_atomic_add_unless
1270 #endif
1271
1272 #ifndef arch_atomic_inc_not_zero
1273
1274
1275
1276
1277
1278
1279
1280 static __always_inline bool
1281 arch_atomic_inc_not_zero(atomic_t *v)
1282 {
1283 return arch_atomic_add_unless(v, 1, 0);
1284 }
1285 #define arch_atomic_inc_not_zero arch_atomic_inc_not_zero
1286 #endif
1287
1288 #ifndef arch_atomic_inc_unless_negative
1289 static __always_inline bool
1290 arch_atomic_inc_unless_negative(atomic_t *v)
1291 {
1292 int c = arch_atomic_read(v);
1293
1294 do {
1295 if (unlikely(c < 0))
1296 return false;
1297 } while (!arch_atomic_try_cmpxchg(v, &c, c + 1));
1298
1299 return true;
1300 }
1301 #define arch_atomic_inc_unless_negative arch_atomic_inc_unless_negative
1302 #endif
1303
1304 #ifndef arch_atomic_dec_unless_positive
1305 static __always_inline bool
1306 arch_atomic_dec_unless_positive(atomic_t *v)
1307 {
1308 int c = arch_atomic_read(v);
1309
1310 do {
1311 if (unlikely(c > 0))
1312 return false;
1313 } while (!arch_atomic_try_cmpxchg(v, &c, c - 1));
1314
1315 return true;
1316 }
1317 #define arch_atomic_dec_unless_positive arch_atomic_dec_unless_positive
1318 #endif
1319
1320 #ifndef arch_atomic_dec_if_positive
1321 static __always_inline int
1322 arch_atomic_dec_if_positive(atomic_t *v)
1323 {
1324 int dec, c = arch_atomic_read(v);
1325
1326 do {
1327 dec = c - 1;
1328 if (unlikely(dec < 0))
1329 break;
1330 } while (!arch_atomic_try_cmpxchg(v, &c, dec));
1331
1332 return dec;
1333 }
1334 #define arch_atomic_dec_if_positive arch_atomic_dec_if_positive
1335 #endif
1336
1337 #ifdef CONFIG_GENERIC_ATOMIC64
1338 #include <asm-generic/atomic64.h>
1339 #endif
1340
1341 #ifndef arch_atomic64_read_acquire
1342 static __always_inline s64
1343 arch_atomic64_read_acquire(const atomic64_t *v)
1344 {
1345 s64 ret;
1346
1347 if (__native_word(atomic64_t)) {
1348 ret = smp_load_acquire(&(v)->counter);
1349 } else {
1350 ret = arch_atomic64_read(v);
1351 __atomic_acquire_fence();
1352 }
1353
1354 return ret;
1355 }
1356 #define arch_atomic64_read_acquire arch_atomic64_read_acquire
1357 #endif
1358
1359 #ifndef arch_atomic64_set_release
1360 static __always_inline void
1361 arch_atomic64_set_release(atomic64_t *v, s64 i)
1362 {
1363 if (__native_word(atomic64_t)) {
1364 smp_store_release(&(v)->counter, i);
1365 } else {
1366 __atomic_release_fence();
1367 arch_atomic64_set(v, i);
1368 }
1369 }
1370 #define arch_atomic64_set_release arch_atomic64_set_release
1371 #endif
1372
1373 #ifndef arch_atomic64_add_return_relaxed
1374 #define arch_atomic64_add_return_acquire arch_atomic64_add_return
1375 #define arch_atomic64_add_return_release arch_atomic64_add_return
1376 #define arch_atomic64_add_return_relaxed arch_atomic64_add_return
1377 #else
1378
1379 #ifndef arch_atomic64_add_return_acquire
1380 static __always_inline s64
1381 arch_atomic64_add_return_acquire(s64 i, atomic64_t *v)
1382 {
1383 s64 ret = arch_atomic64_add_return_relaxed(i, v);
1384 __atomic_acquire_fence();
1385 return ret;
1386 }
1387 #define arch_atomic64_add_return_acquire arch_atomic64_add_return_acquire
1388 #endif
1389
1390 #ifndef arch_atomic64_add_return_release
1391 static __always_inline s64
1392 arch_atomic64_add_return_release(s64 i, atomic64_t *v)
1393 {
1394 __atomic_release_fence();
1395 return arch_atomic64_add_return_relaxed(i, v);
1396 }
1397 #define arch_atomic64_add_return_release arch_atomic64_add_return_release
1398 #endif
1399
1400 #ifndef arch_atomic64_add_return
1401 static __always_inline s64
1402 arch_atomic64_add_return(s64 i, atomic64_t *v)
1403 {
1404 s64 ret;
1405 __atomic_pre_full_fence();
1406 ret = arch_atomic64_add_return_relaxed(i, v);
1407 __atomic_post_full_fence();
1408 return ret;
1409 }
1410 #define arch_atomic64_add_return arch_atomic64_add_return
1411 #endif
1412
1413 #endif
1414
1415 #ifndef arch_atomic64_fetch_add_relaxed
1416 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add
1417 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add
1418 #define arch_atomic64_fetch_add_relaxed arch_atomic64_fetch_add
1419 #else
1420
1421 #ifndef arch_atomic64_fetch_add_acquire
1422 static __always_inline s64
1423 arch_atomic64_fetch_add_acquire(s64 i, atomic64_t *v)
1424 {
1425 s64 ret = arch_atomic64_fetch_add_relaxed(i, v);
1426 __atomic_acquire_fence();
1427 return ret;
1428 }
1429 #define arch_atomic64_fetch_add_acquire arch_atomic64_fetch_add_acquire
1430 #endif
1431
1432 #ifndef arch_atomic64_fetch_add_release
1433 static __always_inline s64
1434 arch_atomic64_fetch_add_release(s64 i, atomic64_t *v)
1435 {
1436 __atomic_release_fence();
1437 return arch_atomic64_fetch_add_relaxed(i, v);
1438 }
1439 #define arch_atomic64_fetch_add_release arch_atomic64_fetch_add_release
1440 #endif
1441
1442 #ifndef arch_atomic64_fetch_add
1443 static __always_inline s64
1444 arch_atomic64_fetch_add(s64 i, atomic64_t *v)
1445 {
1446 s64 ret;
1447 __atomic_pre_full_fence();
1448 ret = arch_atomic64_fetch_add_relaxed(i, v);
1449 __atomic_post_full_fence();
1450 return ret;
1451 }
1452 #define arch_atomic64_fetch_add arch_atomic64_fetch_add
1453 #endif
1454
1455 #endif
1456
1457 #ifndef arch_atomic64_sub_return_relaxed
1458 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return
1459 #define arch_atomic64_sub_return_release arch_atomic64_sub_return
1460 #define arch_atomic64_sub_return_relaxed arch_atomic64_sub_return
1461 #else
1462
1463 #ifndef arch_atomic64_sub_return_acquire
1464 static __always_inline s64
1465 arch_atomic64_sub_return_acquire(s64 i, atomic64_t *v)
1466 {
1467 s64 ret = arch_atomic64_sub_return_relaxed(i, v);
1468 __atomic_acquire_fence();
1469 return ret;
1470 }
1471 #define arch_atomic64_sub_return_acquire arch_atomic64_sub_return_acquire
1472 #endif
1473
1474 #ifndef arch_atomic64_sub_return_release
1475 static __always_inline s64
1476 arch_atomic64_sub_return_release(s64 i, atomic64_t *v)
1477 {
1478 __atomic_release_fence();
1479 return arch_atomic64_sub_return_relaxed(i, v);
1480 }
1481 #define arch_atomic64_sub_return_release arch_atomic64_sub_return_release
1482 #endif
1483
1484 #ifndef arch_atomic64_sub_return
1485 static __always_inline s64
1486 arch_atomic64_sub_return(s64 i, atomic64_t *v)
1487 {
1488 s64 ret;
1489 __atomic_pre_full_fence();
1490 ret = arch_atomic64_sub_return_relaxed(i, v);
1491 __atomic_post_full_fence();
1492 return ret;
1493 }
1494 #define arch_atomic64_sub_return arch_atomic64_sub_return
1495 #endif
1496
1497 #endif
1498
1499 #ifndef arch_atomic64_fetch_sub_relaxed
1500 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub
1501 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub
1502 #define arch_atomic64_fetch_sub_relaxed arch_atomic64_fetch_sub
1503 #else
1504
1505 #ifndef arch_atomic64_fetch_sub_acquire
1506 static __always_inline s64
1507 arch_atomic64_fetch_sub_acquire(s64 i, atomic64_t *v)
1508 {
1509 s64 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1510 __atomic_acquire_fence();
1511 return ret;
1512 }
1513 #define arch_atomic64_fetch_sub_acquire arch_atomic64_fetch_sub_acquire
1514 #endif
1515
1516 #ifndef arch_atomic64_fetch_sub_release
1517 static __always_inline s64
1518 arch_atomic64_fetch_sub_release(s64 i, atomic64_t *v)
1519 {
1520 __atomic_release_fence();
1521 return arch_atomic64_fetch_sub_relaxed(i, v);
1522 }
1523 #define arch_atomic64_fetch_sub_release arch_atomic64_fetch_sub_release
1524 #endif
1525
1526 #ifndef arch_atomic64_fetch_sub
1527 static __always_inline s64
1528 arch_atomic64_fetch_sub(s64 i, atomic64_t *v)
1529 {
1530 s64 ret;
1531 __atomic_pre_full_fence();
1532 ret = arch_atomic64_fetch_sub_relaxed(i, v);
1533 __atomic_post_full_fence();
1534 return ret;
1535 }
1536 #define arch_atomic64_fetch_sub arch_atomic64_fetch_sub
1537 #endif
1538
1539 #endif
1540
1541 #ifndef arch_atomic64_inc
1542 static __always_inline void
1543 arch_atomic64_inc(atomic64_t *v)
1544 {
1545 arch_atomic64_add(1, v);
1546 }
1547 #define arch_atomic64_inc arch_atomic64_inc
1548 #endif
1549
1550 #ifndef arch_atomic64_inc_return_relaxed
1551 #ifdef arch_atomic64_inc_return
1552 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return
1553 #define arch_atomic64_inc_return_release arch_atomic64_inc_return
1554 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return
1555 #endif
1556
1557 #ifndef arch_atomic64_inc_return
1558 static __always_inline s64
1559 arch_atomic64_inc_return(atomic64_t *v)
1560 {
1561 return arch_atomic64_add_return(1, v);
1562 }
1563 #define arch_atomic64_inc_return arch_atomic64_inc_return
1564 #endif
1565
1566 #ifndef arch_atomic64_inc_return_acquire
1567 static __always_inline s64
1568 arch_atomic64_inc_return_acquire(atomic64_t *v)
1569 {
1570 return arch_atomic64_add_return_acquire(1, v);
1571 }
1572 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1573 #endif
1574
1575 #ifndef arch_atomic64_inc_return_release
1576 static __always_inline s64
1577 arch_atomic64_inc_return_release(atomic64_t *v)
1578 {
1579 return arch_atomic64_add_return_release(1, v);
1580 }
1581 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1582 #endif
1583
1584 #ifndef arch_atomic64_inc_return_relaxed
1585 static __always_inline s64
1586 arch_atomic64_inc_return_relaxed(atomic64_t *v)
1587 {
1588 return arch_atomic64_add_return_relaxed(1, v);
1589 }
1590 #define arch_atomic64_inc_return_relaxed arch_atomic64_inc_return_relaxed
1591 #endif
1592
1593 #else
1594
1595 #ifndef arch_atomic64_inc_return_acquire
1596 static __always_inline s64
1597 arch_atomic64_inc_return_acquire(atomic64_t *v)
1598 {
1599 s64 ret = arch_atomic64_inc_return_relaxed(v);
1600 __atomic_acquire_fence();
1601 return ret;
1602 }
1603 #define arch_atomic64_inc_return_acquire arch_atomic64_inc_return_acquire
1604 #endif
1605
1606 #ifndef arch_atomic64_inc_return_release
1607 static __always_inline s64
1608 arch_atomic64_inc_return_release(atomic64_t *v)
1609 {
1610 __atomic_release_fence();
1611 return arch_atomic64_inc_return_relaxed(v);
1612 }
1613 #define arch_atomic64_inc_return_release arch_atomic64_inc_return_release
1614 #endif
1615
1616 #ifndef arch_atomic64_inc_return
1617 static __always_inline s64
1618 arch_atomic64_inc_return(atomic64_t *v)
1619 {
1620 s64 ret;
1621 __atomic_pre_full_fence();
1622 ret = arch_atomic64_inc_return_relaxed(v);
1623 __atomic_post_full_fence();
1624 return ret;
1625 }
1626 #define arch_atomic64_inc_return arch_atomic64_inc_return
1627 #endif
1628
1629 #endif
1630
1631 #ifndef arch_atomic64_fetch_inc_relaxed
1632 #ifdef arch_atomic64_fetch_inc
1633 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc
1634 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc
1635 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc
1636 #endif
1637
1638 #ifndef arch_atomic64_fetch_inc
1639 static __always_inline s64
1640 arch_atomic64_fetch_inc(atomic64_t *v)
1641 {
1642 return arch_atomic64_fetch_add(1, v);
1643 }
1644 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1645 #endif
1646
1647 #ifndef arch_atomic64_fetch_inc_acquire
1648 static __always_inline s64
1649 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1650 {
1651 return arch_atomic64_fetch_add_acquire(1, v);
1652 }
1653 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1654 #endif
1655
1656 #ifndef arch_atomic64_fetch_inc_release
1657 static __always_inline s64
1658 arch_atomic64_fetch_inc_release(atomic64_t *v)
1659 {
1660 return arch_atomic64_fetch_add_release(1, v);
1661 }
1662 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1663 #endif
1664
1665 #ifndef arch_atomic64_fetch_inc_relaxed
1666 static __always_inline s64
1667 arch_atomic64_fetch_inc_relaxed(atomic64_t *v)
1668 {
1669 return arch_atomic64_fetch_add_relaxed(1, v);
1670 }
1671 #define arch_atomic64_fetch_inc_relaxed arch_atomic64_fetch_inc_relaxed
1672 #endif
1673
1674 #else
1675
1676 #ifndef arch_atomic64_fetch_inc_acquire
1677 static __always_inline s64
1678 arch_atomic64_fetch_inc_acquire(atomic64_t *v)
1679 {
1680 s64 ret = arch_atomic64_fetch_inc_relaxed(v);
1681 __atomic_acquire_fence();
1682 return ret;
1683 }
1684 #define arch_atomic64_fetch_inc_acquire arch_atomic64_fetch_inc_acquire
1685 #endif
1686
1687 #ifndef arch_atomic64_fetch_inc_release
1688 static __always_inline s64
1689 arch_atomic64_fetch_inc_release(atomic64_t *v)
1690 {
1691 __atomic_release_fence();
1692 return arch_atomic64_fetch_inc_relaxed(v);
1693 }
1694 #define arch_atomic64_fetch_inc_release arch_atomic64_fetch_inc_release
1695 #endif
1696
1697 #ifndef arch_atomic64_fetch_inc
1698 static __always_inline s64
1699 arch_atomic64_fetch_inc(atomic64_t *v)
1700 {
1701 s64 ret;
1702 __atomic_pre_full_fence();
1703 ret = arch_atomic64_fetch_inc_relaxed(v);
1704 __atomic_post_full_fence();
1705 return ret;
1706 }
1707 #define arch_atomic64_fetch_inc arch_atomic64_fetch_inc
1708 #endif
1709
1710 #endif
1711
1712 #ifndef arch_atomic64_dec
1713 static __always_inline void
1714 arch_atomic64_dec(atomic64_t *v)
1715 {
1716 arch_atomic64_sub(1, v);
1717 }
1718 #define arch_atomic64_dec arch_atomic64_dec
1719 #endif
1720
1721 #ifndef arch_atomic64_dec_return_relaxed
1722 #ifdef arch_atomic64_dec_return
1723 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return
1724 #define arch_atomic64_dec_return_release arch_atomic64_dec_return
1725 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return
1726 #endif
1727
1728 #ifndef arch_atomic64_dec_return
1729 static __always_inline s64
1730 arch_atomic64_dec_return(atomic64_t *v)
1731 {
1732 return arch_atomic64_sub_return(1, v);
1733 }
1734 #define arch_atomic64_dec_return arch_atomic64_dec_return
1735 #endif
1736
1737 #ifndef arch_atomic64_dec_return_acquire
1738 static __always_inline s64
1739 arch_atomic64_dec_return_acquire(atomic64_t *v)
1740 {
1741 return arch_atomic64_sub_return_acquire(1, v);
1742 }
1743 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1744 #endif
1745
1746 #ifndef arch_atomic64_dec_return_release
1747 static __always_inline s64
1748 arch_atomic64_dec_return_release(atomic64_t *v)
1749 {
1750 return arch_atomic64_sub_return_release(1, v);
1751 }
1752 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1753 #endif
1754
1755 #ifndef arch_atomic64_dec_return_relaxed
1756 static __always_inline s64
1757 arch_atomic64_dec_return_relaxed(atomic64_t *v)
1758 {
1759 return arch_atomic64_sub_return_relaxed(1, v);
1760 }
1761 #define arch_atomic64_dec_return_relaxed arch_atomic64_dec_return_relaxed
1762 #endif
1763
1764 #else
1765
1766 #ifndef arch_atomic64_dec_return_acquire
1767 static __always_inline s64
1768 arch_atomic64_dec_return_acquire(atomic64_t *v)
1769 {
1770 s64 ret = arch_atomic64_dec_return_relaxed(v);
1771 __atomic_acquire_fence();
1772 return ret;
1773 }
1774 #define arch_atomic64_dec_return_acquire arch_atomic64_dec_return_acquire
1775 #endif
1776
1777 #ifndef arch_atomic64_dec_return_release
1778 static __always_inline s64
1779 arch_atomic64_dec_return_release(atomic64_t *v)
1780 {
1781 __atomic_release_fence();
1782 return arch_atomic64_dec_return_relaxed(v);
1783 }
1784 #define arch_atomic64_dec_return_release arch_atomic64_dec_return_release
1785 #endif
1786
1787 #ifndef arch_atomic64_dec_return
1788 static __always_inline s64
1789 arch_atomic64_dec_return(atomic64_t *v)
1790 {
1791 s64 ret;
1792 __atomic_pre_full_fence();
1793 ret = arch_atomic64_dec_return_relaxed(v);
1794 __atomic_post_full_fence();
1795 return ret;
1796 }
1797 #define arch_atomic64_dec_return arch_atomic64_dec_return
1798 #endif
1799
1800 #endif
1801
1802 #ifndef arch_atomic64_fetch_dec_relaxed
1803 #ifdef arch_atomic64_fetch_dec
1804 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec
1805 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec
1806 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec
1807 #endif
1808
1809 #ifndef arch_atomic64_fetch_dec
1810 static __always_inline s64
1811 arch_atomic64_fetch_dec(atomic64_t *v)
1812 {
1813 return arch_atomic64_fetch_sub(1, v);
1814 }
1815 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1816 #endif
1817
1818 #ifndef arch_atomic64_fetch_dec_acquire
1819 static __always_inline s64
1820 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1821 {
1822 return arch_atomic64_fetch_sub_acquire(1, v);
1823 }
1824 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1825 #endif
1826
1827 #ifndef arch_atomic64_fetch_dec_release
1828 static __always_inline s64
1829 arch_atomic64_fetch_dec_release(atomic64_t *v)
1830 {
1831 return arch_atomic64_fetch_sub_release(1, v);
1832 }
1833 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1834 #endif
1835
1836 #ifndef arch_atomic64_fetch_dec_relaxed
1837 static __always_inline s64
1838 arch_atomic64_fetch_dec_relaxed(atomic64_t *v)
1839 {
1840 return arch_atomic64_fetch_sub_relaxed(1, v);
1841 }
1842 #define arch_atomic64_fetch_dec_relaxed arch_atomic64_fetch_dec_relaxed
1843 #endif
1844
1845 #else
1846
1847 #ifndef arch_atomic64_fetch_dec_acquire
1848 static __always_inline s64
1849 arch_atomic64_fetch_dec_acquire(atomic64_t *v)
1850 {
1851 s64 ret = arch_atomic64_fetch_dec_relaxed(v);
1852 __atomic_acquire_fence();
1853 return ret;
1854 }
1855 #define arch_atomic64_fetch_dec_acquire arch_atomic64_fetch_dec_acquire
1856 #endif
1857
1858 #ifndef arch_atomic64_fetch_dec_release
1859 static __always_inline s64
1860 arch_atomic64_fetch_dec_release(atomic64_t *v)
1861 {
1862 __atomic_release_fence();
1863 return arch_atomic64_fetch_dec_relaxed(v);
1864 }
1865 #define arch_atomic64_fetch_dec_release arch_atomic64_fetch_dec_release
1866 #endif
1867
1868 #ifndef arch_atomic64_fetch_dec
1869 static __always_inline s64
1870 arch_atomic64_fetch_dec(atomic64_t *v)
1871 {
1872 s64 ret;
1873 __atomic_pre_full_fence();
1874 ret = arch_atomic64_fetch_dec_relaxed(v);
1875 __atomic_post_full_fence();
1876 return ret;
1877 }
1878 #define arch_atomic64_fetch_dec arch_atomic64_fetch_dec
1879 #endif
1880
1881 #endif
1882
1883 #ifndef arch_atomic64_fetch_and_relaxed
1884 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and
1885 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and
1886 #define arch_atomic64_fetch_and_relaxed arch_atomic64_fetch_and
1887 #else
1888
1889 #ifndef arch_atomic64_fetch_and_acquire
1890 static __always_inline s64
1891 arch_atomic64_fetch_and_acquire(s64 i, atomic64_t *v)
1892 {
1893 s64 ret = arch_atomic64_fetch_and_relaxed(i, v);
1894 __atomic_acquire_fence();
1895 return ret;
1896 }
1897 #define arch_atomic64_fetch_and_acquire arch_atomic64_fetch_and_acquire
1898 #endif
1899
1900 #ifndef arch_atomic64_fetch_and_release
1901 static __always_inline s64
1902 arch_atomic64_fetch_and_release(s64 i, atomic64_t *v)
1903 {
1904 __atomic_release_fence();
1905 return arch_atomic64_fetch_and_relaxed(i, v);
1906 }
1907 #define arch_atomic64_fetch_and_release arch_atomic64_fetch_and_release
1908 #endif
1909
1910 #ifndef arch_atomic64_fetch_and
1911 static __always_inline s64
1912 arch_atomic64_fetch_and(s64 i, atomic64_t *v)
1913 {
1914 s64 ret;
1915 __atomic_pre_full_fence();
1916 ret = arch_atomic64_fetch_and_relaxed(i, v);
1917 __atomic_post_full_fence();
1918 return ret;
1919 }
1920 #define arch_atomic64_fetch_and arch_atomic64_fetch_and
1921 #endif
1922
1923 #endif
1924
1925 #ifndef arch_atomic64_andnot
1926 static __always_inline void
1927 arch_atomic64_andnot(s64 i, atomic64_t *v)
1928 {
1929 arch_atomic64_and(~i, v);
1930 }
1931 #define arch_atomic64_andnot arch_atomic64_andnot
1932 #endif
1933
1934 #ifndef arch_atomic64_fetch_andnot_relaxed
1935 #ifdef arch_atomic64_fetch_andnot
1936 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot
1937 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot
1938 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot
1939 #endif
1940
1941 #ifndef arch_atomic64_fetch_andnot
1942 static __always_inline s64
1943 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
1944 {
1945 return arch_atomic64_fetch_and(~i, v);
1946 }
1947 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
1948 #endif
1949
1950 #ifndef arch_atomic64_fetch_andnot_acquire
1951 static __always_inline s64
1952 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1953 {
1954 return arch_atomic64_fetch_and_acquire(~i, v);
1955 }
1956 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1957 #endif
1958
1959 #ifndef arch_atomic64_fetch_andnot_release
1960 static __always_inline s64
1961 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1962 {
1963 return arch_atomic64_fetch_and_release(~i, v);
1964 }
1965 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1966 #endif
1967
1968 #ifndef arch_atomic64_fetch_andnot_relaxed
1969 static __always_inline s64
1970 arch_atomic64_fetch_andnot_relaxed(s64 i, atomic64_t *v)
1971 {
1972 return arch_atomic64_fetch_and_relaxed(~i, v);
1973 }
1974 #define arch_atomic64_fetch_andnot_relaxed arch_atomic64_fetch_andnot_relaxed
1975 #endif
1976
1977 #else
1978
1979 #ifndef arch_atomic64_fetch_andnot_acquire
1980 static __always_inline s64
1981 arch_atomic64_fetch_andnot_acquire(s64 i, atomic64_t *v)
1982 {
1983 s64 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
1984 __atomic_acquire_fence();
1985 return ret;
1986 }
1987 #define arch_atomic64_fetch_andnot_acquire arch_atomic64_fetch_andnot_acquire
1988 #endif
1989
1990 #ifndef arch_atomic64_fetch_andnot_release
1991 static __always_inline s64
1992 arch_atomic64_fetch_andnot_release(s64 i, atomic64_t *v)
1993 {
1994 __atomic_release_fence();
1995 return arch_atomic64_fetch_andnot_relaxed(i, v);
1996 }
1997 #define arch_atomic64_fetch_andnot_release arch_atomic64_fetch_andnot_release
1998 #endif
1999
2000 #ifndef arch_atomic64_fetch_andnot
2001 static __always_inline s64
2002 arch_atomic64_fetch_andnot(s64 i, atomic64_t *v)
2003 {
2004 s64 ret;
2005 __atomic_pre_full_fence();
2006 ret = arch_atomic64_fetch_andnot_relaxed(i, v);
2007 __atomic_post_full_fence();
2008 return ret;
2009 }
2010 #define arch_atomic64_fetch_andnot arch_atomic64_fetch_andnot
2011 #endif
2012
2013 #endif
2014
2015 #ifndef arch_atomic64_fetch_or_relaxed
2016 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or
2017 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or
2018 #define arch_atomic64_fetch_or_relaxed arch_atomic64_fetch_or
2019 #else
2020
2021 #ifndef arch_atomic64_fetch_or_acquire
2022 static __always_inline s64
2023 arch_atomic64_fetch_or_acquire(s64 i, atomic64_t *v)
2024 {
2025 s64 ret = arch_atomic64_fetch_or_relaxed(i, v);
2026 __atomic_acquire_fence();
2027 return ret;
2028 }
2029 #define arch_atomic64_fetch_or_acquire arch_atomic64_fetch_or_acquire
2030 #endif
2031
2032 #ifndef arch_atomic64_fetch_or_release
2033 static __always_inline s64
2034 arch_atomic64_fetch_or_release(s64 i, atomic64_t *v)
2035 {
2036 __atomic_release_fence();
2037 return arch_atomic64_fetch_or_relaxed(i, v);
2038 }
2039 #define arch_atomic64_fetch_or_release arch_atomic64_fetch_or_release
2040 #endif
2041
2042 #ifndef arch_atomic64_fetch_or
2043 static __always_inline s64
2044 arch_atomic64_fetch_or(s64 i, atomic64_t *v)
2045 {
2046 s64 ret;
2047 __atomic_pre_full_fence();
2048 ret = arch_atomic64_fetch_or_relaxed(i, v);
2049 __atomic_post_full_fence();
2050 return ret;
2051 }
2052 #define arch_atomic64_fetch_or arch_atomic64_fetch_or
2053 #endif
2054
2055 #endif
2056
2057 #ifndef arch_atomic64_fetch_xor_relaxed
2058 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor
2059 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor
2060 #define arch_atomic64_fetch_xor_relaxed arch_atomic64_fetch_xor
2061 #else
2062
2063 #ifndef arch_atomic64_fetch_xor_acquire
2064 static __always_inline s64
2065 arch_atomic64_fetch_xor_acquire(s64 i, atomic64_t *v)
2066 {
2067 s64 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2068 __atomic_acquire_fence();
2069 return ret;
2070 }
2071 #define arch_atomic64_fetch_xor_acquire arch_atomic64_fetch_xor_acquire
2072 #endif
2073
2074 #ifndef arch_atomic64_fetch_xor_release
2075 static __always_inline s64
2076 arch_atomic64_fetch_xor_release(s64 i, atomic64_t *v)
2077 {
2078 __atomic_release_fence();
2079 return arch_atomic64_fetch_xor_relaxed(i, v);
2080 }
2081 #define arch_atomic64_fetch_xor_release arch_atomic64_fetch_xor_release
2082 #endif
2083
2084 #ifndef arch_atomic64_fetch_xor
2085 static __always_inline s64
2086 arch_atomic64_fetch_xor(s64 i, atomic64_t *v)
2087 {
2088 s64 ret;
2089 __atomic_pre_full_fence();
2090 ret = arch_atomic64_fetch_xor_relaxed(i, v);
2091 __atomic_post_full_fence();
2092 return ret;
2093 }
2094 #define arch_atomic64_fetch_xor arch_atomic64_fetch_xor
2095 #endif
2096
2097 #endif
2098
2099 #ifndef arch_atomic64_xchg_relaxed
2100 #define arch_atomic64_xchg_acquire arch_atomic64_xchg
2101 #define arch_atomic64_xchg_release arch_atomic64_xchg
2102 #define arch_atomic64_xchg_relaxed arch_atomic64_xchg
2103 #else
2104
2105 #ifndef arch_atomic64_xchg_acquire
2106 static __always_inline s64
2107 arch_atomic64_xchg_acquire(atomic64_t *v, s64 i)
2108 {
2109 s64 ret = arch_atomic64_xchg_relaxed(v, i);
2110 __atomic_acquire_fence();
2111 return ret;
2112 }
2113 #define arch_atomic64_xchg_acquire arch_atomic64_xchg_acquire
2114 #endif
2115
2116 #ifndef arch_atomic64_xchg_release
2117 static __always_inline s64
2118 arch_atomic64_xchg_release(atomic64_t *v, s64 i)
2119 {
2120 __atomic_release_fence();
2121 return arch_atomic64_xchg_relaxed(v, i);
2122 }
2123 #define arch_atomic64_xchg_release arch_atomic64_xchg_release
2124 #endif
2125
2126 #ifndef arch_atomic64_xchg
2127 static __always_inline s64
2128 arch_atomic64_xchg(atomic64_t *v, s64 i)
2129 {
2130 s64 ret;
2131 __atomic_pre_full_fence();
2132 ret = arch_atomic64_xchg_relaxed(v, i);
2133 __atomic_post_full_fence();
2134 return ret;
2135 }
2136 #define arch_atomic64_xchg arch_atomic64_xchg
2137 #endif
2138
2139 #endif
2140
2141 #ifndef arch_atomic64_cmpxchg_relaxed
2142 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg
2143 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg
2144 #define arch_atomic64_cmpxchg_relaxed arch_atomic64_cmpxchg
2145 #else
2146
2147 #ifndef arch_atomic64_cmpxchg_acquire
2148 static __always_inline s64
2149 arch_atomic64_cmpxchg_acquire(atomic64_t *v, s64 old, s64 new)
2150 {
2151 s64 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2152 __atomic_acquire_fence();
2153 return ret;
2154 }
2155 #define arch_atomic64_cmpxchg_acquire arch_atomic64_cmpxchg_acquire
2156 #endif
2157
2158 #ifndef arch_atomic64_cmpxchg_release
2159 static __always_inline s64
2160 arch_atomic64_cmpxchg_release(atomic64_t *v, s64 old, s64 new)
2161 {
2162 __atomic_release_fence();
2163 return arch_atomic64_cmpxchg_relaxed(v, old, new);
2164 }
2165 #define arch_atomic64_cmpxchg_release arch_atomic64_cmpxchg_release
2166 #endif
2167
2168 #ifndef arch_atomic64_cmpxchg
2169 static __always_inline s64
2170 arch_atomic64_cmpxchg(atomic64_t *v, s64 old, s64 new)
2171 {
2172 s64 ret;
2173 __atomic_pre_full_fence();
2174 ret = arch_atomic64_cmpxchg_relaxed(v, old, new);
2175 __atomic_post_full_fence();
2176 return ret;
2177 }
2178 #define arch_atomic64_cmpxchg arch_atomic64_cmpxchg
2179 #endif
2180
2181 #endif
2182
2183 #ifndef arch_atomic64_try_cmpxchg_relaxed
2184 #ifdef arch_atomic64_try_cmpxchg
2185 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg
2186 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg
2187 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg
2188 #endif
2189
2190 #ifndef arch_atomic64_try_cmpxchg
2191 static __always_inline bool
2192 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2193 {
2194 s64 r, o = *old;
2195 r = arch_atomic64_cmpxchg(v, o, new);
2196 if (unlikely(r != o))
2197 *old = r;
2198 return likely(r == o);
2199 }
2200 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2201 #endif
2202
2203 #ifndef arch_atomic64_try_cmpxchg_acquire
2204 static __always_inline bool
2205 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2206 {
2207 s64 r, o = *old;
2208 r = arch_atomic64_cmpxchg_acquire(v, o, new);
2209 if (unlikely(r != o))
2210 *old = r;
2211 return likely(r == o);
2212 }
2213 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2214 #endif
2215
2216 #ifndef arch_atomic64_try_cmpxchg_release
2217 static __always_inline bool
2218 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2219 {
2220 s64 r, o = *old;
2221 r = arch_atomic64_cmpxchg_release(v, o, new);
2222 if (unlikely(r != o))
2223 *old = r;
2224 return likely(r == o);
2225 }
2226 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2227 #endif
2228
2229 #ifndef arch_atomic64_try_cmpxchg_relaxed
2230 static __always_inline bool
2231 arch_atomic64_try_cmpxchg_relaxed(atomic64_t *v, s64 *old, s64 new)
2232 {
2233 s64 r, o = *old;
2234 r = arch_atomic64_cmpxchg_relaxed(v, o, new);
2235 if (unlikely(r != o))
2236 *old = r;
2237 return likely(r == o);
2238 }
2239 #define arch_atomic64_try_cmpxchg_relaxed arch_atomic64_try_cmpxchg_relaxed
2240 #endif
2241
2242 #else
2243
2244 #ifndef arch_atomic64_try_cmpxchg_acquire
2245 static __always_inline bool
2246 arch_atomic64_try_cmpxchg_acquire(atomic64_t *v, s64 *old, s64 new)
2247 {
2248 bool ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2249 __atomic_acquire_fence();
2250 return ret;
2251 }
2252 #define arch_atomic64_try_cmpxchg_acquire arch_atomic64_try_cmpxchg_acquire
2253 #endif
2254
2255 #ifndef arch_atomic64_try_cmpxchg_release
2256 static __always_inline bool
2257 arch_atomic64_try_cmpxchg_release(atomic64_t *v, s64 *old, s64 new)
2258 {
2259 __atomic_release_fence();
2260 return arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2261 }
2262 #define arch_atomic64_try_cmpxchg_release arch_atomic64_try_cmpxchg_release
2263 #endif
2264
2265 #ifndef arch_atomic64_try_cmpxchg
2266 static __always_inline bool
2267 arch_atomic64_try_cmpxchg(atomic64_t *v, s64 *old, s64 new)
2268 {
2269 bool ret;
2270 __atomic_pre_full_fence();
2271 ret = arch_atomic64_try_cmpxchg_relaxed(v, old, new);
2272 __atomic_post_full_fence();
2273 return ret;
2274 }
2275 #define arch_atomic64_try_cmpxchg arch_atomic64_try_cmpxchg
2276 #endif
2277
2278 #endif
2279
2280 #ifndef arch_atomic64_sub_and_test
2281
2282
2283
2284
2285
2286
2287
2288
2289
2290 static __always_inline bool
2291 arch_atomic64_sub_and_test(s64 i, atomic64_t *v)
2292 {
2293 return arch_atomic64_sub_return(i, v) == 0;
2294 }
2295 #define arch_atomic64_sub_and_test arch_atomic64_sub_and_test
2296 #endif
2297
2298 #ifndef arch_atomic64_dec_and_test
2299
2300
2301
2302
2303
2304
2305
2306
2307 static __always_inline bool
2308 arch_atomic64_dec_and_test(atomic64_t *v)
2309 {
2310 return arch_atomic64_dec_return(v) == 0;
2311 }
2312 #define arch_atomic64_dec_and_test arch_atomic64_dec_and_test
2313 #endif
2314
2315 #ifndef arch_atomic64_inc_and_test
2316
2317
2318
2319
2320
2321
2322
2323
2324 static __always_inline bool
2325 arch_atomic64_inc_and_test(atomic64_t *v)
2326 {
2327 return arch_atomic64_inc_return(v) == 0;
2328 }
2329 #define arch_atomic64_inc_and_test arch_atomic64_inc_and_test
2330 #endif
2331
2332 #ifndef arch_atomic64_add_negative
2333
2334
2335
2336
2337
2338
2339
2340
2341
2342 static __always_inline bool
2343 arch_atomic64_add_negative(s64 i, atomic64_t *v)
2344 {
2345 return arch_atomic64_add_return(i, v) < 0;
2346 }
2347 #define arch_atomic64_add_negative arch_atomic64_add_negative
2348 #endif
2349
2350 #ifndef arch_atomic64_fetch_add_unless
2351
2352
2353
2354
2355
2356
2357
2358
2359
2360 static __always_inline s64
2361 arch_atomic64_fetch_add_unless(atomic64_t *v, s64 a, s64 u)
2362 {
2363 s64 c = arch_atomic64_read(v);
2364
2365 do {
2366 if (unlikely(c == u))
2367 break;
2368 } while (!arch_atomic64_try_cmpxchg(v, &c, c + a));
2369
2370 return c;
2371 }
2372 #define arch_atomic64_fetch_add_unless arch_atomic64_fetch_add_unless
2373 #endif
2374
2375 #ifndef arch_atomic64_add_unless
2376
2377
2378
2379
2380
2381
2382
2383
2384
2385 static __always_inline bool
2386 arch_atomic64_add_unless(atomic64_t *v, s64 a, s64 u)
2387 {
2388 return arch_atomic64_fetch_add_unless(v, a, u) != u;
2389 }
2390 #define arch_atomic64_add_unless arch_atomic64_add_unless
2391 #endif
2392
2393 #ifndef arch_atomic64_inc_not_zero
2394
2395
2396
2397
2398
2399
2400
2401 static __always_inline bool
2402 arch_atomic64_inc_not_zero(atomic64_t *v)
2403 {
2404 return arch_atomic64_add_unless(v, 1, 0);
2405 }
2406 #define arch_atomic64_inc_not_zero arch_atomic64_inc_not_zero
2407 #endif
2408
2409 #ifndef arch_atomic64_inc_unless_negative
2410 static __always_inline bool
2411 arch_atomic64_inc_unless_negative(atomic64_t *v)
2412 {
2413 s64 c = arch_atomic64_read(v);
2414
2415 do {
2416 if (unlikely(c < 0))
2417 return false;
2418 } while (!arch_atomic64_try_cmpxchg(v, &c, c + 1));
2419
2420 return true;
2421 }
2422 #define arch_atomic64_inc_unless_negative arch_atomic64_inc_unless_negative
2423 #endif
2424
2425 #ifndef arch_atomic64_dec_unless_positive
2426 static __always_inline bool
2427 arch_atomic64_dec_unless_positive(atomic64_t *v)
2428 {
2429 s64 c = arch_atomic64_read(v);
2430
2431 do {
2432 if (unlikely(c > 0))
2433 return false;
2434 } while (!arch_atomic64_try_cmpxchg(v, &c, c - 1));
2435
2436 return true;
2437 }
2438 #define arch_atomic64_dec_unless_positive arch_atomic64_dec_unless_positive
2439 #endif
2440
2441 #ifndef arch_atomic64_dec_if_positive
2442 static __always_inline s64
2443 arch_atomic64_dec_if_positive(atomic64_t *v)
2444 {
2445 s64 dec, c = arch_atomic64_read(v);
2446
2447 do {
2448 dec = c - 1;
2449 if (unlikely(dec < 0))
2450 break;
2451 } while (!arch_atomic64_try_cmpxchg(v, &c, dec));
2452
2453 return dec;
2454 }
2455 #define arch_atomic64_dec_if_positive arch_atomic64_dec_if_positive
2456 #endif
2457
2458 #endif
2459