Back to home page

OSCL-LXR

 
 

    


0001 // SPDX-License-Identifier: GPL-2.0
0002 
0003 // Generated by scripts/atomic/gen-atomic-long.sh
0004 // DO NOT MODIFY THIS FILE DIRECTLY
0005 
0006 #ifndef _LINUX_ATOMIC_LONG_H
0007 #define _LINUX_ATOMIC_LONG_H
0008 
0009 #include <linux/compiler.h>
0010 #include <asm/types.h>
0011 
0012 #ifdef CONFIG_64BIT
0013 typedef atomic64_t atomic_long_t;
0014 #define ATOMIC_LONG_INIT(i)     ATOMIC64_INIT(i)
0015 #define atomic_long_cond_read_acquire   atomic64_cond_read_acquire
0016 #define atomic_long_cond_read_relaxed   atomic64_cond_read_relaxed
0017 #else
0018 typedef atomic_t atomic_long_t;
0019 #define ATOMIC_LONG_INIT(i)     ATOMIC_INIT(i)
0020 #define atomic_long_cond_read_acquire   atomic_cond_read_acquire
0021 #define atomic_long_cond_read_relaxed   atomic_cond_read_relaxed
0022 #endif
0023 
0024 #ifdef CONFIG_64BIT
0025 
0026 static __always_inline long
0027 arch_atomic_long_read(const atomic_long_t *v)
0028 {
0029     return arch_atomic64_read(v);
0030 }
0031 
0032 static __always_inline long
0033 arch_atomic_long_read_acquire(const atomic_long_t *v)
0034 {
0035     return arch_atomic64_read_acquire(v);
0036 }
0037 
0038 static __always_inline void
0039 arch_atomic_long_set(atomic_long_t *v, long i)
0040 {
0041     arch_atomic64_set(v, i);
0042 }
0043 
0044 static __always_inline void
0045 arch_atomic_long_set_release(atomic_long_t *v, long i)
0046 {
0047     arch_atomic64_set_release(v, i);
0048 }
0049 
0050 static __always_inline void
0051 arch_atomic_long_add(long i, atomic_long_t *v)
0052 {
0053     arch_atomic64_add(i, v);
0054 }
0055 
0056 static __always_inline long
0057 arch_atomic_long_add_return(long i, atomic_long_t *v)
0058 {
0059     return arch_atomic64_add_return(i, v);
0060 }
0061 
0062 static __always_inline long
0063 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
0064 {
0065     return arch_atomic64_add_return_acquire(i, v);
0066 }
0067 
0068 static __always_inline long
0069 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
0070 {
0071     return arch_atomic64_add_return_release(i, v);
0072 }
0073 
0074 static __always_inline long
0075 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
0076 {
0077     return arch_atomic64_add_return_relaxed(i, v);
0078 }
0079 
0080 static __always_inline long
0081 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
0082 {
0083     return arch_atomic64_fetch_add(i, v);
0084 }
0085 
0086 static __always_inline long
0087 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
0088 {
0089     return arch_atomic64_fetch_add_acquire(i, v);
0090 }
0091 
0092 static __always_inline long
0093 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
0094 {
0095     return arch_atomic64_fetch_add_release(i, v);
0096 }
0097 
0098 static __always_inline long
0099 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
0100 {
0101     return arch_atomic64_fetch_add_relaxed(i, v);
0102 }
0103 
0104 static __always_inline void
0105 arch_atomic_long_sub(long i, atomic_long_t *v)
0106 {
0107     arch_atomic64_sub(i, v);
0108 }
0109 
0110 static __always_inline long
0111 arch_atomic_long_sub_return(long i, atomic_long_t *v)
0112 {
0113     return arch_atomic64_sub_return(i, v);
0114 }
0115 
0116 static __always_inline long
0117 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
0118 {
0119     return arch_atomic64_sub_return_acquire(i, v);
0120 }
0121 
0122 static __always_inline long
0123 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
0124 {
0125     return arch_atomic64_sub_return_release(i, v);
0126 }
0127 
0128 static __always_inline long
0129 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
0130 {
0131     return arch_atomic64_sub_return_relaxed(i, v);
0132 }
0133 
0134 static __always_inline long
0135 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
0136 {
0137     return arch_atomic64_fetch_sub(i, v);
0138 }
0139 
0140 static __always_inline long
0141 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
0142 {
0143     return arch_atomic64_fetch_sub_acquire(i, v);
0144 }
0145 
0146 static __always_inline long
0147 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
0148 {
0149     return arch_atomic64_fetch_sub_release(i, v);
0150 }
0151 
0152 static __always_inline long
0153 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
0154 {
0155     return arch_atomic64_fetch_sub_relaxed(i, v);
0156 }
0157 
0158 static __always_inline void
0159 arch_atomic_long_inc(atomic_long_t *v)
0160 {
0161     arch_atomic64_inc(v);
0162 }
0163 
0164 static __always_inline long
0165 arch_atomic_long_inc_return(atomic_long_t *v)
0166 {
0167     return arch_atomic64_inc_return(v);
0168 }
0169 
0170 static __always_inline long
0171 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
0172 {
0173     return arch_atomic64_inc_return_acquire(v);
0174 }
0175 
0176 static __always_inline long
0177 arch_atomic_long_inc_return_release(atomic_long_t *v)
0178 {
0179     return arch_atomic64_inc_return_release(v);
0180 }
0181 
0182 static __always_inline long
0183 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
0184 {
0185     return arch_atomic64_inc_return_relaxed(v);
0186 }
0187 
0188 static __always_inline long
0189 arch_atomic_long_fetch_inc(atomic_long_t *v)
0190 {
0191     return arch_atomic64_fetch_inc(v);
0192 }
0193 
0194 static __always_inline long
0195 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
0196 {
0197     return arch_atomic64_fetch_inc_acquire(v);
0198 }
0199 
0200 static __always_inline long
0201 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
0202 {
0203     return arch_atomic64_fetch_inc_release(v);
0204 }
0205 
0206 static __always_inline long
0207 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
0208 {
0209     return arch_atomic64_fetch_inc_relaxed(v);
0210 }
0211 
0212 static __always_inline void
0213 arch_atomic_long_dec(atomic_long_t *v)
0214 {
0215     arch_atomic64_dec(v);
0216 }
0217 
0218 static __always_inline long
0219 arch_atomic_long_dec_return(atomic_long_t *v)
0220 {
0221     return arch_atomic64_dec_return(v);
0222 }
0223 
0224 static __always_inline long
0225 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
0226 {
0227     return arch_atomic64_dec_return_acquire(v);
0228 }
0229 
0230 static __always_inline long
0231 arch_atomic_long_dec_return_release(atomic_long_t *v)
0232 {
0233     return arch_atomic64_dec_return_release(v);
0234 }
0235 
0236 static __always_inline long
0237 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
0238 {
0239     return arch_atomic64_dec_return_relaxed(v);
0240 }
0241 
0242 static __always_inline long
0243 arch_atomic_long_fetch_dec(atomic_long_t *v)
0244 {
0245     return arch_atomic64_fetch_dec(v);
0246 }
0247 
0248 static __always_inline long
0249 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
0250 {
0251     return arch_atomic64_fetch_dec_acquire(v);
0252 }
0253 
0254 static __always_inline long
0255 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
0256 {
0257     return arch_atomic64_fetch_dec_release(v);
0258 }
0259 
0260 static __always_inline long
0261 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
0262 {
0263     return arch_atomic64_fetch_dec_relaxed(v);
0264 }
0265 
0266 static __always_inline void
0267 arch_atomic_long_and(long i, atomic_long_t *v)
0268 {
0269     arch_atomic64_and(i, v);
0270 }
0271 
0272 static __always_inline long
0273 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
0274 {
0275     return arch_atomic64_fetch_and(i, v);
0276 }
0277 
0278 static __always_inline long
0279 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
0280 {
0281     return arch_atomic64_fetch_and_acquire(i, v);
0282 }
0283 
0284 static __always_inline long
0285 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
0286 {
0287     return arch_atomic64_fetch_and_release(i, v);
0288 }
0289 
0290 static __always_inline long
0291 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
0292 {
0293     return arch_atomic64_fetch_and_relaxed(i, v);
0294 }
0295 
0296 static __always_inline void
0297 arch_atomic_long_andnot(long i, atomic_long_t *v)
0298 {
0299     arch_atomic64_andnot(i, v);
0300 }
0301 
0302 static __always_inline long
0303 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
0304 {
0305     return arch_atomic64_fetch_andnot(i, v);
0306 }
0307 
0308 static __always_inline long
0309 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
0310 {
0311     return arch_atomic64_fetch_andnot_acquire(i, v);
0312 }
0313 
0314 static __always_inline long
0315 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
0316 {
0317     return arch_atomic64_fetch_andnot_release(i, v);
0318 }
0319 
0320 static __always_inline long
0321 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
0322 {
0323     return arch_atomic64_fetch_andnot_relaxed(i, v);
0324 }
0325 
0326 static __always_inline void
0327 arch_atomic_long_or(long i, atomic_long_t *v)
0328 {
0329     arch_atomic64_or(i, v);
0330 }
0331 
0332 static __always_inline long
0333 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
0334 {
0335     return arch_atomic64_fetch_or(i, v);
0336 }
0337 
0338 static __always_inline long
0339 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
0340 {
0341     return arch_atomic64_fetch_or_acquire(i, v);
0342 }
0343 
0344 static __always_inline long
0345 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
0346 {
0347     return arch_atomic64_fetch_or_release(i, v);
0348 }
0349 
0350 static __always_inline long
0351 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
0352 {
0353     return arch_atomic64_fetch_or_relaxed(i, v);
0354 }
0355 
0356 static __always_inline void
0357 arch_atomic_long_xor(long i, atomic_long_t *v)
0358 {
0359     arch_atomic64_xor(i, v);
0360 }
0361 
0362 static __always_inline long
0363 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
0364 {
0365     return arch_atomic64_fetch_xor(i, v);
0366 }
0367 
0368 static __always_inline long
0369 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
0370 {
0371     return arch_atomic64_fetch_xor_acquire(i, v);
0372 }
0373 
0374 static __always_inline long
0375 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
0376 {
0377     return arch_atomic64_fetch_xor_release(i, v);
0378 }
0379 
0380 static __always_inline long
0381 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
0382 {
0383     return arch_atomic64_fetch_xor_relaxed(i, v);
0384 }
0385 
0386 static __always_inline long
0387 arch_atomic_long_xchg(atomic_long_t *v, long i)
0388 {
0389     return arch_atomic64_xchg(v, i);
0390 }
0391 
0392 static __always_inline long
0393 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
0394 {
0395     return arch_atomic64_xchg_acquire(v, i);
0396 }
0397 
0398 static __always_inline long
0399 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
0400 {
0401     return arch_atomic64_xchg_release(v, i);
0402 }
0403 
0404 static __always_inline long
0405 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
0406 {
0407     return arch_atomic64_xchg_relaxed(v, i);
0408 }
0409 
0410 static __always_inline long
0411 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
0412 {
0413     return arch_atomic64_cmpxchg(v, old, new);
0414 }
0415 
0416 static __always_inline long
0417 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
0418 {
0419     return arch_atomic64_cmpxchg_acquire(v, old, new);
0420 }
0421 
0422 static __always_inline long
0423 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
0424 {
0425     return arch_atomic64_cmpxchg_release(v, old, new);
0426 }
0427 
0428 static __always_inline long
0429 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
0430 {
0431     return arch_atomic64_cmpxchg_relaxed(v, old, new);
0432 }
0433 
0434 static __always_inline bool
0435 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
0436 {
0437     return arch_atomic64_try_cmpxchg(v, (s64 *)old, new);
0438 }
0439 
0440 static __always_inline bool
0441 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
0442 {
0443     return arch_atomic64_try_cmpxchg_acquire(v, (s64 *)old, new);
0444 }
0445 
0446 static __always_inline bool
0447 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
0448 {
0449     return arch_atomic64_try_cmpxchg_release(v, (s64 *)old, new);
0450 }
0451 
0452 static __always_inline bool
0453 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
0454 {
0455     return arch_atomic64_try_cmpxchg_relaxed(v, (s64 *)old, new);
0456 }
0457 
0458 static __always_inline bool
0459 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
0460 {
0461     return arch_atomic64_sub_and_test(i, v);
0462 }
0463 
0464 static __always_inline bool
0465 arch_atomic_long_dec_and_test(atomic_long_t *v)
0466 {
0467     return arch_atomic64_dec_and_test(v);
0468 }
0469 
0470 static __always_inline bool
0471 arch_atomic_long_inc_and_test(atomic_long_t *v)
0472 {
0473     return arch_atomic64_inc_and_test(v);
0474 }
0475 
0476 static __always_inline bool
0477 arch_atomic_long_add_negative(long i, atomic_long_t *v)
0478 {
0479     return arch_atomic64_add_negative(i, v);
0480 }
0481 
0482 static __always_inline long
0483 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
0484 {
0485     return arch_atomic64_fetch_add_unless(v, a, u);
0486 }
0487 
0488 static __always_inline bool
0489 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
0490 {
0491     return arch_atomic64_add_unless(v, a, u);
0492 }
0493 
0494 static __always_inline bool
0495 arch_atomic_long_inc_not_zero(atomic_long_t *v)
0496 {
0497     return arch_atomic64_inc_not_zero(v);
0498 }
0499 
0500 static __always_inline bool
0501 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
0502 {
0503     return arch_atomic64_inc_unless_negative(v);
0504 }
0505 
0506 static __always_inline bool
0507 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
0508 {
0509     return arch_atomic64_dec_unless_positive(v);
0510 }
0511 
0512 static __always_inline long
0513 arch_atomic_long_dec_if_positive(atomic_long_t *v)
0514 {
0515     return arch_atomic64_dec_if_positive(v);
0516 }
0517 
0518 #else /* CONFIG_64BIT */
0519 
0520 static __always_inline long
0521 arch_atomic_long_read(const atomic_long_t *v)
0522 {
0523     return arch_atomic_read(v);
0524 }
0525 
0526 static __always_inline long
0527 arch_atomic_long_read_acquire(const atomic_long_t *v)
0528 {
0529     return arch_atomic_read_acquire(v);
0530 }
0531 
0532 static __always_inline void
0533 arch_atomic_long_set(atomic_long_t *v, long i)
0534 {
0535     arch_atomic_set(v, i);
0536 }
0537 
0538 static __always_inline void
0539 arch_atomic_long_set_release(atomic_long_t *v, long i)
0540 {
0541     arch_atomic_set_release(v, i);
0542 }
0543 
0544 static __always_inline void
0545 arch_atomic_long_add(long i, atomic_long_t *v)
0546 {
0547     arch_atomic_add(i, v);
0548 }
0549 
0550 static __always_inline long
0551 arch_atomic_long_add_return(long i, atomic_long_t *v)
0552 {
0553     return arch_atomic_add_return(i, v);
0554 }
0555 
0556 static __always_inline long
0557 arch_atomic_long_add_return_acquire(long i, atomic_long_t *v)
0558 {
0559     return arch_atomic_add_return_acquire(i, v);
0560 }
0561 
0562 static __always_inline long
0563 arch_atomic_long_add_return_release(long i, atomic_long_t *v)
0564 {
0565     return arch_atomic_add_return_release(i, v);
0566 }
0567 
0568 static __always_inline long
0569 arch_atomic_long_add_return_relaxed(long i, atomic_long_t *v)
0570 {
0571     return arch_atomic_add_return_relaxed(i, v);
0572 }
0573 
0574 static __always_inline long
0575 arch_atomic_long_fetch_add(long i, atomic_long_t *v)
0576 {
0577     return arch_atomic_fetch_add(i, v);
0578 }
0579 
0580 static __always_inline long
0581 arch_atomic_long_fetch_add_acquire(long i, atomic_long_t *v)
0582 {
0583     return arch_atomic_fetch_add_acquire(i, v);
0584 }
0585 
0586 static __always_inline long
0587 arch_atomic_long_fetch_add_release(long i, atomic_long_t *v)
0588 {
0589     return arch_atomic_fetch_add_release(i, v);
0590 }
0591 
0592 static __always_inline long
0593 arch_atomic_long_fetch_add_relaxed(long i, atomic_long_t *v)
0594 {
0595     return arch_atomic_fetch_add_relaxed(i, v);
0596 }
0597 
0598 static __always_inline void
0599 arch_atomic_long_sub(long i, atomic_long_t *v)
0600 {
0601     arch_atomic_sub(i, v);
0602 }
0603 
0604 static __always_inline long
0605 arch_atomic_long_sub_return(long i, atomic_long_t *v)
0606 {
0607     return arch_atomic_sub_return(i, v);
0608 }
0609 
0610 static __always_inline long
0611 arch_atomic_long_sub_return_acquire(long i, atomic_long_t *v)
0612 {
0613     return arch_atomic_sub_return_acquire(i, v);
0614 }
0615 
0616 static __always_inline long
0617 arch_atomic_long_sub_return_release(long i, atomic_long_t *v)
0618 {
0619     return arch_atomic_sub_return_release(i, v);
0620 }
0621 
0622 static __always_inline long
0623 arch_atomic_long_sub_return_relaxed(long i, atomic_long_t *v)
0624 {
0625     return arch_atomic_sub_return_relaxed(i, v);
0626 }
0627 
0628 static __always_inline long
0629 arch_atomic_long_fetch_sub(long i, atomic_long_t *v)
0630 {
0631     return arch_atomic_fetch_sub(i, v);
0632 }
0633 
0634 static __always_inline long
0635 arch_atomic_long_fetch_sub_acquire(long i, atomic_long_t *v)
0636 {
0637     return arch_atomic_fetch_sub_acquire(i, v);
0638 }
0639 
0640 static __always_inline long
0641 arch_atomic_long_fetch_sub_release(long i, atomic_long_t *v)
0642 {
0643     return arch_atomic_fetch_sub_release(i, v);
0644 }
0645 
0646 static __always_inline long
0647 arch_atomic_long_fetch_sub_relaxed(long i, atomic_long_t *v)
0648 {
0649     return arch_atomic_fetch_sub_relaxed(i, v);
0650 }
0651 
0652 static __always_inline void
0653 arch_atomic_long_inc(atomic_long_t *v)
0654 {
0655     arch_atomic_inc(v);
0656 }
0657 
0658 static __always_inline long
0659 arch_atomic_long_inc_return(atomic_long_t *v)
0660 {
0661     return arch_atomic_inc_return(v);
0662 }
0663 
0664 static __always_inline long
0665 arch_atomic_long_inc_return_acquire(atomic_long_t *v)
0666 {
0667     return arch_atomic_inc_return_acquire(v);
0668 }
0669 
0670 static __always_inline long
0671 arch_atomic_long_inc_return_release(atomic_long_t *v)
0672 {
0673     return arch_atomic_inc_return_release(v);
0674 }
0675 
0676 static __always_inline long
0677 arch_atomic_long_inc_return_relaxed(atomic_long_t *v)
0678 {
0679     return arch_atomic_inc_return_relaxed(v);
0680 }
0681 
0682 static __always_inline long
0683 arch_atomic_long_fetch_inc(atomic_long_t *v)
0684 {
0685     return arch_atomic_fetch_inc(v);
0686 }
0687 
0688 static __always_inline long
0689 arch_atomic_long_fetch_inc_acquire(atomic_long_t *v)
0690 {
0691     return arch_atomic_fetch_inc_acquire(v);
0692 }
0693 
0694 static __always_inline long
0695 arch_atomic_long_fetch_inc_release(atomic_long_t *v)
0696 {
0697     return arch_atomic_fetch_inc_release(v);
0698 }
0699 
0700 static __always_inline long
0701 arch_atomic_long_fetch_inc_relaxed(atomic_long_t *v)
0702 {
0703     return arch_atomic_fetch_inc_relaxed(v);
0704 }
0705 
0706 static __always_inline void
0707 arch_atomic_long_dec(atomic_long_t *v)
0708 {
0709     arch_atomic_dec(v);
0710 }
0711 
0712 static __always_inline long
0713 arch_atomic_long_dec_return(atomic_long_t *v)
0714 {
0715     return arch_atomic_dec_return(v);
0716 }
0717 
0718 static __always_inline long
0719 arch_atomic_long_dec_return_acquire(atomic_long_t *v)
0720 {
0721     return arch_atomic_dec_return_acquire(v);
0722 }
0723 
0724 static __always_inline long
0725 arch_atomic_long_dec_return_release(atomic_long_t *v)
0726 {
0727     return arch_atomic_dec_return_release(v);
0728 }
0729 
0730 static __always_inline long
0731 arch_atomic_long_dec_return_relaxed(atomic_long_t *v)
0732 {
0733     return arch_atomic_dec_return_relaxed(v);
0734 }
0735 
0736 static __always_inline long
0737 arch_atomic_long_fetch_dec(atomic_long_t *v)
0738 {
0739     return arch_atomic_fetch_dec(v);
0740 }
0741 
0742 static __always_inline long
0743 arch_atomic_long_fetch_dec_acquire(atomic_long_t *v)
0744 {
0745     return arch_atomic_fetch_dec_acquire(v);
0746 }
0747 
0748 static __always_inline long
0749 arch_atomic_long_fetch_dec_release(atomic_long_t *v)
0750 {
0751     return arch_atomic_fetch_dec_release(v);
0752 }
0753 
0754 static __always_inline long
0755 arch_atomic_long_fetch_dec_relaxed(atomic_long_t *v)
0756 {
0757     return arch_atomic_fetch_dec_relaxed(v);
0758 }
0759 
0760 static __always_inline void
0761 arch_atomic_long_and(long i, atomic_long_t *v)
0762 {
0763     arch_atomic_and(i, v);
0764 }
0765 
0766 static __always_inline long
0767 arch_atomic_long_fetch_and(long i, atomic_long_t *v)
0768 {
0769     return arch_atomic_fetch_and(i, v);
0770 }
0771 
0772 static __always_inline long
0773 arch_atomic_long_fetch_and_acquire(long i, atomic_long_t *v)
0774 {
0775     return arch_atomic_fetch_and_acquire(i, v);
0776 }
0777 
0778 static __always_inline long
0779 arch_atomic_long_fetch_and_release(long i, atomic_long_t *v)
0780 {
0781     return arch_atomic_fetch_and_release(i, v);
0782 }
0783 
0784 static __always_inline long
0785 arch_atomic_long_fetch_and_relaxed(long i, atomic_long_t *v)
0786 {
0787     return arch_atomic_fetch_and_relaxed(i, v);
0788 }
0789 
0790 static __always_inline void
0791 arch_atomic_long_andnot(long i, atomic_long_t *v)
0792 {
0793     arch_atomic_andnot(i, v);
0794 }
0795 
0796 static __always_inline long
0797 arch_atomic_long_fetch_andnot(long i, atomic_long_t *v)
0798 {
0799     return arch_atomic_fetch_andnot(i, v);
0800 }
0801 
0802 static __always_inline long
0803 arch_atomic_long_fetch_andnot_acquire(long i, atomic_long_t *v)
0804 {
0805     return arch_atomic_fetch_andnot_acquire(i, v);
0806 }
0807 
0808 static __always_inline long
0809 arch_atomic_long_fetch_andnot_release(long i, atomic_long_t *v)
0810 {
0811     return arch_atomic_fetch_andnot_release(i, v);
0812 }
0813 
0814 static __always_inline long
0815 arch_atomic_long_fetch_andnot_relaxed(long i, atomic_long_t *v)
0816 {
0817     return arch_atomic_fetch_andnot_relaxed(i, v);
0818 }
0819 
0820 static __always_inline void
0821 arch_atomic_long_or(long i, atomic_long_t *v)
0822 {
0823     arch_atomic_or(i, v);
0824 }
0825 
0826 static __always_inline long
0827 arch_atomic_long_fetch_or(long i, atomic_long_t *v)
0828 {
0829     return arch_atomic_fetch_or(i, v);
0830 }
0831 
0832 static __always_inline long
0833 arch_atomic_long_fetch_or_acquire(long i, atomic_long_t *v)
0834 {
0835     return arch_atomic_fetch_or_acquire(i, v);
0836 }
0837 
0838 static __always_inline long
0839 arch_atomic_long_fetch_or_release(long i, atomic_long_t *v)
0840 {
0841     return arch_atomic_fetch_or_release(i, v);
0842 }
0843 
0844 static __always_inline long
0845 arch_atomic_long_fetch_or_relaxed(long i, atomic_long_t *v)
0846 {
0847     return arch_atomic_fetch_or_relaxed(i, v);
0848 }
0849 
0850 static __always_inline void
0851 arch_atomic_long_xor(long i, atomic_long_t *v)
0852 {
0853     arch_atomic_xor(i, v);
0854 }
0855 
0856 static __always_inline long
0857 arch_atomic_long_fetch_xor(long i, atomic_long_t *v)
0858 {
0859     return arch_atomic_fetch_xor(i, v);
0860 }
0861 
0862 static __always_inline long
0863 arch_atomic_long_fetch_xor_acquire(long i, atomic_long_t *v)
0864 {
0865     return arch_atomic_fetch_xor_acquire(i, v);
0866 }
0867 
0868 static __always_inline long
0869 arch_atomic_long_fetch_xor_release(long i, atomic_long_t *v)
0870 {
0871     return arch_atomic_fetch_xor_release(i, v);
0872 }
0873 
0874 static __always_inline long
0875 arch_atomic_long_fetch_xor_relaxed(long i, atomic_long_t *v)
0876 {
0877     return arch_atomic_fetch_xor_relaxed(i, v);
0878 }
0879 
0880 static __always_inline long
0881 arch_atomic_long_xchg(atomic_long_t *v, long i)
0882 {
0883     return arch_atomic_xchg(v, i);
0884 }
0885 
0886 static __always_inline long
0887 arch_atomic_long_xchg_acquire(atomic_long_t *v, long i)
0888 {
0889     return arch_atomic_xchg_acquire(v, i);
0890 }
0891 
0892 static __always_inline long
0893 arch_atomic_long_xchg_release(atomic_long_t *v, long i)
0894 {
0895     return arch_atomic_xchg_release(v, i);
0896 }
0897 
0898 static __always_inline long
0899 arch_atomic_long_xchg_relaxed(atomic_long_t *v, long i)
0900 {
0901     return arch_atomic_xchg_relaxed(v, i);
0902 }
0903 
0904 static __always_inline long
0905 arch_atomic_long_cmpxchg(atomic_long_t *v, long old, long new)
0906 {
0907     return arch_atomic_cmpxchg(v, old, new);
0908 }
0909 
0910 static __always_inline long
0911 arch_atomic_long_cmpxchg_acquire(atomic_long_t *v, long old, long new)
0912 {
0913     return arch_atomic_cmpxchg_acquire(v, old, new);
0914 }
0915 
0916 static __always_inline long
0917 arch_atomic_long_cmpxchg_release(atomic_long_t *v, long old, long new)
0918 {
0919     return arch_atomic_cmpxchg_release(v, old, new);
0920 }
0921 
0922 static __always_inline long
0923 arch_atomic_long_cmpxchg_relaxed(atomic_long_t *v, long old, long new)
0924 {
0925     return arch_atomic_cmpxchg_relaxed(v, old, new);
0926 }
0927 
0928 static __always_inline bool
0929 arch_atomic_long_try_cmpxchg(atomic_long_t *v, long *old, long new)
0930 {
0931     return arch_atomic_try_cmpxchg(v, (int *)old, new);
0932 }
0933 
0934 static __always_inline bool
0935 arch_atomic_long_try_cmpxchg_acquire(atomic_long_t *v, long *old, long new)
0936 {
0937     return arch_atomic_try_cmpxchg_acquire(v, (int *)old, new);
0938 }
0939 
0940 static __always_inline bool
0941 arch_atomic_long_try_cmpxchg_release(atomic_long_t *v, long *old, long new)
0942 {
0943     return arch_atomic_try_cmpxchg_release(v, (int *)old, new);
0944 }
0945 
0946 static __always_inline bool
0947 arch_atomic_long_try_cmpxchg_relaxed(atomic_long_t *v, long *old, long new)
0948 {
0949     return arch_atomic_try_cmpxchg_relaxed(v, (int *)old, new);
0950 }
0951 
0952 static __always_inline bool
0953 arch_atomic_long_sub_and_test(long i, atomic_long_t *v)
0954 {
0955     return arch_atomic_sub_and_test(i, v);
0956 }
0957 
0958 static __always_inline bool
0959 arch_atomic_long_dec_and_test(atomic_long_t *v)
0960 {
0961     return arch_atomic_dec_and_test(v);
0962 }
0963 
0964 static __always_inline bool
0965 arch_atomic_long_inc_and_test(atomic_long_t *v)
0966 {
0967     return arch_atomic_inc_and_test(v);
0968 }
0969 
0970 static __always_inline bool
0971 arch_atomic_long_add_negative(long i, atomic_long_t *v)
0972 {
0973     return arch_atomic_add_negative(i, v);
0974 }
0975 
0976 static __always_inline long
0977 arch_atomic_long_fetch_add_unless(atomic_long_t *v, long a, long u)
0978 {
0979     return arch_atomic_fetch_add_unless(v, a, u);
0980 }
0981 
0982 static __always_inline bool
0983 arch_atomic_long_add_unless(atomic_long_t *v, long a, long u)
0984 {
0985     return arch_atomic_add_unless(v, a, u);
0986 }
0987 
0988 static __always_inline bool
0989 arch_atomic_long_inc_not_zero(atomic_long_t *v)
0990 {
0991     return arch_atomic_inc_not_zero(v);
0992 }
0993 
0994 static __always_inline bool
0995 arch_atomic_long_inc_unless_negative(atomic_long_t *v)
0996 {
0997     return arch_atomic_inc_unless_negative(v);
0998 }
0999 
1000 static __always_inline bool
1001 arch_atomic_long_dec_unless_positive(atomic_long_t *v)
1002 {
1003     return arch_atomic_dec_unless_positive(v);
1004 }
1005 
1006 static __always_inline long
1007 arch_atomic_long_dec_if_positive(atomic_long_t *v)
1008 {
1009     return arch_atomic_dec_if_positive(v);
1010 }
1011 
1012 #endif /* CONFIG_64BIT */
1013 #endif /* _LINUX_ATOMIC_LONG_H */
1014 // e8f0e08ff072b74d180eabe2ad001282b38c2c88