Back to home page

OSCL-LXR

 
 

    


0001 /* SPDX-License-Identifier: GPL-2.0 */
0002 #ifndef __ASM_LSE_H
0003 #define __ASM_LSE_H
0004 
0005 #include <asm/atomic_ll_sc.h>
0006 
0007 #ifdef CONFIG_ARM64_LSE_ATOMICS
0008 
0009 #define __LSE_PREAMBLE  ".arch_extension lse\n"
0010 
0011 #include <linux/compiler_types.h>
0012 #include <linux/export.h>
0013 #include <linux/jump_label.h>
0014 #include <linux/stringify.h>
0015 #include <asm/alternative.h>
0016 #include <asm/atomic_lse.h>
0017 #include <asm/cpucaps.h>
0018 
0019 extern struct static_key_false cpu_hwcap_keys[ARM64_NCAPS];
0020 
0021 static __always_inline bool system_uses_lse_atomics(void)
0022 {
0023     return static_branch_likely(&cpu_hwcap_keys[ARM64_HAS_LSE_ATOMICS]);
0024 }
0025 
0026 #define __lse_ll_sc_body(op, ...)                   \
0027 ({                                  \
0028     system_uses_lse_atomics() ?                 \
0029         __lse_##op(__VA_ARGS__) :               \
0030         __ll_sc_##op(__VA_ARGS__);              \
0031 })
0032 
0033 /* In-line patching at runtime */
0034 #define ARM64_LSE_ATOMIC_INSN(llsc, lse)                \
0035     ALTERNATIVE(llsc, __LSE_PREAMBLE lse, ARM64_HAS_LSE_ATOMICS)
0036 
0037 #else   /* CONFIG_ARM64_LSE_ATOMICS */
0038 
0039 static inline bool system_uses_lse_atomics(void) { return false; }
0040 
0041 #define __lse_ll_sc_body(op, ...)       __ll_sc_##op(__VA_ARGS__)
0042 
0043 #define ARM64_LSE_ATOMIC_INSN(llsc, lse)    llsc
0044 
0045 #endif  /* CONFIG_ARM64_LSE_ATOMICS */
0046 #endif  /* __ASM_LSE_H */