![]() |
|
|||
0001 /* SPDX-License-Identifier: GPL-2.0 */ 0002 /* 0003 * Copyright (C) 2019 Google LLC. 0004 */ 0005 #ifndef __ASM_RWONCE_H 0006 #define __ASM_RWONCE_H 0007 0008 #ifdef CONFIG_SMP 0009 0010 #include <asm/barrier.h> 0011 0012 /* 0013 * Alpha is apparently daft enough to reorder address-dependent loads 0014 * on some CPU implementations. Knock some common sense into it with 0015 * a memory barrier in READ_ONCE(). 0016 * 0017 * For the curious, more information about this unusual reordering is 0018 * available in chapter 15 of the "perfbook": 0019 * 0020 * https://kernel.org/pub/linux/kernel/people/paulmck/perfbook/perfbook.html 0021 * 0022 */ 0023 #define __READ_ONCE(x) \ 0024 ({ \ 0025 __unqual_scalar_typeof(x) __x = \ 0026 (*(volatile typeof(__x) *)(&(x))); \ 0027 mb(); \ 0028 (typeof(x))__x; \ 0029 }) 0030 0031 #endif /* CONFIG_SMP */ 0032 0033 #include <asm-generic/rwonce.h> 0034 0035 #endif /* __ASM_RWONCE_H */
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |
![]() ![]() |