![]() |
|
|||
0001 /* SPDX-License-Identifier: GPL-2.0-only */ 0002 /* 0003 * Copyright (C) 2012 ARM Ltd. 0004 */ 0005 #ifndef __ASM_SPINLOCK_H 0006 #define __ASM_SPINLOCK_H 0007 0008 #include <asm/qspinlock.h> 0009 #include <asm/qrwlock.h> 0010 0011 /* See include/linux/spinlock.h */ 0012 #define smp_mb__after_spinlock() smp_mb() 0013 0014 /* 0015 * Changing this will break osq_lock() thanks to the call inside 0016 * smp_cond_load_relaxed(). 0017 * 0018 * See: 0019 * https://lore.kernel.org/lkml/20200110100612.GC2827@hirez.programming.kicks-ass.net 0020 */ 0021 #define vcpu_is_preempted vcpu_is_preempted 0022 static inline bool vcpu_is_preempted(int cpu) 0023 { 0024 return false; 0025 } 0026 0027 #endif /* __ASM_SPINLOCK_H */
[ Source navigation ] | [ Diff markup ] | [ Identifier search ] | [ general search ] |
This page was automatically generated by the 2.1.0 LXR engine. The LXR team |
![]() ![]() |