0001
0002
0003
0004
0005
0006
0007
0008
0009
0010
0011
0012
0013 #ifndef __ARM64_KVM_NVHE_SPINLOCK_H__
0014 #define __ARM64_KVM_NVHE_SPINLOCK_H__
0015
0016 #include <asm/alternative.h>
0017 #include <asm/lse.h>
0018 #include <asm/rwonce.h>
0019
0020 typedef union hyp_spinlock {
0021 u32 __val;
0022 struct {
0023 #ifdef __AARCH64EB__
0024 u16 next, owner;
0025 #else
0026 u16 owner, next;
0027 #endif
0028 };
0029 } hyp_spinlock_t;
0030
0031 #define hyp_spin_lock_init(l) \
0032 do { \
0033 *(l) = (hyp_spinlock_t){ .__val = 0 }; \
0034 } while (0)
0035
0036 static inline void hyp_spin_lock(hyp_spinlock_t *lock)
0037 {
0038 u32 tmp;
0039 hyp_spinlock_t lockval, newval;
0040
0041 asm volatile(
0042
0043 ARM64_LSE_ATOMIC_INSN(
0044
0045 " prfm pstl1strm, %3\n"
0046 "1: ldaxr %w0, %3\n"
0047 " add %w1, %w0, #(1 << 16)\n"
0048 " stxr %w2, %w1, %3\n"
0049 " cbnz %w2, 1b\n",
0050
0051 " mov %w2, #(1 << 16)\n"
0052 " ldadda %w2, %w0, %3\n"
0053 __nops(3))
0054
0055
0056 " eor %w1, %w0, %w0, ror #16\n"
0057 " cbz %w1, 3f\n"
0058
0059
0060
0061
0062 " sevl\n"
0063 "2: wfe\n"
0064 " ldaxrh %w2, %4\n"
0065 " eor %w1, %w2, %w0, lsr #16\n"
0066 " cbnz %w1, 2b\n"
0067
0068 "3:"
0069 : "=&r" (lockval), "=&r" (newval), "=&r" (tmp), "+Q" (*lock)
0070 : "Q" (lock->owner)
0071 : "memory");
0072 }
0073
0074 static inline void hyp_spin_unlock(hyp_spinlock_t *lock)
0075 {
0076 u64 tmp;
0077
0078 asm volatile(
0079 ARM64_LSE_ATOMIC_INSN(
0080
0081 " ldrh %w1, %0\n"
0082 " add %w1, %w1, #1\n"
0083 " stlrh %w1, %0",
0084
0085 " mov %w1, #1\n"
0086 " staddlh %w1, %0\n"
0087 __nops(1))
0088 : "=Q" (lock->owner), "=&r" (tmp)
0089 :
0090 : "memory");
0091 }
0092
0093 static inline bool hyp_spin_is_locked(hyp_spinlock_t *lock)
0094 {
0095 hyp_spinlock_t lockval = READ_ONCE(*lock);
0096
0097 return lockval.owner != lockval.next;
0098 }
0099
0100 #ifdef CONFIG_NVHE_EL2_DEBUG
0101 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock)
0102 {
0103
0104
0105
0106
0107
0108
0109
0110 if (static_branch_likely(&kvm_protected_mode_initialized))
0111 BUG_ON(!hyp_spin_is_locked(lock));
0112 }
0113 #else
0114 static inline void hyp_assert_lock_held(hyp_spinlock_t *lock) { }
0115 #endif
0116
0117 #endif