13 #ifndef __ARM64_KVM_NVHE_SPINLOCK_H__
14 #define __ARM64_KVM_NVHE_SPINLOCK_H__
16 #include <asm/alternative.h>
18 #include <asm/rwonce.h>
31 #define __HYP_SPIN_LOCK_INITIALIZER \
34 #define __HYP_SPIN_LOCK_UNLOCKED \
35 ((hyp_spinlock_t) __HYP_SPIN_LOCK_INITIALIZER)
37 #define DEFINE_HYP_SPINLOCK(x) hyp_spinlock_t x = __HYP_SPIN_LOCK_UNLOCKED
39 #define hyp_spin_lock_init(l) \
41 *(l) = __HYP_SPIN_LOCK_UNLOCKED; \
51 ARM64_LSE_ATOMIC_INSN(
53 " prfm pstl1strm, %3\n"
55 " add %w1, %w0, #(1 << 16)\n"
56 " stxr %w2, %w1, %3\n"
59 " mov %w2, #(1 << 16)\n"
60 " ldadda %w2, %w0, %3\n"
64 " eor %w1, %w0, %w0, ror #16\n"
73 " eor %w1, %w2, %w0, lsr #16\n"
77 :
"=&r" (lockval),
"=&r" (newval),
"=&r" (tmp),
"+Q" (*lock)
87 ARM64_LSE_ATOMIC_INSN(
96 :
"=Q" (lock->
owner),
"=&r" (tmp)
108 #ifdef CONFIG_NVHE_EL2_DEBUG
118 if (static_branch_likely(&kvm_protected_mode_initialized))
static void hyp_spin_unlock(hyp_spinlock_t *lock)
union hyp_spinlock hyp_spinlock_t
static bool hyp_spin_is_locked(hyp_spinlock_t *lock)
static void hyp_assert_lock_held(hyp_spinlock_t *lock)
static void hyp_spin_lock(hyp_spinlock_t *lock)