xref: /kvm-unit-tests/lib/arm64/spinlock.c (revision d864b580863bc86c89f92936b115eb3303756f8a)
1 /*
2  * spinlocks
3  *
4  * Copyright (C) 2015, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5  *
6  * This work is licensed under the terms of the GNU LGPL, version 2.
7  */
8 #include <libcflat.h>
9 #include <asm/spinlock.h>
10 #include <asm/barrier.h>
11 #include <asm/mmu.h>
12 
spin_lock(struct spinlock * lock)13 void spin_lock(struct spinlock *lock)
14 {
15 	u32 val, fail;
16 
17 	if (!mmu_enabled()) {
18 		lock->v = 1;
19 		smp_mb();
20 		return;
21 	}
22 
23 	do {
24 		asm volatile(
25 		"1:	ldaxr	%w0, [%2]\n"
26 		"	cbnz	%w0, 1b\n"
27 		"	mov	%w0, #1\n"
28 		"	stxr	%w1, %w0, [%2]\n"
29 		: "=&r" (val), "=&r" (fail)
30 		: "r" (&lock->v)
31 		: "cc" );
32 	} while (fail);
33 	smp_mb();
34 }
35 
spin_unlock(struct spinlock * lock)36 void spin_unlock(struct spinlock *lock)
37 {
38 	smp_mb();
39 	if (mmu_enabled())
40 		asm volatile("stlrh wzr, [%0]" :: "r" (&lock->v));
41 	else
42 		lock->v = 0;
43 }
44