xref: /kvm-unit-tests/lib/arm64/spinlock.c (revision 23fb3b171aa1259d7f6f79b50d9930f289bbcb36)
1 /*
2  * spinlocks
3  *
4  * Copyright (C) 2015, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5  *
6  * This work is licensed under the terms of the GNU LGPL, version 2.
7  */
8 #include <asm/spinlock.h>
9 #include <asm/barrier.h>
10 #include <asm/mmu.h>
11 
12 void spin_lock(struct spinlock *lock)
13 {
14 	u32 val, fail;
15 
16 	if (!mmu_enabled()) {
17 		lock->v = 1;
18 		smp_mb();
19 		return;
20 	}
21 
22 	do {
23 		asm volatile(
24 		"1:	ldaxr	%w0, [%2]\n"
25 		"	cbnz	%w0, 1b\n"
26 		"	mov	%0, #1\n"
27 		"	stxr	%w1, %w0, [%2]\n"
28 		: "=&r" (val), "=&r" (fail)
29 		: "r" (&lock->v)
30 		: "cc" );
31 	} while (fail);
32 	smp_mb();
33 }
34 
35 void spin_unlock(struct spinlock *lock)
36 {
37 	smp_mb();
38 	if (mmu_enabled())
39 		asm volatile("stlrh wzr, [%0]" :: "r" (&lock->v));
40 	else
41 		lock->v = 0;
42 }
43