xref: /kvm-unit-tests/arm/cstart64.S (revision db328a24b7030e9dd7e3012f25096a9188722144)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2014, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU LGPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <asm/asm-offsets.h>
10#include <asm/ptrace.h>
11#include <asm/processor.h>
12#include <asm/page.h>
13#include <asm/pgtable-hwdef.h>
14
15.section .init
16
17.globl start
18start:
19	/*
20	 * bootloader params are in x0-x3
21	 * The physical address of the dtb is in x0, x1-x3 are reserved
22	 * See the kernel doc Documentation/arm64/booting.txt
23	 */
24	adr     x4, stacktop
25	mov	sp, x4
26	stp	x0, x1, [sp, #-16]!
27
28	/* Enable FP/ASIMD */
29	mov	x0, #(3 << 20)
30	msr	cpacr_el1, x0
31
32	/* set up exception handling */
33	bl	exceptions_init
34
35	/* complete setup */
36	ldp	x0, x1, [sp], #16
37	bl	setup
38
39	/* run the test */
40	adr	x0, __argc
41	ldr	x0, [x0]
42	adr	x1, __argv
43	bl	main
44	bl	exit
45	b	halt
46
47exceptions_init:
48	adr	x0, vector_table
49	msr	vbar_el1, x0
50	isb
51	ret
52
53.text
54
55.globl halt
56halt:
571:	wfi
58	b	1b
59
60/*
61 * asm_mmu_enable
62 *   Inputs:
63 *     x0 is the base address of the translation table
64 *   Outputs: none
65 *
66 * Adapted from
67 *   arch/arm64/kernel/head.S
68 *   arch/arm64/mm/proc.S
69 */
70
71/*
72 * Memory region attributes for LPAE:
73 *
74 *   n = AttrIndx[2:0]
75 *                      n       MAIR
76 *   DEVICE_nGnRnE      000     00000000
77 *   DEVICE_nGnRE       001     00000100
78 *   DEVICE_GRE         010     00001100
79 *   NORMAL_NC          011     01000100
80 *   NORMAL             100     11111111
81 */
82#define MAIR(attr, mt) ((attr) << ((mt) * 8))
83
84.globl asm_mmu_enable
85asm_mmu_enable:
86	ic	iallu			// I+BTB cache invalidate
87	tlbi	vmalle1is		// invalidate I + D TLBs
88	dsb	ish
89
90	/* TCR */
91	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
92		     TCR_TG0_64K | TCR_TG1_64K |	\
93		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
94		     TCR_SHARED
95	mov	x2, #3			// 011 is 42 bits
96	bfi	x1, x2, #32, #3
97	msr	tcr_el1, x1
98
99	/* MAIR */
100	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
101		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
102		     MAIR(0x0c, MT_DEVICE_GRE) |	\
103		     MAIR(0x44, MT_NORMAL_NC) |		\
104		     MAIR(0xff, MT_NORMAL)
105	msr	mair_el1, x1
106
107	/* TTBR0 */
108	msr	ttbr0_el1, x0
109	isb
110
111	/* SCTLR */
112	mrs	x1, sctlr_el1
113	orr	x1, x1, SCTLR_EL1_C
114	orr	x1, x1, SCTLR_EL1_I
115	orr	x1, x1, SCTLR_EL1_M
116	msr	sctlr_el1, x1
117	isb
118
119	ret
120
121/*
122 * Vectors
123 * Adapted from arch/arm64/kernel/entry.S
124 */
125.macro vector_stub, name, vec
126\name:
127	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
128	stp	 x2,  x3, [sp,  #16]
129	stp	 x4,  x5, [sp,  #32]
130	stp	 x6,  x7, [sp,  #48]
131	stp	 x8,  x9, [sp,  #64]
132	stp	x10, x11, [sp,  #80]
133	stp	x12, x13, [sp,  #96]
134	stp	x14, x15, [sp, #112]
135	stp	x16, x17, [sp, #128]
136	stp	x18, x19, [sp, #144]
137	stp	x20, x21, [sp, #160]
138	stp	x22, x23, [sp, #176]
139	stp	x24, x25, [sp, #192]
140	stp	x26, x27, [sp, #208]
141	stp	x28, x29, [sp, #224]
142
143	str	x30, [sp, #S_LR]
144
145	.if \vec >= 8
146	mrs	x1, sp_el0
147	.else
148	add	x1, sp, #S_FRAME_SIZE
149	.endif
150	str	x1, [sp, #S_SP]
151
152	mrs	x1, elr_el1
153	mrs	x2, spsr_el1
154	stp	x1, x2, [sp, #S_PC]
155
156	and	x2, x2, #PSR_MODE_MASK
157	cmp	x2, #PSR_MODE_EL0t
158	b.ne	1f
159	adr	x2, user_mode
160	str	xzr, [x2]		/* we're in kernel mode now */
161
1621:	mov	x0, \vec
163	mov	x1, sp
164	mrs	x2, esr_el1
165	bl	do_handle_exception
166
167	ldp	x1, x2, [sp, #S_PC]
168	msr	spsr_el1, x2
169	msr	elr_el1, x1
170
171	and	x2, x2, #PSR_MODE_MASK
172	cmp	x2, #PSR_MODE_EL0t
173	b.ne	1f
174	adr	x2, user_mode
175	mov	x1, #1
176	str	x1, [x2]		/* we're going back to user mode */
177
1781:
179	.if \vec >= 8
180	ldr	x1, [sp, #S_SP]
181	msr	sp_el0, x1
182	.endif
183
184	ldr	x30, [sp, #S_LR]
185
186	ldp	x28, x29, [sp, #224]
187	ldp	x26, x27, [sp, #208]
188	ldp	x24, x25, [sp, #192]
189	ldp	x22, x23, [sp, #176]
190	ldp	x20, x21, [sp, #160]
191	ldp	x18, x19, [sp, #144]
192	ldp	x16, x17, [sp, #128]
193	ldp	x14, x15, [sp, #112]
194	ldp	x12, x13, [sp,  #96]
195	ldp	x10, x11, [sp,  #80]
196	ldp	 x8,  x9, [sp,  #64]
197	ldp	 x6,  x7, [sp,  #48]
198	ldp	 x4,  x5, [sp,  #32]
199	ldp	 x2,  x3, [sp,  #16]
200	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
201
202	eret
203.endm
204
205vector_stub	el1t_sync,     0
206vector_stub	el1t_irq,      1
207vector_stub	el1t_fiq,      2
208vector_stub	el1t_error,    3
209
210vector_stub	el1h_sync,     4
211vector_stub	el1h_irq,      5
212vector_stub	el1h_fiq,      6
213vector_stub	el1h_error,    7
214
215vector_stub	el0_sync_64,   8
216vector_stub	el0_irq_64,    9
217vector_stub	el0_fiq_64,   10
218vector_stub	el0_error_64, 11
219
220vector_stub	el0_sync_32,  12
221vector_stub	el0_irq_32,   13
222vector_stub	el0_fiq_32,   14
223vector_stub	el0_error_32, 15
224
225.section .text.ex
226
227.macro ventry, label
228.align 7
229	b	\label
230.endm
231
232.align 11
233vector_table:
234	ventry	el1t_sync			// Synchronous EL1t
235	ventry	el1t_irq			// IRQ EL1t
236	ventry	el1t_fiq			// FIQ EL1t
237	ventry	el1t_error			// Error EL1t
238
239	ventry	el1h_sync			// Synchronous EL1h
240	ventry	el1h_irq			// IRQ EL1h
241	ventry	el1h_fiq			// FIQ EL1h
242	ventry	el1h_error			// Error EL1h
243
244	ventry	el0_sync_64			// Synchronous 64-bit EL0
245	ventry	el0_irq_64			// IRQ 64-bit EL0
246	ventry	el0_fiq_64			// FIQ 64-bit EL0
247	ventry	el0_error_64			// Error 64-bit EL0
248
249	ventry	el0_sync_32			// Synchronous 32-bit EL0
250	ventry	el0_irq_32			// IRQ 32-bit EL0
251	ventry	el0_fiq_32			// FIQ 32-bit EL0
252	ventry	el0_error_32			// Error 32-bit EL0
253