xref: /kvm-unit-tests/arm/cstart64.S (revision dfc1fec2fbde04ad607e1aed560cf7059350c70f)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2017, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <auxinfo.h>
10#include <asm/asm-offsets.h>
11#include <asm/assembler.h>
12#include <asm/ptrace.h>
13#include <asm/page.h>
14#include <asm/pgtable-hwdef.h>
15#include <asm/thread_info.h>
16#include <asm/sysreg.h>
17
18#ifdef CONFIG_EFI
19#include "efi/crt0-efi-aarch64.S"
20#else
21
22.macro zero_range, tmp1, tmp2
239998:	cmp	\tmp1, \tmp2
24	b.eq	9997f
25	stp	xzr, xzr, [\tmp1], #16
26	b	9998b
279997:
28.endm
29
30.section .init
31
32/*
33 * Bootloader params are in x0-x3. See kernel doc
34 * Documentation/arm64/booting.txt
35 */
36.globl start
37start:
38	/* get our base address */
39	adrp	x4, start
40	add     x4, x4, :lo12:start
41
42	/*
43	 * Update all R_AARCH64_RELATIVE relocations using the table
44	 * of Elf64_Rela entries between reloc_start/end. The build
45	 * will not emit other relocation types.
46	 *
47	 * struct Elf64_Rela {
48	 * 	uint64_t r_offset;
49	 * 	uint64_t r_info;
50	 * 	int64_t  r_addend;
51	 * }
52	 */
53	adrp	x5, reloc_start
54	add     x5, x5, :lo12:reloc_start
55	adrp	x6, reloc_end
56	add     x6, x6, :lo12:reloc_end
571:
58	cmp	x5, x6
59	b.hs	1f
60	ldr	x7, [x5]			// r_offset
61	ldr	x8, [x5, #16]			// r_addend
62	add	x8, x8, x4			// val = base + r_addend
63	str	x8, [x4, x7]			// base[r_offset] = val
64	add	x5, x5, #24
65	b	1b
66
671:
68	/* zero BSS */
69	adrp	x4, bss
70	add	x4, x4, :lo12:bss
71	adrp    x5, ebss
72	add     x5, x5, :lo12:ebss
73	zero_range x4, x5
74
75	/* zero and set up stack */
76	adrp    x5, stacktop
77	add     x5, x5, :lo12:stacktop
78	sub	x4, x5, #THREAD_SIZE
79	zero_range x4, x5
80
81	/* set SCTLR_EL1 to a known value */
82	ldr	x4, =INIT_SCTLR_EL1_MMU_OFF
83	msr	sctlr_el1, x4
84	isb
85
86	mov	x4, #1
87	msr	spsel, x4
88	adrp    x4, stackptr
89	add     sp, x4, :lo12:stackptr
90
91	/* enable FP/ASIMD */
92	mov	x4, #(3 << 20)
93	msr	cpacr_el1, x4
94
95	/* set up exception handling */
96	bl	exceptions_init
97
98	/* complete setup */
99	adrp	x1, stacktop
100	add	x1, x1, :lo12:stacktop		// x1 is the base of free memory
101	bl	setup				// x0 is the addr of the dtb
102
103	/* run the test */
104	adrp	x0, __argc
105	ldr	w0, [x0, :lo12:__argc]
106	adrp	x1, __argv
107	add	x1, x1, :lo12:__argv
108	adrp	x2, __environ
109	add	x2, x2, :lo12:__environ
110	bl	main
111	bl	exit
112	b	halt
113
114#endif
115
116.text
117
118/*
119 * psci_invoke_hvc / psci_invoke_smc
120 *
121 * Inputs:
122 *   w0 -- function_id
123 *   x1 -- arg0
124 *   x2 -- arg1
125 *   x3 -- arg2
126 *
127 * Outputs:
128 *   x0 -- return code
129 */
130.globl psci_invoke_hvc
131psci_invoke_hvc:
132	hvc	#0
133	ret
134
135.globl psci_invoke_smc
136psci_invoke_smc:
137	smc	#0
138	ret
139
140get_mmu_off:
141	adrp	x0, auxinfo
142	ldr	x0, [x0, :lo12:auxinfo + 8]
143	and	x0, x0, #AUXINFO_MMU_OFF
144	ret
145
146.globl secondary_entry
147secondary_entry:
148	/* Enable FP/ASIMD */
149	mov	x0, #(3 << 20)
150	msr	cpacr_el1, x0
151
152	/* set up exception handling */
153	bl	exceptions_init
154
155	/* enable the MMU unless requested off */
156	bl	get_mmu_off
157	cbnz	x0, 1f
158	adrp	x0, mmu_idmap
159	ldr	x0, [x0, :lo12:mmu_idmap]
160	bl	asm_mmu_enable
161
1621:
163	/* set the stack */
164	adrp	x0, secondary_data
165	ldr	x0, [x0, :lo12:secondary_data]
166	mov	sp, x0
167
168	/* finish init in C code */
169	bl	secondary_cinit
170
171	/* x0 is now the entry function, run it */
172	blr	x0
173	b	do_idle
174
175.globl halt
176halt:
1771:	wfi
178	b	1b
179
180/*
181 * asm_mmu_enable
182 *   Inputs:
183 *     x0 is the base address of the translation table
184 *   Outputs: none
185 *
186 * Adapted from
187 *   arch/arm64/kernel/head.S
188 *   arch/arm64/mm/proc.S
189 */
190
191/*
192 * Memory region attributes for LPAE:
193 *
194 *   n = AttrIndx[2:0]
195 *                      n       MAIR
196 *   DEVICE_nGnRnE      000     00000000
197 *   DEVICE_nGnRE       001     00000100
198 *   DEVICE_GRE         010     00001100
199 *   NORMAL_NC          011     01000100
200 *   NORMAL             100     11111111
201 *   NORMAL_WT          101     10111011
202 *   DEVICE_nGRE        110     00001000
203 */
204#define MAIR(attr, mt) ((attr) << ((mt) * 8))
205
206#if PAGE_SIZE == SZ_64K
207#define TCR_TG_FLAGS	TCR_TG0_64K | TCR_TG1_64K
208#elif PAGE_SIZE == SZ_16K
209#define TCR_TG_FLAGS	TCR_TG0_16K | TCR_TG1_16K
210#elif PAGE_SIZE == SZ_4K
211#define TCR_TG_FLAGS	TCR_TG0_4K | TCR_TG1_4K
212#endif
213
214.globl asm_mmu_enable
215asm_mmu_enable:
216	tlbi	vmalle1			// invalidate I + D TLBs
217	dsb	nsh
218
219	/* TCR */
220	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
221		     TCR_TG_FLAGS  |			\
222		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
223		     TCR_SHARED |			\
224		     TCR_EPD1
225	mrs	x2, id_aa64mmfr0_el1
226	bfi	x1, x2, #32, #3
227	msr	tcr_el1, x1
228
229	/* MAIR */
230	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
231		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
232		     MAIR(0x0c, MT_DEVICE_GRE) |	\
233		     MAIR(0x44, MT_NORMAL_NC) |		\
234		     MAIR(0xff, MT_NORMAL) |	        \
235		     MAIR(0xbb, MT_NORMAL_WT) |         \
236		     MAIR(0x08, MT_DEVICE_nGRE)
237	msr	mair_el1, x1
238
239	/* TTBR0 */
240	msr	ttbr0_el1, x0
241	isb
242
243	/* SCTLR */
244	mrs	x1, sctlr_el1
245	orr	x1, x1, SCTLR_EL1_C
246	orr	x1, x1, SCTLR_EL1_I
247	orr	x1, x1, SCTLR_EL1_M
248	msr	sctlr_el1, x1
249	isb
250
251	ret
252
253.globl asm_mmu_disable
254asm_mmu_disable:
255	mrs	x0, sctlr_el1
256	bic	x0, x0, SCTLR_EL1_M
257	msr	sctlr_el1, x0
258	isb
259
260	/* Clean + invalidate the entire memory */
261	adrp	x0, __phys_offset
262	ldr	x0, [x0, :lo12:__phys_offset]
263	adrp	x1, __phys_end
264	ldr	x1, [x1, :lo12:__phys_end]
265	sub	x1, x1, x0
266	dcache_by_line_op civac, sy, x0, x1, x2, x3
267
268	ret
269
270/*
271 * Vectors
272 */
273
274.globl exceptions_init
275exceptions_init:
276	adrp	x4, vector_table
277	add	x4, x4, :lo12:vector_table
278	msr	vbar_el1, x4
279	isb
280	ret
281
282/*
283 * Vector stubs
284 * Adapted from arch/arm64/kernel/entry.S
285 * Declare as weak to allow external tests to redefine and override a
286 * vector_stub.
287 */
288.macro vector_stub, name, vec
289.weak \name
290\name:
291	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
292	stp	 x2,  x3, [sp,  #16]
293	stp	 x4,  x5, [sp,  #32]
294	stp	 x6,  x7, [sp,  #48]
295	stp	 x8,  x9, [sp,  #64]
296	stp	x10, x11, [sp,  #80]
297	stp	x12, x13, [sp,  #96]
298	stp	x14, x15, [sp, #112]
299	stp	x16, x17, [sp, #128]
300	stp	x18, x19, [sp, #144]
301	stp	x20, x21, [sp, #160]
302	stp	x22, x23, [sp, #176]
303	stp	x24, x25, [sp, #192]
304	stp	x26, x27, [sp, #208]
305	stp	x28, x29, [sp, #224]
306
307	str	x30, [sp, #S_LR]
308
309	.if \vec >= 8
310	mrs	x1, sp_el0
311	.else
312	add	x1, sp, #S_FRAME_SIZE
313	.endif
314	str	x1, [sp, #S_SP]
315
316	mrs	x1, elr_el1
317	mrs	x2, spsr_el1
318	stp	x1, x2, [sp, #S_PC]
319
320	/*
321	 * Save a frame pointer using the link to allow unwinding of
322	 * exceptions.
323	 */
324	stp	x29, x1, [sp, #S_FP]
325	add 	x29, sp, #S_FP
326
327	mov	x0, \vec
328	mov	x1, sp
329	mrs	x2, esr_el1
330	bl	do_handle_exception
331
332	ldp	x1, x2, [sp, #S_PC]
333	msr	spsr_el1, x2
334	msr	elr_el1, x1
335
336	.if \vec >= 8
337	ldr	x1, [sp, #S_SP]
338	msr	sp_el0, x1
339	.endif
340
341	ldr	x30, [sp, #S_LR]
342
343	ldp	x28, x29, [sp, #224]
344	ldp	x26, x27, [sp, #208]
345	ldp	x24, x25, [sp, #192]
346	ldp	x22, x23, [sp, #176]
347	ldp	x20, x21, [sp, #160]
348	ldp	x18, x19, [sp, #144]
349	ldp	x16, x17, [sp, #128]
350	ldp	x14, x15, [sp, #112]
351	ldp	x12, x13, [sp,  #96]
352	ldp	x10, x11, [sp,  #80]
353	ldp	 x8,  x9, [sp,  #64]
354	ldp	 x6,  x7, [sp,  #48]
355	ldp	 x4,  x5, [sp,  #32]
356	ldp	 x2,  x3, [sp,  #16]
357	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
358
359	eret
360.endm
361
362.globl vector_stub_start
363vector_stub_start:
364
365vector_stub	el1t_sync,     0
366vector_stub	el1t_irq,      1
367vector_stub	el1t_fiq,      2
368vector_stub	el1t_error,    3
369
370vector_stub	el1h_sync,     4
371vector_stub	el1h_irq,      5
372vector_stub	el1h_fiq,      6
373vector_stub	el1h_error,    7
374
375vector_stub	el0_sync_64,   8
376vector_stub	el0_irq_64,    9
377vector_stub	el0_fiq_64,   10
378vector_stub	el0_error_64, 11
379
380vector_stub	el0_sync_32,  12
381vector_stub	el0_irq_32,   13
382vector_stub	el0_fiq_32,   14
383vector_stub	el0_error_32, 15
384
385.globl vector_stub_end
386vector_stub_end:
387
388.section .text.ex
389
390.macro ventry, label
391.align 7
392	b	\label
393.endm
394
395
396/*
397 * Declare as weak to allow external tests to redefine and override the
398 * default vector table.
399 */
400.align 11
401.weak vector_table
402vector_table:
403	ventry	el1t_sync			// Synchronous EL1t
404	ventry	el1t_irq			// IRQ EL1t
405	ventry	el1t_fiq			// FIQ EL1t
406	ventry	el1t_error			// Error EL1t
407
408	ventry	el1h_sync			// Synchronous EL1h
409	ventry	el1h_irq			// IRQ EL1h
410	ventry	el1h_fiq			// FIQ EL1h
411	ventry	el1h_error			// Error EL1h
412
413	ventry	el0_sync_64			// Synchronous 64-bit EL0
414	ventry	el0_irq_64			// IRQ 64-bit EL0
415	ventry	el0_fiq_64			// FIQ 64-bit EL0
416	ventry	el0_error_64			// Error 64-bit EL0
417
418	ventry	el0_sync_32			// Synchronous 32-bit EL0
419	ventry	el0_irq_32			// IRQ 32-bit EL0
420	ventry	el0_fiq_32			// FIQ 32-bit EL0
421	ventry	el0_error_32			// Error 32-bit EL0
422