xref: /kvm-unit-tests/arm/cstart64.S (revision abdc5d02a7796a55802509ac9bb704c721f2a5f6)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2017, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 */
8#include <auxinfo.h>
9#include <asm/asm-offsets.h>
10#include <asm/assembler.h>
11#include <asm/ptrace.h>
12#include <asm/page.h>
13#include <asm/pgtable-hwdef.h>
14#include <asm/processor.h>
15#include <asm/thread_info.h>
16#include <asm/sysreg.h>
17
18#ifdef CONFIG_EFI
19#include "efi/crt0-efi-aarch64.S"
20#else
21
22.macro zero_range, tmp1, tmp2
239998:	cmp	\tmp1, \tmp2
24	b.eq	9997f
25	stp	xzr, xzr, [\tmp1], #16
26	b	9998b
279997:
28.endm
29
30.section .init
31
32/*
33 * Bootloader params are in x0-x3. See kernel doc
34 * Documentation/arm64/booting.txt
35 */
36.globl start
37start:
38	/* get our base address */
39	adrp	x4, start
40	add     x4, x4, :lo12:start
41
42	/*
43	 * Update all R_AARCH64_RELATIVE relocations using the table
44	 * of Elf64_Rela entries between reloc_start/end. The build
45	 * will not emit other relocation types.
46	 *
47	 * struct Elf64_Rela {
48	 * 	uint64_t r_offset;
49	 * 	uint64_t r_info;
50	 * 	int64_t  r_addend;
51	 * }
52	 */
53	adrp	x5, reloc_start
54	add     x5, x5, :lo12:reloc_start
55	adrp	x6, reloc_end
56	add     x6, x6, :lo12:reloc_end
571:
58	cmp	x5, x6
59	b.hs	1f
60	ldr	x7, [x5]			// r_offset
61	ldr	x8, [x5, #16]			// r_addend
62	add	x8, x8, x4			// val = base + r_addend
63	str	x8, [x4, x7]			// base[r_offset] = val
64	add	x5, x5, #24
65	b	1b
66
671:
68	/* zero BSS */
69	adrp	x4, bss
70	add	x4, x4, :lo12:bss
71	adrp    x5, ebss
72	add     x5, x5, :lo12:ebss
73	zero_range x4, x5
74
75	/* zero and set up stack */
76	adrp    x5, stacktop
77	add     x5, x5, :lo12:stacktop
78	sub	x4, x5, #THREAD_SIZE
79	zero_range x4, x5
80
81	/* set SCTLR_EL1 to a known value */
82	ldr	x4, =INIT_SCTLR_EL1_MMU_OFF
83	msr	sctlr_el1, x4
84	isb
85
86	mov	x4, #1
87	msr	spsel, x4
88	adrp    x4, stackptr
89	add     sp, x4, :lo12:stackptr
90
91	/* enable FP/ASIMD and SVE */
92	mov	x4, (3 << 20)
93	orr	x4, x4, (3 << 16)
94	msr	cpacr_el1, x4
95
96	/* set up exception handling */
97	bl	exceptions_init
98
99	/* complete setup */
100	adrp	x1, stacktop
101	add	x1, x1, :lo12:stacktop		// x1 is the base of free memory
102	bl	setup				// x0 is the addr of the dtb
103
104	/* run the test */
105	adrp	x0, __argc
106	ldr	w0, [x0, :lo12:__argc]
107	adrp	x1, __argv
108	add	x1, x1, :lo12:__argv
109	adrp	x2, __environ
110	add	x2, x2, :lo12:__environ
111	bl	main
112	bl	exit
113	b	halt
114
115#endif
116
117.text
118
119/*
120 * arm_smccc_hvc / arm_smccc_smc
121 *
122 * Inputs:
123 *   w0 -- function_id
124 *   x1 -- arg0
125 *   x2 -- arg1
126 *   x3 -- arg2
127 *   x4 -- arg3
128 *   x5 -- arg4
129 *   x6 -- arg5
130 *   x7 -- arg6
131 *   sp -- { arg7, arg8, arg9, arg10, result }
132 *
133 * Outputs:
134 *   x0 -- return code
135 *
136 * If result pointer is not NULL:
137 *   result.r0 -- return code
138 *   result.r1 -- x1
139 *   result.r2 -- x2
140 *   result.r3 -- x3
141 *   result.r4 -- x4
142 *   result.r5 -- x5
143 *   result.r6 -- x6
144 *   result.r7 -- x7
145 *   result.r8 -- x8
146 *   result.r9 -- x9
147 */
148.macro do_smccc_call instr
149	/* Save x8-x11 on stack */
150	stp	x9, x8,	  [sp, #-16]!
151	stp	x11, x10, [sp, #-16]!
152	/* Load arg7 - arg10 from the stack */
153	ldp	x8, x9,   [sp, #32]
154	ldp	x10, x11, [sp, #48]
155	\instr	#0
156	/* Get the result address */
157	ldr	x10, [sp, #64]
158	cmp	x10, xzr
159	b.eq	1f
160	stp	x0, x1, [x10, #0]
161	stp	x2, x3, [x10, #16]
162	stp	x4, x5, [x10, #32]
163	stp	x6, x7, [x10, #48]
164	stp	x8, x9, [x10, #64]
1651:
166	/* Restore x8-x11 from stack */
167	ldp	x11, x10, [sp], #16
168	ldp	x9, x8,   [sp], #16
169	ret
170.endm
171
172.globl arm_smccc_hvc
173arm_smccc_hvc:
174	do_smccc_call hvc
175
176.globl arm_smccc_smc
177arm_smccc_smc:
178	do_smccc_call smc
179
180get_mmu_off:
181	adrp	x0, auxinfo
182	ldr	x0, [x0, :lo12:auxinfo + 8]
183	and	x0, x0, #AUXINFO_MMU_OFF
184	ret
185
186.globl secondary_entry
187secondary_entry:
188	/* enable FP/ASIMD and SVE */
189	mov	x0, #(3 << 20)
190	orr	x0, x0, #(3 << 16)
191	msr	cpacr_el1, x0
192
193	/* set up exception handling */
194	bl	exceptions_init
195
196	/* enable the MMU unless requested off */
197	bl	get_mmu_off
198	cbnz	x0, 1f
199	adrp	x0, mmu_idmap
200	ldr	x0, [x0, :lo12:mmu_idmap]
201	bl	asm_mmu_enable
202
2031:
204	/* set the stack */
205	adrp	x0, secondary_data
206	ldr	x0, [x0, :lo12:secondary_data]
207	mov	sp, x0
208
209	/* finish init in C code */
210	bl	secondary_cinit
211
212	/* x0 is now the entry function, run it */
213	blr	x0
214	b	do_idle
215
216.globl halt
217halt:
2181:	wfi
219	b	1b
220
221/*
222 * asm_mmu_enable
223 *   Inputs:
224 *     x0 is the base address of the translation table
225 *   Outputs: none
226 *
227 * Adapted from
228 *   arch/arm64/kernel/head.S
229 *   arch/arm64/mm/proc.S
230 */
231
232/*
233 * Memory region attributes for LPAE:
234 *
235 *   n = AttrIndx[2:0]
236 *                      n       MAIR
237 *   DEVICE_nGnRnE      000     00000000
238 *   DEVICE_nGnRE       001     00000100
239 *   DEVICE_GRE         010     00001100
240 *   NORMAL_NC          011     01000100
241 *   NORMAL             100     11111111
242 *   NORMAL_WT          101     10111011
243 *   DEVICE_nGRE        110     00001000
244 *   NORMAL_TAGGED      111     11110000
245 */
246#define MAIR(attr, mt) ((attr) << ((mt) * 8))
247
248#if PAGE_SIZE == SZ_64K
249#define TCR_TG_FLAGS	TCR_TG0_64K | TCR_TG1_64K
250#elif PAGE_SIZE == SZ_16K
251#define TCR_TG_FLAGS	TCR_TG0_16K | TCR_TG1_16K
252#elif PAGE_SIZE == SZ_4K
253#define TCR_TG_FLAGS	TCR_TG0_4K | TCR_TG1_4K
254#endif
255
256.globl asm_mmu_enable
257asm_mmu_enable:
258	tlbi	vmalle1			// invalidate I + D TLBs
259	dsb	nsh
260
261	/* TCR */
262	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
263		     TCR_TG_FLAGS  |			\
264		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
265		     TCR_SHARED |			\
266		     TCR_EPD1
267	mrs	x2, id_aa64mmfr0_el1
268	bfi	x1, x2, #32, #3
269	msr	tcr_el1, x1
270
271	/* MAIR */
272	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
273		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
274		     MAIR(0x0c, MT_DEVICE_GRE) |	\
275		     MAIR(0x44, MT_NORMAL_NC) |		\
276		     MAIR(0xff, MT_NORMAL) |	        \
277		     MAIR(0xbb, MT_NORMAL_WT) |         \
278		     MAIR(0x08, MT_DEVICE_nGRE) |       \
279		     MAIR(0xf0, MT_NORMAL_TAGGED)
280	msr	mair_el1, x1
281
282	/* TTBR0 */
283	msr	ttbr0_el1, x0
284	isb
285
286	/* SCTLR */
287	mrs	x1, sctlr_el1
288	orr	x1, x1, SCTLR_EL1_C
289	orr	x1, x1, SCTLR_EL1_I
290	orr	x1, x1, SCTLR_EL1_M
291	msr	sctlr_el1, x1
292	isb
293
294	ret
295
296.globl asm_mmu_disable
297asm_mmu_disable:
298	mrs	x0, sctlr_el1
299	bic	x0, x0, SCTLR_EL1_M
300	msr	sctlr_el1, x0
301	isb
302
303	/* Clean + invalidate the entire memory */
304	adrp	x0, __phys_offset
305	ldr	x0, [x0, :lo12:__phys_offset]
306	adrp	x1, __phys_end
307	ldr	x1, [x1, :lo12:__phys_end]
308	sub	x1, x1, x0
309	dcache_by_line_op civac, sy, x0, x1, x2, x3
310
311	ret
312
313/*
314 * Vectors
315 */
316
317.globl exceptions_init
318exceptions_init:
319	adrp	x4, vector_table
320	add	x4, x4, :lo12:vector_table
321	msr	vbar_el1, x4
322	isb
323	ret
324
325/*
326 * Vector stubs
327 * Adapted from arch/arm64/kernel/entry.S
328 * Declare as weak to allow external tests to redefine and override a
329 * vector_stub.
330 */
331.macro vector_stub, name, vec
332.weak \name
333\name:
334	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
335	stp	 x2,  x3, [sp,  #16]
336	stp	 x4,  x5, [sp,  #32]
337	stp	 x6,  x7, [sp,  #48]
338	stp	 x8,  x9, [sp,  #64]
339	stp	x10, x11, [sp,  #80]
340	stp	x12, x13, [sp,  #96]
341	stp	x14, x15, [sp, #112]
342	stp	x16, x17, [sp, #128]
343	stp	x18, x19, [sp, #144]
344	stp	x20, x21, [sp, #160]
345	stp	x22, x23, [sp, #176]
346	stp	x24, x25, [sp, #192]
347	stp	x26, x27, [sp, #208]
348	stp	x28, x29, [sp, #224]
349
350	str	x30, [sp, #S_LR]
351
352	.if \vec >= 8
353	mrs	x1, sp_el0
354	.else
355	add	x1, sp, #S_FRAME_SIZE
356	.endif
357	str	x1, [sp, #S_SP]
358
359	mrs	x1, elr_el1
360	mrs	x2, spsr_el1
361	stp	x1, x2, [sp, #S_PC]
362
363	/*
364	 * Save a frame pointer using the link to allow unwinding of
365	 * exceptions.
366	 */
367	stp	x29, x1, [sp, #S_FP]
368	add 	x29, sp, #S_FP
369
370	mov	x0, \vec
371	mov	x1, sp
372	mrs	x2, esr_el1
373	bl	do_handle_exception
374
375	ldp	x1, x2, [sp, #S_PC]
376	msr	spsr_el1, x2
377	msr	elr_el1, x1
378
379	.if \vec >= 8
380	ldr	x1, [sp, #S_SP]
381	msr	sp_el0, x1
382	.endif
383
384	ldr	x30, [sp, #S_LR]
385
386	ldp	x28, x29, [sp, #224]
387	ldp	x26, x27, [sp, #208]
388	ldp	x24, x25, [sp, #192]
389	ldp	x22, x23, [sp, #176]
390	ldp	x20, x21, [sp, #160]
391	ldp	x18, x19, [sp, #144]
392	ldp	x16, x17, [sp, #128]
393	ldp	x14, x15, [sp, #112]
394	ldp	x12, x13, [sp,  #96]
395	ldp	x10, x11, [sp,  #80]
396	ldp	 x8,  x9, [sp,  #64]
397	ldp	 x6,  x7, [sp,  #48]
398	ldp	 x4,  x5, [sp,  #32]
399	ldp	 x2,  x3, [sp,  #16]
400	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
401
402	eret
403.endm
404
405.globl vector_stub_start
406vector_stub_start:
407
408vector_stub	el1t_sync,     0
409vector_stub	el1t_irq,      1
410vector_stub	el1t_fiq,      2
411vector_stub	el1t_error,    3
412
413vector_stub	el1h_sync,     4
414vector_stub	el1h_irq,      5
415vector_stub	el1h_fiq,      6
416vector_stub	el1h_error,    7
417
418vector_stub	el0_sync_64,   8
419vector_stub	el0_irq_64,    9
420vector_stub	el0_fiq_64,   10
421vector_stub	el0_error_64, 11
422
423vector_stub	el0_sync_32,  12
424vector_stub	el0_irq_32,   13
425vector_stub	el0_fiq_32,   14
426vector_stub	el0_error_32, 15
427
428.globl vector_stub_end
429vector_stub_end:
430
431.section .text.ex
432
433.macro ventry, label
434.align 7
435	b	\label
436.endm
437
438
439/*
440 * Declare as weak to allow external tests to redefine and override the
441 * default vector table.
442 */
443.align 11
444.weak vector_table
445vector_table:
446	ventry	el1t_sync			// Synchronous EL1t
447	ventry	el1t_irq			// IRQ EL1t
448	ventry	el1t_fiq			// FIQ EL1t
449	ventry	el1t_error			// Error EL1t
450
451	ventry	el1h_sync			// Synchronous EL1h
452	ventry	el1h_irq			// IRQ EL1h
453	ventry	el1h_fiq			// FIQ EL1h
454	ventry	el1h_error			// Error EL1h
455
456	ventry	el0_sync_64			// Synchronous 64-bit EL0
457	ventry	el0_irq_64			// IRQ 64-bit EL0
458	ventry	el0_fiq_64			// FIQ 64-bit EL0
459	ventry	el0_error_64			// Error 64-bit EL0
460
461	ventry	el0_sync_32			// Synchronous 32-bit EL0
462	ventry	el0_irq_32			// IRQ 32-bit EL0
463	ventry	el0_fiq_32			// FIQ 32-bit EL0
464	ventry	el0_error_32			// Error 32-bit EL0
465