xref: /kvm-unit-tests/arm/cstart64.S (revision cddb18bc11b24b9effbc8a793ab582cbc366f9a5)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2017, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <auxinfo.h>
10#include <asm/asm-offsets.h>
11#include <asm/assembler.h>
12#include <asm/ptrace.h>
13#include <asm/page.h>
14#include <asm/pgtable-hwdef.h>
15#include <asm/thread_info.h>
16#include <asm/sysreg.h>
17
18#ifdef CONFIG_EFI
19#include "efi/crt0-efi-aarch64.S"
20#else
21
22.macro zero_range, tmp1, tmp2
239998:	cmp	\tmp1, \tmp2
24	b.eq	9997f
25	stp	xzr, xzr, [\tmp1], #16
26	b	9998b
279997:
28.endm
29
30.section .init
31
32/*
33 * Bootloader params are in x0-x3. See kernel doc
34 * Documentation/arm64/booting.txt
35 */
36.globl start
37start:
38	/* get our base address */
39	adrp	x4, start
40	add     x4, x4, :lo12:start
41
42	/*
43	 * Update all R_AARCH64_RELATIVE relocations using the table
44	 * of Elf64_Rela entries between reloc_start/end. The build
45	 * will not emit other relocation types.
46	 *
47	 * struct Elf64_Rela {
48	 * 	uint64_t r_offset;
49	 * 	uint64_t r_info;
50	 * 	int64_t  r_addend;
51	 * }
52	 */
53	adrp	x5, reloc_start
54	add     x5, x5, :lo12:reloc_start
55	adrp	x6, reloc_end
56	add     x6, x6, :lo12:reloc_end
571:
58	cmp	x5, x6
59	b.hs	1f
60	ldr	x7, [x5]			// r_offset
61	ldr	x8, [x5, #16]			// r_addend
62	add	x8, x8, x4			// val = base + r_addend
63	str	x8, [x4, x7]			// base[r_offset] = val
64	add	x5, x5, #24
65	b	1b
66
671:
68	/* zero BSS */
69	adrp	x4, bss
70	add	x4, x4, :lo12:bss
71	adrp    x5, ebss
72	add     x5, x5, :lo12:ebss
73	zero_range x4, x5
74
75	/* zero and set up stack */
76	adrp    x5, stacktop
77	add     x5, x5, :lo12:stacktop
78	sub	x4, x5, #THREAD_SIZE
79	zero_range x4, x5
80
81	/* set SCTLR_EL1 to a known value */
82	ldr	x4, =INIT_SCTLR_EL1_MMU_OFF
83	msr	sctlr_el1, x4
84	isb
85
86	mov	x4, #1
87	msr	spsel, x4
88	adrp    x4, stackptr
89	add     sp, x4, :lo12:stackptr
90
91	/* enable FP/ASIMD */
92	mov	x4, #(3 << 20)
93	msr	cpacr_el1, x4
94
95	/* set up exception handling */
96	bl	exceptions_init
97
98	/* complete setup */
99	adrp	x1, stacktop
100	add	x1, x1, :lo12:stacktop		// x1 is the base of free memory
101	bl	setup				// x0 is the addr of the dtb
102
103	/* run the test */
104	adrp	x0, __argc
105	ldr	w0, [x0, :lo12:__argc]
106	adrp	x1, __argv
107	add	x1, x1, :lo12:__argv
108	adrp	x2, __environ
109	add	x2, x2, :lo12:__environ
110	bl	main
111	bl	exit
112	b	halt
113
114#endif
115
116.text
117
118/*
119 * arm_smccc_hvc / arm_smccc_smc
120 *
121 * Inputs:
122 *   w0 -- function_id
123 *   x1 -- arg0
124 *   x2 -- arg1
125 *   x3 -- arg2
126 *   x4 -- arg3
127 *   x5 -- arg4
128 *   x6 -- arg5
129 *   x7 -- arg6
130 *   sp -- { arg7, arg8, arg9, arg10, result }
131 *
132 * Outputs:
133 *   x0 -- return code
134 *
135 * If result pointer is not NULL:
136 *   result.r0 -- return code
137 *   result.r1 -- x1
138 *   result.r2 -- x2
139 *   result.r3 -- x3
140 *   result.r4 -- x4
141 *   result.r5 -- x5
142 *   result.r6 -- x6
143 *   result.r7 -- x7
144 *   result.r8 -- x8
145 *   result.r9 -- x9
146 */
147.macro do_smccc_call instr
148	/* Save x8-x11 on stack */
149	stp	x9, x8,	  [sp, #-16]!
150	stp	x11, x10, [sp, #-16]!
151	/* Load arg7 - arg10 from the stack */
152	ldp	x8, x9,   [sp, #32]
153	ldp	x10, x11, [sp, #48]
154	\instr	#0
155	/* Get the result address */
156	ldr	x10, [sp, #64]
157	cmp	x10, xzr
158	b.eq	1f
159	stp	x0, x1, [x10, #0]
160	stp	x2, x3, [x10, #16]
161	stp	x4, x5, [x10, #32]
162	stp	x6, x7, [x10, #48]
163	stp	x8, x9, [x10, #64]
1641:
165	/* Restore x8-x11 from stack */
166	ldp	x11, x10, [sp], #16
167	ldp	x9, x8,   [sp], #16
168	ret
169.endm
170
171.globl arm_smccc_hvc
172arm_smccc_hvc:
173	do_smccc_call hvc
174
175.globl arm_smccc_smc
176arm_smccc_smc:
177	do_smccc_call smc
178
179get_mmu_off:
180	adrp	x0, auxinfo
181	ldr	x0, [x0, :lo12:auxinfo + 8]
182	and	x0, x0, #AUXINFO_MMU_OFF
183	ret
184
185.globl secondary_entry
186secondary_entry:
187	/* Enable FP/ASIMD */
188	mov	x0, #(3 << 20)
189	msr	cpacr_el1, x0
190
191	/* set up exception handling */
192	bl	exceptions_init
193
194	/* enable the MMU unless requested off */
195	bl	get_mmu_off
196	cbnz	x0, 1f
197	adrp	x0, mmu_idmap
198	ldr	x0, [x0, :lo12:mmu_idmap]
199	bl	asm_mmu_enable
200
2011:
202	/* set the stack */
203	adrp	x0, secondary_data
204	ldr	x0, [x0, :lo12:secondary_data]
205	mov	sp, x0
206
207	/* finish init in C code */
208	bl	secondary_cinit
209
210	/* x0 is now the entry function, run it */
211	blr	x0
212	b	do_idle
213
214.globl halt
215halt:
2161:	wfi
217	b	1b
218
219/*
220 * asm_mmu_enable
221 *   Inputs:
222 *     x0 is the base address of the translation table
223 *   Outputs: none
224 *
225 * Adapted from
226 *   arch/arm64/kernel/head.S
227 *   arch/arm64/mm/proc.S
228 */
229
230/*
231 * Memory region attributes for LPAE:
232 *
233 *   n = AttrIndx[2:0]
234 *                      n       MAIR
235 *   DEVICE_nGnRnE      000     00000000
236 *   DEVICE_nGnRE       001     00000100
237 *   DEVICE_GRE         010     00001100
238 *   NORMAL_NC          011     01000100
239 *   NORMAL             100     11111111
240 *   NORMAL_WT          101     10111011
241 *   DEVICE_nGRE        110     00001000
242 */
243#define MAIR(attr, mt) ((attr) << ((mt) * 8))
244
245#if PAGE_SIZE == SZ_64K
246#define TCR_TG_FLAGS	TCR_TG0_64K | TCR_TG1_64K
247#elif PAGE_SIZE == SZ_16K
248#define TCR_TG_FLAGS	TCR_TG0_16K | TCR_TG1_16K
249#elif PAGE_SIZE == SZ_4K
250#define TCR_TG_FLAGS	TCR_TG0_4K | TCR_TG1_4K
251#endif
252
253.globl asm_mmu_enable
254asm_mmu_enable:
255	tlbi	vmalle1			// invalidate I + D TLBs
256	dsb	nsh
257
258	/* TCR */
259	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
260		     TCR_TG_FLAGS  |			\
261		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
262		     TCR_SHARED |			\
263		     TCR_EPD1
264	mrs	x2, id_aa64mmfr0_el1
265	bfi	x1, x2, #32, #3
266	msr	tcr_el1, x1
267
268	/* MAIR */
269	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
270		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
271		     MAIR(0x0c, MT_DEVICE_GRE) |	\
272		     MAIR(0x44, MT_NORMAL_NC) |		\
273		     MAIR(0xff, MT_NORMAL) |	        \
274		     MAIR(0xbb, MT_NORMAL_WT) |         \
275		     MAIR(0x08, MT_DEVICE_nGRE)
276	msr	mair_el1, x1
277
278	/* TTBR0 */
279	msr	ttbr0_el1, x0
280	isb
281
282	/* SCTLR */
283	mrs	x1, sctlr_el1
284	orr	x1, x1, SCTLR_EL1_C
285	orr	x1, x1, SCTLR_EL1_I
286	orr	x1, x1, SCTLR_EL1_M
287	msr	sctlr_el1, x1
288	isb
289
290	ret
291
292.globl asm_mmu_disable
293asm_mmu_disable:
294	mrs	x0, sctlr_el1
295	bic	x0, x0, SCTLR_EL1_M
296	msr	sctlr_el1, x0
297	isb
298
299	/* Clean + invalidate the entire memory */
300	adrp	x0, __phys_offset
301	ldr	x0, [x0, :lo12:__phys_offset]
302	adrp	x1, __phys_end
303	ldr	x1, [x1, :lo12:__phys_end]
304	sub	x1, x1, x0
305	dcache_by_line_op civac, sy, x0, x1, x2, x3
306
307	ret
308
309/*
310 * Vectors
311 */
312
313.globl exceptions_init
314exceptions_init:
315	adrp	x4, vector_table
316	add	x4, x4, :lo12:vector_table
317	msr	vbar_el1, x4
318	isb
319	ret
320
321/*
322 * Vector stubs
323 * Adapted from arch/arm64/kernel/entry.S
324 * Declare as weak to allow external tests to redefine and override a
325 * vector_stub.
326 */
327.macro vector_stub, name, vec
328.weak \name
329\name:
330	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
331	stp	 x2,  x3, [sp,  #16]
332	stp	 x4,  x5, [sp,  #32]
333	stp	 x6,  x7, [sp,  #48]
334	stp	 x8,  x9, [sp,  #64]
335	stp	x10, x11, [sp,  #80]
336	stp	x12, x13, [sp,  #96]
337	stp	x14, x15, [sp, #112]
338	stp	x16, x17, [sp, #128]
339	stp	x18, x19, [sp, #144]
340	stp	x20, x21, [sp, #160]
341	stp	x22, x23, [sp, #176]
342	stp	x24, x25, [sp, #192]
343	stp	x26, x27, [sp, #208]
344	stp	x28, x29, [sp, #224]
345
346	str	x30, [sp, #S_LR]
347
348	.if \vec >= 8
349	mrs	x1, sp_el0
350	.else
351	add	x1, sp, #S_FRAME_SIZE
352	.endif
353	str	x1, [sp, #S_SP]
354
355	mrs	x1, elr_el1
356	mrs	x2, spsr_el1
357	stp	x1, x2, [sp, #S_PC]
358
359	/*
360	 * Save a frame pointer using the link to allow unwinding of
361	 * exceptions.
362	 */
363	stp	x29, x1, [sp, #S_FP]
364	add 	x29, sp, #S_FP
365
366	mov	x0, \vec
367	mov	x1, sp
368	mrs	x2, esr_el1
369	bl	do_handle_exception
370
371	ldp	x1, x2, [sp, #S_PC]
372	msr	spsr_el1, x2
373	msr	elr_el1, x1
374
375	.if \vec >= 8
376	ldr	x1, [sp, #S_SP]
377	msr	sp_el0, x1
378	.endif
379
380	ldr	x30, [sp, #S_LR]
381
382	ldp	x28, x29, [sp, #224]
383	ldp	x26, x27, [sp, #208]
384	ldp	x24, x25, [sp, #192]
385	ldp	x22, x23, [sp, #176]
386	ldp	x20, x21, [sp, #160]
387	ldp	x18, x19, [sp, #144]
388	ldp	x16, x17, [sp, #128]
389	ldp	x14, x15, [sp, #112]
390	ldp	x12, x13, [sp,  #96]
391	ldp	x10, x11, [sp,  #80]
392	ldp	 x8,  x9, [sp,  #64]
393	ldp	 x6,  x7, [sp,  #48]
394	ldp	 x4,  x5, [sp,  #32]
395	ldp	 x2,  x3, [sp,  #16]
396	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
397
398	eret
399.endm
400
401.globl vector_stub_start
402vector_stub_start:
403
404vector_stub	el1t_sync,     0
405vector_stub	el1t_irq,      1
406vector_stub	el1t_fiq,      2
407vector_stub	el1t_error,    3
408
409vector_stub	el1h_sync,     4
410vector_stub	el1h_irq,      5
411vector_stub	el1h_fiq,      6
412vector_stub	el1h_error,    7
413
414vector_stub	el0_sync_64,   8
415vector_stub	el0_irq_64,    9
416vector_stub	el0_fiq_64,   10
417vector_stub	el0_error_64, 11
418
419vector_stub	el0_sync_32,  12
420vector_stub	el0_irq_32,   13
421vector_stub	el0_fiq_32,   14
422vector_stub	el0_error_32, 15
423
424.globl vector_stub_end
425vector_stub_end:
426
427.section .text.ex
428
429.macro ventry, label
430.align 7
431	b	\label
432.endm
433
434
435/*
436 * Declare as weak to allow external tests to redefine and override the
437 * default vector table.
438 */
439.align 11
440.weak vector_table
441vector_table:
442	ventry	el1t_sync			// Synchronous EL1t
443	ventry	el1t_irq			// IRQ EL1t
444	ventry	el1t_fiq			// FIQ EL1t
445	ventry	el1t_error			// Error EL1t
446
447	ventry	el1h_sync			// Synchronous EL1h
448	ventry	el1h_irq			// IRQ EL1h
449	ventry	el1h_fiq			// FIQ EL1h
450	ventry	el1h_error			// Error EL1h
451
452	ventry	el0_sync_64			// Synchronous 64-bit EL0
453	ventry	el0_irq_64			// IRQ 64-bit EL0
454	ventry	el0_fiq_64			// FIQ 64-bit EL0
455	ventry	el0_error_64			// Error 64-bit EL0
456
457	ventry	el0_sync_32			// Synchronous 32-bit EL0
458	ventry	el0_irq_32			// IRQ 32-bit EL0
459	ventry	el0_fiq_32			// FIQ 32-bit EL0
460	ventry	el0_error_32			// Error 32-bit EL0
461