xref: /kvm-unit-tests/arm/cstart64.S (revision bb4c17e3783ce4578065f8ea55b6227dc0f53ad8)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2017, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <auxinfo.h>
10#include <asm/asm-offsets.h>
11#include <asm/assembler.h>
12#include <asm/ptrace.h>
13#include <asm/page.h>
14#include <asm/pgtable-hwdef.h>
15#include <asm/thread_info.h>
16#include <asm/sysreg.h>
17
18.macro zero_range, tmp1, tmp2
199998:	cmp	\tmp1, \tmp2
20	b.eq	9997f
21	stp	xzr, xzr, [\tmp1], #16
22	b	9998b
239997:
24.endm
25
26.section .init
27
28/*
29 * Bootloader params are in x0-x3. See kernel doc
30 * Documentation/arm64/booting.txt
31 */
32.globl start
33start:
34	/* get our base address */
35	adrp	x4, start
36	add     x4, x4, :lo12:start
37
38	/*
39	 * Update all R_AARCH64_RELATIVE relocations using the table
40	 * of Elf64_Rela entries between reloc_start/end. The build
41	 * will not emit other relocation types.
42	 *
43	 * struct Elf64_Rela {
44	 * 	uint64_t r_offset;
45	 * 	uint64_t r_info;
46	 * 	int64_t  r_addend;
47	 * }
48	 */
49	adrp	x5, reloc_start
50	add     x5, x5, :lo12:reloc_start
51	adrp	x6, reloc_end
52	add     x6, x6, :lo12:reloc_end
531:
54	cmp	x5, x6
55	b.hs	1f
56	ldr	x7, [x5]			// r_offset
57	ldr	x8, [x5, #16]			// r_addend
58	add	x8, x8, x4			// val = base + r_addend
59	str	x8, [x4, x7]			// base[r_offset] = val
60	add	x5, x5, #24
61	b	1b
62
631:
64	/* zero BSS */
65	adrp	x4, bss
66	add	x4, x4, :lo12:bss
67	adrp    x5, ebss
68	add     x5, x5, :lo12:ebss
69	zero_range x4, x5
70
71	/* zero and set up stack */
72	adrp    x5, stacktop
73	add     x5, x5, :lo12:stacktop
74	sub	x4, x5, #THREAD_SIZE
75	zero_range x4, x5
76
77	/* set SCTLR_EL1 to a known value */
78	ldr	x4, =INIT_SCTLR_EL1_MMU_OFF
79	msr	sctlr_el1, x4
80	isb
81
82	mov	x4, #1
83	msr	spsel, x4
84	adrp    x4, stackptr
85	add     sp, x4, :lo12:stackptr
86
87	/* enable FP/ASIMD */
88	mov	x4, #(3 << 20)
89	msr	cpacr_el1, x4
90
91	/* set up exception handling */
92	bl	exceptions_init
93
94	/* complete setup */
95	adrp	x1, stacktop
96	add	x1, x1, :lo12:stacktop		// x1 is the base of free memory
97	bl	setup				// x0 is the addr of the dtb
98
99	/* run the test */
100	adrp	x0, __argc
101	ldr	w0, [x0, :lo12:__argc]
102	adrp	x1, __argv
103	add	x1, x1, :lo12:__argv
104	adrp	x2, __environ
105	add	x2, x2, :lo12:__environ
106	bl	main
107	bl	exit
108	b	halt
109
110.text
111
112/*
113 * psci_invoke_hvc / psci_invoke_smc
114 *
115 * Inputs:
116 *   w0 -- function_id
117 *   x1 -- arg0
118 *   x2 -- arg1
119 *   x3 -- arg2
120 *
121 * Outputs:
122 *   x0 -- return code
123 */
124.globl psci_invoke_hvc
125psci_invoke_hvc:
126	hvc	#0
127	ret
128
129.globl psci_invoke_smc
130psci_invoke_smc:
131	smc	#0
132	ret
133
134get_mmu_off:
135	adrp	x0, auxinfo
136	ldr	x0, [x0, :lo12:auxinfo + 8]
137	and	x0, x0, #AUXINFO_MMU_OFF
138	ret
139
140.globl secondary_entry
141secondary_entry:
142	/* Enable FP/ASIMD */
143	mov	x0, #(3 << 20)
144	msr	cpacr_el1, x0
145
146	/* set up exception handling */
147	bl	exceptions_init
148
149	/* enable the MMU unless requested off */
150	bl	get_mmu_off
151	cbnz	x0, 1f
152	adrp	x0, mmu_idmap
153	ldr	x0, [x0, :lo12:mmu_idmap]
154	bl	asm_mmu_enable
155
1561:
157	/* set the stack */
158	adrp	x0, secondary_data
159	ldr	x0, [x0, :lo12:secondary_data]
160	mov	sp, x0
161
162	/* finish init in C code */
163	bl	secondary_cinit
164
165	/* x0 is now the entry function, run it */
166	blr	x0
167	b	do_idle
168
169.globl halt
170halt:
1711:	wfi
172	b	1b
173
174/*
175 * asm_mmu_enable
176 *   Inputs:
177 *     x0 is the base address of the translation table
178 *   Outputs: none
179 *
180 * Adapted from
181 *   arch/arm64/kernel/head.S
182 *   arch/arm64/mm/proc.S
183 */
184
185/*
186 * Memory region attributes for LPAE:
187 *
188 *   n = AttrIndx[2:0]
189 *                      n       MAIR
190 *   DEVICE_nGnRnE      000     00000000
191 *   DEVICE_nGnRE       001     00000100
192 *   DEVICE_GRE         010     00001100
193 *   NORMAL_NC          011     01000100
194 *   NORMAL             100     11111111
195 *   NORMAL_WT          101     10111011
196 *   DEVICE_nGRE        110     00001000
197 */
198#define MAIR(attr, mt) ((attr) << ((mt) * 8))
199
200#if PAGE_SIZE == SZ_64K
201#define TCR_TG_FLAGS	TCR_TG0_64K | TCR_TG1_64K
202#elif PAGE_SIZE == SZ_16K
203#define TCR_TG_FLAGS	TCR_TG0_16K | TCR_TG1_16K
204#elif PAGE_SIZE == SZ_4K
205#define TCR_TG_FLAGS	TCR_TG0_4K | TCR_TG1_4K
206#endif
207
208.globl asm_mmu_enable
209asm_mmu_enable:
210	tlbi	vmalle1			// invalidate I + D TLBs
211	dsb	nsh
212
213	/* TCR */
214	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
215		     TCR_TG_FLAGS  |			\
216		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
217		     TCR_SHARED |			\
218		     TCR_EPD1
219	mrs	x2, id_aa64mmfr0_el1
220	bfi	x1, x2, #32, #3
221	msr	tcr_el1, x1
222
223	/* MAIR */
224	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
225		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
226		     MAIR(0x0c, MT_DEVICE_GRE) |	\
227		     MAIR(0x44, MT_NORMAL_NC) |		\
228		     MAIR(0xff, MT_NORMAL) |	        \
229		     MAIR(0xbb, MT_NORMAL_WT) |         \
230		     MAIR(0x08, MT_DEVICE_nGRE)
231	msr	mair_el1, x1
232
233	/* TTBR0 */
234	msr	ttbr0_el1, x0
235	isb
236
237	/* SCTLR */
238	mrs	x1, sctlr_el1
239	orr	x1, x1, SCTLR_EL1_C
240	orr	x1, x1, SCTLR_EL1_I
241	orr	x1, x1, SCTLR_EL1_M
242	msr	sctlr_el1, x1
243	isb
244
245	ret
246
247.globl asm_mmu_disable
248asm_mmu_disable:
249	mrs	x0, sctlr_el1
250	bic	x0, x0, SCTLR_EL1_M
251	msr	sctlr_el1, x0
252	isb
253
254	/* Clean + invalidate the entire memory */
255	adrp	x0, __phys_offset
256	ldr	x0, [x0, :lo12:__phys_offset]
257	adrp	x1, __phys_end
258	ldr	x1, [x1, :lo12:__phys_end]
259	sub	x1, x1, x0
260	dcache_by_line_op civac, sy, x0, x1, x2, x3
261
262	ret
263
264/*
265 * Vectors
266 */
267
268exceptions_init:
269	adrp	x4, vector_table
270	add	x4, x4, :lo12:vector_table
271	msr	vbar_el1, x4
272	isb
273	ret
274
275/*
276 * Vector stubs
277 * Adapted from arch/arm64/kernel/entry.S
278 * Declare as weak to allow external tests to redefine and override a
279 * vector_stub.
280 */
281.macro vector_stub, name, vec
282.weak \name
283\name:
284	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
285	stp	 x2,  x3, [sp,  #16]
286	stp	 x4,  x5, [sp,  #32]
287	stp	 x6,  x7, [sp,  #48]
288	stp	 x8,  x9, [sp,  #64]
289	stp	x10, x11, [sp,  #80]
290	stp	x12, x13, [sp,  #96]
291	stp	x14, x15, [sp, #112]
292	stp	x16, x17, [sp, #128]
293	stp	x18, x19, [sp, #144]
294	stp	x20, x21, [sp, #160]
295	stp	x22, x23, [sp, #176]
296	stp	x24, x25, [sp, #192]
297	stp	x26, x27, [sp, #208]
298	stp	x28, x29, [sp, #224]
299
300	str	x30, [sp, #S_LR]
301
302	.if \vec >= 8
303	mrs	x1, sp_el0
304	.else
305	add	x1, sp, #S_FRAME_SIZE
306	.endif
307	str	x1, [sp, #S_SP]
308
309	mrs	x1, elr_el1
310	mrs	x2, spsr_el1
311	stp	x1, x2, [sp, #S_PC]
312
313	mov	x0, \vec
314	mov	x1, sp
315	mrs	x2, esr_el1
316	bl	do_handle_exception
317
318	ldp	x1, x2, [sp, #S_PC]
319	msr	spsr_el1, x2
320	msr	elr_el1, x1
321
322	.if \vec >= 8
323	ldr	x1, [sp, #S_SP]
324	msr	sp_el0, x1
325	.endif
326
327	ldr	x30, [sp, #S_LR]
328
329	ldp	x28, x29, [sp, #224]
330	ldp	x26, x27, [sp, #208]
331	ldp	x24, x25, [sp, #192]
332	ldp	x22, x23, [sp, #176]
333	ldp	x20, x21, [sp, #160]
334	ldp	x18, x19, [sp, #144]
335	ldp	x16, x17, [sp, #128]
336	ldp	x14, x15, [sp, #112]
337	ldp	x12, x13, [sp,  #96]
338	ldp	x10, x11, [sp,  #80]
339	ldp	 x8,  x9, [sp,  #64]
340	ldp	 x6,  x7, [sp,  #48]
341	ldp	 x4,  x5, [sp,  #32]
342	ldp	 x2,  x3, [sp,  #16]
343	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
344
345	eret
346.endm
347
348vector_stub	el1t_sync,     0
349vector_stub	el1t_irq,      1
350vector_stub	el1t_fiq,      2
351vector_stub	el1t_error,    3
352
353vector_stub	el1h_sync,     4
354vector_stub	el1h_irq,      5
355vector_stub	el1h_fiq,      6
356vector_stub	el1h_error,    7
357
358vector_stub	el0_sync_64,   8
359vector_stub	el0_irq_64,    9
360vector_stub	el0_fiq_64,   10
361vector_stub	el0_error_64, 11
362
363vector_stub	el0_sync_32,  12
364vector_stub	el0_irq_32,   13
365vector_stub	el0_fiq_32,   14
366vector_stub	el0_error_32, 15
367
368.section .text.ex
369
370.macro ventry, label
371.align 7
372	b	\label
373.endm
374
375
376/*
377 * Declare as weak to allow external tests to redefine and override the
378 * default vector table.
379 */
380.align 11
381.weak vector_table
382vector_table:
383	ventry	el1t_sync			// Synchronous EL1t
384	ventry	el1t_irq			// IRQ EL1t
385	ventry	el1t_fiq			// FIQ EL1t
386	ventry	el1t_error			// Error EL1t
387
388	ventry	el1h_sync			// Synchronous EL1h
389	ventry	el1h_irq			// IRQ EL1h
390	ventry	el1h_fiq			// FIQ EL1h
391	ventry	el1h_error			// Error EL1h
392
393	ventry	el0_sync_64			// Synchronous 64-bit EL0
394	ventry	el0_irq_64			// IRQ 64-bit EL0
395	ventry	el0_fiq_64			// FIQ 64-bit EL0
396	ventry	el0_error_64			// Error 64-bit EL0
397
398	ventry	el0_sync_32			// Synchronous 32-bit EL0
399	ventry	el0_irq_32			// IRQ 32-bit EL0
400	ventry	el0_fiq_32			// FIQ 32-bit EL0
401	ventry	el0_error_32			// Error 32-bit EL0
402