xref: /kvm-unit-tests/arm/cstart.S (revision f583d9243296b7045a54f8980e3c00849e15ff8c)
1/*
2 * Boot entry point and assembler functions for armv7 tests.
3 *
4 * Copyright (C) 2014, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU LGPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <auxinfo.h>
10#include <asm/assembler.h>
11#include <asm/thread_info.h>
12#include <asm/asm-offsets.h>
13#include <asm/pgtable-hwdef.h>
14#include <asm/ptrace.h>
15#include <asm/sysreg.h>
16
17#define THREAD_START_SP ((THREAD_SIZE - S_FRAME_SIZE * 8) & ~7)
18
19.macro zero_range, tmp1, tmp2, tmp3, tmp4
20	mov	\tmp3, #0
21	mov	\tmp4, #0
229998:	cmp	\tmp1, \tmp2
23	beq	9997f
24	strd	\tmp3, \tmp4, [\tmp1], #8
25	b	9998b
269997:
27.endm
28
29.arm
30
31.section .init
32
33.globl start
34start:
35	/* zero BSS */
36	ldr	r4, =bss
37	ldr	r5, =ebss
38	zero_range r4, r5, r6, r7
39
40	/* zero stack */
41	ldr	r5, =stacktop
42	sub	r4, r5, #THREAD_SIZE
43	zero_range r4, r5, r6, r7
44
45	/*
46	 * set stack, making room at top of stack for cpu0's
47	 * exception stacks. Must start wtih stackptr, not
48	 * stacktop, so the thread size masking (shifts) work.
49	 */
50	ldr	sp, =stackptr
51	lsr	sp, #THREAD_SHIFT
52	lsl	sp, #THREAD_SHIFT
53	add	sp, #THREAD_START_SP
54
55	/*
56	 * save sp before pushing anything on the stack
57	 * lr makes a good temp register right now
58	 */
59	mov	lr, sp
60
61	/*
62	 * bootloader params are in r0-r2
63	 * See the kernel doc Documentation/arm/Booting
64	 *   r0 = 0
65	 *   r1 = machine type number
66	 *   r2 = physical address of the dtb
67	 *
68	 * As we have no need for r0's nor r1's value, then
69	 * put the dtb in r0. This allows setup to be consistent
70	 * with arm64.
71	 */
72	mov	r0, r2
73	push	{r0-r1}
74
75	/* set up vector table, mode stacks, and enable the VFP */
76	mov	r0, lr			@ lr is stack top (see above),
77					@ which is the exception stacks base
78	bl	exceptions_init
79	bl	enable_vfp
80
81	/* complete setup */
82	pop	{r0-r1}
83	bl	setup
84	bl	get_mmu_off
85	cmp	r0, #0
86	bne	1f
87	bl	setup_vm
88
891:
90	/* run the test */
91	ldr	r0, =__argc
92	ldr	r0, [r0]
93	ldr	r1, =__argv
94	ldr	r2, =__environ
95	bl	main
96	bl	exit
97	b	halt
98
99
100.macro set_mode_stack mode, stack
101	add	\stack, #S_FRAME_SIZE
102	msr	cpsr_c, #(\mode | PSR_I_BIT | PSR_F_BIT)
103	isb
104	mov	sp, \stack
105.endm
106
107exceptions_init:
108	mrc	p15, 0, r2, c1, c0, 0	@ read SCTLR
109	bic	r2, #CR_V		@ SCTLR.V := 0
110	mcr	p15, 0, r2, c1, c0, 0	@ write SCTLR
111	ldr	r2, =vector_table
112	mcr	p15, 0, r2, c12, c0, 0	@ write VBAR
113
114	mrs	r2, cpsr
115
116	/* first frame reserved for svc mode */
117	set_mode_stack	UND_MODE, r0
118	set_mode_stack	ABT_MODE, r0
119	set_mode_stack	IRQ_MODE, r0
120	set_mode_stack	FIQ_MODE, r0
121
122	msr	cpsr_cxsf, r2		@ back to svc mode
123	isb
124	mov	pc, lr
125
126enable_vfp:
127	/* Enable full access to CP10 and CP11: */
128	mov	r0, #(3 << 22 | 3 << 20)
129	mcr	p15, 0, r0, c1, c0, 2
130	isb
131	/* Set the FPEXC.EN bit to enable Advanced SIMD and VFP: */
132	mov	r0, #(1 << 30)
133	vmsr	fpexc, r0
134	mov	pc, lr
135
136.text
137
138.global get_mmu_off
139get_mmu_off:
140	ldr	r0, =auxinfo
141	ldr	r0, [r0, #4]
142	and	r0, #AUXINFO_MMU_OFF
143	mov	pc, lr
144
145.global secondary_entry
146secondary_entry:
147	/* enable the MMU unless requested off */
148	bl	get_mmu_off
149	cmp	r0, #0
150	bne	1f
151	mov	r1, #0
152	ldr	r0, =mmu_idmap
153	ldr	r0, [r0]
154	bl	asm_mmu_enable
155
1561:
157	/*
158	 * Set the stack, and set up vector table
159	 * and exception stacks. Exception stacks
160	 * space starts at stack top and grows up.
161	 */
162	ldr	r1, =secondary_data
163	ldr	r0, [r1]
164	mov	sp, r0
165	bl	exceptions_init
166	bl	enable_vfp
167
168	/* finish init in C code */
169	bl	secondary_cinit
170
171	/* r0 is now the entry function, run it */
172	blx	r0
173	b	do_idle
174
175.globl halt
176halt:
1771:	wfi
178	b	1b
179
180/*
181 * asm_mmu_enable
182 *   Inputs:
183 *     (r0 - lo, r1 - hi) is the base address of the translation table
184 *   Outputs: none
185 */
186.equ	PRRR,	0xeeaa4400		@ MAIR0 (from Linux kernel)
187.equ	NMRR,	0xff000004		@ MAIR1 (from Linux kernel)
188.globl asm_mmu_enable
189asm_mmu_enable:
190	/* TLBIALL */
191	mcr	p15, 0, r2, c8, c7, 0
192	dsb	nsh
193
194	/* TTBCR */
195	ldr	r2, =(TTBCR_EAE | 				\
196		      TTBCR_SH0_SHARED | 			\
197		      TTBCR_IRGN0_WBWA | TTBCR_ORGN0_WBWA)
198	mcr	p15, 0, r2, c2, c0, 2
199	isb
200
201	/* MAIR */
202	ldr	r2, =PRRR
203	mcr	p15, 0, r2, c10, c2, 0
204	ldr	r2, =NMRR
205	mcr	p15, 0, r2, c10, c2, 1
206
207	/* TTBR0 */
208	mcrr	p15, 0, r0, r1, c2
209	isb
210
211	/* SCTLR */
212	mrc	p15, 0, r2, c1, c0, 0
213	orr	r2, #CR_C
214	orr	r2, #CR_I
215	orr	r2, #CR_M
216	mcr	p15, 0, r2, c1, c0, 0
217	isb
218
219	mov     pc, lr
220
221.globl asm_mmu_disable
222asm_mmu_disable:
223	/* SCTLR */
224	mrc	p15, 0, r0, c1, c0, 0
225	bic	r0, #CR_M
226	mcr	p15, 0, r0, c1, c0, 0
227	isb
228
229	ldr	r0, =__phys_offset
230	ldr	r0, [r0]
231	ldr	r1, =__phys_end
232	ldr	r1, [r1]
233	sub	r1, r1, r0
234	dcache_by_line_op dccimvac, sy, r0, r1, r2, r3
235
236	mov     pc, lr
237
238/*
239 * Vector stubs
240 * Simplified version of the Linux kernel implementation
241 *   arch/arm/kernel/entry-armv.S
242 *
243 * Each mode has an S_FRAME_SIZE sized memory region,
244 * and the mode's stack pointer has been initialized
245 * to the base of that region in exceptions_init.
246 */
247.macro vector_stub, name, vec, mode, correction=0
248.align 5
249vector_\name:
250.if \correction
251	sub	lr, lr, #\correction
252.endif
253	/*
254	 * Save r0, r1, lr_<exception> (parent PC)
255	 * and spsr_<exception> (parent CPSR)
256	 */
257	str	r0, [sp, #S_R0]
258	str	r1, [sp, #S_R1]
259	str	lr, [sp, #S_PC]
260	mrs	r0, spsr
261	str	r0, [sp, #S_PSR]
262
263	/* Prepare for SVC32 mode. */
264	mrs	r0, cpsr
265	bic	r0, #MODE_MASK
266	orr	r0, #SVC_MODE
267	msr	spsr_cxsf, r0
268
269	/* Branch to handler in SVC mode */
270	mov	r0, #\vec
271	mov	r1, sp
272	ldr	lr, =vector_common
273	movs	pc, lr
274.endm
275
276vector_stub 	rst,	0, UND_MODE
277vector_stub	und,	1, UND_MODE
278vector_stub	pabt,	3, ABT_MODE, 4
279vector_stub	dabt,	4, ABT_MODE, 8
280vector_stub	irq,	6, IRQ_MODE, 4
281vector_stub	fiq,	7, FIQ_MODE, 4
282
283.align 5
284vector_svc:
285	/*
286	 * Save r0, r1, lr_<exception> (parent PC)
287	 * and spsr_<exception> (parent CPSR)
288	 */
289	push	{ r1 }
290	lsr	r1, sp, #THREAD_SHIFT
291	lsl	r1, #THREAD_SHIFT
292	add	r1, #THREAD_START_SP
293	str	r0, [r1, #S_R0]
294	pop	{ r0 }
295	str	r0, [r1, #S_R1]
296	str	lr, [r1, #S_PC]
297	mrs	r0, spsr
298	str	r0, [r1, #S_PSR]
299
300	/*
301	 * Branch to handler, still in SVC mode.
302	 * r0 := 2 is the svc vector number.
303	 */
304	mov	r0, #2
305	ldr	lr, =vector_common
306	mov	pc, lr
307
308vector_common:
309	/* make room for pt_regs */
310	sub	sp, #S_FRAME_SIZE
311	tst	sp, #4			@ check stack alignment
312	subne	sp, #4
313
314	/* store registers r0-r12 */
315	stmia	sp, { r0-r12 }		@ stored wrong r0 and r1, fix later
316
317	/* get registers saved in the stub */
318	ldr	r2, [r1, #S_R0]		@ r0
319	ldr	r3, [r1, #S_R1]		@ r1
320	ldr	r4, [r1, #S_PC] 	@ lr_<exception> (parent PC)
321	ldr	r5, [r1, #S_PSR]	@ spsr_<exception> (parent CPSR)
322
323	/* fix r0 and r1 */
324	str	r2, [sp, #S_R0]
325	str	r3, [sp, #S_R1]
326
327	/* store sp_svc, if we were in usr mode we'll fix this later */
328	add	r6, sp, #S_FRAME_SIZE
329	addne	r6, #4			@ stack wasn't aligned
330	str	r6, [sp, #S_SP]
331
332	str	lr, [sp, #S_LR]		@ store lr_svc, fix later for usr mode
333	str	r4, [sp, #S_PC]		@ store lr_<exception>
334	str	r5, [sp, #S_PSR]	@ store spsr_<exception>
335
336	/* set ORIG_r0 */
337	mov	r2, #-1
338	str	r2, [sp, #S_OLD_R0]
339
340	/* if we were in usr mode then we need sp_usr and lr_usr instead */
341	and	r1, r5, #MODE_MASK
342	cmp	r1, #USR_MODE
343	bne	1f
344	add	r1, sp, #S_SP
345	stmia	r1, { sp,lr }^
346
347	/* Call the handler. r0 is the vector number, r1 := pt_regs */
3481:	mov	r1, sp
349	bl	do_handle_exception
350
351	/*
352	 * make sure we restore sp_svc on mode change. No need to
353	 * worry about lr_svc though, as that gets clobbered on
354	 * exception entry anyway.
355	 */
356	str	r6, [sp, #S_SP]
357
358	/* return from exception */
359	msr	spsr_cxsf, r5
360	ldmia	sp, { r0-pc }^
361
362.align 5
363vector_addrexcptn:
364	b	vector_addrexcptn
365
366.section .text.ex
367.align 5
368vector_table:
369	b	vector_rst
370	b	vector_und
371	b	vector_svc
372	b	vector_pabt
373	b	vector_dabt
374	b	vector_addrexcptn	@ should never happen
375	b	vector_irq
376	b	vector_fiq
377