xref: /kvm-unit-tests/arm/cstart64.S (revision 4da0bc9a24a78cf61f046898b24c45577b645492)
1/*
2 * Boot entry point and assembler functions for aarch64 tests.
3 *
4 * Copyright (C) 2017, Red Hat Inc, Andrew Jones <drjones@redhat.com>
5 *
6 * This work is licensed under the terms of the GNU GPL, version 2.
7 */
8#define __ASSEMBLY__
9#include <asm/asm-offsets.h>
10#include <asm/ptrace.h>
11#include <asm/processor.h>
12#include <asm/page.h>
13#include <asm/pgtable-hwdef.h>
14
15.section .init
16
17.globl start
18start:
19	/*
20	 * bootloader params are in x0-x3
21	 * The physical address of the dtb is in x0, x1-x3 are reserved
22	 * See the kernel doc Documentation/arm64/booting.txt
23	 */
24	mov	x4, #1
25	msr	spsel, x4
26	isb
27	adr     x4, stackptr
28	mov	sp, x4
29	stp	x0, x1, [sp, #-16]!
30
31	/* Enable FP/ASIMD */
32	mov	x0, #(3 << 20)
33	msr	cpacr_el1, x0
34
35	/* set up exception handling */
36	bl	exceptions_init
37
38	/* complete setup */
39	ldp	x0, x1, [sp], #16
40	bl	setup
41
42	/* run the test */
43	adr	x0, __argc
44	ldr	x0, [x0]
45	adr	x1, __argv
46	adr	x2, __environ
47	bl	main
48	bl	exit
49	b	halt
50
51exceptions_init:
52	adr	x0, vector_table
53	msr	vbar_el1, x0
54	isb
55	ret
56
57.text
58
59.globl secondary_entry
60secondary_entry:
61	/* Enable FP/ASIMD */
62	mov	x0, #(3 << 20)
63	msr	cpacr_el1, x0
64
65	/* set up exception handling */
66	bl	exceptions_init
67
68	/* enable the MMU */
69	adr	x0, mmu_idmap
70	ldr	x0, [x0]
71	bl	asm_mmu_enable
72
73	/* set the stack */
74	adr	x1, secondary_data
75	ldr	x0, [x1]
76	mov	sp, x0
77
78	/* finish init in C code */
79	bl	secondary_cinit
80
81	/* x0 is now the entry function, run it */
82	blr	x0
83	b	do_idle
84
85.globl halt
86halt:
871:	wfi
88	b	1b
89
90/*
91 * asm_mmu_enable
92 *   Inputs:
93 *     x0 is the base address of the translation table
94 *   Outputs: none
95 *
96 * Adapted from
97 *   arch/arm64/kernel/head.S
98 *   arch/arm64/mm/proc.S
99 */
100
101/*
102 * Memory region attributes for LPAE:
103 *
104 *   n = AttrIndx[2:0]
105 *                      n       MAIR
106 *   DEVICE_nGnRnE      000     00000000
107 *   DEVICE_nGnRE       001     00000100
108 *   DEVICE_GRE         010     00001100
109 *   NORMAL_NC          011     01000100
110 *   NORMAL             100     11111111
111 */
112#define MAIR(attr, mt) ((attr) << ((mt) * 8))
113
114.globl asm_mmu_enable
115asm_mmu_enable:
116	ic	iallu			// I+BTB cache invalidate
117	tlbi	vmalle1is		// invalidate I + D TLBs
118	dsb	ish
119
120	/* TCR */
121	ldr	x1, =TCR_TxSZ(VA_BITS) |		\
122		     TCR_TG0_64K | TCR_TG1_64K |	\
123		     TCR_IRGN_WBWA | TCR_ORGN_WBWA |	\
124		     TCR_SHARED
125	mrs	x2, id_aa64mmfr0_el1
126	bfi	x1, x2, #32, #3
127	msr	tcr_el1, x1
128
129	/* MAIR */
130	ldr	x1, =MAIR(0x00, MT_DEVICE_nGnRnE) |	\
131		     MAIR(0x04, MT_DEVICE_nGnRE) |	\
132		     MAIR(0x0c, MT_DEVICE_GRE) |	\
133		     MAIR(0x44, MT_NORMAL_NC) |		\
134		     MAIR(0xff, MT_NORMAL)
135	msr	mair_el1, x1
136
137	/* TTBR0 */
138	msr	ttbr0_el1, x0
139	isb
140
141	/* SCTLR */
142	mrs	x1, sctlr_el1
143	orr	x1, x1, SCTLR_EL1_C
144	orr	x1, x1, SCTLR_EL1_I
145	orr	x1, x1, SCTLR_EL1_M
146	msr	sctlr_el1, x1
147	isb
148
149	ret
150
151.globl asm_mmu_disable
152asm_mmu_disable:
153	mrs	x0, sctlr_el1
154	bic	x0, x0, SCTLR_EL1_M
155	msr	sctlr_el1, x0
156	isb
157	ret
158
159/*
160 * Vectors
161 * Adapted from arch/arm64/kernel/entry.S
162 */
163.macro vector_stub, name, vec
164\name:
165	stp	 x0,  x1, [sp, #-S_FRAME_SIZE]!
166	stp	 x2,  x3, [sp,  #16]
167	stp	 x4,  x5, [sp,  #32]
168	stp	 x6,  x7, [sp,  #48]
169	stp	 x8,  x9, [sp,  #64]
170	stp	x10, x11, [sp,  #80]
171	stp	x12, x13, [sp,  #96]
172	stp	x14, x15, [sp, #112]
173	stp	x16, x17, [sp, #128]
174	stp	x18, x19, [sp, #144]
175	stp	x20, x21, [sp, #160]
176	stp	x22, x23, [sp, #176]
177	stp	x24, x25, [sp, #192]
178	stp	x26, x27, [sp, #208]
179	stp	x28, x29, [sp, #224]
180
181	str	x30, [sp, #S_LR]
182
183	.if \vec >= 8
184	mrs	x1, sp_el0
185	.else
186	add	x1, sp, #S_FRAME_SIZE
187	.endif
188	str	x1, [sp, #S_SP]
189
190	mrs	x1, elr_el1
191	mrs	x2, spsr_el1
192	stp	x1, x2, [sp, #S_PC]
193
194	mov	x0, \vec
195	mov	x1, sp
196	mrs	x2, esr_el1
197	bl	do_handle_exception
198
199	ldp	x1, x2, [sp, #S_PC]
200	msr	spsr_el1, x2
201	msr	elr_el1, x1
202
203	.if \vec >= 8
204	ldr	x1, [sp, #S_SP]
205	msr	sp_el0, x1
206	.endif
207
208	ldr	x30, [sp, #S_LR]
209
210	ldp	x28, x29, [sp, #224]
211	ldp	x26, x27, [sp, #208]
212	ldp	x24, x25, [sp, #192]
213	ldp	x22, x23, [sp, #176]
214	ldp	x20, x21, [sp, #160]
215	ldp	x18, x19, [sp, #144]
216	ldp	x16, x17, [sp, #128]
217	ldp	x14, x15, [sp, #112]
218	ldp	x12, x13, [sp,  #96]
219	ldp	x10, x11, [sp,  #80]
220	ldp	 x8,  x9, [sp,  #64]
221	ldp	 x6,  x7, [sp,  #48]
222	ldp	 x4,  x5, [sp,  #32]
223	ldp	 x2,  x3, [sp,  #16]
224	ldp	 x0,  x1, [sp], #S_FRAME_SIZE
225
226	eret
227.endm
228
229vector_stub	el1t_sync,     0
230vector_stub	el1t_irq,      1
231vector_stub	el1t_fiq,      2
232vector_stub	el1t_error,    3
233
234vector_stub	el1h_sync,     4
235vector_stub	el1h_irq,      5
236vector_stub	el1h_fiq,      6
237vector_stub	el1h_error,    7
238
239vector_stub	el0_sync_64,   8
240vector_stub	el0_irq_64,    9
241vector_stub	el0_fiq_64,   10
242vector_stub	el0_error_64, 11
243
244vector_stub	el0_sync_32,  12
245vector_stub	el0_irq_32,   13
246vector_stub	el0_fiq_32,   14
247vector_stub	el0_error_32, 15
248
249.section .text.ex
250
251.macro ventry, label
252.align 7
253	b	\label
254.endm
255
256.align 11
257vector_table:
258	ventry	el1t_sync			// Synchronous EL1t
259	ventry	el1t_irq			// IRQ EL1t
260	ventry	el1t_fiq			// FIQ EL1t
261	ventry	el1t_error			// Error EL1t
262
263	ventry	el1h_sync			// Synchronous EL1h
264	ventry	el1h_irq			// IRQ EL1h
265	ventry	el1h_fiq			// FIQ EL1h
266	ventry	el1h_error			// Error EL1h
267
268	ventry	el0_sync_64			// Synchronous 64-bit EL0
269	ventry	el0_irq_64			// IRQ 64-bit EL0
270	ventry	el0_fiq_64			// FIQ 64-bit EL0
271	ventry	el0_error_64			// Error 64-bit EL0
272
273	ventry	el0_sync_32			// Synchronous 32-bit EL0
274	ventry	el0_irq_32			// IRQ 32-bit EL0
275	ventry	el0_fiq_32			// FIQ 32-bit EL0
276	ventry	el0_error_32			// Error 32-bit EL0
277