aboutsummaryrefslogtreecommitdiff
path: root/sys/arm64/vmm/vmm_hyp_exception.S
blob: 0e8b31ae8b12dd896c15242d6781e3d96518dfed (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
/*-
 * SPDX-License-Identifier: BSD-2-Clause
 *
 * Copyright (C) 2017 Alexandru Elisei <alexandru.elisei@gmail.com>
 * Copyright (c) 2021 Andrew Turner
 *
 * This software was developed by Alexandru Elisei under sponsorship
 * from the FreeBSD Foundation.
 *
 * Redistribution and use in source and binary forms, with or without
 * modification, are permitted provided that the following conditions
 * are met:
 * 1. Redistributions of source code must retain the above copyright
 *    notice, this list of conditions and the following disclaimer.
 * 2. Redistributions in binary form must reproduce the above copyright
 *    notice, this list of conditions and the following disclaimer in the
 *    documentation and/or other materials provided with the distribution.
 *
 * THIS SOFTWARE IS PROVIDED BY AUTHOR AND CONTRIBUTORS ``AS IS'' AND
 * ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
 * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
 * ARE DISCLAIMED.  IN NO EVENT SHALL AUTHOR OR CONTRIBUTORS BE LIABLE
 * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
 * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
 * OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
 * HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
 * LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
 * OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
 * SUCH DAMAGE.
 */


#include <machine/asm.h>
#include <machine/hypervisor.h>

#include "assym.inc"
#include "hyp.h"

.macro	save_host_registers
	/* TODO: Only store callee saved registers */
	sub	sp, sp, #(32 * 8)
	str	x30,      [sp, #(30 * 8)]
	stp	x28, x29, [sp, #(28 * 8)]
	stp	x26, x27, [sp, #(26 * 8)]
	stp	x24, x25, [sp, #(24 * 8)]
	stp	x22, x23, [sp, #(22 * 8)]
	stp	x20, x21, [sp, #(20 * 8)]
	stp	x18, x19, [sp, #(18 * 8)]
	stp	x16, x17, [sp, #(16 * 8)]
	stp	x14, x15, [sp, #(14 * 8)]
	stp	x12, x13, [sp, #(12 * 8)]
	stp	x10, x11, [sp, #(10 * 8)]
	stp	x8,  x9,  [sp, #(8  * 8)]
	stp	x6,  x7,  [sp, #(6  * 8)]
	stp	x4,  x5,  [sp, #(4  * 8)]
	stp	x2,  x3,  [sp, #(2  * 8)]
	stp	x0,  x1,  [sp, #(0  * 8)]
.endm

.macro	restore_host_registers
	/* TODO: Only restore callee saved registers */
	ldp	x0,  x1,  [sp, #(0  * 8)]
	ldp	x2,  x3,  [sp, #(2  * 8)]
	ldp	x4,  x5,  [sp, #(4  * 8)]
	ldp	x6,  x7,  [sp, #(6  * 8)]
	ldp	x8,  x9,  [sp, #(8  * 8)]
	ldp	x10, x11, [sp, #(10 * 8)]
	ldp	x12, x13, [sp, #(12 * 8)]
	ldp	x14, x15, [sp, #(14 * 8)]
	ldp	x16, x17, [sp, #(16 * 8)]
	ldp	x18, x19, [sp, #(18 * 8)]
	ldp	x20, x21, [sp, #(20 * 8)]
	ldp	x22, x23, [sp, #(22 * 8)]
	ldp	x24, x25, [sp, #(24 * 8)]
	ldp	x26, x27, [sp, #(26 * 8)]
	ldp	x28, x29, [sp, #(28 * 8)]
	ldr	x30,      [sp, #(30 * 8)]
	add	sp, sp, #(32 * 8)
.endm

.macro	save_guest_registers
	/* Back up x0 so we can use it as a temporary register */
	stp	x0,  x1,  [sp, #-(2 * 8)]!

	/* Restore the hypctx pointer */
	mrs	x0, tpidr_el2

	stp	x2,  x3,  [x0, #(TF_X + 2  * 8)]
	stp	x4,  x5,  [x0, #(TF_X + 4  * 8)]
	stp	x6,  x7,  [x0, #(TF_X + 6  * 8)]
	stp	x8,  x9,  [x0, #(TF_X + 8  * 8)]
	stp	x10, x11, [x0, #(TF_X + 10 * 8)]
	stp	x12, x13, [x0, #(TF_X + 12 * 8)]
	stp	x14, x15, [x0, #(TF_X + 14 * 8)]
	stp	x16, x17, [x0, #(TF_X + 16 * 8)]
	stp	x18, x19, [x0, #(TF_X + 18 * 8)]
	stp	x20, x21, [x0, #(TF_X + 20 * 8)]
	stp	x22, x23, [x0, #(TF_X + 22 * 8)]
	stp	x24, x25, [x0, #(TF_X + 24 * 8)]
	stp	x26, x27, [x0, #(TF_X + 26 * 8)]
	stp	x28, x29, [x0, #(TF_X + 28 * 8)]

	str	lr, [x0, #(TF_LR)]

	/* Restore the saved x0 & x1 and save them */
	ldp	x2,  x3,  [sp], #(2 * 8)
	stp	x2,  x3,  [x0, #(TF_X + 0  * 8)]
.endm

.macro	restore_guest_registers
	/*
	 * Copy the guest x0 and x1 to the stack so we can restore them
	 * after loading the other registers.
	 */
	ldp	x2,  x3,  [x0, #(TF_X + 0  * 8)]
	stp	x2,  x3,  [sp, #-(2 * 8)]!

	ldr	lr, [x0, #(TF_LR)]

	ldp	x28, x29, [x0, #(TF_X + 28 * 8)]
	ldp	x26, x27, [x0, #(TF_X + 26 * 8)]
	ldp	x24, x25, [x0, #(TF_X + 24 * 8)]
	ldp	x22, x23, [x0, #(TF_X + 22 * 8)]
	ldp	x20, x21, [x0, #(TF_X + 20 * 8)]
	ldp	x18, x19, [x0, #(TF_X + 18 * 8)]
	ldp	x16, x17, [x0, #(TF_X + 16 * 8)]
	ldp	x14, x15, [x0, #(TF_X + 14 * 8)]
	ldp	x12, x13, [x0, #(TF_X + 12 * 8)]
	ldp	x10, x11, [x0, #(TF_X + 10 * 8)]
	ldp	x8,  x9,  [x0, #(TF_X + 8  * 8)]
	ldp	x6,  x7,  [x0, #(TF_X + 6  * 8)]
	ldp	x4,  x5,  [x0, #(TF_X + 4  * 8)]
	ldp	x2,  x3,  [x0, #(TF_X + 2  * 8)]

	ldp	x0,  x1,  [sp], #(2 * 8)
.endm

.macro vempty
	.align 7
	1: b	1b
.endm

.macro vector name
	.align 7
	b	handle_\name
.endm

	.section ".vmm_vectors","ax"
	.align 11
hyp_init_vectors:
	vempty		/* Synchronous EL2t */
	vempty		/* IRQ EL2t */
	vempty		/* FIQ EL2t */
	vempty		/* Error EL2t */

	vempty		/* Synchronous EL2h */
	vempty		/* IRQ EL2h */
	vempty		/* FIQ EL2h */
	vempty		/* Error EL2h */

	vector hyp_init	/* Synchronous 64-bit EL1 */
	vempty		/* IRQ 64-bit EL1 */
	vempty		/* FIQ 64-bit EL1 */
	vempty		/* Error 64-bit EL1 */

	vempty		/* Synchronous 32-bit EL1 */
	vempty		/* IRQ 32-bit EL1 */
	vempty		/* FIQ 32-bit EL1 */
	vempty		/* Error 32-bit EL1 */

	.text
	.align 11
hyp_vectors:
	vempty			/* Synchronous EL2t */
	vempty			/* IRQ EL2t */
	vempty			/* FIQ EL2t */
	vempty			/* Error EL2t */

	vector el2_el2h_sync	/* Synchronous EL2h */
	vector el2_el2h_irq	/* IRQ EL2h */
	vector el2_el2h_fiq	/* FIQ EL2h */
	vector el2_el2h_error	/* Error EL2h */

	vector el2_el1_sync64	/* Synchronous 64-bit EL1 */
	vector el2_el1_irq64	/* IRQ 64-bit EL1 */
	vector el2_el1_fiq64	/* FIQ 64-bit EL1 */
	vector el2_el1_error64	/* Error 64-bit EL1 */

	vempty			/* Synchronous 32-bit EL1 */
	vempty			/* IRQ 32-bit EL1 */
	vempty			/* FIQ 32-bit EL1 */
	vempty			/* Error 32-bit EL1 */

/*
 * Initialize the hypervisor mode with a new exception vector table, translation
 * table and stack.
 *
 * Expecting:
 * x0 - translation tables physical address
 * x1 - stack top virtual address
 * x2 - TCR_EL2 value
 * x3 - SCTLR_EL2 value
 * x4 - VTCR_EL2 value
 */
LENTRY(handle_hyp_init)
	/* Install the new exception vectors */
	adrp	x6, hyp_vectors
	add	x6, x6, :lo12:hyp_vectors
	msr	vbar_el2, x6
	/* Set the stack top address */
	mov	sp, x1
	/* Use the host VTTBR_EL2 to tell the host and the guests apart */
	mov	x9, #VTTBR_HOST
	msr	vttbr_el2, x9
	/* Load the base address for the translation tables */
	msr	ttbr0_el2, x0
	/* Invalidate the TLB */
	dsb	ish
	tlbi	alle2
	dsb	ishst
	isb
	/* Use the same memory attributes as EL1 */
	mrs	x9, mair_el1
	msr	mair_el2, x9
	/* Configure address translation */
	msr	tcr_el2, x2
	isb
	/* Set the system control register for EL2 */
	msr	sctlr_el2, x3
	/* Set the Stage 2 translation control register */
	msr	vtcr_el2, x4
	/* Return success */
	mov	x0, #0
	/* MMU is up and running */
	ERET
LEND(handle_hyp_init)

.macro do_world_switch_to_host
	save_guest_registers
	restore_host_registers

	/* Restore host VTTBR */
	mov	x9, #VTTBR_HOST
	msr	vttbr_el2, x9
.endm


.macro handle_el2_excp type
	/* Save registers before modifying so we can restore them */
	str	x9, [sp, #-16]!

	/* Test if the exception happened when the host was running */
	mrs	x9, vttbr_el2
	cmp	x9, #VTTBR_HOST
	beq	1f

	/* We got the exception while the guest was running */
	ldr	x9, [sp], #16
	do_world_switch_to_host
	mov	x0, \type
	ret

1:
	/* We got the exception while the host was running */
	ldr	x9, [sp], #16
	mov	x0, \type
	ERET
.endm


LENTRY(handle_el2_el2h_sync)
	handle_el2_excp #EXCP_TYPE_EL2_SYNC
LEND(handle_el2_el2h_sync)

LENTRY(handle_el2_el2h_irq)
	handle_el2_excp #EXCP_TYPE_EL2_IRQ
LEND(handle_el2_el2h_irq)

LENTRY(handle_el2_el2h_fiq)
	handle_el2_excp #EXCP_TYPE_EL2_FIQ
LEND(handle_el2_el2h_fiq)

LENTRY(handle_el2_el2h_error)
	handle_el2_excp #EXCP_TYPE_EL2_ERROR
LEND(handle_el2_el2h_error)


LENTRY(handle_el2_el1_sync64)
	/* Save registers before modifying so we can restore them */
	str	x9, [sp, #-16]!

	/* Check for host hypervisor call */
	mrs	x9, vttbr_el2
	cmp	x9, #VTTBR_HOST
	ldr	x9, [sp], #16 /* Restore the temp register */
	bne	1f

	/*
	 * Called from the host
	 */

	/* Check if this is a cleanup call and handle in a controlled state */
	cmp	x0, #(HYP_CLEANUP)
	b.eq	vmm_cleanup

	str	lr, [sp, #-16]!
	bl	vmm_hyp_enter
	ldr	lr, [sp], #16
	ERET

1:	/* Guest exception taken to EL2 */
	do_world_switch_to_host
	mov	x0, #EXCP_TYPE_EL1_SYNC
	ret
LEND(handle_el2_el1_sync64)

/*
 * We only trap IRQ, FIQ and SError exceptions when a guest is running. Do a
 * world switch to host to handle these exceptions.
 */

LENTRY(handle_el2_el1_irq64)
	do_world_switch_to_host
	str	x9, [sp, #-16]!
	mrs	x9, ich_misr_el2
	cmp	x9, xzr
	beq	1f
	mov	x0, #EXCP_TYPE_MAINT_IRQ
	b	2f
1:
	mov	x0, #EXCP_TYPE_EL1_IRQ
2:
	ldr	x9, [sp], #16
	ret
LEND(handle_el2_el1_irq)

LENTRY(handle_el2_el1_fiq64)
	do_world_switch_to_host
	mov	x0, #EXCP_TYPE_EL1_FIQ
	ret
LEND(handle_el2_el1_fiq64)

LENTRY(handle_el2_el1_error64)
	do_world_switch_to_host
	mov	x0, #EXCP_TYPE_EL1_ERROR
	ret
LEND(handle_el2_el1_error64)


/*
 * Usage:
 * uint64_t vmm_enter_guest(struct hypctx *hypctx)
 *
 * Expecting:
 * x0 - hypctx address
 */
ENTRY(vmm_enter_guest)
	/* Save hypctx address */
	msr	tpidr_el2, x0

	save_host_registers
	restore_guest_registers

	/* Enter guest */
	ERET
END(vmm_enter_guest)

/*
 * Usage:
 * void vmm_cleanup(uint64_t handle, void *hyp_stub_vectors)
 *
 * Expecting:
 * x1 - physical address of hyp_stub_vectors
 */
LENTRY(vmm_cleanup)
	/* Restore the stub vectors */
	msr	vbar_el2, x1

	/* Disable the MMU */
	dsb	sy
	mrs	x2, sctlr_el2
	bic	x2, x2, #SCTLR_EL2_M
	msr	sctlr_el2, x2
	isb

	ERET
LEND(vmm_cleanup)