xref: /haiku/src/system/kernel/arch/arm64/arch_asm.S (revision 6f80a9801fedbe7355c4360bd204ba746ec3ec2d)
1/*
2 * Copyright 2019 Haiku, Inc. All Rights Reserved.
3 * Distributed under the terms of the MIT License.
4 */
5#include <arch/arm/arch_cpu.h>
6#include <asm_defs.h>
7#include "asm_offsets.h"
8#include "syscall_numbers.h"
9
10.text
11
12FUNCTION(_thread_exit_syscall):
13	svc #((SYSCALL_EXIT_THREAD << 5) | 1)
14FUNCTION_END(_thread_exit_syscall)
15
16.macro xchg_sp xt
17add	sp, sp, \xt
18sub	\xt, sp, \xt
19sub	sp, sp, \xt
20.endm
21
22.macro EXCEPTION_ENTRY el
23	// interrupts are automatically disabled by hardware
24
25	// avoid using sp in case it is misaligned
26	// swap sp with x19 and use it instead
27	xchg_sp x19
28
29	// x19 is now the stack top, make room for IFRAME
30	sub x19, x19, #(IFRAME_sizeof)
31
32	stp	    x0,  x1, [x19, #(IFRAME_x + 0 * 8)]
33	stp	    x2,  x3, [x19, #(IFRAME_x + 2 * 8)]
34	stp	    x4,  x5, [x19, #(IFRAME_x + 4 * 8)]
35	stp	    x6,  x7, [x19, #(IFRAME_x + 6 * 8)]
36	stp	    x8,  x9, [x19, #(IFRAME_x + 8 * 8)]
37	stp	   x10, x11, [x19, #(IFRAME_x + 10 * 8)]
38	stp	   x12, x13, [x19, #(IFRAME_x + 12 * 8)]
39	stp	   x14, x15, [x19, #(IFRAME_x + 14 * 8)]
40	stp	   x16, x17, [x19, #(IFRAME_x + 16 * 8)]
41	mov    x0,   sp  // original x19 that we swapped with sp
42	stp	   x18,  x0, [x19, #(IFRAME_x + 18 * 8)]
43
44	// x20-x29 won't be clobbered
45	// thus we don't really need to store these
46
47	str	   x30,      [x19, #(IFRAME_lr)]
48
49.if \el == 0
50	mrs x0, SP_EL0
51.else
52	// add sizeof back here to store original sp
53	add x0, x19, #(IFRAME_sizeof)
54.endif
55
56	mrs x1, ELR_EL1
57	mrs x2, SPSR_EL1
58	mrs x3, ESR_EL1
59	mrs x4, FAR_EL1
60
61	str x0, [x19, #(IFRAME_sp)]
62	str x1, [x19, #(IFRAME_elr)]
63	str x2, [x19, #(IFRAME_spsr)]
64	str x3, [x19, #(IFRAME_esr)]
65	str x4, [x19, #(IFRAME_far)]
66.endm
67
68.macro EXCEPTION_RETURN el
69	// x19 is callee-saved so it still points to IFRAME
70	// x0, x1, x18, x19 will be restored at the very end
71
72	ldr x0,  [x19, #(IFRAME_elr)]
73	ldr x1,  [x19, #(IFRAME_spsr)]
74	ldr x18, [x19, #(IFRAME_sp)]
75
76	// x0 and x1 will be restored later
77	ldp	    x2,  x3, [x19, #(IFRAME_x + 2 * 8)]
78	ldp	    x4,  x5, [x19, #(IFRAME_x + 4 * 8)]
79	ldp	    x6,  x7, [x19, #(IFRAME_x + 6 * 8)]
80	ldp	    x8,  x9, [x19, #(IFRAME_x + 8 * 8)]
81	ldp	   x10, x11, [x19, #(IFRAME_x + 10 * 8)]
82	ldp	   x12, x13, [x19, #(IFRAME_x + 12 * 8)]
83	ldp	   x14, x15, [x19, #(IFRAME_x + 14 * 8)]
84	ldp	   x16, x17, [x19, #(IFRAME_x + 16 * 8)]
85	// x18 and x19 will be restored later
86	ldr	   x30, [x19, #(IFRAME_lr)]
87
88	// disable interrupts before restoring ELR/SPSR/sp
89	msr DAIFSet, #0xf
90
91	msr ELR_EL1, x0
92	msr SPSR_EL1, x1
93
94.if \el == 0
95	// load stack pointer for EL0 from IFRAME
96	msr SP_EL0, x18
97
98	// unwind our own stack pointer
99	add sp, x19, #(IFRAME_sizeof)
100.else
101	// we stored original pointer to IFRAME, no need to unwind again there
102	mov sp, x18
103.endif
104
105	// finally restore remaining registers
106	ldp x0,   x1, [x19, #(IFRAME_x + 0 * 8)]
107	ldp x18, x19, [x19, #(IFRAME_x + 18 * 8)]
108
109	eret
110.endm
111
112.macro EXCEPTION_HANDLER el name func
113	FUNCTION(handle_\name):
114		EXCEPTION_ENTRY \el
115
116		// prepare aligned sp for C function
117		and sp, x19, #0xfffffffffffffff0
118
119		// call C handler, passing IFRAME in x0
120		// handler can enable interrupts if it wants to
121		mov x0, x19
122		bl \func
123
124		EXCEPTION_RETURN \el
125	FUNCTION_END(handle_\name)
126.endm
127
128.macro	vector	name
129	.align 7
130	b	handle_\name
131.endm
132
133.macro	vempty
134	.align 7
135	brk	0xfff
136	1: b	1b
137.endm
138
139.align 11
140.globl _exception_vectors
141_exception_vectors:
142	vempty             /* Synchronous EL1t */
143	vempty             /* IRQ EL1t */
144	vempty             /* FIQ EL1t */
145	vempty             /* Error EL1t */
146
147	vector el1h_sync   /* Synchronous EL1h */
148	vector el1h_irq    /* IRQ EL1h */
149	vector el1h_fiq    /* FIQ EL1h */
150	vector el1h_error  /* Error EL1h */
151
152	vector el0_sync    /* Synchronous 64-bit EL0 */
153	vector el0_irq     /* IRQ 64-bit EL0 */
154	vector el0_fiq     /* FIQ 64-bit EL0 */
155	vector el0_error   /* Error 64-bit EL0 */
156
157	vempty             /* Synchronous 32-bit EL0 */
158	vempty             /* IRQ 32-bit EL0 */
159	vempty             /* FIQ 32-bit EL0 */
160	vempty             /* Error 32-bit EL0 */
161
162EXCEPTION_HANDLER 1 el1h_sync do_sync_handler
163EXCEPTION_HANDLER 1 el1h_irq do_irq_handler
164EXCEPTION_HANDLER 1 el1h_fiq do_fiq_handler
165EXCEPTION_HANDLER 1 el1h_error do_error_handler
166
167EXCEPTION_HANDLER 0 el0_sync do_sync_handler
168EXCEPTION_HANDLER 0 el0_irq do_irq_handler
169EXCEPTION_HANDLER 0 el0_fiq do_fiq_handler
170EXCEPTION_HANDLER 0 el0_error do_error_handler
171
172FUNCTION(_eret_with_iframe):
173	mov x20, xzr
174	mov x21, xzr
175	mov x22, xzr
176	mov x23, xzr
177	mov x24, xzr
178	mov x25, xzr
179	mov x26, xzr
180	mov x27, xzr
181	mov x28, xzr
182	mov x29, xzr
183
184	mov x19, x0
185	EXCEPTION_RETURN 0
186FUNCTION_END(_eret_with_iframe)
187
188FUNCTION(_fp_save):
189	stp q0, q1, [x0], #32
190	stp q2, q3, [x0], #32
191	stp q4, q5, [x0], #32
192	stp q6, q7, [x0], #32
193	stp q8, q9, [x0], #32
194	stp q10, q11, [x0], #32
195	stp q12, q13, [x0], #32
196	stp q14, q15, [x0], #32
197	stp q16, q17, [x0], #32
198	stp q18, q19, [x0], #32
199	stp q20, q21, [x0], #32
200	stp q22, q23, [x0], #32
201	stp q24, q25, [x0], #32
202	stp q26, q27, [x0], #32
203	stp q28, q29, [x0], #32
204	stp q30, q31, [x0], #32
205	mrs x1, FPSR
206	mrs x2, FPCR
207	str x1, [x0], #8
208	str x2, [x0], #8
209	ret
210FUNCTION_END(_fp_save)
211
212FUNCTION(_fp_restore):
213	ldp q0, q1, [x0], #32
214	ldp q2, q3, [x0], #32
215	ldp q4, q5, [x0], #32
216	ldp q6, q7, [x0], #32
217	ldp q8, q9, [x0], #32
218	ldp q10, q11, [x0], #32
219	ldp q12, q13, [x0], #32
220	ldp q14, q15, [x0], #32
221	ldp q16, q17, [x0], #32
222	ldp q18, q19, [x0], #32
223	ldp q20, q21, [x0], #32
224	ldp q22, q23, [x0], #32
225	ldp q24, q25, [x0], #32
226	ldp q26, q27, [x0], #32
227	ldp q28, q29, [x0], #32
228	ldp q30, q31, [x0], #32
229
230	ldr x1, [x0], #8
231	msr FPSR, x1
232
233	// avoid restoring FPCR if it hasn't changed
234	ldr x2, [x0], #8
235	mrs x3, FPCR
236	cmp x3, x2
237	beq 1f
238	msr FPCR, x2
2391:
240	ret
241FUNCTION_END(_fp_restore)
242
243FUNCTION(_arch_context_swap):
244	// save
245	stp x19, x20, [x0], #16
246	stp x21, x22, [x0], #16
247	stp x23, x24, [x0], #16
248	stp x25, x26, [x0], #16
249	stp x27, x28, [x0], #16
250	stp x29, x30, [x0], #16
251
252	mov x2, sp
253	str x2, [x0], #8
254
255	stp  d8,  d9, [x0], #16
256	stp d10, d11, [x0], #16
257	stp d12, d13, [x0], #16
258	stp d14, d15, [x0], #16
259
260	// restore
261	ldp x19, x20, [x1], #16
262	ldp x21, x22, [x1], #16
263	ldp x23, x24, [x1], #16
264	ldp x25, x26, [x1], #16
265	ldp x27, x28, [x1], #16
266	ldp x29, x30, [x1], #16
267
268	ldr x2, [x1], #8
269	mov sp, x2
270
271	ldp  d8,  d9, [x1], #16
272	ldp d10, d11, [x1], #16
273	ldp d12, d13, [x1], #16
274	ldp d14, d15, [x1], #16
275
276	// pass x29 as argument to thread entry function
277	mov x0, x29
278	ret
279FUNCTION_END(_arch_context_swap)
280
281/*!	\fn void arch_debug_call_with_fault_handler(cpu_ent* cpu,
282		jmp_buf jumpBuffer, void (*function)(void*), void* parameter)
283
284	Called by debug_call_with_fault_handler() to do the dirty work of setting
285	the fault handler and calling the function. If the function causes a page
286	fault, the arch_debug_call_with_fault_handler() calls longjmp() with the
287	given \a jumpBuffer. Otherwise it returns normally.
288
289	debug_call_with_fault_handler() has already saved the CPU's fault_handler
290	and fault_handler_stack_pointer and will reset them later, so
291	arch_debug_call_with_fault_handler() doesn't need to care about it.
292
293	\param cpu The \c cpu_ent for the current CPU.
294	\param jumpBuffer Buffer to be used for longjmp().
295	\param function The function to be called.
296	\param parameter The parameter to be passed to the function to be called.
297*/
298FUNCTION(arch_debug_call_with_fault_handler):
299	ldr x4, =fault
300	str x4, [x0, #CPU_ENT_fault_handler]
301	str x1, [x0, #CPU_ENT_fault_handler_stack_pointer]
302
303	mov x0, x3
304	br x2
305
306fault:
307	mov x0, sp
308	mov x1, #1
309	b longjmp
310FUNCTION_END(arch_debug_call_with_fault_handler)
311