xref: /haiku/src/system/kernel/arch/ppc/arch_exceptions.S (revision 6c678c57c736f4688c9bab047bed3fe00fd27a13)
1/*
2 * Copyright 2006, Ingo Weinhold <bonefish@cs.tu-berlin.de>.
3 * All rights reserved. Distributed under the terms of the MIT License.
4 *
5 * Copyright 2003, Travis Geiselbrecht. All rights reserved.
6 * Distributed under the terms of the NewOS License.
7 */
8#define FUNCTION(x) .global x; .type x,@function; x
9#define LOCAL_FUNCTION(x) .type x,@function; x
10
11/*	General exception handling concept:
12
13	The PPC architecture specifies entry point offsets for the various
14	exceptions in the first two physical pages. We put a short piece of code
15	(VEC_ENTRY()) into each exception vector. It calls exception_vector_common,
16	which is defined in the unused space at the beginning of the first physical
17	page. It re-enables address translation and calls ppc_exception_tail which
18	lies in the kernel. It dumps an iframe and invokes ppc_exception_entry()
19	(arch_int.cpp), which handles the exception and returns eventually.
20	The registers are restored from the iframe and we return from the
21	interrupt.
22
23	algorithm overview:
24
25	* VEC_ENTRY
26	* exception_vector_common
27	* ppc_exception_tail
28		- dump iframe
29		- ppc_exception_entry()
30		- restore registers and return from interrupt
31
32	Here we use the following SPRG registers, which are at the disposal of the
33	operating system:
34	* SPRG0: Physical address pointer to a struct cpu_exception_context
35			 for the current CPU. The structure contains helpful pointers
36			 as well as some scratch memory for temporarily saving registers.
37	* SPRG1: Scratch.
38
39	struct cpu_exception_context (defined in arch_int.h):
40	offset 0:  virtual address of the exception handler routine in the kernel
41	offset 4:  virtual address of the exception context
42	offset 8:  kernel stack for the current thread
43	offset 12: start of scratch memory for saving registers etc.
44
45	algorithm in detail:
46
47	* VEC_ENTRY
48		- save r1 in SPRG1 and load cpu_exception_context into r1
49		- save r0, save LR in r0
50	* exception_vector_common
51		- params:
52			. r0: old LR
53			. r1: exception context (physical address)
54			. SPRG1: original r1
55		- save r0-3
56		- load virtual exception context address to r1
57		- turn on BAT for exception vector code
58		- turn on address translation
59		- get exception vector offset from LR
60	* ppc_exception_tail
61		- params:
62			. r1: exception context (virtual address)
63			. r3: exception vector offset
64			. SPRG1: original r1
65		- turn off BAT
66		- get kernel stack pointer
67		- dump iframe
68		- ppc_exception_entry()
69		- restore registers and return from interrupt
70 */
71
72
73/* exception vector definitions */
74
75/* code in each exception vector */
76#define VEC_ENTRY() \
77	mtsprg1	%r1					; /* temporarily save r1 in SPRG1 */		\
78	mfsprg0	%r1					; /* ppc_cpu_exception_context* -> r1 */	\
79	stw		%r0, 16(%r1)		; /* save r0 */								\
80	mflr	%r0					; /* save LR in r0 */						\
81	bl		exception_vector_common	; /* continue with the common part */
82
83/* defines an exception vector */
84#define DEFINE_VECTOR(offset, name) 	\
85.skip	offset - (. - __irqvec_start);	\
86FUNCTION(name):							\
87	VEC_ENTRY()
88
89
90.global __irqvec_start
91__irqvec_start:
92	.long	0
93
94/* Called by the exception vector code.
95 * LR:    Points to the end of the exception vector code we're coming from.
96 * r0:    original LR
97 * r1:    ppc_cpu_exception_context* (physical address)
98 * SPRG1: original r1
99 */
100exception_vector_common:
101	stw		%r0, 20(%r1)			/* save original LR */
102	stw		%r2, 24(%r1)			/* save r2 */
103	stw		%r3, 28(%r1)			/* save r3 */
104
105	/* load the virtual address of the ppc_cpu_exception_context for this CPU */
106	lwz		%r1, 4(%r1)
107
108	/* Address translation is turned off. We map this code via BAT, turn on
109	   address translation, and continue in the kernel proper. */
110	li		%r0, 0x10|0x2			/* BATL_MC | BATL_PP_RW */
111	mtibatl	0, %r0					/* load lower word of the instruction BAT */
112	li		%r0, 0x2				/* BEPI = 0, BL = 0 (128 KB), BATU_VS */
113	mtibatu	0, %r0					/* load upper word of the instruction BAT */
114	isync
115	sync
116
117	/* turn on address translation */
118	mfsrr1	%r0						/* load saved msr */
119	rlwinm	%r0, %r0, 28, 30, 31	/* extract mmu bits */
120	mfmsr	%r3						/* load the current msr */
121	rlwimi  %r3, %r0, 4, 26, 27		/* merge the mmu bits with the current msr */
122	mtmsr	%r3						/* load new msr (turning the mmu back on) */
123	isync
124
125	/* Get LR -- it points to the end of the exception vector code. We adjust it
126	   to point to the beginning and can use it to identify the vector later. */
127	mflr	%r3
128	subi	%r3, %r3, 20		/* 5 instructions */
129
130	/* jump to kernel code (ppc_exception_tail) */
131	lwz		%r2, 0(%r1)
132	mtlr	%r2
133	blr
134
135
136DEFINE_VECTOR(0x100, system_reset_exception)
137DEFINE_VECTOR(0x200, machine_check_exception)
138DEFINE_VECTOR(0x300, DSI_exception)
139DEFINE_VECTOR(0x400, ISI_exception)
140DEFINE_VECTOR(0x500, external_interrupt_exception)
141DEFINE_VECTOR(0x600, alignment_exception)
142DEFINE_VECTOR(0x700, program_exception)
143DEFINE_VECTOR(0x800, FP_unavailable_exception)
144DEFINE_VECTOR(0x900, decrementer_exception)
145DEFINE_VECTOR(0xc00, system_call_exception)
146DEFINE_VECTOR(0xd00, trace_exception)
147DEFINE_VECTOR(0xe00, FP_assist_exception)
148DEFINE_VECTOR(0xf00, perf_monitor_exception)
149DEFINE_VECTOR(0xf20, altivec_unavailable_exception)
150DEFINE_VECTOR(0x1000, ITLB_miss_exception)
151DEFINE_VECTOR(0x1100, DTLB_miss_on_load_exception)
152DEFINE_VECTOR(0x1200, DTLB_miss_on_store_exception)
153DEFINE_VECTOR(0x1300, instruction_address_breakpoint_exception)
154DEFINE_VECTOR(0x1400, system_management_exception)
155DEFINE_VECTOR(0x1600, altivec_assist_exception)
156DEFINE_VECTOR(0x1700, thermal_management_exception)
157
158.global __irqvec_end
159__irqvec_end:
160
161
162/* This is where exception_vector_common continues. We're in the kernel here.
163   r1:    ppc_cpu_exception_context* (virtual address)
164   r3:    exception vector offset
165   SPRG1: original r1
166 */
167FUNCTION(ppc_exception_tail):
168	/* turn off BAT */
169	li		%r2, 0
170	mtibatu	0, %r2
171	mtibatl	0, %r2
172	isync
173	sync
174
175	/* save CR */
176	mfcr	%r0
177
178	mfsrr1	%r2					/* load saved msr */
179	andi.	%r2, %r2, (1 << 14)	/* see if it was in kernel mode */
180	beq		.kernel				/* yep */
181
182	/* We come from userland. Load the kernel stack top address for the current
183	   userland thread. */
184	mr		%r2, %r1
185	lwz		%r1, 8(%r1)
186	b		.restore_stack_end
187
188.kernel:
189	mr		%r2, %r1
190	mfsprg1	%r1
191
192.restore_stack_end:
193	/* now r2 points to the ppc_cpu_exception_context, r1 to the kernel stack */
194	/* restore the CR, it was messed up in the previous compare */
195	mtcrf	0xff, %r0
196
197	/* save the registers */
198	bl		__save_regs
199
200	/* iframe pointer to r4 and a backup to r20 */
201	mr		%r4, %r1
202	mr		%r20, %r1
203
204	/* adjust the stack pointer for ABI compatibility */
205	subi	%r1, %r1, 8				/* make sure there's space for the previous
206									   frame pointer and the return address */
207	li		%r2, 0xfffffff0
208	and		%r1, %r1, %r2			/* 16 byte align the stack pointer */
209	li		%r0, 0
210	stw		%r0, 0(%r1)				/* previous frame pointer: NULL */
211		/* 4(%r1) is room for the return address to be filled in by the
212		   called function. */
213
214	/* r3: exception vector offset
215	   r4: iframe pointer */
216	bl 		ppc_exception_entry
217
218	/* move the iframe to r1 */
219	mr		%r1, %r20
220
221	b		__restore_regs_and_rfi
222
223
224/* called by ppc_exception_tail
225 * register expectations:
226 *  r1:        stack
227 *  r2:        ppc_cpu_exception_context*
228 *  SPRG1:     original r1
229 *  r0,r3, LR: scrambled, but saved in scratch memory
230 * all other regs should have been unmodified by the exception handler,
231 * and ready to be saved
232 */
233__save_regs:
234	lwz		%r0, 16(%r2)		/* original r0 */
235	stwu	%r0, -4(%r1)		/* push r0 */
236	mfsprg1	%r0					/* original r1 */
237	stwu	%r0, -4(%r1)		/* push r1 */
238	lwz		%r0, 24(%r2)		/* original r2 */
239	stwu	%r0, -4(%r1)		/* push r2 */
240	lwz		%r0, 28(%r2)		/* original r3 */
241	stwu	%r0, -4(%r1)		/* push r3 */
242
243	/* push r4-r31 */
244	stwu	%r4, -4(%r1)
245	stwu	%r5, -4(%r1)
246	stwu	%r6, -4(%r1)
247	stwu	%r7, -4(%r1)
248	stwu	%r8, -4(%r1)
249	stwu	%r9, -4(%r1)
250	stwu	%r10, -4(%r1)
251	stwu	%r11, -4(%r1)
252	stwu	%r12, -4(%r1)
253	stwu	%r13, -4(%r1)
254	stwu	%r14, -4(%r1)
255	stwu	%r15, -4(%r1)
256	stwu	%r16, -4(%r1)
257	stwu	%r17, -4(%r1)
258	stwu	%r18, -4(%r1)
259	stwu	%r19, -4(%r1)
260	stwu	%r20, -4(%r1)
261	stwu	%r21, -4(%r1)
262	stwu	%r22, -4(%r1)
263	stwu	%r23, -4(%r1)
264	stwu	%r24, -4(%r1)
265	stwu	%r25, -4(%r1)
266	stwu	%r26, -4(%r1)
267	stwu	%r27, -4(%r1)
268	stwu	%r28, -4(%r1)
269	stwu	%r29, -4(%r1)
270	stwu	%r30, -4(%r1)
271	stwu	%r31, -4(%r1)
272
273	/* save some of the other regs */
274	mfctr	%r0
275	stwu	%r0, -4(%r1)		/* push CTR */
276	mfxer	%r0
277	stwu	%r0, -4(%r1)		/* push XER */
278	mfcr	%r0
279	stwu	%r0, -4(%r1)		/* push CR */
280	lwz		%r0, 20(%r2)		/* original LR */
281	stwu	%r0, -4(%r1)		/* push LR */
282	mfspr	%r0, %dsisr
283	stwu	%r0, -4(%r1)		/* push DSISR */
284	mfspr	%r0, %dar
285	stwu	%r0, -4(%r1)		/* push DAR */
286	mfspr	%r0, %srr1
287	stwu	%r0, -4(%r1)		/* push SRR1 */
288	mfspr	%r0, %srr0
289	stwu	%r0, -4(%r1)		/* push SRR0 */
290
291	stwu	%r3, -4(%r1)		/* exception vector offset */
292
293	blr
294
295
296/* called at the tail end of each of the exceptions
297 * r1: iframe pointer
298 */
299__restore_regs_and_rfi:
300	lwzu	%r0, 4(%r1)		/* SRR0 (skip vector offset) */
301	mtspr	%srr0, %r0
302	lwzu	%r0, 4(%r1)		/* SRR1 */
303	mtspr	%srr1, %r0
304	lwzu	%r0, 4(%r1)		/* DAR */
305	mtspr	%dar, %r0
306	lwzu	%r0, 4(%r1)		/* DSISR */
307	mtspr	%dsisr, %r0
308	lwzu	%r0, 4(%r1)		/* LR */
309	mtlr	%r0
310	lwzu	%r0, 4(%r1)		/* CR */
311	mtcr	%r0
312	lwzu	%r0, 4(%r1)		/* XER */
313	mtxer	%r0
314	lwzu	%r0, 4(%r1)		/* CTR */
315	mtctr	%r0
316
317	lwzu	%r31, 4(%r1)
318	lwzu	%r30, 4(%r1)
319	lwzu	%r29, 4(%r1)
320	lwzu	%r28, 4(%r1)
321	lwzu	%r27, 4(%r1)
322	lwzu	%r26, 4(%r1)
323	lwzu	%r25, 4(%r1)
324	lwzu	%r24, 4(%r1)
325	lwzu	%r23, 4(%r1)
326	lwzu	%r22, 4(%r1)
327	lwzu	%r21, 4(%r1)
328	lwzu	%r20, 4(%r1)
329	lwzu	%r19, 4(%r1)
330	lwzu	%r18, 4(%r1)
331	lwzu	%r17, 4(%r1)
332	lwzu	%r16, 4(%r1)
333	lwzu	%r15, 4(%r1)
334	lwzu	%r14, 4(%r1)
335	lwzu	%r13, 4(%r1)
336	lwzu	%r12, 4(%r1)
337	lwzu	%r11, 4(%r1)
338	lwzu	%r10, 4(%r1)
339	lwzu	%r9, 4(%r1)
340	lwzu	%r8, 4(%r1)
341	lwzu	%r7, 4(%r1)
342	lwzu	%r6, 4(%r1)
343	lwzu	%r5, 4(%r1)
344	lwzu	%r4, 4(%r1)
345	lwzu	%r3, 4(%r1)
346	lwzu	%r2, 4(%r1)
347	lwz		%r0, 8(%r1)
348	lwz		%r1, 4(%r1)
349
350	/* return from interrupt */
351	rfi
352