xref: /haiku/src/system/kernel/arch/m68k/arch_exceptions.S (revision b06a48ab8f30b45916a9c157b992827779182163)
1/*
2 * Copyright 2006, Ingo Weinhold <bonefish@cs.tu-berlin.de>.
3 * All rights reserved. Distributed under the terms of the MIT License.
4 *
5 * Copyright 2003, Travis Geiselbrecht. All rights reserved.
6 * Distributed under the terms of the NewOS License.
7 */
8
9#include "asm_offsets.h"
10
11#define FUNCTION(x) .global x; .type x,@function; x
12#define LOCAL_FUNCTION(x) .type x,@function; x
13
14
15
16/*	General exception handling concept:
17
18	Starting with 68020 the vector offset (=vector number * 4) is part
19	of the all exception frame types, so we can easily have a common
20	asm stub for all of them, which calls m68k_exception_entry in
21	arch_int.cpp.
22	Also, m68k doesn't disable the mmu on interrupt as ppc does,
23	which makes things simpler.
24
25	----ppc
26
27	The PPC architecture specifies entry point offsets for the various
28	exceptions in the first two physical pages. We put a short piece of code
29	(VEC_ENTRY()) into each exception vector. It calls exception_vector_common,
30	which is defined in the unused space at the beginning of the first physical
31	page. It re-enables address translation and calls ppc_exception_tail which
32	lies in the kernel. It dumps an iframe and invokes ppc_exception_entry()
33	(arch_int.cpp), which handles the exception and returns eventually.
34	The registers are restored from the iframe and we return from the
35	interrupt.
36
37	algorithm overview:
38
39	* VEC_ENTRY
40	* exception_vector_common
41	* ppc_exception_tail
42		- dump iframe
43		- ppc_exception_entry()
44		- restore registers and return from interrupt
45
46	Here we use the following SPRG registers, which are at the disposal of the
47	operating system:
48	* SPRG0: Physical address pointer to a struct cpu_exception_context
49			 for the current CPU. The structure contains helpful pointers
50			 as well as some scratch memory for temporarily saving registers.
51	* SPRG1: Scratch.
52
53	struct cpu_exception_context (defined in arch_int.h):
54	offset 0:  virtual address of the exception handler routine in the kernel
55	offset 4:  virtual address of the exception context
56	offset 8:  kernel stack for the current thread
57	offset 12: start of scratch memory for saving registers etc.
58
59	algorithm in detail:
60
61	* VEC_ENTRY
62		- save r1 in SPRG1 and load cpu_exception_context into r1
63		- save r0, save LR in r0
64	* exception_vector_common
65		- params:
66			. r0: old LR
67			. r1: exception context (physical address)
68			. SPRG1: original r1
69		- save r0-3
70		- load virtual exception context address to r1
71		- turn on BAT for exception vector code
72		- turn on address translation
73		- get exception vector offset from LR
74	* ppc_exception_tail
75		- params:
76			. r1: exception context (virtual address)
77			. r3: exception vector offset
78			. SPRG1: original r1
79		- turn off BAT
80		- get kernel stack pointer
81		- dump iframe
82		- ppc_exception_entry()
83		- restore registers and return from interrupt
84 */
85
86
87/* exception vector definitions */
88
89/* this one just returns */
90FUNCTION(__m68k_exception_noop):
91	rte
92
93/* see arch_asm.S for ctx switch */
94
95FUNCTION(__m68k_exception_common):
96	/* save regs */
97	movem.l		%d0-%d7/%a0-%a6,-(%sp)	/* push the iframe address */
98	/* save fp */
99	sub.l		#FPU_STATE_sizeof,%sp
100	fsave		(%sp)
101	tst.b		(%sp)			/* check for a null state */
102	beq		null_sav_1		/* yes */
103
104	fmovem		%fpcr/%fpsr/%fpiar,-(%sp)
105	fmovem		%fp0-%fp7,-(%sp)
106	bra		null_sav_2
107null_sav_1:
108	sub.l		#IFRAME_fpu-IFRAME_fp,%sp
109null_sav_2:
110
111	move.l		%sp,-(%sp)		/* push address of iframe */
112	bsr		m68k_exception_entry	/* call C entry */
113	add.l		#4,%sp
114
115	/* restore fp */
116	tst.b		IFRAME_fpu-IFRAME_fp(%sp) /* check for a null state */
117	beq		null_res_1		/* yes */
118	fmovem		(%sp)+,%fp0-%fp7
119	fmovem		(%sp)+,%fpcr/%fpsr/%fpiar
120	bra		null_res_2
121null_res_1:
122	add.l		#IFRAME_fpu-IFRAME_fp,%sp
123null_res_2:
124	/* restore fp */
125	frestore	(%sp)
126	add.l		#FPU_STATE_sizeof,%sp
127	/* restore regs */
128	movem.l		(%sp)+,%d0-%d7/%a0-%a6
129
130	rte
131
132
133
134#if 0 /* PPC */
135
136/* code in each exception vector */
137#define VEC_ENTRY() \
138	mtsprg1	%r1					; /* temporarily save r1 in SPRG1 */		\
139	mfsprg0	%r1					; /* ppc_cpu_exception_context* -> r1 */	\
140	stw		%r0, 16(%r1)		; /* save r0 */								\
141	mflr	%r0					; /* save LR in r0 */						\
142	bl		exception_vector_common	; /* continue with the common part */
143
144/* defines an exception vector */
145#define DEFINE_VECTOR(offset, name) 	\
146.skip	offset - (. - __irqvec_start);	\
147FUNCTION(name):							\
148	VEC_ENTRY()
149
150
151.global __irqvec_start
152__irqvec_start:
153	.long	0
154
155/* Called by the exception vector code.
156 * LR:    Points to the end of the exception vector code we're coming from.
157 * r0:    original LR
158 * r1:    ppc_cpu_exception_context* (physical address)
159 * SPRG1: original r1
160 */
161exception_vector_common:
162	stw		%r0, 20(%r1)			/* save original LR */
163	stw		%r2, 24(%r1)			/* save r2 */
164	stw		%r3, 28(%r1)			/* save r3 */
165
166	/* load the virtual address of the ppc_cpu_exception_context for this CPU */
167	lwz		%r1, 4(%r1)
168
169	/* Address translation is turned off. We map this code via BAT, turn on
170	   address translation, and continue in the kernel proper. */
171	li		%r0, 0x10|0x2			/* BATL_MC | BATL_PP_RW */
172	mtibatl	0, %r0					/* load lower word of the instruction BAT */
173	li		%r0, 0x2				/* BEPI = 0, BL = 0 (128 KB), BATU_VS */
174	mtibatu	0, %r0					/* load upper word of the instruction BAT */
175	isync
176	sync
177
178	/* turn on address translation */
179	mfsrr1	%r0						/* load saved msr */
180	rlwinm	%r0, %r0, 28, 30, 31	/* extract mmu bits */
181	mfmsr	%r3						/* load the current msr */
182	rlwimi  %r3, %r0, 4, 26, 27		/* merge the mmu bits with the current msr */
183	li		%r0, 1
184	rlwimi  %r3, %r0, 13, 18, 18	/* turn on FPU, too */
185	mtmsr	%r3						/* load new msr (turning the mmu back on) */
186	isync
187
188	/* Get LR -- it points to the end of the exception vector code. We adjust it
189	   to point to the beginning and can use it to identify the vector later. */
190	mflr	%r3
191	subi	%r3, %r3, 20		/* 5 instructions */
192
193	/* jump to kernel code (ppc_exception_tail) */
194	lwz		%r2, 0(%r1)
195	mtlr	%r2
196	blr
197
198
199DEFINE_VECTOR(0x100, system_reset_exception)
200DEFINE_VECTOR(0x200, machine_check_exception)
201DEFINE_VECTOR(0x300, DSI_exception)
202DEFINE_VECTOR(0x400, ISI_exception)
203DEFINE_VECTOR(0x500, external_interrupt_exception)
204DEFINE_VECTOR(0x600, alignment_exception)
205DEFINE_VECTOR(0x700, program_exception)
206DEFINE_VECTOR(0x800, FP_unavailable_exception)
207DEFINE_VECTOR(0x900, decrementer_exception)
208DEFINE_VECTOR(0xc00, system_call_exception)
209DEFINE_VECTOR(0xd00, trace_exception)
210DEFINE_VECTOR(0xe00, FP_assist_exception)
211DEFINE_VECTOR(0xf00, perf_monitor_exception)
212DEFINE_VECTOR(0xf20, altivec_unavailable_exception)
213DEFINE_VECTOR(0x1000, ITLB_miss_exception)
214DEFINE_VECTOR(0x1100, DTLB_miss_on_load_exception)
215DEFINE_VECTOR(0x1200, DTLB_miss_on_store_exception)
216DEFINE_VECTOR(0x1300, instruction_address_breakpoint_exception)
217DEFINE_VECTOR(0x1400, system_management_exception)
218DEFINE_VECTOR(0x1600, altivec_assist_exception)
219DEFINE_VECTOR(0x1700, thermal_management_exception)
220
221.global __irqvec_end
222__irqvec_end:
223
224
225/* This is where exception_vector_common continues. We're in the kernel here.
226   r1:    ppc_cpu_exception_context* (virtual address)
227   r3:    exception vector offset
228   SPRG1: original r1
229 */
230FUNCTION(ppc_exception_tail):
231	/* turn off BAT */
232	li		%r2, 0
233	mtibatu	0, %r2
234	mtibatl	0, %r2
235	isync
236	sync
237
238	/* save CR */
239	mfcr	%r0
240
241	mfsrr1	%r2					/* load saved msr */
242	andi.	%r2, %r2, (1 << 14)	/* see if it was in kernel mode */
243	beq		.kernel				/* yep */
244
245	/* We come from userland. Load the kernel stack top address for the current
246	   userland thread. */
247	mr		%r2, %r1
248	lwz		%r1, 8(%r1)
249	b		.restore_stack_end
250
251.kernel:
252	mr		%r2, %r1
253	mfsprg1	%r1
254
255.restore_stack_end:
256	/* now r2 points to the ppc_cpu_exception_context, r1 to the kernel stack */
257	/* restore the CR, it was messed up in the previous compare */
258	mtcrf	0xff, %r0
259
260	/* align r1 to 8 bytes, so the iframe will be aligned too */
261	rlwinm	%r1, %r1, 0, 0, 28
262
263	/* save the registers */
264	bl		__save_regs
265
266	/* iframe pointer to r4 and a backup to r20 */
267	mr		%r4, %r1
268	mr		%r20, %r1
269
270	/* adjust the stack pointer for ABI compatibility */
271	subi	%r1, %r1, 8				/* make sure there's space for the previous
272									   frame pointer and the return address */
273	rlwinm	%r1, %r1, 0, 0, 27		/* 16 byte align the stack pointer */
274	li		%r0, 0
275	stw		%r0, 0(%r1)				/* previous frame pointer: NULL */
276		/* 4(%r1) is room for the return address to be filled in by the
277		   called function. */
278
279	/* r3: exception vector offset
280	   r4: iframe pointer */
281	bl 		ppc_exception_entry
282
283	/* move the iframe to r1 */
284	mr		%r1, %r20
285
286	b		__restore_regs_and_rfi
287
288
289/* called by ppc_exception_tail
290 * register expectations:
291 *  r1:        stack
292 *  r2:        ppc_cpu_exception_context*
293 *  SPRG1:     original r1
294 *  r0,r3, LR: scrambled, but saved in scratch memory
295 * all other regs should have been unmodified by the exception handler,
296 * and ready to be saved
297 */
298__save_regs:
299	/* Note: The iframe must be 8 byte aligned. The stack pointer we are passed
300	   in r1 is aligned. So we store the floating point registers first and
301	   need to take care that an even number of 4 byte registers is stored,
302	   or insert padding respectively. */
303
304	/* push f0-f31 */
305	stfdu	%f0, -8(%r1)
306	stfdu	%f1, -8(%r1)
307	stfdu	%f2, -8(%r1)
308	stfdu	%f3, -8(%r1)
309	stfdu	%f4, -8(%r1)
310	stfdu	%f5, -8(%r1)
311	stfdu	%f6, -8(%r1)
312	stfdu	%f7, -8(%r1)
313	stfdu	%f8, -8(%r1)
314	stfdu	%f9, -8(%r1)
315	stfdu	%f10, -8(%r1)
316	stfdu	%f11, -8(%r1)
317	stfdu	%f12, -8(%r1)
318	stfdu	%f13, -8(%r1)
319	stfdu	%f14, -8(%r1)
320	stfdu	%f15, -8(%r1)
321	stfdu	%f16, -8(%r1)
322	stfdu	%f17, -8(%r1)
323	stfdu	%f18, -8(%r1)
324	stfdu	%f19, -8(%r1)
325	stfdu	%f20, -8(%r1)
326	stfdu	%f21, -8(%r1)
327	stfdu	%f22, -8(%r1)
328	stfdu	%f23, -8(%r1)
329	stfdu	%f24, -8(%r1)
330	stfdu	%f25, -8(%r1)
331	stfdu	%f26, -8(%r1)
332	stfdu	%f27, -8(%r1)
333	stfdu	%f28, -8(%r1)
334	stfdu	%f29, -8(%r1)
335	stfdu	%f30, -8(%r1)
336	stfdu	%f31, -8(%r1)
337
338	/* push r0-r3 */
339	lwz		%r0, 16(%r2)		/* original r0 */
340	stwu	%r0, -4(%r1)		/* push r0 */
341	mfsprg1	%r0					/* original r1 */
342	stwu	%r0, -4(%r1)		/* push r1 */
343	lwz		%r0, 24(%r2)		/* original r2 */
344	stwu	%r0, -4(%r1)		/* push r2 */
345	lwz		%r0, 28(%r2)		/* original r3 */
346	stwu	%r0, -4(%r1)		/* push r3 */
347
348	/* push r4-r31 */
349	stwu	%r4, -4(%r1)
350	stwu	%r5, -4(%r1)
351	stwu	%r6, -4(%r1)
352	stwu	%r7, -4(%r1)
353	stwu	%r8, -4(%r1)
354	stwu	%r9, -4(%r1)
355	stwu	%r10, -4(%r1)
356	stwu	%r11, -4(%r1)
357	stwu	%r12, -4(%r1)
358	stwu	%r13, -4(%r1)
359	stwu	%r14, -4(%r1)
360	stwu	%r15, -4(%r1)
361	stwu	%r16, -4(%r1)
362	stwu	%r17, -4(%r1)
363	stwu	%r18, -4(%r1)
364	stwu	%r19, -4(%r1)
365	stwu	%r20, -4(%r1)
366	stwu	%r21, -4(%r1)
367	stwu	%r22, -4(%r1)
368	stwu	%r23, -4(%r1)
369	stwu	%r24, -4(%r1)
370	stwu	%r25, -4(%r1)
371	stwu	%r26, -4(%r1)
372	stwu	%r27, -4(%r1)
373	stwu	%r28, -4(%r1)
374	stwu	%r29, -4(%r1)
375	stwu	%r30, -4(%r1)
376	stwu	%r31, -4(%r1)
377
378	/* save some of the other regs */
379	mffs	%f0
380	stfsu	%f0, -4(%r1)		/* push FPSCR */
381	mfctr	%r0
382	stwu	%r0, -4(%r1)		/* push CTR */
383	mfxer	%r0
384	stwu	%r0, -4(%r1)		/* push XER */
385	mfcr	%r0
386	stwu	%r0, -4(%r1)		/* push CR */
387	lwz		%r0, 20(%r2)		/* original LR */
388	stwu	%r0, -4(%r1)		/* push LR */
389	mfspr	%r0, %dsisr
390	stwu	%r0, -4(%r1)		/* push DSISR */
391	mfspr	%r0, %dar
392	stwu	%r0, -4(%r1)		/* push DAR */
393	mfspr	%r0, %srr1
394	stwu	%r0, -4(%r1)		/* push SRR1 */
395	mfspr	%r0, %srr0
396	stwu	%r0, -4(%r1)		/* push SRR0 */
397
398	stwu	%r3, -4(%r1)		/* exception vector offset */
399
400	blr
401
402
403/* called at the tail end of each of the exceptions
404 * r1: iframe pointer
405 */
406__restore_regs_and_rfi:
407	lwzu	%r0, 4(%r1)		/* SRR0 (skip vector offset) */
408	mtspr	%srr0, %r0
409	lwzu	%r0, 4(%r1)		/* SRR1 */
410	mtspr	%srr1, %r0
411	lwzu	%r0, 4(%r1)		/* DAR */
412	mtspr	%dar, %r0
413	lwzu	%r0, 4(%r1)		/* DSISR */
414	mtspr	%dsisr, %r0
415	lwzu	%r0, 4(%r1)		/* LR */
416	mtlr	%r0
417	lwzu	%r0, 4(%r1)		/* CR */
418	mtcr	%r0
419	lwzu	%r0, 4(%r1)		/* XER */
420	mtxer	%r0
421	lwzu	%r0, 4(%r1)		/* CTR */
422	mtctr	%r0
423	lfsu	%f0, 4(%r1)		/* FPSCR */
424	mtfsf	0xff, %f0
425
426	lwzu	%r31, 4(%r1)
427	lwzu	%r30, 4(%r1)
428	lwzu	%r29, 4(%r1)
429	lwzu	%r28, 4(%r1)
430	lwzu	%r27, 4(%r1)
431	lwzu	%r26, 4(%r1)
432	lwzu	%r25, 4(%r1)
433	lwzu	%r24, 4(%r1)
434	lwzu	%r23, 4(%r1)
435	lwzu	%r22, 4(%r1)
436	lwzu	%r21, 4(%r1)
437	lwzu	%r20, 4(%r1)
438	lwzu	%r19, 4(%r1)
439	lwzu	%r18, 4(%r1)
440	lwzu	%r17, 4(%r1)
441	lwzu	%r16, 4(%r1)
442	lwzu	%r15, 4(%r1)
443	lwzu	%r14, 4(%r1)
444	lwzu	%r13, 4(%r1)
445	lwzu	%r12, 4(%r1)
446	lwzu	%r11, 4(%r1)
447	lwzu	%r10, 4(%r1)
448	lwzu	%r9, 4(%r1)
449	lwzu	%r8, 4(%r1)
450	lwzu	%r7, 4(%r1)
451	lwzu	%r6, 4(%r1)
452	lwzu	%r5, 4(%r1)
453	lwzu	%r4, 4(%r1)
454	lwzu	%r3, 4(%r1)
455
456	/* Stop here, before we overwrite r1, and continue with the floating point
457	   registers first. */
458	addi	%r2, %r1, 16		/* skip r3-r0 */
459
460	/* f31-f0 */
461	lfd		%f31, 0(%r2)
462	lfdu	%f30, 8(%r2)
463	lfdu	%f29, 8(%r2)
464	lfdu	%f28, 8(%r2)
465	lfdu	%f27, 8(%r2)
466	lfdu	%f26, 8(%r2)
467	lfdu	%f25, 8(%r2)
468	lfdu	%f24, 8(%r2)
469	lfdu	%f23, 8(%r2)
470	lfdu	%f22, 8(%r2)
471	lfdu	%f21, 8(%r2)
472	lfdu	%f20, 8(%r2)
473	lfdu	%f19, 8(%r2)
474	lfdu	%f18, 8(%r2)
475	lfdu	%f17, 8(%r2)
476	lfdu	%f16, 8(%r2)
477	lfdu	%f15, 8(%r2)
478	lfdu	%f14, 8(%r2)
479	lfdu	%f13, 8(%r2)
480	lfdu	%f12, 8(%r2)
481	lfdu	%f11, 8(%r2)
482	lfdu	%f10, 8(%r2)
483	lfdu	%f9, 8(%r2)
484	lfdu	%f8, 8(%r2)
485	lfdu	%f7, 8(%r2)
486	lfdu	%f6, 8(%r2)
487	lfdu	%f5, 8(%r2)
488	lfdu	%f4, 8(%r2)
489	lfdu	%f3, 8(%r2)
490	lfdu	%f2, 8(%r2)
491	lfdu	%f1, 8(%r2)
492	lfd		%f0, 8(%r2)
493
494	/* r2-r0 */
495	lwzu	%r2, 4(%r1)
496	lwz		%r0, 8(%r1)
497	lwz		%r1, 4(%r1)
498
499	/* return from interrupt */
500	rfi
501#endif /* PPC */
502