1/* 2 * Copyright 2019-2022 Haiku, Inc. All Rights Reserved. 3 * Distributed under the terms of the MIT License. 4 */ 5#include <arch/arm/arch_cpu.h> 6#include <asm_defs.h> 7#include "asm_offsets.h" 8#include "syscall_numbers.h" 9 10.text 11 12FUNCTION(_thread_exit_syscall): 13 svc #((SYSCALL_EXIT_THREAD << 5) | 1) 14FUNCTION_END(_thread_exit_syscall) 15 16.macro xchg_sp xt 17add sp, sp, \xt 18sub \xt, sp, \xt 19sub sp, sp, \xt 20.endm 21 22.macro EXCEPTION_ENTRY el 23 // interrupts are automatically disabled by hardware 24 25 // avoid using sp in case it is misaligned 26 // swap sp with x19 and use it instead 27 xchg_sp x19 28 29 // x19 is now the stack top, make room for IFRAME 30 sub x19, x19, #(IFRAME_sizeof) 31 32 stp x0, x1, [x19, #(IFRAME_x + 0 * 8)] 33 stp x2, x3, [x19, #(IFRAME_x + 2 * 8)] 34 stp x4, x5, [x19, #(IFRAME_x + 4 * 8)] 35 stp x6, x7, [x19, #(IFRAME_x + 6 * 8)] 36 stp x8, x9, [x19, #(IFRAME_x + 8 * 8)] 37 stp x10, x11, [x19, #(IFRAME_x + 10 * 8)] 38 stp x12, x13, [x19, #(IFRAME_x + 12 * 8)] 39 stp x14, x15, [x19, #(IFRAME_x + 14 * 8)] 40 stp x16, x17, [x19, #(IFRAME_x + 16 * 8)] 41 mov x0, sp // original x19 that we swapped with sp 42 stp x18, x0, [x19, #(IFRAME_x + 18 * 8)] 43 44 // x20-x28 won't be clobbered 45 // thus we don't really need to store these 46 47 str x29, [x19, #(IFRAME_fp)] 48 str x30, [x19, #(IFRAME_lr)] 49 50.if \el == 0 51 mrs x0, SP_EL0 52.else 53 // add sizeof back here to store original sp 54 add x0, x19, #(IFRAME_sizeof) 55.endif 56 57 mrs x1, ELR_EL1 58 mrs x2, SPSR_EL1 59 mrs x3, ESR_EL1 60 mrs x4, FAR_EL1 61 62 str x0, [x19, #(IFRAME_sp)] 63 str x1, [x19, #(IFRAME_elr)] 64 str x2, [x19, #(IFRAME_spsr)] 65 str x3, [x19, #(IFRAME_esr)] 66 str x4, [x19, #(IFRAME_far)] 67.endm 68 69.macro EXCEPTION_RETURN el 70 // x19 is callee-saved so it still points to IFRAME 71 // x0, x1, x18, x19 will be restored at the very end 72 73 ldr x0, [x19, #(IFRAME_elr)] 74 ldr x1, [x19, #(IFRAME_spsr)] 75 ldr x18, [x19, #(IFRAME_sp)] 76 77 // x0 and x1 will be restored later 78 ldp x2, x3, [x19, #(IFRAME_x + 2 * 8)] 79 ldp x4, x5, [x19, #(IFRAME_x + 4 * 8)] 80 ldp x6, x7, [x19, #(IFRAME_x + 6 * 8)] 81 ldp x8, x9, [x19, #(IFRAME_x + 8 * 8)] 82 ldp x10, x11, [x19, #(IFRAME_x + 10 * 8)] 83 ldp x12, x13, [x19, #(IFRAME_x + 12 * 8)] 84 ldp x14, x15, [x19, #(IFRAME_x + 14 * 8)] 85 ldp x16, x17, [x19, #(IFRAME_x + 16 * 8)] 86 // x18 and x19 will be restored later 87 ldr x29, [x19, #(IFRAME_fp)] 88 ldr x30, [x19, #(IFRAME_lr)] 89 90 // disable interrupts before restoring ELR/SPSR/sp 91 msr DAIFSet, #0xf 92 93 msr ELR_EL1, x0 94 msr SPSR_EL1, x1 95 96.if \el == 0 97 // load stack pointer for EL0 from IFRAME 98 msr SP_EL0, x18 99 100 // unwind our own stack pointer 101 add sp, x19, #(IFRAME_sizeof) 102.else 103 // we stored original pointer to IFRAME, no need to unwind again there 104 mov sp, x18 105.endif 106 107 // finally restore remaining registers 108 ldp x0, x1, [x19, #(IFRAME_x + 0 * 8)] 109 ldp x18, x19, [x19, #(IFRAME_x + 18 * 8)] 110 111 eret 112.endm 113 114.macro EXCEPTION_HANDLER el name func 115 FUNCTION(handle_\name): 116 EXCEPTION_ENTRY \el 117 118 // prepare aligned sp for C function 119 and sp, x19, #0xfffffffffffffff0 120 121 // call C handler, passing IFRAME in x0 122 // handler can enable interrupts if it wants to 123 mov x0, x19 124 mov x29, x0 125 bl \func 126 127 EXCEPTION_RETURN \el 128 FUNCTION_END(handle_\name) 129.endm 130 131.macro vector name 132 .align 7 133 b handle_\name 134.endm 135 136.macro vempty 137 .align 7 138 brk 0xfff 139 1: b 1b 140.endm 141 142.align 11 143.globl _exception_vectors 144_exception_vectors: 145 vempty /* Synchronous EL1t */ 146 vempty /* IRQ EL1t */ 147 vempty /* FIQ EL1t */ 148 vempty /* Error EL1t */ 149 150 vector el1h_sync /* Synchronous EL1h */ 151 vector el1h_irq /* IRQ EL1h */ 152 vector el1h_fiq /* FIQ EL1h */ 153 vector el1h_error /* Error EL1h */ 154 155 vector el0_sync /* Synchronous 64-bit EL0 */ 156 vector el0_irq /* IRQ 64-bit EL0 */ 157 vector el0_fiq /* FIQ 64-bit EL0 */ 158 vector el0_error /* Error 64-bit EL0 */ 159 160 vempty /* Synchronous 32-bit EL0 */ 161 vempty /* IRQ 32-bit EL0 */ 162 vempty /* FIQ 32-bit EL0 */ 163 vempty /* Error 32-bit EL0 */ 164 165EXCEPTION_HANDLER 1 el1h_sync do_sync_handler 166EXCEPTION_HANDLER 1 el1h_irq do_irq_handler 167EXCEPTION_HANDLER 1 el1h_fiq do_fiq_handler 168EXCEPTION_HANDLER 1 el1h_error do_error_handler 169 170EXCEPTION_HANDLER 0 el0_sync do_sync_handler 171EXCEPTION_HANDLER 0 el0_irq do_irq_handler 172EXCEPTION_HANDLER 0 el0_fiq do_fiq_handler 173EXCEPTION_HANDLER 0 el0_error do_error_handler 174 175FUNCTION(_eret_with_iframe): 176 mov x20, xzr 177 mov x21, xzr 178 mov x22, xzr 179 mov x23, xzr 180 mov x24, xzr 181 mov x25, xzr 182 mov x26, xzr 183 mov x27, xzr 184 mov x28, xzr 185 mov x29, xzr 186 187 mov x19, x0 188 EXCEPTION_RETURN 0 189FUNCTION_END(_eret_with_iframe) 190 191FUNCTION(_fp_save): 192 stp q0, q1, [x0], #32 193 stp q2, q3, [x0], #32 194 stp q4, q5, [x0], #32 195 stp q6, q7, [x0], #32 196 stp q8, q9, [x0], #32 197 stp q10, q11, [x0], #32 198 stp q12, q13, [x0], #32 199 stp q14, q15, [x0], #32 200 stp q16, q17, [x0], #32 201 stp q18, q19, [x0], #32 202 stp q20, q21, [x0], #32 203 stp q22, q23, [x0], #32 204 stp q24, q25, [x0], #32 205 stp q26, q27, [x0], #32 206 stp q28, q29, [x0], #32 207 stp q30, q31, [x0], #32 208 mrs x1, FPSR 209 mrs x2, FPCR 210 str x1, [x0], #8 211 str x2, [x0], #8 212 ret 213FUNCTION_END(_fp_save) 214 215FUNCTION(_fp_restore): 216 ldp q0, q1, [x0], #32 217 ldp q2, q3, [x0], #32 218 ldp q4, q5, [x0], #32 219 ldp q6, q7, [x0], #32 220 ldp q8, q9, [x0], #32 221 ldp q10, q11, [x0], #32 222 ldp q12, q13, [x0], #32 223 ldp q14, q15, [x0], #32 224 ldp q16, q17, [x0], #32 225 ldp q18, q19, [x0], #32 226 ldp q20, q21, [x0], #32 227 ldp q22, q23, [x0], #32 228 ldp q24, q25, [x0], #32 229 ldp q26, q27, [x0], #32 230 ldp q28, q29, [x0], #32 231 ldp q30, q31, [x0], #32 232 233 ldr x1, [x0], #8 234 msr FPSR, x1 235 236 // avoid restoring FPCR if it hasn't changed 237 ldr x2, [x0], #8 238 mrs x3, FPCR 239 cmp x3, x2 240 beq 1f 241 msr FPCR, x2 2421: 243 ret 244FUNCTION_END(_fp_restore) 245 246FUNCTION(_arch_context_swap): 247 // save 248 stp x19, x20, [x0], #16 249 stp x21, x22, [x0], #16 250 stp x23, x24, [x0], #16 251 stp x25, x26, [x0], #16 252 stp x27, x28, [x0], #16 253 stp x29, x30, [x0], #16 254 255 mov x2, sp 256 str x2, [x0], #8 257 258 stp d8, d9, [x0], #16 259 stp d10, d11, [x0], #16 260 stp d12, d13, [x0], #16 261 stp d14, d15, [x0], #16 262 263 // restore 264 ldp x19, x20, [x1], #16 265 ldp x21, x22, [x1], #16 266 ldp x23, x24, [x1], #16 267 ldp x25, x26, [x1], #16 268 ldp x27, x28, [x1], #16 269 ldp x29, x30, [x1], #16 270 271 ldr x2, [x1], #8 272 mov sp, x2 273 274 ldp d8, d9, [x1], #16 275 ldp d10, d11, [x1], #16 276 ldp d12, d13, [x1], #16 277 ldp d14, d15, [x1], #16 278 279 // pass x29 as argument to thread entry function 280 mov x0, x29 281 ret 282FUNCTION_END(_arch_context_swap) 283 284/*! \fn void arch_debug_call_with_fault_handler(cpu_ent* cpu, 285 jmp_buf jumpBuffer, void (*function)(void*), void* parameter) 286 287 Called by debug_call_with_fault_handler() to do the dirty work of setting 288 the fault handler and calling the function. If the function causes a page 289 fault, the arch_debug_call_with_fault_handler() calls longjmp() with the 290 given \a jumpBuffer. Otherwise it returns normally. 291 292 debug_call_with_fault_handler() has already saved the CPU's fault_handler 293 and fault_handler_stack_pointer and will reset them later, so 294 arch_debug_call_with_fault_handler() doesn't need to care about it. 295 296 \param cpu The \c cpu_ent for the current CPU. 297 \param jumpBuffer Buffer to be used for longjmp(). 298 \param function The function to be called. 299 \param parameter The parameter to be passed to the function to be called. 300*/ 301FUNCTION(arch_debug_call_with_fault_handler): 302 ldr x4, =fault 303 str x4, [x0, #CPU_ENT_fault_handler] 304 str x1, [x0, #CPU_ENT_fault_handler_stack_pointer] 305 306 mov x0, x3 307 br x2 308 309fault: 310 mov x0, sp 311 mov x1, #1 312 b longjmp 313FUNCTION_END(arch_debug_call_with_fault_handler) 314 315 316/* addr_t arm64_get_fp(void) */ 317FUNCTION(arm64_get_fp): 318 mov x0, x29 319 ret 320FUNCTION_END(arm64_get_fp) 321