xref: /haiku/headers/private/kernel/arch/arm64/arch_cpu.h (revision 9f3bdf3d039430b5172c424def20ce5d9f7367d4)
1 /*
2  * Copyright 2018, Jaroslaw Pelczar <jarek@jpelczar.com>
3  * Distributed under the terms of the MIT License.
4  */
5 #ifndef _KERNEL_ARCH_ARM64_ARCH_CPU_H_
6 #define _KERNEL_ARCH_ARM64_ARCH_CPU_H_
7 
8 
9 #define CPU_MAX_CACHE_LEVEL 	8
10 #define CACHE_LINE_SIZE 		64
11 
12 #define set_ac()
13 #define clear_ac()
14 
15 #include <kernel/arch/arm64/arm_registers.h>
16 
17 #ifndef _ASSEMBLER
18 
19 #include <arch/arm64/arch_thread_types.h>
20 #include <kernel.h>
21 
22 #define arm64_sev()  		__asm__ __volatile__("sev" : : : "memory")
23 #define arm64_wfe()  		__asm__ __volatile__("wfe" : : : "memory")
24 #define arm64_dsb()  		__asm__ __volatile__("dsb" : : : "memory")
25 #define arm64_dmb()  		__asm__ __volatile__("dmb" : : : "memory")
26 #define arm64_isb()  		__asm__ __volatile__("isb" : : : "memory")
27 #define arm64_nop()  		__asm__ __volatile__("nop" : : : "memory")
28 #define arm64_wfi()  		__asm__ __volatile__("wfi" : : : "memory")
29 #define arm64_yield() 	__asm__ __volatile__("yield" : : : "memory")
30 
31 /* Extract CPU affinity levels 0-3 */
32 #define	CPU_AFF0(mpidr)	(u_int)(((mpidr) >> 0) & 0xff)
33 #define	CPU_AFF1(mpidr)	(u_int)(((mpidr) >> 8) & 0xff)
34 #define	CPU_AFF2(mpidr)	(u_int)(((mpidr) >> 16) & 0xff)
35 #define	CPU_AFF3(mpidr)	(u_int)(((mpidr) >> 32) & 0xff)
36 #define	CPU_AFF0_MASK	0xffUL
37 #define	CPU_AFF1_MASK	0xff00UL
38 #define	CPU_AFF2_MASK	0xff0000UL
39 #define	CPU_AFF3_MASK	0xff00000000UL
40 #define	CPU_AFF_MASK	(CPU_AFF0_MASK | CPU_AFF1_MASK | \
41     CPU_AFF2_MASK| CPU_AFF3_MASK)	/* Mask affinity fields in MPIDR_EL1 */
42 
43 
44 #define	CPU_IMPL_ARM		0x41
45 #define	CPU_IMPL_BROADCOM	0x42
46 #define	CPU_IMPL_CAVIUM		0x43
47 #define	CPU_IMPL_DEC		0x44
48 #define	CPU_IMPL_INFINEON	0x49
49 #define	CPU_IMPL_FREESCALE	0x4D
50 #define	CPU_IMPL_NVIDIA		0x4E
51 #define	CPU_IMPL_APM		0x50
52 #define	CPU_IMPL_QUALCOMM	0x51
53 #define	CPU_IMPL_MARVELL	0x56
54 #define	CPU_IMPL_INTEL		0x69
55 
56 #define	CPU_PART_THUNDER	0x0A1
57 #define	CPU_PART_FOUNDATION	0xD00
58 #define	CPU_PART_CORTEX_A35	0xD04
59 #define	CPU_PART_CORTEX_A53	0xD03
60 #define	CPU_PART_CORTEX_A55	0xD05
61 #define	CPU_PART_CORTEX_A57	0xD07
62 #define	CPU_PART_CORTEX_A72	0xD08
63 #define	CPU_PART_CORTEX_A73	0xD09
64 #define	CPU_PART_CORTEX_A75	0xD0A
65 
66 #define	CPU_REV_THUNDER_1_0	0x00
67 #define	CPU_REV_THUNDER_1_1	0x01
68 
69 #define	CPU_IMPL(midr)	(((midr) >> 24) & 0xff)
70 #define	CPU_PART(midr)	(((midr) >> 4) & 0xfff)
71 #define	CPU_VAR(midr)	(((midr) >> 20) & 0xf)
72 #define	CPU_REV(midr)	(((midr) >> 0) & 0xf)
73 
74 #define	CPU_IMPL_TO_MIDR(val)	(((val) & 0xff) << 24)
75 #define	CPU_PART_TO_MIDR(val)	(((val) & 0xfff) << 4)
76 #define	CPU_VAR_TO_MIDR(val)	(((val) & 0xf) << 20)
77 #define	CPU_REV_TO_MIDR(val)	(((val) & 0xf) << 0)
78 
79 #define	CPU_IMPL_MASK	(0xff << 24)
80 #define	CPU_PART_MASK	(0xfff << 4)
81 #define	CPU_VAR_MASK	(0xf << 20)
82 #define	CPU_REV_MASK	(0xf << 0)
83 
84 #define	CPU_ID_RAW(impl, part, var, rev)		\
85     (CPU_IMPL_TO_MIDR((impl)) |				\
86     CPU_PART_TO_MIDR((part)) | CPU_VAR_TO_MIDR((var)) |	\
87     CPU_REV_TO_MIDR((rev)))
88 
89 #define	CPU_MATCH(mask, impl, part, var, rev)		\
90     (((mask) & PCPU_GET(midr)) ==			\
91     ((mask) & CPU_ID_RAW((impl), (part), (var), (rev))))
92 
93 #define	CPU_MATCH_RAW(mask, devid)			\
94     (((mask) & PCPU_GET(midr)) == ((mask) & (devid)))
95 
96 static inline uint64 arm64_get_cyclecount(void)
97 {
98 	return READ_SPECIALREG(cntvct_el0);
99 }
100 
101 #define	ADDRESS_TRANSLATE_FUNC(stage)					\
102 static inline uint64									\
103 arm64_address_translate_ ##stage (uint64 addr)		\
104 {														\
105 	uint64 ret;											\
106 														\
107 	__asm __volatile(									\
108 	    "at " __ARMREG_STRING(stage) ", %1 \n"					\
109 	    "mrs %0, par_el1" : "=r"(ret) : "r"(addr));		\
110 														\
111 	return (ret);										\
112 }
113 
114 
115 ADDRESS_TRANSLATE_FUNC(s1e0r)
116 ADDRESS_TRANSLATE_FUNC(s1e0w)
117 ADDRESS_TRANSLATE_FUNC(s1e1r)
118 ADDRESS_TRANSLATE_FUNC(s1e1w)
119 
120 
121 struct aarch64_fpu_state
122 {
123 	uint64 regs[32 * 2];
124 	uint64 fpsr;
125 	uint64 fpcr;
126 };
127 
128 
129 /* raw exception frames */
130 struct iframe {
131 	// return info
132 	uint64 elr;
133 	uint64 spsr;
134 	uint64 x[20];
135 	uint64 lr;
136 	uint64 sp;
137 	uint64 fp;
138 
139 	// exception info
140 	uint64 esr;
141 	uint64 far;
142 
143 	// fpu
144 	struct aarch64_fpu_state fpu;
145 };
146 
147 
148 #ifdef __cplusplus
149 namespace BKernel {
150 	struct Thread;
151 }  // namespace BKernel
152 
153 
154 typedef struct arch_cpu_info {
155 	uint32						mpidr;
156 	BKernel::Thread*			last_vfp_user;
157 } arch_cpu_info;
158 #endif
159 
160 
161 #ifdef __cplusplus
162 extern "C" {
163 #endif
164 
165 
166 static inline void arch_cpu_pause(void)
167 {
168 	arm64_yield();
169 }
170 
171 
172 static inline void arch_cpu_idle(void)
173 {
174 	arm64_wfi();
175 }
176 
177 
178 extern addr_t arm64_get_fp(void);
179 
180 
181 #ifdef __cplusplus
182 }
183 #endif
184 
185 #endif
186 
187 
188 #endif /* _KERNEL_ARCH_ARM64_ARCH_CPU_H_ */
189