1 /*
2 * Copyright 2014, Paweł Dziepak, pdziepak@quarnos.org.
3 * Distributed under the terms of the MIT License.
4 */
5 #ifndef _KERNEL_ARCH_X86_64_CPU_H
6 #define _KERNEL_ARCH_X86_64_CPU_H
7
8
9 #include <arch_thread_types.h>
10
11
12 extern uint16 gFPUControlDefault;
13 extern uint32 gFPUMXCSRDefault;
14
15
16 static inline uint64_t
x86_read_msr(uint32_t msr)17 x86_read_msr(uint32_t msr)
18 {
19 uint64_t high, low;
20 asm volatile("rdmsr" : "=a" (low), "=d" (high) : "c" (msr));
21 return (high << 32) | low;
22 }
23
24
25 static inline void
x86_write_msr(uint32_t msr,uint64_t value)26 x86_write_msr(uint32_t msr, uint64_t value)
27 {
28 asm volatile("wrmsr" : : "a" (value) , "d" (value >> 32), "c" (msr));
29 }
30
31
32 static inline void
x86_context_switch(arch_thread * oldState,arch_thread * newState)33 x86_context_switch(arch_thread* oldState, arch_thread* newState)
34 {
35 asm volatile(
36 "pushq %%rbp;"
37 "movq $1f, %c[rip](%0);"
38 "movq %%rsp, %c[rsp](%0);"
39 "movq %c[rsp](%1), %%rsp;"
40 "jmp *%c[rip](%1);"
41 "1:"
42 "popq %%rbp;"
43 :
44 : "a" (oldState), "d" (newState),
45 [rsp] "i" (offsetof(arch_thread, current_stack)),
46 [rip] "i" (offsetof(arch_thread, instruction_pointer))
47 : "rbx", "rcx", "rdi", "rsi", "r8", "r9", "r10", "r11", "r12", "r13",
48 "r14", "r15", "xmm0", "xmm1", "xmm2", "xmm3", "xmm4", "xmm5",
49 "xmm6", "xmm7", "xmm8", "xmm9", "xmm10", "xmm11", "xmm12", "xmm13",
50 "xmm14", "xmm15", "memory");
51 asm volatile("ldmxcsr %0" : : "m" (gFPUMXCSRDefault));
52 asm volatile("fldcw %0" : : "m" (gFPUControlDefault));
53 }
54
55
56 static inline void
x86_swap_pgdir(uintptr_t root)57 x86_swap_pgdir(uintptr_t root)
58 {
59 asm volatile("movq %0, %%cr3" : : "r" (root));
60 }
61
62
63 #endif // _KERNEL_ARCH_X86_64_CPU_H
64