xref: /haiku/headers/private/kernel/arch/x86/64/atomic.h (revision 37fedaf8494b34aad811abcc49e79aa32943f880)
1 /*
2  * Copyright 2014, Paweł Dziepak, pdziepak@quarnos.org.
3  * Distributed under the terms of the MIT License.
4  */
5 #ifndef _KERNEL_ARCH_X86_64_ATOMIC_H
6 #define _KERNEL_ARCH_X86_64_ATOMIC_H
7 
8 
9 static inline void
10 memory_read_barrier_inline(void)
11 {
12 	asm volatile("lfence" : : : "memory");
13 }
14 
15 
16 static inline void
17 memory_write_barrier_inline(void)
18 {
19 	asm volatile("sfence" : : : "memory");
20 }
21 
22 
23 static inline void
24 memory_full_barrier_inline(void)
25 {
26 	asm volatile("mfence" : : : "memory");
27 }
28 
29 
30 #define memory_read_barrier		memory_read_barrier_inline
31 #define memory_write_barrier	memory_write_barrier_inline
32 #define memory_full_barrier		memory_full_barrier_inline
33 
34 
35 static inline void
36 atomic_set_inline(int32* value, int32 newValue)
37 {
38 	memory_write_barrier();
39 	*(volatile int32*)value = newValue;
40 }
41 
42 
43 static inline int32
44 atomic_get_and_set_inline(int32* value, int32 newValue)
45 {
46 	asm volatile("xchg %0, (%1)"
47 		: "+r" (newValue)
48 		: "r" (value)
49 		: "memory");
50 	return newValue;
51 }
52 
53 
54 static inline int32
55 atomic_test_and_set_inline(int32* value, int32 newValue, int32 testAgainst)
56 {
57 	asm volatile("lock; cmpxchgl %2, (%3)"
58 		: "=a" (newValue)
59 		: "0" (testAgainst), "r" (newValue), "r" (value)
60 		: "memory");
61 	return newValue;
62 }
63 
64 
65 static inline int32
66 atomic_add_inline(int32* value, int32 newValue)
67 {
68 	asm volatile("lock; xaddl %0, (%1)"
69 		: "+r" (newValue)
70 		: "r" (value)
71 		: "memory");
72 	return newValue;
73 }
74 
75 
76 static inline int32
77 atomic_get_inline(int32* value)
78 {
79 	int32 newValue = *(volatile int32*)value;
80 	memory_read_barrier();
81 	return newValue;
82 }
83 
84 
85 static inline void
86 atomic_set64_inline(int64* value, int64 newValue)
87 {
88 	memory_write_barrier();
89 	*(volatile int64*)value = newValue;
90 }
91 
92 
93 static inline int64
94 atomic_get_and_set64_inline(int64* value, int64 newValue)
95 {
96 	asm volatile("xchgq %0, (%1)"
97 		: "+r" (newValue)
98 		: "r" (value)
99 		: "memory");
100 	return newValue;
101 }
102 
103 
104 static inline int64
105 atomic_test_and_set64_inline(int64* value, int64 newValue, int64 testAgainst)
106 {
107 	asm volatile("lock; cmpxchgq %2, (%3)"
108 		: "=a" (newValue)
109 		: "0" (testAgainst), "r" (newValue), "r" (value)
110 		: "memory");
111 	return newValue;
112 }
113 
114 
115 static inline int64
116 atomic_add64_inline(int64* value, int64 newValue)
117 {
118 	asm volatile("lock; xaddq %0, (%1)"
119 		: "+r" (newValue)
120 		: "r" (value)
121 		: "memory");
122 	return newValue;
123 }
124 
125 
126 static inline int64
127 atomic_get64_inline(int64* value)
128 {
129 	int64 newValue = *(volatile int64*)value;
130 	memory_read_barrier();
131 	return newValue;
132 }
133 
134 
135 #define atomic_set				atomic_set_inline
136 #define atomic_get_and_set		atomic_get_and_set_inline
137 #ifndef atomic_test_and_set
138 #	define atomic_test_and_set	atomic_test_and_set_inline
139 #endif
140 #ifndef atomic_add
141 #	define atomic_add			atomic_add_inline
142 #endif
143 #define atomic_get				atomic_get_inline
144 
145 #define atomic_set64			atomic_set64_inline
146 #define atomic_get_and_set64	atomic_get_and_set64_inline
147 #define atomic_test_and_set64	atomic_test_and_set64_inline
148 #define atomic_add64			atomic_add64_inline
149 #define atomic_get64			atomic_get64_inline
150 
151 
152 #endif	// _KERNEL_ARCH_X86_64_ATOMIC_H
153 
154