xref: /haiku/src/system/kernel/slab/MemoryManager.h (revision e0ef64750f3169cd634bb2f7a001e22488b05231)
1 /*
2  * Copyright 2010, Ingo Weinhold <ingo_weinhold@gmx.de>.
3  * Distributed under the terms of the MIT License.
4  */
5 #ifndef MEMORY_MANAGER_H
6 #define MEMORY_MANAGER_H
7 
8 
9 #include <KernelExport.h>
10 
11 #include <condition_variable.h>
12 #include <kernel.h>
13 #include <lock.h>
14 #include <util/DoublyLinkedList.h>
15 #include <util/OpenHashTable.h>
16 
17 
18 struct kernel_args;
19 struct ObjectCache;
20 struct VMArea;
21 
22 
23 #define SLAB_CHUNK_SIZE_SMALL	B_PAGE_SIZE
24 #define SLAB_CHUNK_SIZE_MEDIUM	(16 * B_PAGE_SIZE)
25 #define SLAB_CHUNK_SIZE_LARGE	(128 * B_PAGE_SIZE)
26 #define SLAB_AREA_SIZE			(2048 * B_PAGE_SIZE)
27 	// TODO: These sizes have been chosen with 4 KB pages in mind.
28 #define SLAB_AREA_STRUCT_OFFSET	B_PAGE_SIZE
29 	// The offset from the start of the area to the Area structure. This space
30 	// is not mapped and will trip code writing beyond the previous area's
31 	// bounds.
32 
33 #define SLAB_META_CHUNKS_PER_AREA	(SLAB_AREA_SIZE / SLAB_CHUNK_SIZE_LARGE)
34 #define SLAB_SMALL_CHUNKS_PER_META_CHUNK	\
35 	(SLAB_CHUNK_SIZE_LARGE / SLAB_CHUNK_SIZE_SMALL)
36 
37 
38 class MemoryManager {
39 public:
40 	static	void				Init(kernel_args* args);
41 	static	void				InitPostArea();
42 
43 	static	status_t			Allocate(ObjectCache* cache, uint32 flags,
44 									void*& _pages);
45 	static	void				Free(void* pages, uint32 flags);
46 
47 	static	status_t			AllocateRaw(size_t size, uint32 flags,
48 									void*& _pages);
49 	static	ObjectCache*		FreeRawOrReturnCache(void* pages,
50 									uint32 flags);
51 
52 	static	size_t				AcceptableChunkSize(size_t size);
53 	static	ObjectCache*		GetAllocationInfo(void* address,
54 									size_t& _size);
55 	static	ObjectCache*		CacheForAddress(void* address);
56 
57 	static	bool				MaintenanceNeeded();
58 	static	void				PerformMaintenance();
59 
60 private:
61 			struct Tracing;
62 
63 			struct Area;
64 
65 			struct Chunk {
66 				union {
67 					Chunk*		next;
68 					addr_t		reference;
69 				};
70 			};
71 
72 			struct MetaChunk : DoublyLinkedListLinkImpl<MetaChunk> {
73 				size_t			chunkSize;
74 				addr_t			chunkBase;
75 				size_t			totalSize;
76 				uint16			chunkCount;
77 				uint16			usedChunkCount;
78 				uint16			firstFreeChunk;	// *some* free range
79 				uint16			lastFreeChunk;	// inclusive
80 				Chunk			chunks[SLAB_SMALL_CHUNKS_PER_META_CHUNK];
81 				Chunk*			freeChunks;
82 
83 				Area*			GetArea() const;
84 			};
85 
86 			friend class MetaChunk;
87 			typedef DoublyLinkedList<MetaChunk> MetaChunkList;
88 
89 			struct Area : DoublyLinkedListLinkImpl<Area> {
90 				Area*			next;
91 				VMArea*			vmArea;
92 				size_t			reserved_memory_for_mapping;
93 				uint16			usedMetaChunkCount;
94 				bool			fullyMapped;
95 				MetaChunk		metaChunks[SLAB_META_CHUNKS_PER_AREA];
96 
97 				addr_t BaseAddress() const
98 				{
99 					return (addr_t)this - SLAB_AREA_STRUCT_OFFSET;
100 				}
101 			};
102 
103 			typedef DoublyLinkedList<Area> AreaList;
104 
105 			struct AreaHashDefinition {
106 				typedef addr_t		KeyType;
107 				typedef	Area		ValueType;
108 
109 				size_t HashKey(addr_t key) const
110 				{
111 					return key / SLAB_AREA_SIZE;
112 				}
113 
114 				size_t Hash(const Area* value) const
115 				{
116 					return HashKey(value->BaseAddress());
117 				}
118 
119 				bool Compare(addr_t key, const Area* value) const
120 				{
121 					return key == value->BaseAddress();
122 				}
123 
124 				Area*& GetLink(Area* value) const
125 				{
126 					return value->next;
127 				}
128 			};
129 
130 			typedef BOpenHashTable<AreaHashDefinition> AreaTable;
131 
132 			struct AllocationEntry {
133 				ConditionVariable	condition;
134 				thread_id			thread;
135 			};
136 
137 private:
138 	static	status_t			_AllocateChunks(size_t chunkSize,
139 									uint32 chunkCount, uint32 flags,
140 									MetaChunk*& _metaChunk, Chunk*& _chunk);
141 	static	bool				_GetChunks(MetaChunkList* metaChunkList,
142 									size_t chunkSize, uint32 chunkCount,
143 									MetaChunk*& _metaChunk, Chunk*& _chunk);
144 	static	bool				_GetChunk(MetaChunkList* metaChunkList,
145 									size_t chunkSize, MetaChunk*& _metaChunk,
146 									Chunk*& _chunk);
147 	static	void				_FreeChunk(Area* area, MetaChunk* metaChunk,
148 									Chunk* chunk, addr_t chunkAddress,
149 									bool alreadyUnmapped, uint32 flags);
150 
151 	static	void				_PrepareMetaChunk(MetaChunk* metaChunk,
152 									size_t chunkSize);
153 
154 	static	void				_AddArea(Area* area);
155 	static	status_t			_AllocateArea(uint32 flags, Area*& _area);
156 	static	void				_FreeArea(Area* area, bool areaRemoved,
157 									uint32 flags);
158 
159 	static	status_t			_MapChunk(VMArea* vmArea, addr_t address,
160 									size_t size, size_t reserveAdditionalMemory,
161 									uint32 flags);
162 	static	status_t			_UnmapChunk(VMArea* vmArea, addr_t address,
163 									size_t size, uint32 flags);
164 
165 	static	void				_UnmapFreeChunksEarly(Area* area);
166 	static	void				_ConvertEarlyArea(Area* area);
167 
168 	static	void				_RequestMaintenance();
169 
170 	static	addr_t				_AreaBaseAddressForAddress(addr_t address);
171 	static	Area*				_AreaForAddress(addr_t address);
172 	static	uint32				_ChunkIndexForAddress(
173 									const MetaChunk* metaChunk, addr_t address);
174 	static	addr_t				_ChunkAddress(const MetaChunk* metaChunk,
175 									const Chunk* chunk);
176 	static	bool				_IsChunkFree(const MetaChunk* metaChunk,
177 									const Chunk* chunk);
178 	static	bool				_IsChunkInFreeList(const MetaChunk* metaChunk,
179 									const Chunk* chunk);
180 	static	void				_CheckMetaChunk(MetaChunk* metaChunk);
181 
182 	static	int					_DumpRawAllocations(int argc, char** argv);
183 	static	void				_PrintMetaChunkTableHeader(bool printChunks);
184 	static	void				_DumpMetaChunk(MetaChunk* metaChunk,
185 									bool printChunks, bool printHeader);
186 	static	int					_DumpMetaChunk(int argc, char** argv);
187 	static	void				_DumpMetaChunks(const char* name,
188 									MetaChunkList& metaChunkList,
189 									bool printChunks);
190 	static	int					_DumpMetaChunks(int argc, char** argv);
191 	static	int					_DumpArea(int argc, char** argv);
192 	static	int					_DumpAreas(int argc, char** argv);
193 
194 private:
195 	static	const size_t		kAreaAdminSize
196 									= ROUNDUP(sizeof(Area), B_PAGE_SIZE);
197 
198 	static	mutex				sLock;
199 	static	rw_lock				sAreaTableLock;
200 	static	kernel_args*		sKernelArgs;
201 	static	AreaTable			sAreaTable;
202 	static	Area*				sFreeAreas;
203 	static	int					sFreeAreaCount;
204 	static	MetaChunkList		sFreeCompleteMetaChunks;
205 	static	MetaChunkList		sFreeShortMetaChunks;
206 	static	MetaChunkList		sPartialMetaChunksSmall;
207 	static	MetaChunkList		sPartialMetaChunksMedium;
208 	static	AllocationEntry*	sAllocationEntryCanWait;
209 	static	AllocationEntry*	sAllocationEntryDontWait;
210 	static	bool				sMaintenanceNeeded;
211 };
212 
213 
214 /*static*/ inline bool
215 MemoryManager::MaintenanceNeeded()
216 {
217 	return sMaintenanceNeeded;
218 }
219 
220 
221 /*static*/ inline addr_t
222 MemoryManager::_AreaBaseAddressForAddress(addr_t address)
223 {
224 	return ROUNDDOWN((addr_t)address, SLAB_AREA_SIZE);
225 }
226 
227 
228 /*static*/ inline MemoryManager::Area*
229 MemoryManager::_AreaForAddress(addr_t address)
230 {
231 	return (Area*)(_AreaBaseAddressForAddress(address)
232 		+ SLAB_AREA_STRUCT_OFFSET);
233 }
234 
235 
236 /*static*/ inline uint32
237 MemoryManager::_ChunkIndexForAddress(const MetaChunk* metaChunk, addr_t address)
238 {
239 	return (address - metaChunk->chunkBase) / metaChunk->chunkSize;
240 }
241 
242 
243 /*static*/ inline addr_t
244 MemoryManager::_ChunkAddress(const MetaChunk* metaChunk, const Chunk* chunk)
245 {
246 	return metaChunk->chunkBase
247 		+ (chunk - metaChunk->chunks) * metaChunk->chunkSize;
248 }
249 
250 
251 /*static*/ inline bool
252 MemoryManager::_IsChunkFree(const MetaChunk* metaChunk, const Chunk* chunk)
253 {
254 	return chunk->next == NULL
255 		|| (chunk->next >= metaChunk->chunks
256 			&& chunk->next < metaChunk->chunks + metaChunk->chunkCount);
257 }
258 
259 
260 inline MemoryManager::Area*
261 MemoryManager::MetaChunk::GetArea() const
262 {
263 	return _AreaForAddress((addr_t)this);
264 }
265 
266 
267 #endif	// MEMORY_MANAGER_H
268