xref: /haiku/src/system/runtime_loader/heap.cpp (revision 2cad94c1c30b6223ad8c08710b26e071d32e9979)
1 /*
2  * Copyright 2003-2013, Axel Dörfler, axeld@pinc-software.de.
3  * Distributed under the terms of the MIT License.
4  */
5 
6 
7 #include "runtime_loader_private.h"
8 
9 #include <syscalls.h>
10 
11 #ifdef HEAP_TEST
12 #	include <stdio.h>
13 #endif
14 #include <stdlib.h>
15 #include <string.h>
16 
17 #include <algorithm>
18 
19 #include <util/SplayTree.h>
20 
21 
22 /*!	This is a very simple malloc()/free() implementation - it only
23 	manages a free list.
24 	After heap_init() is called, all free memory is contained in one
25 	big chunk, the only entry in the free link list (which is a single
26 	linked list).
27 	When memory is allocated, the smallest free chunk that contains
28 	the requested size is split (or taken as a whole if it can't be
29 	splitted anymore), and it's lower half will be removed from the
30 	free list.
31 	The free list is ordered by size, starting with the smallest
32 	free chunk available. When a chunk is freed, it will be joint
33 	with its predecessor or successor, if possible.
34 	To ease list handling, the list anchor itself is a free chunk with
35 	size 0 that can't be allocated.
36 */
37 const static size_t kAlignment = 8;
38 	// all memory chunks will be a multiple of this
39 
40 const static size_t kInitialHeapSize = 64 * 1024;
41 const static size_t kHeapGrowthAlignment = 32 * 1024;
42 
43 
44 class Chunk {
45 public:
46 	size_t CompleteSize() const
47 	{
48 		return fSize;
49 	}
50 
51 protected:
52 	union {
53 		size_t	fSize;
54 		char	fAlignment[kAlignment];
55 	};
56 };
57 
58 
59 class FreeChunk;
60 
61 
62 struct FreeChunkData : SplayTreeLink<FreeChunk> {
63 
64 	FreeChunk* Next() const
65 	{
66 		return fNext;
67 	}
68 
69 	FreeChunk** NextLink()
70 	{
71 		return &fNext;
72 	}
73 
74 protected:
75 	FreeChunk*	fNext;
76 };
77 
78 
79 class FreeChunk : public Chunk, public FreeChunkData {
80 public:
81 			void				SetTo(size_t size);
82 
83 			size_t				Size() const;
84 
85 			FreeChunk*			Split(size_t splitSize);
86 			bool				IsTouching(FreeChunk* link);
87 			FreeChunk*			Join(FreeChunk* link);
88 
89 			void*				AllocatedAddress() const;
90 	static	FreeChunk*			SetToAllocated(void* allocated);
91 };
92 
93 
94 struct FreeChunkKey {
95 	FreeChunkKey(size_t size)
96 		:
97 		fSize(size),
98 		fChunk(NULL)
99 	{
100 	}
101 
102 	FreeChunkKey(const FreeChunk* chunk)
103 		:
104 		fSize(chunk->Size()),
105 		fChunk(chunk)
106 	{
107 	}
108 
109 	int Compare(const FreeChunk* chunk) const
110 	{
111 		size_t chunkSize = chunk->Size();
112 		if (chunkSize != fSize)
113 			return fSize < chunkSize ? -1 : 1;
114 
115 		if (fChunk == chunk)
116 			return 0;
117 		return fChunk < chunk ? -1 : 1;
118 	}
119 
120 private:
121 	size_t				fSize;
122 	const FreeChunk*	fChunk;
123 };
124 
125 
126 struct FreeChunkTreeDefinition {
127 	typedef FreeChunkKey	KeyType;
128 	typedef FreeChunk		NodeType;
129 
130 	static FreeChunkKey GetKey(const FreeChunk* node)
131 	{
132 		return FreeChunkKey(node);
133 	}
134 
135 	static SplayTreeLink<FreeChunk>* GetLink(FreeChunk* node)
136 	{
137 		return node;
138 	}
139 
140 	static int Compare(const FreeChunkKey& key, const FreeChunk* node)
141 	{
142 		return key.Compare(node);
143 	}
144 
145 	static FreeChunk** GetListLink(FreeChunk* node)
146 	{
147 		return node->NextLink();
148 	}
149 };
150 
151 
152 typedef IteratableSplayTree<FreeChunkTreeDefinition> FreeChunkTree;
153 
154 
155 static size_t sAvailable;
156 static FreeChunkTree sFreeChunkTree;
157 
158 
159 static inline size_t
160 align(size_t size, size_t alignment = kAlignment)
161 {
162 	return (size + alignment - 1) & ~(alignment - 1);
163 }
164 
165 
166 void
167 FreeChunk::SetTo(size_t size)
168 {
169 	fSize = size;
170 	fNext = NULL;
171 }
172 
173 
174 /*!	Returns the amount of bytes that can be allocated
175 	in this chunk.
176 */
177 size_t
178 FreeChunk::Size() const
179 {
180 	return (addr_t)this + fSize - (addr_t)AllocatedAddress();
181 }
182 
183 
184 /*!	Splits the upper half at the requested location and returns it. This chunk
185 	will no longer be a valid FreeChunk object; only its fSize will be valid.
186  */
187 FreeChunk*
188 FreeChunk::Split(size_t splitSize)
189 {
190 	splitSize = align(splitSize);
191 
192 	FreeChunk* chunk = (FreeChunk*)((addr_t)AllocatedAddress() + splitSize);
193 	size_t newSize = (addr_t)chunk - (addr_t)this;
194 	chunk->fSize = fSize - newSize;
195 	chunk->fNext = NULL;
196 
197 	fSize = newSize;
198 
199 	return chunk;
200 }
201 
202 
203 /*!	Checks if the specified chunk touches this chunk, so
204 	that they could be joined.
205 */
206 bool
207 FreeChunk::IsTouching(FreeChunk* chunk)
208 {
209 	return chunk
210 		&& (((uint8*)this + fSize == (uint8*)chunk)
211 			|| (uint8*)chunk + chunk->fSize == (uint8*)this);
212 }
213 
214 
215 /*!	Joins the chunk to this chunk and returns the pointer
216 	to the new chunk - which will either be one of the
217 	two chunks.
218 	Note, the chunks must be joinable, or else this method
219 	doesn't work correctly. Use FreeChunk::IsTouching()
220 	to check if this method can be applied.
221 */
222 FreeChunk*
223 FreeChunk::Join(FreeChunk* chunk)
224 {
225 	if (chunk < this) {
226 		chunk->fSize += fSize;
227 		chunk->fNext = fNext;
228 
229 		return chunk;
230 	}
231 
232 	fSize += chunk->fSize;
233 	fNext = chunk->fNext;
234 
235 	return this;
236 }
237 
238 
239 void*
240 FreeChunk::AllocatedAddress() const
241 {
242 	return (void*)static_cast<const FreeChunkData*>(this);
243 }
244 
245 
246 FreeChunk*
247 FreeChunk::SetToAllocated(void* allocated)
248 {
249 	return static_cast<FreeChunk*>((FreeChunkData*)allocated);
250 }
251 
252 
253 //	#pragma mark -
254 
255 
256 static status_t
257 add_area(size_t size)
258 {
259 	void* base;
260 	area_id area = _kern_create_area("rld heap", &base,
261 		B_RANDOMIZED_ANY_ADDRESS, size, B_NO_LOCK, B_READ_AREA | B_WRITE_AREA);
262 	if (area < 0)
263 		return area;
264 
265 	// declare the whole area as one chunk, and add it to the free tree
266 	FreeChunk* chunk = (FreeChunk*)base;
267 	chunk->SetTo(size);
268 	sFreeChunkTree.Insert(chunk);
269 
270 	sAvailable += chunk->Size();
271 	return B_OK;
272 }
273 
274 
275 static status_t
276 grow_heap(size_t bytes)
277 {
278 	return add_area(align(align(sizeof(Chunk)) + bytes, kHeapGrowthAlignment));
279 }
280 
281 
282 //	#pragma mark -
283 
284 
285 status_t
286 heap_init(void)
287 {
288 	return add_area(kInitialHeapSize);
289 }
290 
291 
292 #ifdef HEAP_TEST
293 void
294 dump_chunks(void)
295 {
296 	FreeChunk* chunk = sFreeChunkTree.FindMin();
297 	while (chunk != NULL) {
298 		printf("\t%p: chunk size = %ld, end = %p, next = %p\n", chunk,
299 			chunk->Size(), (uint8*)chunk + chunk->CompleteSize(),
300 			chunk->Next());
301 		chunk = chunk->Next();
302 	}
303 }
304 #endif
305 
306 
307 void*
308 malloc(size_t size)
309 {
310 	if (size == 0)
311 		return NULL;
312 
313 	// align the size requirement to a kAlignment bytes boundary
314 	if (size < sizeof(FreeChunkData))
315 		size = sizeof(FreeChunkData);
316 	size = align(size);
317 
318 	if (size > sAvailable) {
319 		// try to enlarge heap
320 		if (grow_heap(size) != B_OK)
321 			return NULL;
322 	}
323 
324 	FreeChunkKey key(size);
325 	FreeChunk* chunk = sFreeChunkTree.FindClosest(key, true, true);
326 	if (chunk == NULL) {
327 		// could not find a free chunk as large as needed
328 		if (grow_heap(size) != B_OK)
329 			return NULL;
330 
331 		chunk = sFreeChunkTree.FindClosest(key, true, true);
332 		if (chunk == NULL) {
333 			TRACE(("no allocation chunk found after growing the heap\n"));
334 			return NULL;
335 		}
336 	}
337 
338 	sFreeChunkTree.Remove(chunk);
339 	sAvailable -= chunk->Size();
340 
341 	void* allocatedAddress = chunk->AllocatedAddress();
342 
343 	// If this chunk is bigger than the requested size and there's enough space
344 	// left over for a new chunk, we split it.
345 	if (chunk->Size() >= size + align(sizeof(FreeChunk))) {
346 		FreeChunk* freeChunk = chunk->Split(size);
347 		sFreeChunkTree.Insert(freeChunk);
348 		sAvailable += freeChunk->Size();
349 	}
350 
351 	TRACE(("malloc(%lu) -> %p\n", size, allocatedAddress));
352 	return allocatedAddress;
353 }
354 
355 
356 void*
357 realloc(void* oldBuffer, size_t newSize)
358 {
359 	if (newSize == 0) {
360 		TRACE(("realloc(%p, %lu) -> NULL\n", oldBuffer, newSize));
361 		free(oldBuffer);
362 		return NULL;
363 	}
364 
365 	size_t oldSize = 0;
366 	if (oldBuffer != NULL) {
367 		FreeChunk* oldChunk = FreeChunk::SetToAllocated(oldBuffer);
368 		oldSize = oldChunk->Size();
369 
370 		// Check if the old buffer still fits, and if it makes sense to keep it.
371 		if (oldSize >= newSize
372 			&& (oldSize < 128 || newSize > oldSize / 3)) {
373 			TRACE(("realloc(%p, %lu) old buffer is large enough\n",
374 				oldBuffer, newSize));
375 			return oldBuffer;
376 		}
377 	}
378 
379 	void* newBuffer = malloc(newSize);
380 	if (newBuffer == NULL)
381 		return NULL;
382 
383 	if (oldBuffer != NULL) {
384 		memcpy(newBuffer, oldBuffer, std::min(oldSize, newSize));
385 		free(oldBuffer);
386 	}
387 
388 	TRACE(("realloc(%p, %lu) -> %p\n", oldBuffer, newSize, newBuffer));
389 	return newBuffer;
390 }
391 
392 
393 void
394 free(void* allocated)
395 {
396 	if (allocated == NULL)
397 		return;
398 
399 	TRACE(("free(%p)\n", allocated));
400 
401 
402 	FreeChunk* freedChunk = FreeChunk::SetToAllocated(allocated);
403 
404 	// try to join the new free chunk with an existing one
405 	// it may be joined with up to two chunks
406 
407 	FreeChunk* chunk = sFreeChunkTree.FindMin();
408 	int32 joinCount = 0;
409 
410 	while (chunk) {
411 		FreeChunk* nextChunk = chunk->Next();
412 
413 		if (chunk->IsTouching(freedChunk)) {
414 			sFreeChunkTree.Remove(chunk);
415 			sAvailable -= chunk->Size();
416 
417 			freedChunk = chunk->Join(freedChunk);
418 
419 			if (++joinCount == 2)
420 				break;
421 		}
422 
423 		chunk = nextChunk;
424 	}
425 
426 	sFreeChunkTree.Insert(freedChunk);
427 	sAvailable += freedChunk->Size();
428 }
429