xref: /haiku/src/system/boot/loader/heap.cpp (revision 545ea51c99141d5093cb44e48055fef2c9c0bbc8)
1 /*
2  * Copyright 2003-2013, Axel Dörfler, axeld@pinc-software.de.
3  * Distributed under the terms of the MIT License.
4  */
5 
6 
7 #include <boot/heap.h>
8 
9 #ifdef HEAP_TEST
10 #	include <stdio.h>
11 #endif
12 #include <stdlib.h>
13 #include <string.h>
14 
15 #include <algorithm>
16 
17 #include <boot/platform.h>
18 #include <util/OpenHashTable.h>
19 #include <util/SplayTree.h>
20 
21 
22 //#define TRACE_HEAP
23 #ifdef TRACE_HEAP
24 #	define TRACE(format...)	dprintf(format)
25 #else
26 #	define TRACE(format...)	do { } while (false)
27 #endif
28 
29 
30 /*!	This is a very simple malloc()/free() implementation - it only
31 	manages a free list.
32 	After heap_init() is called, all free memory is contained in one
33 	big chunk, the only entry in the free link list (which is a single
34 	linked list).
35 	When memory is allocated, the smallest free chunk that contains
36 	the requested size is split (or taken as a whole if it can't be
37 	splitted anymore), and it's lower half will be removed from the
38 	free list.
39 	The free list is ordered by size, starting with the smallest
40 	free chunk available. When a chunk is freed, it will be joint
41 	with its predecessor or successor, if possible.
42 	To ease list handling, the list anchor itself is a free chunk with
43 	size 0 that can't be allocated.
44 */
45 
46 #define DEBUG_ALLOCATIONS
47 	// if defined, freed memory is filled with 0xcc
48 #define DEBUG_MAX_HEAP_USAGE
49 	// if defined, the maximum heap usage is determined and printed before
50 	// entering the kernel
51 
52 
53 const static size_t kAlignment = 8;
54 	// all memory chunks will be a multiple of this
55 
56 const static size_t kDefaultHeapSize = (1024 + 512) * 1024;
57 	// default initial heap size, unless overridden by platform loader
58 const static size_t kLargeAllocationThreshold = 128 * 1024;
59 	// allocations of this size or larger are allocated separately
60 
61 
62 class Chunk {
63 public:
CompleteSize() const64 	size_t CompleteSize() const
65 	{
66 		return fSize;
67 	}
68 
69 protected:
70 	union {
71 		size_t	fSize;
72 		char	fAlignment[kAlignment];
73 	};
74 };
75 
76 
77 class FreeChunk;
78 
79 
80 struct FreeChunkData : SplayTreeLink<FreeChunk> {
81 
NextFreeChunkData82 	FreeChunk* Next() const
83 	{
84 		return fNext;
85 	}
86 
NextLinkFreeChunkData87 	FreeChunk** NextLink()
88 	{
89 		return &fNext;
90 	}
91 
92 protected:
93 	FreeChunk*	fNext;
94 };
95 
96 
97 class FreeChunk : public Chunk, public FreeChunkData {
98 public:
99 			void				SetTo(size_t size);
100 
101 			size_t				Size() const;
102 
103 			FreeChunk*			Split(size_t splitSize);
104 			bool				IsTouching(FreeChunk* link);
105 			FreeChunk*			Join(FreeChunk* link);
106 
107 			void*				AllocatedAddress() const;
108 	static	FreeChunk*			SetToAllocated(void* allocated);
109 };
110 
111 
112 struct FreeChunkKey {
FreeChunkKeyFreeChunkKey113 	FreeChunkKey(size_t size)
114 		:
115 		fSize(size),
116 		fChunk(NULL)
117 	{
118 	}
119 
FreeChunkKeyFreeChunkKey120 	FreeChunkKey(const FreeChunk* chunk)
121 		:
122 		fSize(chunk->Size()),
123 		fChunk(chunk)
124 	{
125 	}
126 
CompareFreeChunkKey127 	int Compare(const FreeChunk* chunk) const
128 	{
129 		size_t chunkSize = chunk->Size();
130 		if (chunkSize != fSize)
131 			return fSize < chunkSize ? -1 : 1;
132 
133 		if (fChunk == chunk)
134 			return 0;
135 		return fChunk < chunk ? -1 : 1;
136 	}
137 
138 private:
139 	size_t				fSize;
140 	const FreeChunk*	fChunk;
141 };
142 
143 
144 struct FreeChunkTreeDefinition {
145 	typedef FreeChunkKey	KeyType;
146 	typedef FreeChunk		NodeType;
147 
GetKeyFreeChunkTreeDefinition148 	static FreeChunkKey GetKey(const FreeChunk* node)
149 	{
150 		return FreeChunkKey(node);
151 	}
152 
GetLinkFreeChunkTreeDefinition153 	static SplayTreeLink<FreeChunk>* GetLink(FreeChunk* node)
154 	{
155 		return node;
156 	}
157 
CompareFreeChunkTreeDefinition158 	static int Compare(const FreeChunkKey& key, const FreeChunk* node)
159 	{
160 		return key.Compare(node);
161 	}
162 
GetListLinkFreeChunkTreeDefinition163 	static FreeChunk** GetListLink(FreeChunk* node)
164 	{
165 		return node->NextLink();
166 	}
167 };
168 
169 
170 typedef IteratableSplayTree<FreeChunkTreeDefinition> FreeChunkTree;
171 
172 
173 struct LargeAllocation {
LargeAllocationLargeAllocation174 	LargeAllocation()
175 	{
176 		fAddress = NULL;
177 		fSize = 0;
178 	}
179 
AllocateLargeAllocation180 	status_t Allocate(size_t size)
181 	{
182 		ssize_t actualSize = platform_allocate_heap_region(size, &fAddress);
183 		if (actualSize < 0)
184 			return actualSize;
185 
186 		fSize = actualSize;
187 		return B_OK;
188 	}
189 
FreeLargeAllocation190 	void Free()
191 	{
192 		platform_free_heap_region(fAddress, fSize);
193 	}
194 
AddressLargeAllocation195 	void* Address() const
196 	{
197 		return fAddress;
198 	}
199 
SizeLargeAllocation200 	size_t Size() const
201 	{
202 		return fSize;
203 	}
204 
HashNextLargeAllocation205 	LargeAllocation*& HashNext()
206 	{
207 		return fHashNext;
208 	}
209 
210 private:
211 	void*				fAddress;
212 	size_t				fSize;
213 	LargeAllocation*	fHashNext;
214 };
215 
216 
217 struct LargeAllocationHashDefinition {
218 	typedef void*			KeyType;
219 	typedef	LargeAllocation	ValueType;
220 
HashKeyLargeAllocationHashDefinition221 	size_t HashKey(void* key) const
222 	{
223 		return size_t(key) / kAlignment;
224 	}
225 
HashLargeAllocationHashDefinition226 	size_t Hash(LargeAllocation* value) const
227 	{
228 		return HashKey(value->Address());
229 	}
230 
CompareLargeAllocationHashDefinition231 	bool Compare(void* key, LargeAllocation* value) const
232 	{
233 		return value->Address() == key;
234 	}
235 
GetLinkLargeAllocationHashDefinition236 	LargeAllocation*& GetLink(LargeAllocation* value) const
237 	{
238 		return value->HashNext();
239 	}
240 };
241 
242 
243 typedef BOpenHashTable<LargeAllocationHashDefinition> LargeAllocationHashTable;
244 
245 
246 static void* sHeapBase;
247 static void* sHeapEnd;
248 static size_t sMaxHeapSize, sAvailable, sMaxHeapUsage;
249 static FreeChunkTree sFreeChunkTree;
250 
251 static LargeAllocationHashTable sLargeAllocations;
252 
253 
254 static inline size_t
align(size_t size)255 align(size_t size)
256 {
257 	return (size + kAlignment - 1) & ~(kAlignment - 1);
258 }
259 
260 
261 static void*
malloc_large(size_t size)262 malloc_large(size_t size)
263 {
264 	LargeAllocation* allocation = new(std::nothrow) LargeAllocation;
265 	if (allocation == NULL) {
266 		dprintf("malloc_large(): Out of memory!\n");
267 		return NULL;
268 	}
269 
270 	if (allocation->Allocate(size) != B_OK) {
271 		dprintf("malloc_large(): Out of memory!\n");
272 		delete allocation;
273 		return NULL;
274 	}
275 
276 	sLargeAllocations.InsertUnchecked(allocation);
277 	TRACE("malloc_large(%lu) -> %p\n", size, allocation->Address());
278 	return allocation->Address();
279 }
280 
281 
282 static void
free_large(void * address)283 free_large(void* address)
284 {
285 	LargeAllocation* allocation = sLargeAllocations.Lookup(address);
286 	if (allocation == NULL)
287 		panic("free_large(%p): unknown allocation!\n", address);
288 
289 	sLargeAllocations.RemoveUnchecked(allocation);
290 	allocation->Free();
291 	delete allocation;
292 }
293 
294 
295 void
SetTo(size_t size)296 FreeChunk::SetTo(size_t size)
297 {
298 	fSize = size;
299 	fNext = NULL;
300 }
301 
302 
303 /*!	Returns the amount of bytes that can be allocated
304 	in this chunk.
305 */
306 size_t
Size() const307 FreeChunk::Size() const
308 {
309 	return (addr_t)this + fSize - (addr_t)AllocatedAddress();
310 }
311 
312 
313 /*!	Splits the upper half at the requested location and returns it. This chunk
314 	will no longer be a valid FreeChunk object; only its fSize will be valid.
315  */
316 FreeChunk*
Split(size_t splitSize)317 FreeChunk::Split(size_t splitSize)
318 {
319 	splitSize = align(splitSize);
320 
321 	FreeChunk* chunk = (FreeChunk*)((addr_t)AllocatedAddress() + splitSize);
322 	size_t newSize = (addr_t)chunk - (addr_t)this;
323 	chunk->fSize = fSize - newSize;
324 	chunk->fNext = NULL;
325 
326 	fSize = newSize;
327 
328 	return chunk;
329 }
330 
331 
332 /*!	Checks if the specified chunk touches this chunk, so
333 	that they could be joined.
334 */
335 bool
IsTouching(FreeChunk * chunk)336 FreeChunk::IsTouching(FreeChunk* chunk)
337 {
338 	return chunk
339 		&& (((uint8*)this + fSize == (uint8*)chunk)
340 			|| (uint8*)chunk + chunk->fSize == (uint8*)this);
341 }
342 
343 
344 /*!	Joins the chunk to this chunk and returns the pointer
345 	to the new chunk - which will either be one of the
346 	two chunks.
347 	Note, the chunks must be joinable, or else this method
348 	doesn't work correctly. Use FreeChunk::IsTouching()
349 	to check if this method can be applied.
350 */
351 FreeChunk*
Join(FreeChunk * chunk)352 FreeChunk::Join(FreeChunk* chunk)
353 {
354 	if (chunk < this) {
355 		chunk->fSize += fSize;
356 		chunk->fNext = fNext;
357 
358 		return chunk;
359 	}
360 
361 	fSize += chunk->fSize;
362 	fNext = chunk->fNext;
363 
364 	return this;
365 }
366 
367 
368 void*
AllocatedAddress() const369 FreeChunk::AllocatedAddress() const
370 {
371 	return (void*)static_cast<const FreeChunkData*>(this);
372 }
373 
374 
375 FreeChunk*
SetToAllocated(void * allocated)376 FreeChunk::SetToAllocated(void* allocated)
377 {
378 	return static_cast<FreeChunk*>((FreeChunkData*)allocated);
379 }
380 
381 
382 //	#pragma mark -
383 
384 
385 void
heap_release(stage2_args * args)386 heap_release(stage2_args* args)
387 {
388 	heap_print_statistics();
389 
390 	LargeAllocation* allocation = sLargeAllocations.Clear(true);
391 	while (allocation != NULL) {
392 		LargeAllocation* next = allocation->HashNext();
393 		allocation->Free();
394 		allocation = next;
395 	}
396 
397 	platform_free_heap_region(sHeapBase, (addr_t)sHeapEnd - (addr_t)sHeapBase);
398 
399 	sHeapBase = sHeapEnd = NULL;
400 	memset((void*)&sFreeChunkTree, 0, sizeof(sFreeChunkTree));
401 	memset((void*)&sLargeAllocations, 0, sizeof(sLargeAllocations));
402 }
403 
404 
405 void
heap_print_statistics()406 heap_print_statistics()
407 {
408 #ifdef DEBUG_MAX_HEAP_USAGE
409 	dprintf("maximum boot loader heap usage: %zu, currently used: %zu\n",
410 		sMaxHeapUsage, sMaxHeapSize - sAvailable);
411 #endif
412 }
413 
414 
415 status_t
heap_init(stage2_args * args)416 heap_init(stage2_args* args)
417 {
418 	if (args->heap_size == 0)
419 		args->heap_size = kDefaultHeapSize;
420 
421 	void* base;
422 	ssize_t size = platform_allocate_heap_region(args->heap_size, &base);
423 	if (size < 0)
424 		return B_ERROR;
425 
426 	sHeapBase = base;
427 	sHeapEnd = (void*)((addr_t)base + size);
428 	sMaxHeapSize = (uint8*)sHeapEnd - (uint8*)sHeapBase;
429 
430 	// declare the whole heap as one chunk, and add it
431 	// to the free list
432 	FreeChunk* chunk = (FreeChunk*)base;
433 	chunk->SetTo(sMaxHeapSize);
434 	sFreeChunkTree.Insert(chunk);
435 
436 	sAvailable = chunk->Size();
437 #ifdef DEBUG_MAX_HEAP_USAGE
438 	sMaxHeapUsage = sMaxHeapSize - sAvailable;
439 #endif
440 
441 	if (sLargeAllocations.Init(64) != B_OK)
442 		return B_NO_MEMORY;
443 
444 	return B_OK;
445 }
446 
447 
448 #ifdef HEAP_TEST
449 void
dump_chunks(void)450 dump_chunks(void)
451 {
452 	FreeChunk* chunk = sFreeChunkTree.FindMin();
453 	while (chunk != NULL) {
454 		printf("\t%p: chunk size = %ld, end = %p, next = %p\n", chunk,
455 			chunk->Size(), (uint8*)chunk + chunk->CompleteSize(),
456 			chunk->Next());
457 		chunk = chunk->Next();
458 	}
459 }
460 #endif
461 
462 uint32
heap_available(void)463 heap_available(void)
464 {
465 	return (uint32)sAvailable;
466 }
467 
468 
469 void*
malloc(size_t size)470 malloc(size_t size)
471 {
472 	if (sHeapBase == NULL || size == 0)
473 		return NULL;
474 
475 	// align the size requirement to a kAlignment bytes boundary
476 	if (size < sizeof(FreeChunkData))
477 		size = sizeof(FreeChunkData);
478 	size = align(size);
479 
480 	if (size >= kLargeAllocationThreshold)
481 		return malloc_large(size);
482 
483 	if (size > sAvailable) {
484 		dprintf("malloc(): Out of memory allocating a block of %ld bytes, "
485 			"only %ld left!\n", size, sAvailable);
486 		return NULL;
487 	}
488 
489 	FreeChunk* chunk = sFreeChunkTree.FindClosest(FreeChunkKey(size), true,
490 		true);
491 
492 	if (chunk == NULL) {
493 		// could not find a free chunk as large as needed
494 		dprintf("malloc(): Out of memory allocating a block of %ld bytes, "
495 			"no free chunks!\n", size);
496 		return NULL;
497 	}
498 
499 	sFreeChunkTree.Remove(chunk);
500 	sAvailable -= chunk->Size();
501 
502 	void* allocatedAddress = chunk->AllocatedAddress();
503 
504 	// If this chunk is bigger than the requested size and there's enough space
505 	// left over for a new chunk, we split it.
506 	if (chunk->Size() >= size + align(sizeof(FreeChunk))) {
507 		FreeChunk* freeChunk = chunk->Split(size);
508 		sFreeChunkTree.Insert(freeChunk);
509 		sAvailable += freeChunk->Size();
510 	}
511 
512 #ifdef DEBUG_MAX_HEAP_USAGE
513 	sMaxHeapUsage = std::max(sMaxHeapUsage, sMaxHeapSize - sAvailable);
514 #endif
515 
516 	TRACE("malloc(%lu) -> %p\n", size, allocatedAddress);
517 	return allocatedAddress;
518 }
519 
520 
521 void*
realloc(void * oldBuffer,size_t newSize)522 realloc(void* oldBuffer, size_t newSize)
523 {
524 	if (newSize == 0) {
525 		TRACE("realloc(%p, %lu) -> NULL\n", oldBuffer, newSize);
526 		free(oldBuffer);
527 		return NULL;
528 	}
529 
530 	size_t oldSize = 0;
531 	if (oldBuffer != NULL) {
532 		if (oldBuffer >= sHeapBase && oldBuffer < sHeapEnd) {
533 			FreeChunk* oldChunk = FreeChunk::SetToAllocated(oldBuffer);
534 			oldSize = oldChunk->Size();
535 		} else {
536 			LargeAllocation* allocation = sLargeAllocations.Lookup(oldBuffer);
537 			if (allocation == NULL) {
538 				panic("realloc(%p, %zu): unknown allocation!\n", oldBuffer,
539 					newSize);
540 			}
541 
542 			oldSize = allocation->Size();
543 		}
544 
545 		// Check if the old buffer still fits, and if it makes sense to keep it.
546 		if (oldSize >= newSize
547 			&& (oldSize < 128 || newSize > oldSize / 3)) {
548 			TRACE("realloc(%p, %lu) old buffer is large enough\n",
549 				oldBuffer, newSize);
550 			return oldBuffer;
551 		}
552 	}
553 
554 	void* newBuffer = malloc(newSize);
555 	if (newBuffer == NULL)
556 		return NULL;
557 
558 	if (oldBuffer != NULL) {
559 		memcpy(newBuffer, oldBuffer, std::min(oldSize, newSize));
560 		free(oldBuffer);
561 	}
562 
563 	TRACE("realloc(%p, %lu) -> %p\n", oldBuffer, newSize, newBuffer);
564 	return newBuffer;
565 }
566 
567 
568 void*
calloc(size_t numElements,size_t size)569 calloc(size_t numElements, size_t size)
570 {
571 	void* address = malloc(numElements * size);
572 	if (address != NULL)
573 		memset(address, 0, numElements * size);
574 
575 	return address;
576 }
577 
578 
579 void
free(void * allocated)580 free(void* allocated)
581 {
582 	if (allocated == NULL)
583 		return;
584 
585 	TRACE("free(%p)\n", allocated);
586 
587 	if (allocated < sHeapBase || allocated >= sHeapEnd) {
588 		free_large(allocated);
589 		return;
590 	}
591 
592 	FreeChunk* freedChunk = FreeChunk::SetToAllocated(allocated);
593 
594 #ifdef DEBUG_ALLOCATIONS
595 	if (freedChunk->Size() > sMaxHeapSize - sAvailable) {
596 		panic("freed chunk %p clobbered (%#zx)!\n", freedChunk,
597 			freedChunk->Size());
598 	}
599 	{
600 		FreeChunk* chunk = sFreeChunkTree.FindMin();
601 		while (chunk) {
602 			if (chunk->Size() > sAvailable || freedChunk == chunk)
603 				panic("invalid chunk in free list (%p (%zu)), or double free\n",
604 					chunk, chunk->Size());
605 			chunk = chunk->Next();
606 		}
607 	}
608 #endif
609 
610 	// try to join the new free chunk with an existing one
611 	// it may be joined with up to two chunks
612 
613 	FreeChunk* chunk = sFreeChunkTree.FindMin();
614 	int32 joinCount = 0;
615 
616 	while (chunk) {
617 		FreeChunk* nextChunk = chunk->Next();
618 
619 		if (chunk->IsTouching(freedChunk)) {
620 			sFreeChunkTree.Remove(chunk);
621 			sAvailable -= chunk->Size();
622 
623 			freedChunk = chunk->Join(freedChunk);
624 
625 			if (++joinCount == 2)
626 				break;
627 		}
628 
629 		chunk = nextChunk;
630 	}
631 
632 	sFreeChunkTree.Insert(freedChunk);
633 	sAvailable += freedChunk->Size();
634 #ifdef DEBUG_MAX_HEAP_USAGE
635 	sMaxHeapUsage = std::max(sMaxHeapUsage, sMaxHeapSize - sAvailable);
636 #endif
637 }
638 
639