495 lines
11 KiB
C
495 lines
11 KiB
C
// ::Allocator::Globals::
|
|
|
|
FLAlloc FL_ALLOC = {0};
|
|
Allocator ALLOC = {0};
|
|
read_only FLNode FL_NIL_NODE = {0};
|
|
|
|
constexpr usize FL_GLOBAL_SIZE = MB(32);
|
|
static b32 FL_GLOBAL_INIT = false;
|
|
|
|
// ::Allocator::Util::Header::
|
|
|
|
static inline usize CalcPaddingWithHeader(uintptr ptr, uintptr alignment, usize header_size)
|
|
{
|
|
Assert(IsPow2(alignment), "Alignment provided to CalcPaddingWithHeader is not a power of two");
|
|
|
|
uintptr padding = CalcPadding(ptr, alignment);
|
|
uintptr needed_space = (uintptr)header_size;
|
|
|
|
if (padding < needed_space)
|
|
{
|
|
needed_space -= padding;
|
|
|
|
if ((needed_space & (alignment-1)) != 0)
|
|
padding += alignment * (1 + (needed_space/alignment));
|
|
else
|
|
padding += alignment * (needed_space/alignment);
|
|
}
|
|
|
|
return (usize)padding;
|
|
}
|
|
|
|
static inline usize CalcPadding(uintptr ptr, uintptr alignment)
|
|
{
|
|
Assert(IsPow2(alignment), "CalcPadding failure: IsPow2 failed");
|
|
|
|
uintptr padding = 0;
|
|
uintptr modulo = ptr & (alignment-1);
|
|
if (modulo != 0)
|
|
padding = alignment - modulo;
|
|
|
|
return (usize)padding;
|
|
}
|
|
|
|
// ::Allocator::Arena::Start::
|
|
|
|
static Arena *ArenaInit(rawptr buffer, usize size)
|
|
{
|
|
Arena *arena = (Arena *)buffer;
|
|
buffer = PtrAdd(buffer, ARENA_HEADER_SIZE);
|
|
|
|
arena->buffer = buffer;
|
|
arena->length = size;
|
|
arena->pos = 0;
|
|
|
|
return arena;
|
|
}
|
|
|
|
static Arena *ArenaCreate(usize size)
|
|
{
|
|
u8 *mem = MemAllocZeroed(size);
|
|
return ArenaInit(mem, size);
|
|
}
|
|
|
|
static Arena *ArenaCreateDebug(usize size, u32 init_line_no)
|
|
{
|
|
u8 *mem = MemAllocZeroed(size);
|
|
return ArenaInitDebug(mem, size, init_line_no);
|
|
}
|
|
|
|
static rawptr ArenaAllocAlign(Arena *arena, usize size, usize align)
|
|
{
|
|
rawptr ptr = NULL;
|
|
|
|
uintptr curr_ptr = (uintptr)arena->buffer + (uintptr)arena->pos;
|
|
uintptr offset = AlignPow2(curr_ptr, align);
|
|
offset -= (uintptr)arena->buffer;
|
|
|
|
if (offset+size <= arena->length)
|
|
{
|
|
ptr = &arena->buffer[offset];
|
|
arena->pos = offset+size;
|
|
}
|
|
else
|
|
{
|
|
Printfln("Out of memory: %d", arena->init_line_no);
|
|
Assert(0, "Memory Failure");
|
|
}
|
|
|
|
return ptr;
|
|
}
|
|
|
|
static rawptr ArenaAlloc(Arena *arena, usize size)
|
|
{
|
|
return ArenaAllocAlign(arena, size, DEFAULT_ALIGNMENT);
|
|
}
|
|
|
|
static void ArenaFree(Arena *arena)
|
|
{
|
|
arena->pos = 0;
|
|
}
|
|
|
|
static void ArenaFreeZeroed(Arena *arena)
|
|
{
|
|
MemZero(arena->buffer, arena->pos);
|
|
ArenaFree(arena);
|
|
}
|
|
|
|
static void DeallocArena(Arena *arena)
|
|
{
|
|
MemFree(arena, arena->length);
|
|
}
|
|
|
|
static Arena * ArenaInitDebug(rawptr buffer, usize size, u32 init_line_no)
|
|
{
|
|
Arena *arena = ArenaInit(buffer, size);
|
|
arena->init_line_no = init_line_no;
|
|
return arena;
|
|
}
|
|
|
|
// ::Allocator::Arena::End::
|
|
|
|
|
|
|
|
// ::Allocator::GlobalAlloc::Start::
|
|
|
|
static void InitAllocator(usize init_size)
|
|
{
|
|
ALLOC.grow_size = init_size;
|
|
ALLOC.buffer = MemAllocZeroed(init_size);
|
|
ALLOC.size = init_size;
|
|
ALLOC.free_size = init_size;
|
|
|
|
ALLOC.hash_table = FLMemAlloc(sizeof(HashTable));
|
|
HashTableInit(ALLOC.hash_table, 32);
|
|
|
|
ALLOC.tree = FLMemAlloc(sizeof(RBTree));
|
|
RBTreeInit(ALLOC.tree);
|
|
|
|
RBTreeInsert(ALLOC.tree, init_size, ALLOC.buffer);
|
|
}
|
|
|
|
static void DeinitAlloc()
|
|
{
|
|
MemFree(ALLOC.buffer, ALLOC.size);
|
|
}
|
|
|
|
static rawptr Alloc(usize size)
|
|
{
|
|
return AllocAlign(size, DEFAULT_ALIGNMENT);
|
|
}
|
|
|
|
static rawptr AllocAlign(usize size, usize alignment)
|
|
{
|
|
if (size == 0) return NULL;
|
|
|
|
RBNode *node = P_RB_NIL;
|
|
rawptr mem = NULL;
|
|
if (!RBTreeSearchNearest(ALLOC.tree, size + alignment, &node))
|
|
{
|
|
AllocGrow(size);
|
|
RBTreeSearchNearest(ALLOC.tree, size + alignment, &node);
|
|
}
|
|
|
|
u64 alloc_size = node->key;
|
|
rawptr free_alloc = node->bucket.last->data;
|
|
RBTreeDelete(ALLOC.tree, alloc_size, free_alloc);
|
|
|
|
usize padding = CalcPadding(uintptr(free_alloc), alignment);
|
|
uintptr new_addr = uintptr(free_alloc) + size + padding;
|
|
RBTreeInsert(ALLOC.tree, alloc_size - size - padding, rawptr(new_addr));
|
|
|
|
HashTablePushRawptrU64(ALLOC.hash_table, free_alloc, size + padding);
|
|
|
|
return free_alloc;
|
|
}
|
|
|
|
// TODO: finish allocator
|
|
// need an idea
|
|
static void Free(rawptr ptr)
|
|
{
|
|
if (ptr == NULL) return;
|
|
|
|
|
|
}
|
|
|
|
static void AllocGrow(usize size)
|
|
{
|
|
usize grow_size = size < ALLOC.grow_size ? ALLOC.grow_size : ALLOC.grow_size + size;
|
|
MemRealloc(ALLOC.buffer, ALLOC.size, ALLOC.size + grow_size);
|
|
|
|
RBTreeInsert(ALLOC.tree, grow_size, ALLOC.buffer + ALLOC.size); // TODO: check this if things fuck up it could be wrong
|
|
|
|
ALLOC.size += grow_size;
|
|
ALLOC.free_size += grow_size;
|
|
}
|
|
|
|
// ::Allocator::GlobalAlloc::End::
|
|
|
|
|
|
|
|
// ::Allocator::FreeList::Start::
|
|
|
|
static void GlobalFreeListInit(usize size)
|
|
{
|
|
FreeListInit(&FL_ALLOC, size);
|
|
FL_GLOBAL_INIT = true;
|
|
}
|
|
|
|
static rawptr FLMemAlloc(usize size)
|
|
{
|
|
return FreeListAlloc(&FL_ALLOC, size);
|
|
}
|
|
|
|
static rawptr FLMemAllocZeroed(usize size)
|
|
{
|
|
rawptr ptr = FreeListAlloc(&FL_ALLOC, size);
|
|
MemZero(ptr, size);
|
|
return ptr;
|
|
}
|
|
|
|
static void FLMemFree(rawptr ptr)
|
|
{
|
|
FreeListFree(&FL_ALLOC, ptr);
|
|
}
|
|
|
|
static void FreeListInit(FLAlloc *alloc, usize size)
|
|
{
|
|
alloc->lists = MemAllocZeroed(sizeof(FreeList *) * 16);
|
|
alloc->list_count = 1;
|
|
alloc->list_capacity = 16;
|
|
alloc->nil = &FL_NIL_NODE;
|
|
alloc->grow_size = size;
|
|
|
|
_FreeListInit(&alloc->lists[0], size);
|
|
|
|
FreeListFreeAll(alloc);
|
|
}
|
|
|
|
static void _FreeListInit(FreeList **alloc, usize size)
|
|
{
|
|
*alloc = (FreeList *)MemAllocZeroed(size);
|
|
(*alloc)->data = (rawptr)(((uintptr)*alloc) + ((uintptr)sizeof(FreeList)));
|
|
(*alloc)->size = size;
|
|
(*alloc)->used = sizeof(FreeList);
|
|
}
|
|
|
|
static void FreeListFreeAll(FLAlloc *alloc)
|
|
{
|
|
TicketMutLock(&alloc->mut);
|
|
|
|
if (alloc->list_count > 1)
|
|
{
|
|
for (u32 i = 1; i < alloc->list_count; i++)
|
|
{
|
|
MemFree(alloc->lists[i], alloc->lists[i]->size);
|
|
alloc->lists[i] = NULL;
|
|
}
|
|
|
|
alloc->list_count = 1;
|
|
}
|
|
|
|
FLNode *node = (FLNode *)alloc->lists[0]->data;
|
|
node->size = alloc->lists[0]->size;
|
|
node->next = &FL_NIL_NODE;
|
|
|
|
alloc->lists[0]->head = node;
|
|
alloc->lists[0]->used = sizeof(FreeList);
|
|
|
|
TicketMutUnlock(&alloc->mut);
|
|
}
|
|
|
|
static FLNode *FreeListSearch(FreeList *alloc, usize size, usize alignment, usize *out_padding, FLNode **prev_node)
|
|
{
|
|
FLNode *node = alloc->head;
|
|
FLNode *prev = &FL_NIL_NODE;
|
|
|
|
usize padding = 0;
|
|
|
|
while (node != &FL_NIL_NODE)
|
|
{
|
|
padding = CalcPaddingWithHeader((uintptr)node, (uintptr)alignment, sizeof(FLNode));
|
|
usize required_size = size + padding;
|
|
if (node->size >= required_size)
|
|
break;
|
|
|
|
prev = node;
|
|
node = node->next;
|
|
}
|
|
|
|
if (out_padding)
|
|
*out_padding = padding;
|
|
|
|
if (prev_node)
|
|
*prev_node = prev;
|
|
|
|
return node;
|
|
}
|
|
|
|
/*
|
|
* NOT SAFE TO CALL OUTSIDE OF FreeListAlloc
|
|
*/
|
|
static void FreeListGrow(FLAlloc *alloc, usize alloc_size)
|
|
{
|
|
alloc->list_count += 1;
|
|
u32 i = alloc->list_count;
|
|
if (i >= alloc->list_capacity)
|
|
{
|
|
alloc->list_capacity += 16;
|
|
rawptr new_mem = MemAlloc(sizeof(FreeList *) * alloc->list_capacity);
|
|
MemCpy(new_mem, alloc->lists, i);
|
|
MemFree(alloc->lists, sizeof(FreeList *) * i);
|
|
alloc->lists = new_mem;
|
|
}
|
|
|
|
usize grow_size = alloc->grow_size;
|
|
if (alloc_size > grow_size)
|
|
grow_size += alloc_size;
|
|
|
|
_FreeListInit(&alloc->lists[i], grow_size);
|
|
|
|
FLNode *node = (FLNode *)alloc->lists[i]->data;
|
|
node->size = alloc->lists[i]->size;
|
|
node->next = alloc->nil;
|
|
|
|
alloc->lists[i]->head = node;
|
|
}
|
|
|
|
static rawptr _FreeListAllocAlign(FreeList *alloc, usize size, usize alignment)
|
|
{
|
|
if (size == 0) return NULL;
|
|
|
|
usize padding = 0;
|
|
FLNode *prev_node = &FL_NIL_NODE;
|
|
|
|
FLAllocHeader *header;
|
|
|
|
if (size < sizeof(FLNode))
|
|
size = sizeof(FLNode);
|
|
|
|
if (alignment < 8)
|
|
alignment = 8;
|
|
|
|
FLNode *node = FreeListSearch(alloc, size, alignment, &padding, &prev_node);
|
|
|
|
if (node == &FL_NIL_NODE)
|
|
Assert(0, "FreeListAllocAlign failed to allocate, oom");
|
|
|
|
usize alignment_padding = padding - sizeof(FLAllocHeader);
|
|
usize required_space = size + padding;
|
|
usize remaining = node->size - required_space;
|
|
|
|
if (remaining > 0)
|
|
{
|
|
FLNode *new_node = (FLNode *)(((c8 *)node) + required_space);
|
|
new_node->size = remaining;
|
|
FreeListInsert(&alloc->head, node, new_node);
|
|
}
|
|
|
|
FreeListRemove(&alloc->head, prev_node, node);
|
|
|
|
header = (FLAllocHeader *)(((c8 *)node) + alignment_padding);
|
|
header->size = required_space;
|
|
header->padding = alignment_padding;
|
|
|
|
alloc->used += required_space;
|
|
|
|
return (rawptr)(((c8 *)header) + sizeof(FLAllocHeader));
|
|
}
|
|
|
|
static rawptr FreeListAlloc(FLAlloc *alloc, usize size)
|
|
{
|
|
return FreeListAllocAlign(alloc, size, DEFAULT_ALIGNMENT);
|
|
}
|
|
|
|
static rawptr FreeListAllocAlign(FLAlloc *alloc, usize size, usize alignment)
|
|
{
|
|
if (!FL_GLOBAL_INIT)
|
|
{
|
|
GlobalFreeListInit(FL_GLOBAL_SIZE);
|
|
}
|
|
|
|
TicketMutLock(&alloc->mut);
|
|
|
|
FreeList *fl = NULL;
|
|
for (u32 i = 0; i < alloc->list_count; i++)
|
|
{
|
|
usize remaining = alloc->lists[i]->size - alloc->lists[i]->used;
|
|
if (size < remaining)
|
|
{
|
|
fl = alloc->lists[i];
|
|
break;
|
|
}
|
|
}
|
|
|
|
if (fl == NULL)
|
|
{
|
|
FreeListGrow(alloc, size);
|
|
fl = alloc->lists[alloc->list_count-1];
|
|
}
|
|
|
|
rawptr ptr = _FreeListAllocAlign(fl, size, alignment);
|
|
|
|
TicketMutUnlock(&alloc->mut);
|
|
|
|
return ptr;
|
|
}
|
|
|
|
static void _FreeListFree(FreeList *alloc, rawptr ptr)
|
|
{
|
|
FLAllocHeader *header = cast(FLAllocHeader *, u8ptr(ptr) - sizeof(FLAllocHeader));
|
|
FLNode *free_node = cast(FLNode *,header);
|
|
free_node->size = header->size + header->padding;
|
|
free_node->next = &FL_NIL_NODE;
|
|
|
|
FLNode *prev_node = &FL_NIL_NODE;
|
|
FLNode *node = alloc->head;
|
|
while (node != &FL_NIL_NODE)
|
|
{
|
|
if (ptr < node)
|
|
{
|
|
FreeListInsert(&alloc->head, prev_node, free_node);
|
|
break;
|
|
}
|
|
|
|
prev_node = node;
|
|
node = node->next;
|
|
}
|
|
|
|
alloc->used -= free_node->size;
|
|
|
|
FreeListCoalescence(alloc, prev_node, free_node);
|
|
}
|
|
|
|
static void FreeListFree(FLAlloc *alloc, rawptr ptr)
|
|
{
|
|
if (ptr == NULL) return;
|
|
|
|
TicketMutLock(&alloc->mut);
|
|
|
|
for (u32 i = 0; i < alloc->list_count; i++)
|
|
{
|
|
uintptr ptr_addr = uintptr(ptr);
|
|
uintptr start = uintptr(alloc->lists[i]->data);
|
|
uintptr end = uintptr(alloc->lists[i]->data) + uintptr(alloc->lists[i]->size);
|
|
if (ptr_addr >= start && ptr_addr < end)
|
|
{
|
|
_FreeListFree(alloc->lists[i], ptr);
|
|
break;
|
|
}
|
|
}
|
|
|
|
TicketMutUnlock(&alloc->mut);
|
|
}
|
|
|
|
static void FreeListCoalescence(FreeList *alloc, FLNode *prev_node, FLNode *free_node)
|
|
{
|
|
if (free_node->next != &FL_NIL_NODE && (rawptr)(((c8 *)free_node) + free_node->size) == free_node->next)
|
|
{
|
|
free_node->size += free_node->next->size;
|
|
FreeListRemove(&alloc->head, free_node, free_node->next);
|
|
}
|
|
|
|
if (prev_node->next != &FL_NIL_NODE && (rawptr)(((c8 *)prev_node) + prev_node->size) == free_node)
|
|
{
|
|
prev_node->size += free_node->next->size;
|
|
FreeListRemove(&alloc->head, prev_node, free_node);
|
|
}
|
|
}
|
|
|
|
static void FreeListRemove(FLNode **head, FLNode *prev_node, FLNode *del_node)
|
|
{
|
|
if (prev_node == &FL_NIL_NODE)
|
|
*head = del_node->next;
|
|
else
|
|
prev_node->next = del_node->next;
|
|
}
|
|
|
|
static void FreeListInsert(FLNode **head, FLNode *prev_node, FLNode *new_node)
|
|
{
|
|
if (prev_node == &FL_NIL_NODE)
|
|
{
|
|
if (*head != &FL_NIL_NODE)
|
|
new_node->next = *head;
|
|
else
|
|
*head = new_node;
|
|
}
|
|
else
|
|
{
|
|
new_node->next = prev_node->next;
|
|
prev_node->next = new_node;
|
|
}
|
|
}
|
|
|
|
// ::Allocator::FreeList::End::
|