Updated memory allocator. Fix in printf printing pointers. Added aligned_alloc to libc module. Renamed MemoryArena -> ArenaAllocator. New temp allocator. @pool(), @scoped, @tscoped macros. Bump to 0.3.2.

This commit is contained in:
Christoffer Lerno
2022-08-01 15:25:26 +02:00
parent 272f134e78
commit 550bca79e9
15 changed files with 337 additions and 153 deletions

View File

@@ -6,7 +6,7 @@ module std::core::mem::allocator;
*/
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
{
MemoryArena* arena = (MemoryArena*)data;
ArenaAllocator* arena = (ArenaAllocator*)data;
switch (kind)
{
case CALLOC:
@@ -16,18 +16,18 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
alignment = alignment_for_allocation(alignment);
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
if (kind == AllocationKind.CALLOC) mem::set(mem, 0, size);
if (kind == AllocationKind.CALLOC) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
assert((uptr)old_pointer >= (uptr)arena.memory, "Pointer originates from a different allocator.");
if (size > arena.total) return AllocationFailure.OUT_OF_MEMORY!;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
if (size > arena.data.len) return AllocationFailure.CHUNK_TOO_LARGE!;
alignment = alignment_for_allocation(alignment);
usize* old_size_ptr = (usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
usize old_size = *old_size_ptr;
// Do last allocation and alignment match?
if (arena.memory + arena.used == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
if (&arena.data[arena.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
@@ -36,7 +36,7 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
return old_pointer;
}
usize new_used = arena.used + size - old_size;
if (new_used > arena.total) return AllocationFailure.OUT_OF_MEMORY!;
if (new_used > arena.data.len) return AllocationFailure.OUT_OF_MEMORY!;
arena.used = new_used;
*old_size_ptr = size;
return old_pointer;
@@ -44,19 +44,21 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
// Otherwise just allocate new memory.
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
mem::copy(mem, old_pointer, old_size);
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)arena.memory, "Pointer originates from a different allocator.");
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == arena.memory + arena.used)
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
return null;
case MARK:
return (void*)(uptr)arena.used;
case RESET:
arena.used = 0;
arena.used = size;
return null;
}
unreachable();
@@ -69,14 +71,14 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! MemoryArena.alloc(MemoryArena* this, usize size, usize alignment, usize prefixed_bytes = 0)
private fn void*! ArenaAllocator.alloc(ArenaAllocator* this, usize size, usize alignment, usize prefixed_bytes = 0)
{
void* start_mem = this.memory;
void* start_mem = this.data.ptr;
void* unaligned_pointer = start_mem + this.used + prefixed_bytes;
if ((uptr)unaligned_pointer < (uptr)start_mem) return AllocationFailure.OUT_OF_MEMORY!;
usize offset_start = mem::aligned_offset((usize)(uptr)unaligned_pointer, alignment) - (usize)(uptr)start_mem;
usize end = offset_start + size;
if (end > this.total || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
if (end > this.data.len || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
this.used = end;
return start_mem + offset_start;
}

View File

@@ -54,7 +54,7 @@ private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, voi
return old_pointer;
}
void* new_mem = this.alloc(size, alignment)?;
mem::copy(new_mem, old_pointer, old_size);
mem::memcpy(new_mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
@@ -154,7 +154,7 @@ private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size,
assert(!old_pointer, "Unexpected no old pointer for calloc.");
if (!size) return null;
void* mem = allocator.alloc(size, alignment)?;
mem::set(mem, 0, size);
mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALLOC:
assert(!old_pointer, "Unexpected no old pointer for alloc.");
@@ -174,6 +174,8 @@ private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size,
if (!old_pointer) return null;
allocator.free(old_pointer);
return null;
case MARK:
unreachable("Tried to mark a dynamic arena");
case RESET:
allocator.reset();
return null;

View File

@@ -28,7 +28,8 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
case ALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::malloc(bytes + alignment), alignment);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(alignment, bytes);
}
else
{
@@ -39,23 +40,36 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
case CALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::calloc(bytes + alignment, 1), alignment);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
mem::memset(data, 0, bytes, false, DEFAULT_MEM_ALIGNMENT);
}
else
{
data = libc::malloc(bytes);
data = libc::calloc(bytes, 1);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case REALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::realloc(old_pointer, bytes + alignment), alignment);
}
else
{
data = libc::realloc(old_pointer, bytes);
}
if (!bytes) nextcase FREE;
if (!old_pointer) nextcase CALLOC;
$if (libc::HAS_MALLOC_SIZE):
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
usize size = libc::malloc_size(old_pointer);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
if (bytes > size)
{
mem::memset(data + size, 0, bytes - size, false, DEFAULT_MEM_ALIGNMENT);
}
mem::memcpy(data, old_pointer, size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(old_pointer);
return data;
}
$endif;
data = libc::realloc(old_pointer, bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case RESET:

View File

@@ -0,0 +1,197 @@
module std::core::mem::allocator;
import std::io;
private struct TempAllocatorChunk
{
usize size;
char[*] data;
}
struct TempAllocator
{
inline Allocator allocator;
Allocator* backing_allocator;
TempAllocatorPage* last_page;
usize used;
usize capacity;
char[*] data;
}
struct TempAllocatorPage
{
TempAllocatorPage* prev_page;
usize mark;
void* start;
usize size;
usize ident;
char[*] data;
}
/**
* @require size >= 16
**/
fn TempAllocator*! new_temp(usize size, Allocator* backing_allocator)
{
TempAllocator* allocator = backing_allocator.alloc(size + TempAllocator.sizeof)?;
allocator.last_page = null;
allocator.function = &temp_allocator_function;
allocator.backing_allocator = backing_allocator;
allocator.used = 0;
allocator.capacity = size;
return allocator;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
{
TempAllocator* arena = (TempAllocator*)data;
switch (kind)
{
case CALLOC:
case ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), kind == AllocationKind.CALLOC);
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment));
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)&arena.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
return null;
case MARK:
return (void*)(uptr)arena.used;
case RESET:
arena._reset(size)?;
return null;
}
unreachable();
}
private fn void! TempAllocator._reset(TempAllocator* this, usize mark)
{
TempAllocatorPage *last_page = this.last_page;
while (last_page)
{
if (last_page.mark < mark) break;
void* mem = last_page.start;
last_page = last_page.prev_page;
this.backing_allocator.free(mem)?;
}
this.last_page = last_page;
this.used = mark;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment)
{
TempAllocatorChunk *chunk = pointer - TempAllocatorChunk.sizeof;
if (chunk.size == ~(usize)0)
{
assert(this.last_page, "Realloc of non temp pointer");
// First grab the page
TempAllocatorPage *page = pointer - TempAllocatorPage.sizeof;
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
// Find the new header size
usize actual_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, alignment);
// And the old one.
uptrdiff prev_header_size = pointer - real_pointer;
// If the new header size needs to move, we're out of luck.
// We need to make a copy
if (actual_header_size > prev_header_size)
{
// Clear only if the new size is bigger than the old one.
void* data = this._alloc(size, alignment, size > page.size)?;
mem::memcpy(data, pointer, page.size);
this.backing_allocator.free(real_pointer)?;
return data;
}
void* new_start = this.backing_allocator.realloc(real_pointer, prev_header_size + size, alignment)?;
page = new_start + prev_header_size - TempAllocatorPage.sizeof;
page.mark = this.used;
page.prev_page = this.last_page;
this.last_page = page;
io::printf("Size: %d\n", size);
page.size = size;
page.start = new_start;
return &page.data;
}
assert(pointer < &this.data + this.capacity && pointer >= &this.data, "This is not a temp allocated pointer.");
assert(pointer < &this.data + this.used, "This is a stale temp pointer.");
io::printf("realloc normal %s\n", size);
// TODO optimize last allocation
TempAllocatorChunk* data = this._alloc(size, alignment, size > chunk.size)?;
mem::memcpy(data, pointer, chunk.size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return data;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, bool clear)
{
void* start_mem = &this.data;
uptr starting_ptr = (uptr)start_mem + this.used;
uptr aligned_header_start = mem::aligned_offset(starting_ptr, $alignof(TempAllocatorChunk));
uptr unaligned_data_start = aligned_header_start + TempAllocatorChunk.sizeof;
usize data_start = mem::aligned_offset(unaligned_data_start, alignment);
usize new_usage = data_start + size - (uptr)start_mem;
// Fallback to backing allocator
if (new_usage > this.capacity)
{
// Enlarge the header if needed
usize actual_header_size = mem::aligned_offset(TempAllocator.sizeof, alignment);
usize total_alloc_size = actual_header_size + size;
void* start = clear ? this.backing_allocator.calloc(total_alloc_size, alignment) : this.backing_allocator.alloc(total_alloc_size, alignment)?;
// Move forward to the memory
void* mem = start + actual_header_size;
TempAllocatorPage* page = mem - TempAllocator.sizeof;
page.start = start;
page.ident = ~(usize)0;
page.mark = this.used;
page.size = size;
page.prev_page = this.last_page;
this.last_page = page;
assert(&page.data == mem, "Expected match");
return mem;
}
TempAllocatorChunk* chunk_start = (TempAllocatorChunk*)(data_start - TempAllocatorChunk.sizeof);
chunk_start.size = size;
this.used = new_usage;
void* mem = &chunk_start.data;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
}