Allocators separated into aligned and non aligned operations.

This commit is contained in:
Christoffer Lerno
2022-08-03 20:45:30 +02:00
parent 550bca79e9
commit cc8884d3d1
15 changed files with 572 additions and 269 deletions

View File

@@ -1,58 +1,47 @@
module std::core::mem::allocator;
struct ArenaAllocatorHeader
{
usize size;
char[*] data;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
ArenaAllocator* arena = (ArenaAllocator*)data;
bool clear = false;
switch (kind)
{
case CALLOC:
case ALIGNED_CALLOC:
clear = true;
nextcase;
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
alignment = alignment_for_allocation(alignment);
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
if (kind == AllocationKind.CALLOC) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
void* mem = arena._alloc(size, alignment, offset)?;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALIGNED_REALLOC:
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
if (size > arena.data.len) return AllocationFailure.CHUNK_TOO_LARGE!;
alignment = alignment_for_allocation(alignment);
usize* old_size_ptr = (usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
usize old_size = *old_size_ptr;
// Do last allocation and alignment match?
if (&arena.data[arena.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
*old_size_ptr = size;
arena.used -= old_size - size;
return old_pointer;
}
usize new_used = arena.used + size - old_size;
if (new_used > arena.data.len) return AllocationFailure.OUT_OF_MEMORY!;
arena.used = new_used;
*old_size_ptr = size;
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
return arena._realloc(old_pointer, size, alignment, offset)?;
case ALIGNED_FREE:
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
if (old_pointer + header.size == &arena.data[arena.used])
{
arena.used -= old_size;
arena.used -= header.size + ArenaAllocatorHeader.sizeof;
}
return null;
case MARK:
@@ -69,16 +58,62 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, $alignof(ArenaAllocatorHeader)) == offset
* @require this != null
**/
private fn void*! ArenaAllocator.alloc(ArenaAllocator* this, usize size, usize alignment, usize prefixed_bytes = 0)
private fn void*! ArenaAllocator._alloc(ArenaAllocator* this, usize size, usize alignment, usize offset)
{
void* start_mem = this.data.ptr;
void* unaligned_pointer = start_mem + this.used + prefixed_bytes;
if ((uptr)unaligned_pointer < (uptr)start_mem) return AllocationFailure.OUT_OF_MEMORY!;
usize offset_start = mem::aligned_offset((usize)(uptr)unaligned_pointer, alignment) - (usize)(uptr)start_mem;
usize end = offset_start + size;
if (end > this.data.len || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
usize total_len = this.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE!;
void* start_mem = this.data.ptr;
void* unaligned_pointer_to_offset = start_mem + this.used + ArenaAllocatorHeader.sizeof + offset;
void* aligned_pointer_to_offset = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usize end = (usize)(aligned_pointer_to_offset - this.data.ptr) + size - offset;
if (end > total_len) return AllocationFailure.OUT_OF_MEMORY!;
this.used = end;
return start_mem + offset_start;
void *mem = aligned_pointer_to_offset - offset;
ArenaAllocatorHeader* header = mem - ArenaAllocatorHeader.sizeof;
header.size = size;
return mem;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, $alignof(ArenaAllocatorHeader)) == offset
* @require this != null
**/
private fn void*! ArenaAllocator._realloc(ArenaAllocator* this, void *old_pointer, usize size, usize alignment, usize offset)
{
assert(old_pointer >= this.data.ptr, "Pointer originates from a different allocator.");
usize total_len = this.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE!;
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
usize old_size = header.size;
// Do last allocation and alignment match?
if (&this.data[this.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer + offset, alignment))
{
if (old_size >= size)
{
this.used -= old_size - size;
}
else
{
usize new_used = this.used + size - old_size;
if (new_used > total_len) return AllocationFailure.OUT_OF_MEMORY!;
this.used = new_used;
}
header.size = size;
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = this._alloc(size, alignment, offset)?;
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
}

View File

@@ -9,6 +9,10 @@ private struct DynamicArenaPage
void* last_ptr;
}
private struct DynamicArenaChunk
{
usize size;
}
/**
* @require ptr && this
@@ -28,7 +32,7 @@ private fn void DynamicArenaAllocator.free(DynamicArenaAllocator* this, void* pt
* @require old_pointer && size > 0
* @require this.page `tried to realloc pointer on invalid allocator`
*/
private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, void* old_pointer, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._realloc(DynamicArenaAllocator* this, void* old_pointer, usize size, usize alignment, usize offset)
{
DynamicArenaPage* current_page = this.page;
alignment = alignment_for_allocation(alignment);
@@ -53,7 +57,7 @@ private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, voi
current_page.used += add_size;
return old_pointer;
}
void* new_mem = this.alloc(size, alignment)?;
void* new_mem = this._alloc(size, alignment, offset)?;
mem::memcpy(new_mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
@@ -78,9 +82,12 @@ private fn void DynamicArenaAllocator.reset(DynamicArenaAllocator* this)
* @require math::is_power_of_2(alignment)
* @require size > 0
*/
private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._alloc_new(DynamicArenaAllocator* this, usize size, usize alignment, usize offset)
{
usize page_size = max(this.page_size, size + DEFAULT_SIZE_PREFIX + alignment);
// First, make sure that we can align it, extending the page size if needed.
usize page_size = max(this.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof + offset, alignment) - offset);
// Grab the page without alignment (we do it ourselves)
void* mem = this.backing_allocator.alloc(page_size)?;
DynamicArenaPage*! page = this.backing_allocator.alloc(DynamicArenaPage.sizeof);
if (catch err = page)
@@ -89,15 +96,16 @@ private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, u
return err!;
}
page.memory = mem;
usize offset = mem::aligned_offset((usize)mem + DEFAULT_SIZE_PREFIX, alignment) - (usize)mem;
usize* size_ptr = mem + offset - DEFAULT_SIZE_PREFIX;
*size_ptr = size;
void* mem_start = mem::aligned_pointer(mem + offset + DynamicArenaChunk.sizeof, alignment) - offset;
assert(mem_start + DynamicArenaChunk.sizeof + size < mem + page_size);
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem_start - 1;
chunk.size = size;
page.prev_arena = this.page;
page.total = page_size;
page.used = size + offset;
this.page = page;
return page.last_ptr = page.memory + offset;
page.last_ptr = mem_start;
return mem_start;
}
/**
@@ -105,7 +113,7 @@ private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, u
* @require size > 0
* @require this
*/
private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._alloc(DynamicArenaAllocator* this, usize size, usize alignment, usize offset)
{
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = this.page;
@@ -115,15 +123,15 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
this.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page) return this.alloc_new(size, alignment);
usize start = mem::aligned_offset((uptr)page.memory + page.used + DEFAULT_SIZE_PREFIX, alignment) - (usize)page.memory;
usize new_used = start + size;
if (!page) return this._alloc_new(size, alignment, offset);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
usize new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = this.unused_page))
{
start = mem::aligned_offset((uptr)page.memory + DEFAULT_SIZE_PREFIX, alignment) - (usize)page.memory;
new_used = start + size;
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
new_used = start + size - page.memory;
if (page.total >= new_used)
{
this.unused_page = page.prev_arena;
@@ -132,12 +140,13 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
break ALLOCATE_NEW;
}
}
return this.alloc_new(size, alignment);
return this._alloc_new(size, alignment, offset);
}
page.used = new_used;
void* mem = page.memory + start;
usize* size_offset = mem - DEFAULT_SIZE_PREFIX;
*size_offset = size;
void* mem = start;
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem - 1;
chunk.size = size;
return mem;
}
@@ -145,31 +154,35 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
DynamicArenaAllocator* allocator = (DynamicArenaAllocator*)data;
switch (kind)
{
case CALLOC:
case ALIGNED_CALLOC:
assert(!old_pointer, "Unexpected no old pointer for calloc.");
if (!size) return null;
void* mem = allocator.alloc(size, alignment)?;
void* mem = allocator._alloc(size, alignment, offset)?;
mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected no old pointer for alloc.");
if (!size) return null;
return allocator.alloc(size, alignment);
return allocator._alloc(size, alignment, offset);
case REALLOC:
case ALIGNED_REALLOC:
if (!size)
{
if (!old_pointer) return null;
allocator.free(old_pointer);
return null;
}
if (!old_pointer) return allocator.alloc(size, alignment);
void* mem = allocator.realloc(old_pointer, size, alignment)?;
if (!old_pointer) return allocator._alloc(size, alignment, offset);
void* mem = allocator._realloc(old_pointer, size, alignment, offset)?;
return mem;
case ALIGNED_FREE:
case FREE:
if (!old_pointer) return null;
allocator.free(old_pointer);

View File

@@ -4,20 +4,80 @@ import libc;
private const Allocator _NULL_ALLOCATOR = { &null_allocator_fn };
private const Allocator _SYSTEM_ALLOCATOR = { &libc_allocator_fn };
private fn void*! null_allocator_fn(Allocator* this, usize bytes, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! null_allocator_fn(Allocator* this, usize bytes, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
switch (kind)
{
case ALLOC:
case CALLOC:
case REALLOC:
case ALIGNED_ALLOC:
case ALIGNED_REALLOC:
case ALIGNED_CALLOC:
return AllocationFailure.OUT_OF_MEMORY!;
default:
return null;
}
}
fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, void* old_pointer, AllocationKind kind) @inline
private struct AlignedBlock
{
usize len;
void* start;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_alloc(usize bytes, usize alignment, usize offset) @inline
{
usize header = mem::aligned_offset(AlignedBlock.sizeof + offset, alignment) - offset;
void* data = libc::malloc(header + bytes);
void* mem = mem::aligned_pointer(data + offset, alignment) - offset;
assert(mem > data);
AlignedBlock* desc = (AlignedBlock*)mem - 1;
*desc = { bytes, data };
return mem;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_calloc(usize bytes, usize alignment, usize offset) @inline
{
usize header = mem::aligned_offset(AlignedBlock.sizeof + offset, alignment) - offset;
void* data = libc::calloc(header + bytes, 1);
void* mem = mem::aligned_pointer(data + offset, alignment) - offset;
AlignedBlock* desc = (AlignedBlock*)mem - 1;
assert(mem > data);
*desc = { bytes, data };
return mem;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_realloc(void* old_pointer, usize bytes, usize alignment, usize offset) @inline
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
void* data_start = desc.start;
void* new_data = _libc_aligned_calloc(bytes, alignment, offset);
mem::memcpy(new_data, old_pointer, desc.len > bytes ? desc.len : bytes, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(data_start);
return new_data;
}
private fn void _libc_aligned_free(void* old_pointer) @inline
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
libc::free(desc.start);
}
fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, usize offset, void* old_pointer, AllocationKind kind) @inline
{
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
assert(math::is_power_of_2(alignment), "Alignment was not a power of 2");
@@ -25,58 +85,35 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
void* data;
switch (kind)
{
case ALIGNED_ALLOC:
if (alignment <= DEFAULT_MEM_ALIGNMENT) nextcase ALLOC;
data = _libc_aligned_alloc(bytes, alignment, offset);
case ALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(alignment, bytes);
}
else
{
data = libc::malloc(bytes);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case CALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
mem::memset(data, 0, bytes, false, DEFAULT_MEM_ALIGNMENT);
}
else
{
data = libc::calloc(bytes, 1);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
data = libc::malloc(bytes);
case ALIGNED_CALLOC:
if (alignment <= DEFAULT_MEM_ALIGNMENT) nextcase CALLOC;
data = _libc_aligned_calloc(bytes, alignment, offset);
case CALLOC:
data = libc::calloc(bytes, 1);
case ALIGNED_REALLOC:
if (!bytes) nextcase ALIGNED_FREE;
if (!old_pointer) nextcase ALIGNED_CALLOC;
data = _libc_aligned_realloc(old_pointer, bytes, alignment, offset);
case REALLOC:
if (!bytes) nextcase FREE;
if (!old_pointer) nextcase CALLOC;
$if (libc::HAS_MALLOC_SIZE):
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
usize size = libc::malloc_size(old_pointer);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
if (bytes > size)
{
mem::memset(data + size, 0, bytes - size, false, DEFAULT_MEM_ALIGNMENT);
}
mem::memcpy(data, old_pointer, size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(old_pointer);
return data;
}
$endif;
data = libc::realloc(old_pointer, bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case RESET:
return AllocationFailure.UNSUPPORTED_OPERATION!;
case ALIGNED_FREE:
_libc_aligned_free(old_pointer);
return null;
case FREE:
libc::free(old_pointer);
return null;
default:
unreachable();
}
unreachable();
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
}

View File

@@ -17,16 +17,23 @@ struct TempAllocator
char[*] data;
}
private const usize PAGE_IS_ALIGNED = (usize)isize.max + 1;
struct TempAllocatorPage
{
TempAllocatorPage* prev_page;
usize mark;
void* start;
usize mark;
usize size;
usize ident;
char[*] data;
}
macro usize TempAllocatorPage.pagesize(TempAllocatorPage* page) { return page.size & ~PAGE_IS_ALIGNED; }
macro bool TempAllocatorPage.is_aligned(TempAllocatorPage* page) { return page.size & PAGE_IS_ALIGNED == PAGE_IS_ALIGNED; }
/**
* @require size >= 16
**/
@@ -45,28 +52,31 @@ fn TempAllocator*! new_temp(usize size, Allocator* backing_allocator)
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
TempAllocator* arena = (TempAllocator*)data;
switch (kind)
{
case CALLOC:
case ALLOC:
case ALIGNED_CALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), kind == AllocationKind.CALLOC);
return arena._alloc(size, alignment, offset, true);
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), offset, false);
case ALIGNED_REALLOC:
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment));
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment), offset);
case FREE:
case ALIGNED_FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)&arena.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
io::println("Freeing stuff\n");
arena._free(old_pointer)?;
return null;
case MARK:
return (void*)(uptr)arena.used;
@@ -77,121 +87,172 @@ private fn void*! temp_allocator_function(Allocator* data, usize size, usize ali
unreachable();
}
private fn void! TempAllocator._free(TempAllocator* this, void* old_pointer)
{
// TODO fix free
assert((uptr)old_pointer >= (uptr)&this.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &this.data[this.used])
{
this.used -= old_size;
}
}
private fn void! TempAllocator._reset(TempAllocator* this, usize mark)
{
TempAllocatorPage *last_page = this.last_page;
while (last_page)
while (last_page && last_page.mark < mark)
{
if (last_page.mark < mark) break;
void* mem = last_page.start;
TempAllocatorPage *to_free = last_page;
last_page = last_page.prev_page;
this.backing_allocator.free(mem)?;
this._free_page(to_free)?;
}
this.last_page = last_page;
this.used = mark;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment)
private fn void! TempAllocator._free_page(TempAllocator* this, TempAllocatorPage* page) @inline
{
void* mem = page.start;
if (page.is_aligned()) return this.backing_allocator.free_aligned(mem);
return this.backing_allocator.free(mem);
}
private fn void*! TempAllocator._realloc_page(TempAllocator* this, TempAllocatorPage* page, usize size, usize alignment, usize offset) @inline
{
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
usize page_size = page.pagesize();
// Clear on size > original size.
void* data = this._alloc(size, alignment, offset, false)?;
mem::memcpy(data, &page.data[0], page_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
if (page.is_aligned())
{
this.backing_allocator.free_aligned(real_pointer)?;
}
else
{
this.backing_allocator.free(real_pointer)?;
}
return data;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment, usize offset) @inline
{
TempAllocatorChunk *chunk = pointer - TempAllocatorChunk.sizeof;
if (chunk.size == ~(usize)0)
if (chunk.size == (usize)-1)
{
assert(this.last_page, "Realloc of non temp pointer");
// First grab the page
TempAllocatorPage *page = pointer - TempAllocatorPage.sizeof;
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
// Find the new header size
usize actual_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, alignment);
// And the old one.
uptrdiff prev_header_size = pointer - real_pointer;
// If the new header size needs to move, we're out of luck.
// We need to make a copy
if (actual_header_size > prev_header_size)
{
// Clear only if the new size is bigger than the old one.
void* data = this._alloc(size, alignment, size > page.size)?;
mem::memcpy(data, pointer, page.size);
this.backing_allocator.free(real_pointer)?;
return data;
}
void* new_start = this.backing_allocator.realloc(real_pointer, prev_header_size + size, alignment)?;
page = new_start + prev_header_size - TempAllocatorPage.sizeof;
page.mark = this.used;
page.prev_page = this.last_page;
this.last_page = page;
io::printf("Size: %d\n", size);
page.size = size;
page.start = new_start;
return &page.data;
return this._realloc_page(page, size, alignment, offset);
}
assert(pointer < &this.data + this.capacity && pointer >= &this.data, "This is not a temp allocated pointer.");
assert(pointer < &this.data + this.used, "This is a stale temp pointer.");
io::printf("realloc normal %s\n", size);
// TODO optimize last allocation
TempAllocatorChunk* data = this._alloc(size, alignment, size > chunk.size)?;
TempAllocatorChunk* data = this._alloc(size, alignment, offset, size > chunk.size)?;
mem::memcpy(data, pointer, chunk.size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return data;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, bool clear)
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, usize offset, bool clear)
{
void* start_mem = &this.data;
uptr starting_ptr = (uptr)start_mem + this.used;
uptr aligned_header_start = mem::aligned_offset(starting_ptr, $alignof(TempAllocatorChunk));
uptr unaligned_data_start = aligned_header_start + TempAllocatorChunk.sizeof;
usize data_start = mem::aligned_offset(unaligned_data_start, alignment);
usize new_usage = data_start + size - (uptr)start_mem;
// Fallback to backing allocator
if (new_usage > this.capacity)
void* starting_ptr = start_mem + this.used;
void* aligned_header_start = mem::aligned_pointer(starting_ptr, $alignof(TempAllocatorChunk));
void* mem = aligned_header_start + TempAllocatorChunk.sizeof;
if (alignment > $alignof(TempAllocatorChunk))
{
// Enlarge the header if needed
usize actual_header_size = mem::aligned_offset(TempAllocator.sizeof, alignment);
usize total_alloc_size = actual_header_size + size;
void* start = clear ? this.backing_allocator.calloc(total_alloc_size, alignment) : this.backing_allocator.alloc(total_alloc_size, alignment)?;
mem = mem::aligned_pointer(mem + offset, alignment) - offset;
}
usize new_usage = (usize)(mem - start_mem) + size;
// Move forward to the memory
void* mem = start + actual_header_size;
TempAllocatorPage* page = mem - TempAllocator.sizeof;
page.start = start;
page.ident = ~(usize)0;
page.mark = this.used;
page.size = size;
page.prev_page = this.last_page;
this.last_page = page;
assert(&page.data == mem, "Expected match");
return mem;
// Arena alignment, simple!
if (new_usage <= this.capacity)
{
TempAllocatorChunk* chunk_start = mem - TempAllocatorChunk.sizeof;
chunk_start.size = size;
this.used = new_usage;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
}
TempAllocatorChunk* chunk_start = (TempAllocatorChunk*)(data_start - TempAllocatorChunk.sizeof);
chunk_start.size = size;
this.used = new_usage;
void* mem = &chunk_start.data;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
}
// Fallback to backing allocator
TempAllocatorPage* page;
// We have something we need to align.
if (alignment > DEFAULT_MEM_ALIGNMENT || offset)
{
// This is actually simpler, since it will create the offset for us.
usize total_alloc_size = TempAllocatorPage.sizeof + size;
if (clear)
{
page = this.backing_allocator.calloc_aligned(total_alloc_size, alignment, TempAllocatorPage.sizeof + offset)?;
}
else
{
page = this.backing_allocator.alloc_aligned(total_alloc_size, alignment, TempAllocatorPage.sizeof + offset)?;
}
page.start = page;
page.size = size | PAGE_IS_ALIGNED;
}
else
{
// Here we might need to pad
usize padded_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, DEFAULT_MEM_ALIGNMENT);
usize total_alloc_size = padded_header_size + size;
void* alloc = clear ? this.backing_allocator.calloc(total_alloc_size) : this.backing_allocator.alloc(total_alloc_size)?;
// Find the page.
page = alloc + padded_header_size - TempAllocatorPage.sizeof;
assert(mem::ptr_is_aligned(page, $alignof(TempAllocator)));
assert(mem::ptr_is_aligned(&page.data[0], DEFAULT_MEM_ALIGNMENT));
page.start = alloc;
page.size = size;
}
// Mark it as a page
page.ident = ~(usize)0;
// Store when it was created
page.mark = this.used;
// Hook up the page.
page.prev_page = this.last_page;
this.last_page = page;
return &page.data[0];
}
fn void TempAllocator.print_pages(TempAllocator* this, File f)
{
TempAllocatorPage *last_page = this.last_page;
if (!last_page)
{
f.printf("No pages.\n");
return;
}
f.printf("---Pages----\n");
uint index = 0;
while (last_page)
{
bool is_not_aligned = !(last_page.size & (1u64 << 63));
f.printf("%d. Alloc: %d %d at %p%s\n", ++index,
last_page.size & ~(1u64 << 63), last_page.mark, &last_page.data[0], is_not_aligned ? "" : " [aligned]");
last_page = last_page.prev_page;
}
}

View File

@@ -21,6 +21,11 @@ fn usize aligned_offset(usize offset, usize alignment)
return alignment * ((offset + alignment - 1) / alignment);
}
macro void* aligned_pointer(void* ptr, usize alignment)
{
return (void*)(uptr)aligned_offset((uptr)ptr, alignment);
}
/**
* @require math::is_power_of_2(alignment)
@@ -50,8 +55,6 @@ macro void memset(void* dst, char val, usize bytes, bool $is_volatile = false, u
$$memset(dst, val, bytes, $is_volatile, $dst_align);
}
macro @clone(&value) @builtin
{
$typeof(value)* x = malloc($typeof(value));
@@ -66,56 +69,62 @@ macro malloc($Type) @builtin
fn char[] alloc_bytes(usize bytes) @inline
{
return ((char*)thread_allocator.alloc(bytes, 1))[0..bytes - 1]!!;
return ((char*)thread_allocator.alloc(bytes))[:bytes]!!;
}
fn void* alloc(usize size)
{
return thread_allocator.alloc(size)!!;
}
fn void*! alloc_checked(usize size)
{
return thread_allocator.alloc(size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void* alloc(usize size, usize alignment = 0)
fn void*! alloc_aligned(usize size, usize alignment)
{
return thread_allocator.alloc(size, alignment)!!;
return thread_allocator.alloc_aligned(size, alignment);
}
fn void* calloc(usize size)
{
return thread_allocator.calloc(size)!!;
}
fn void*! calloc_checked(usize size)
{
return thread_allocator.calloc(size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! alloc_checked(usize size, usize alignment = 0)
fn void*! calloc_aligned(usize size, usize alignment)
{
return thread_allocator.alloc(size, alignment);
return thread_allocator.calloc_aligned(size, alignment);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void* calloc(usize size, usize alignment = 0)
fn void* realloc(void *ptr, usize new_size)
{
return thread_allocator.calloc(size, alignment)!!;
return thread_allocator.realloc(ptr, new_size)!!;
}
fn void*! realloc_checked(void *ptr, usize new_size)
{
return thread_allocator.realloc(ptr, new_size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! calloc_checked(usize size, usize alignment = 0)
fn void*! realloc_aligned(void *ptr, usize new_size, usize alignment)
{
return thread_allocator.calloc(size, alignment);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void* realloc(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment)!!;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! realloc_checked(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment);
return thread_allocator.realloc_aligned(ptr, new_size, alignment);
}
fn void free(void* ptr) @builtin
@@ -123,6 +132,11 @@ fn void free(void* ptr) @builtin
return thread_allocator.free(ptr)!!;
}
fn void free_aligned(void* ptr)
{
return thread_allocator.free_aligned(ptr)!!;
}
/**
* Run with a specific allocator inside of the macro body.
**/
@@ -147,17 +161,17 @@ macro void @tscoped(;@body())
fn void* talloc(usize size, usize alignment = 0)
{
return temp_allocator().alloc(size, alignment)!!;
return temp_allocator().alloc_aligned(size, alignment)!!;
}
fn void* tcalloc(usize size, usize alignment = 0)
{
return temp_allocator().calloc(size, alignment)!!;
return temp_allocator().calloc_aligned(size, alignment)!!;
}
fn void* trealloc(void* ptr, usize size, usize alignment = 0)
{
return temp_allocator().realloc(ptr, size, alignment)!!;
return temp_allocator().realloc_aligned(ptr, size, alignment)!!;
}
macro void @pool(;@body) @builtin

View File

@@ -8,7 +8,7 @@ const DEFAULT_SIZE_PREFIX_ALIGNMENT = $alignof(usize);
const Allocator* NULL_ALLOCATOR = &_NULL_ALLOCATOR;
const Allocator* LIBC_ALLOCATOR = &_SYSTEM_ALLOCATOR;
define AllocatorFunction = fn void*!(Allocator* allocator, usize new_size, usize alignment, void* old_pointer, AllocationKind kind);
define AllocatorFunction = fn void*!(Allocator* allocator, usize new_size, usize alignment, usize offset, void* old_pointer, AllocationKind kind);
struct Allocator
{
@@ -21,6 +21,10 @@ enum AllocationKind
CALLOC,
REALLOC,
FREE,
ALIGNED_ALLOC,
ALIGNED_CALLOC,
ALIGNED_REALLOC,
ALIGNED_FREE,
RESET,
MARK,
}
@@ -34,50 +38,71 @@ fault AllocationFailure
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! Allocator.alloc(Allocator* allocator, usize size, usize alignment = 0) @inline
fn void*! Allocator.alloc(Allocator* allocator, usize size) @inline
{
return allocator.function(allocator, size, alignment, null, ALLOC);
return allocator.function(allocator, size, 0, 0, null, ALLOC);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.realloc(Allocator* allocator, void* old_pointer, usize size, usize alignment = 0) @inline
fn void*! Allocator.alloc_aligned(Allocator* allocator, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, old_pointer, REALLOC);
return allocator.function(allocator, size, alignment, offset, null, ALIGNED_ALLOC);
}
fn void*! Allocator.realloc(Allocator* allocator, void* old_pointer, usize size) @inline
{
return allocator.function(allocator, size, 0, 0, old_pointer, REALLOC);
}
/**
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.realloc_aligned(Allocator* allocator, void* old_pointer, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, offset, old_pointer, ALIGNED_REALLOC);
}
fn usize! Allocator.mark(Allocator* allocator) @inline
{
return (usize)(uptr)allocator.function(allocator, 0, 0, null, MARK);
return (usize)(uptr)allocator.function(allocator, 0, 0, 0, null, MARK);
}
fn void*! Allocator.calloc(Allocator* allocator, usize size) @inline
{
return allocator.function(allocator, size, 0, 0, null, CALLOC);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.calloc(Allocator* allocator, usize size, usize alignment = 0) @inline
fn void*! Allocator.calloc_aligned(Allocator* allocator, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, null, CALLOC);
return allocator.function(allocator, size, alignment, offset, null, ALIGNED_CALLOC);
}
fn void! Allocator.free(Allocator* allocator, void* old_pointer) @inline
{
allocator.function(allocator, 0, 0, old_pointer, FREE)?;
allocator.function(allocator, 0, 0, 0, old_pointer, FREE)?;
}
fn void! Allocator.free_aligned(Allocator* allocator, void* old_pointer) @inline
{
allocator.function(allocator, 0, 0, 0, old_pointer, ALIGNED_FREE)?;
}
fn void Allocator.reset(Allocator* allocator, usize mark = 0)
{
allocator.function(allocator, mark, 0, null, RESET)!!;
allocator.function(allocator, mark, 0, 0, null, RESET)!!;
}
private fn usize alignment_for_allocation(usize alignment) @inline
{
if (alignment < DEFAULT_MEM_ALIGNMENT)
{
alignment = DEFAULT_SIZE_PREFIX_ALIGNMENT;
alignment = DEFAULT_MEM_ALIGNMENT;
}
return alignment;
}
@@ -95,7 +120,7 @@ struct DynamicArenaAllocator
* @require page_size >= 128
* @require this != null
**/
fn void DynamicArenaAllocator.init(DynamicArenaAllocator* this, usize page_size, Allocator* backing_allocator = mem::allocator())
fn void DynamicArenaAllocator.init(DynamicArenaAllocator* this, usize page_size, Allocator* backing_allocator = mem::current_allocator())
{
this.function = &dynamic_arena_allocator_function;
this.page = null;

View File

@@ -29,7 +29,6 @@ fn String join(char[][] s, char[] joiner)
return res;
}
fn ZString copy_zstring(char[] s)
{
usize len = s.len;

View File

@@ -158,7 +158,7 @@ fn ZString String.copy_zstr(String* str, Allocator* allocator = mem::current_all
usize str_len = str.len();
if (!str_len)
{
return (ZString)allocator.calloc(1, 1)!!;
return (ZString)allocator.calloc(1)!!;
}
char* zstr = allocator.alloc(str_len + 1)!!;
StringData* data = str.data();

View File

@@ -166,6 +166,15 @@ fn usize! File.println(File* file, char[] string)
return len + 1;
}
/**
* @param [&in] file
* @require file.file `File must be initialized`
*/
fn void File.flush(File* file)
{
libc::fflush(file.file);
}
fn File stdout()
{
return { libc::stdout() };

View File

@@ -717,14 +717,36 @@ fn usize! printf(char[] format, args...) @maydiscard
return vsnprintf(&out_putchar_fn, null, format, args);
}
fn usize! printfln(char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_putchar_fn, null, format, args)?;
putchar('\n');
return size + 1;
}
fn usize! String.printf(String* str, char[] format, args...) @maydiscard
{
return vsnprintf(&out_string_append_fn, str, format, args);
}
fn usize! String.printfln(String* str, char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_string_append_fn, str, format, args)?;
str.append('\n');
return size + 1;
}
fn usize! File.printf(File file, char[] format, args...) @maydiscard
{
return vsnprintf(&out_putchar_fn, &file, format, args);
return vsnprintf(&out_putchar_fn, &file, format, args);
}
fn usize! File.printfln(File file, char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_putchar_fn, &file, format, args)?;
file.putc('\n')?;
file.flush();
return size + 1;
}
private fn void! PrintParam.left_adjust(PrintParam* param, usize len)

View File

@@ -87,7 +87,6 @@ extern fn usize strxfrm(char* dest, char* src, usize n);
// malloc
extern fn void* malloc(usize size);
extern fn void* aligned_alloc(usize align, usize size);
extern fn void* calloc(usize count, usize size);
extern fn void* free(void*);
extern fn void* realloc(void* ptr, usize size);
@@ -104,6 +103,7 @@ $case OsType.LINUX:
extern CFile __stderr @extname("stderr");
extern fn usize malloc_usable_size(void* ptr);
macro usize malloc_size(void* ptr) { return malloc_usable_size(ptr); }
extern fn void* aligned_alloc(usize align, usize size);
macro CFile stdin() { return __stdin; }
macro CFile stdout() { return __stdout; }
macro CFile stderr() { return __stderr; }
@@ -112,7 +112,8 @@ $case OsType.MACOSX:
extern CFile __stdoutp;
extern CFile __stderrp;
extern fn usize malloc_size(void* ptr);
macro CFile stdin() { return __stdinp; }
extern fn void* aligned_alloc(usize align, usize size);
macro CFile stdin() { return __stdinp; }
macro CFile stdout() { return __stdoutp; }
macro CFile stderr() { return __stderrp; }
$case OsType.WIN32: