Updated malloc/calloc/realloc/free deprecation of old helper functions. Add checks to prevent incorrect alignment on types when using malloc. Better errors from $assert. Added @deprecated. Fixed issue using named arguments after varargs.

This commit is contained in:
Christoffer Lerno
2023-02-27 14:51:35 +01:00
committed by Christoffer Lerno
parent 8ad8af861e
commit dd4edfb747
28 changed files with 705 additions and 343 deletions

View File

@@ -52,7 +52,7 @@ fn void*! arena_allocator_function(Allocator* data, usz size, usz alignment, usz
if (!size) return null;
alignment = alignment_for_allocation(alignment);
void* mem = arena._alloc(size, alignment, offset)?;
if (clear) mem::clear(mem, size, DEFAULT_MEM_ALIGNMENT);
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
case ALIGNED_REALLOC:
case REALLOC:
@@ -84,8 +84,8 @@ fn void*! arena_allocator_function(Allocator* data, usz size, usz alignment, usz
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require this != null
@@ -110,8 +110,8 @@ fn void*! ArenaAllocator._alloc(ArenaAllocator* this, usz size, usz alignment, u
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= mem::MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, ArenaAllocatorHeader.alignof) == offset
* @require this != null
@@ -141,6 +141,6 @@ fn void*! ArenaAllocator._realloc(ArenaAllocator* this, void *old_pointer, usz s
}
// Otherwise just allocate new memory.
void* mem = this._alloc(size, alignment, offset)?;
mem::copy(mem, old_pointer, old_size, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
mem::copy(mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}

View File

@@ -31,14 +31,14 @@ fn void DynamicArenaAllocator.destroy(DynamicArenaAllocator* this)
while (page)
{
DynamicArenaPage* next_page = page.prev_arena;
this.backing_allocator.free(page)!!;
free(page, .using = this.backing_allocator);
page = next_page;
}
page = this.unused_page;
while (page)
{
DynamicArenaPage* next_page = page.prev_arena;
this.backing_allocator.free(page)!!;
free(page, .using = this.backing_allocator);
page = next_page;
}
this.page = null;
@@ -103,7 +103,7 @@ fn void*! DynamicArenaAllocator._realloc(DynamicArenaAllocator* this, void* old_
return old_pointer;
}
void* new_mem = this._alloc(size, alignment, offset)?;
mem::copy(new_mem, old_pointer, old_size, DEFAULT_MEM_ALIGNMENT);
mem::copy(new_mem, old_pointer, old_size, mem::DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
@@ -134,10 +134,10 @@ fn void*! DynamicArenaAllocator._alloc_new(DynamicArenaAllocator* this, usz size
// Grab the page without alignment (we do it ourselves)
void* mem = this.backing_allocator.alloc(page_size)?;
DynamicArenaPage*! page = this.backing_allocator.alloc(DynamicArenaPage.sizeof);
DynamicArenaPage*! page = malloc(DynamicArenaPage, .using = this.backing_allocator);
if (catch err = page)
{
this.backing_allocator.free(mem)?;
free(mem, .using = this.backing_allocator);
return err!;
}
page.memory = mem;
@@ -210,7 +210,7 @@ fn void*! dynamic_arena_allocator_function(Allocator* data, usz size, usz alignm
assert(!old_pointer, "Unexpected no old pointer for calloc.");
if (!size) return null;
void* mem = allocator._alloc(size, alignment, offset)?;
mem::clear(mem, size, DEFAULT_MEM_ALIGNMENT);
mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
case ALLOC:
case ALIGNED_ALLOC:

View File

@@ -77,13 +77,13 @@ fn void*! SimpleHeapAllocator._realloc(SimpleHeapAllocator* this, void* old_poin
fn void*! SimpleHeapAllocator._calloc(SimpleHeapAllocator* this, usz bytes) @local
{
void* data = this._alloc(bytes)?;
mem::clear(data, bytes, DEFAULT_MEM_ALIGNMENT);
mem::clear(data, bytes, mem::DEFAULT_MEM_ALIGNMENT);
return data;
}
fn void*! SimpleHeapAllocator._alloc(SimpleHeapAllocator* this, usz bytes) @local
{
usz aligned_bytes = mem::aligned_offset(bytes, DEFAULT_MEM_ALIGNMENT);
usz aligned_bytes = mem::aligned_offset(bytes, mem::DEFAULT_MEM_ALIGNMENT);
if (!this.free_list)
{
this.add_block(aligned_bytes)?;
@@ -132,7 +132,7 @@ fn void*! SimpleHeapAllocator._alloc(SimpleHeapAllocator* this, usz bytes) @loca
fn void! SimpleHeapAllocator.add_block(SimpleHeapAllocator* this, usz aligned_bytes) @local
{
assert(mem::aligned_offset(aligned_bytes, DEFAULT_MEM_ALIGNMENT) == aligned_bytes);
assert(mem::aligned_offset(aligned_bytes, mem::DEFAULT_MEM_ALIGNMENT) == aligned_bytes);
char[] result = this.alloc_fn(aligned_bytes + Header.sizeof)?;
Header* new_block = (Header*)result.ptr;
new_block.size = result.len - Header.sizeof;

View File

@@ -73,7 +73,7 @@ macro void*! @aligned_realloc(#calloc_fn, #free_fn, void* old_pointer, usz bytes
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
void* data_start = desc.start;
void* new_data = @aligned_calloc(#calloc_fn, bytes, alignment, offset)?;
mem::copy(new_data, old_pointer, desc.len > bytes ? desc.len : bytes, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
mem::copy(new_data, old_pointer, desc.len > bytes ? desc.len : bytes, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
$if ($checks(#free_fn(data_start)?)):
#free_fn(data_start)?;
$else:
@@ -94,7 +94,7 @@ macro void! @aligned_free(#free_fn, void* old_pointer)
fn void*! libc_allocator_fn(Allocator* unused, usz bytes, usz alignment, usz offset, void* old_pointer, AllocationKind kind) @inline
{
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
if (!alignment) alignment = mem::DEFAULT_MEM_ALIGNMENT;
assert(math::is_power_of_2(alignment), "Alignment was not a power of 2");
void* data;

View File

@@ -39,7 +39,7 @@ macro bool TempAllocatorPage.is_aligned(TempAllocatorPage* page) => page.size &
**/
fn TempAllocator*! new_temp(usz size, Allocator* backing_allocator)
{
TempAllocator* allocator = backing_allocator.alloc(size + TempAllocator.sizeof)?;
TempAllocator* allocator = malloc_checked(TempAllocator, .using = backing_allocator, .end_padding = size)?;
allocator.last_page = null;
allocator.function = &temp_allocator_function;
allocator.backing_allocator = backing_allocator;
@@ -132,7 +132,7 @@ fn void*! TempAllocator._realloc_page(TempAllocator* this, TempAllocatorPage* pa
usz page_size = page.pagesize();
// Clear on size > original size.
void* data = this._alloc(size, alignment, offset, false)?;
mem::copy(data, &page.data[0], page_size, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
mem::copy(data, &page.data[0], page_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
if (page.is_aligned())
{
this.backing_allocator.free_aligned(real_pointer)?;
@@ -159,7 +159,7 @@ fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usz size, u
// TODO optimize last allocation
TempAllocatorChunk* data = this._alloc(size, alignment, offset, size > chunk.size)?;
mem::copy(data, pointer, chunk.size, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
mem::copy(data, pointer, chunk.size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
return data;
}
@@ -167,7 +167,7 @@ fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usz size, u
/**
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require alignment <= mem::MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
fn void*! TempAllocator._alloc(TempAllocator* this, usz size, usz alignment, usz offset, bool clear) @local
@@ -188,7 +188,7 @@ fn void*! TempAllocator._alloc(TempAllocator* this, usz size, usz alignment, usz
TempAllocatorChunk* chunk_start = mem - TempAllocatorChunk.sizeof;
chunk_start.size = size;
this.used = new_usage;
if (clear) mem::clear(mem, size, DEFAULT_MEM_ALIGNMENT);
if (clear) mem::clear(mem, size, mem::DEFAULT_MEM_ALIGNMENT);
return mem;
}
@@ -196,7 +196,7 @@ fn void*! TempAllocator._alloc(TempAllocator* this, usz size, usz alignment, usz
TempAllocatorPage* page;
// We have something we need to align.
if (alignment > DEFAULT_MEM_ALIGNMENT || offset)
if (alignment > mem::DEFAULT_MEM_ALIGNMENT || offset)
{
// This is actually simpler, since it will create the offset for us.
usz total_alloc_size = TempAllocatorPage.sizeof + size;
@@ -214,14 +214,14 @@ fn void*! TempAllocator._alloc(TempAllocator* this, usz size, usz alignment, usz
else
{
// Here we might need to pad
usz padded_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, DEFAULT_MEM_ALIGNMENT);
usz padded_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, mem::DEFAULT_MEM_ALIGNMENT);
usz total_alloc_size = padded_header_size + size;
void* alloc = (clear ? this.backing_allocator.calloc(total_alloc_size) : this.backing_allocator.alloc(total_alloc_size))?;
// Find the page.
page = alloc + padded_header_size - TempAllocatorPage.sizeof;
assert(mem::ptr_is_aligned(page, TempAllocator.alignof));
assert(mem::ptr_is_aligned(&page.data[0], DEFAULT_MEM_ALIGNMENT));
assert(mem::ptr_is_aligned(&page.data[0], mem::DEFAULT_MEM_ALIGNMENT));
page.start = alloc;
page.size = size;
}