Allocators separated into aligned and non aligned operations.

This commit is contained in:
Christoffer Lerno
2022-08-03 20:45:30 +02:00
parent 550bca79e9
commit cc8884d3d1
15 changed files with 572 additions and 269 deletions

View File

@@ -1,58 +1,47 @@
module std::core::mem::allocator;
struct ArenaAllocatorHeader
{
usize size;
char[*] data;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
ArenaAllocator* arena = (ArenaAllocator*)data;
bool clear = false;
switch (kind)
{
case CALLOC:
case ALIGNED_CALLOC:
clear = true;
nextcase;
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
alignment = alignment_for_allocation(alignment);
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
if (kind == AllocationKind.CALLOC) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
void* mem = arena._alloc(size, alignment, offset)?;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALIGNED_REALLOC:
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
if (size > arena.data.len) return AllocationFailure.CHUNK_TOO_LARGE!;
alignment = alignment_for_allocation(alignment);
usize* old_size_ptr = (usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
usize old_size = *old_size_ptr;
// Do last allocation and alignment match?
if (&arena.data[arena.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
*old_size_ptr = size;
arena.used -= old_size - size;
return old_pointer;
}
usize new_used = arena.used + size - old_size;
if (new_used > arena.data.len) return AllocationFailure.OUT_OF_MEMORY!;
arena.used = new_used;
*old_size_ptr = size;
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
return arena._realloc(old_pointer, size, alignment, offset)?;
case ALIGNED_FREE:
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
// Reclaim memory if it's the last element.
if (old_pointer + header.size == &arena.data[arena.used])
{
arena.used -= old_size;
arena.used -= header.size + ArenaAllocatorHeader.sizeof;
}
return null;
case MARK:
@@ -69,16 +58,62 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, $alignof(ArenaAllocatorHeader)) == offset
* @require this != null
**/
private fn void*! ArenaAllocator.alloc(ArenaAllocator* this, usize size, usize alignment, usize prefixed_bytes = 0)
private fn void*! ArenaAllocator._alloc(ArenaAllocator* this, usize size, usize alignment, usize offset)
{
void* start_mem = this.data.ptr;
void* unaligned_pointer = start_mem + this.used + prefixed_bytes;
if ((uptr)unaligned_pointer < (uptr)start_mem) return AllocationFailure.OUT_OF_MEMORY!;
usize offset_start = mem::aligned_offset((usize)(uptr)unaligned_pointer, alignment) - (usize)(uptr)start_mem;
usize end = offset_start + size;
if (end > this.data.len || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
usize total_len = this.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE!;
void* start_mem = this.data.ptr;
void* unaligned_pointer_to_offset = start_mem + this.used + ArenaAllocatorHeader.sizeof + offset;
void* aligned_pointer_to_offset = mem::aligned_pointer(unaligned_pointer_to_offset, alignment);
usize end = (usize)(aligned_pointer_to_offset - this.data.ptr) + size - offset;
if (end > total_len) return AllocationFailure.OUT_OF_MEMORY!;
this.used = end;
return start_mem + offset_start;
void *mem = aligned_pointer_to_offset - offset;
ArenaAllocatorHeader* header = mem - ArenaAllocatorHeader.sizeof;
header.size = size;
return mem;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require offset <= MAX_MEMORY_ALIGNMENT `offset too big`
* @require offset <= size && offset >= 0
* @require mem::aligned_offset(offset, $alignof(ArenaAllocatorHeader)) == offset
* @require this != null
**/
private fn void*! ArenaAllocator._realloc(ArenaAllocator* this, void *old_pointer, usize size, usize alignment, usize offset)
{
assert(old_pointer >= this.data.ptr, "Pointer originates from a different allocator.");
usize total_len = this.data.len;
if (size > total_len) return AllocationFailure.CHUNK_TOO_LARGE!;
ArenaAllocatorHeader* header = old_pointer - ArenaAllocatorHeader.sizeof;
usize old_size = header.size;
// Do last allocation and alignment match?
if (&this.data[this.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer + offset, alignment))
{
if (old_size >= size)
{
this.used -= old_size - size;
}
else
{
usize new_used = this.used + size - old_size;
if (new_used > total_len) return AllocationFailure.OUT_OF_MEMORY!;
this.used = new_used;
}
header.size = size;
return old_pointer;
}
// Otherwise just allocate new memory.
void* mem = this._alloc(size, alignment, offset)?;
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
}

View File

@@ -9,6 +9,10 @@ private struct DynamicArenaPage
void* last_ptr;
}
private struct DynamicArenaChunk
{
usize size;
}
/**
* @require ptr && this
@@ -28,7 +32,7 @@ private fn void DynamicArenaAllocator.free(DynamicArenaAllocator* this, void* pt
* @require old_pointer && size > 0
* @require this.page `tried to realloc pointer on invalid allocator`
*/
private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, void* old_pointer, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._realloc(DynamicArenaAllocator* this, void* old_pointer, usize size, usize alignment, usize offset)
{
DynamicArenaPage* current_page = this.page;
alignment = alignment_for_allocation(alignment);
@@ -53,7 +57,7 @@ private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, voi
current_page.used += add_size;
return old_pointer;
}
void* new_mem = this.alloc(size, alignment)?;
void* new_mem = this._alloc(size, alignment, offset)?;
mem::memcpy(new_mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
@@ -78,9 +82,12 @@ private fn void DynamicArenaAllocator.reset(DynamicArenaAllocator* this)
* @require math::is_power_of_2(alignment)
* @require size > 0
*/
private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._alloc_new(DynamicArenaAllocator* this, usize size, usize alignment, usize offset)
{
usize page_size = max(this.page_size, size + DEFAULT_SIZE_PREFIX + alignment);
// First, make sure that we can align it, extending the page size if needed.
usize page_size = max(this.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof + offset, alignment) - offset);
// Grab the page without alignment (we do it ourselves)
void* mem = this.backing_allocator.alloc(page_size)?;
DynamicArenaPage*! page = this.backing_allocator.alloc(DynamicArenaPage.sizeof);
if (catch err = page)
@@ -89,15 +96,16 @@ private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, u
return err!;
}
page.memory = mem;
usize offset = mem::aligned_offset((usize)mem + DEFAULT_SIZE_PREFIX, alignment) - (usize)mem;
usize* size_ptr = mem + offset - DEFAULT_SIZE_PREFIX;
*size_ptr = size;
void* mem_start = mem::aligned_pointer(mem + offset + DynamicArenaChunk.sizeof, alignment) - offset;
assert(mem_start + DynamicArenaChunk.sizeof + size < mem + page_size);
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem_start - 1;
chunk.size = size;
page.prev_arena = this.page;
page.total = page_size;
page.used = size + offset;
this.page = page;
return page.last_ptr = page.memory + offset;
page.last_ptr = mem_start;
return mem_start;
}
/**
@@ -105,7 +113,7 @@ private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, u
* @require size > 0
* @require this
*/
private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize size, usize alignment)
private fn void*! DynamicArenaAllocator._alloc(DynamicArenaAllocator* this, usize size, usize alignment, usize offset)
{
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = this.page;
@@ -115,15 +123,15 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
this.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page) return this.alloc_new(size, alignment);
usize start = mem::aligned_offset((uptr)page.memory + page.used + DEFAULT_SIZE_PREFIX, alignment) - (usize)page.memory;
usize new_used = start + size;
if (!page) return this._alloc_new(size, alignment, offset);
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
usize new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = this.unused_page))
{
start = mem::aligned_offset((uptr)page.memory + DEFAULT_SIZE_PREFIX, alignment) - (usize)page.memory;
new_used = start + size;
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof + offset, alignment) - offset;
new_used = start + size - page.memory;
if (page.total >= new_used)
{
this.unused_page = page.prev_arena;
@@ -132,12 +140,13 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
break ALLOCATE_NEW;
}
}
return this.alloc_new(size, alignment);
return this._alloc_new(size, alignment, offset);
}
page.used = new_used;
void* mem = page.memory + start;
usize* size_offset = mem - DEFAULT_SIZE_PREFIX;
*size_offset = size;
void* mem = start;
DynamicArenaChunk* chunk = (DynamicArenaChunk*)mem - 1;
chunk.size = size;
return mem;
}
@@ -145,31 +154,35 @@ private fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
DynamicArenaAllocator* allocator = (DynamicArenaAllocator*)data;
switch (kind)
{
case CALLOC:
case ALIGNED_CALLOC:
assert(!old_pointer, "Unexpected no old pointer for calloc.");
if (!size) return null;
void* mem = allocator.alloc(size, alignment)?;
void* mem = allocator._alloc(size, alignment, offset)?;
mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected no old pointer for alloc.");
if (!size) return null;
return allocator.alloc(size, alignment);
return allocator._alloc(size, alignment, offset);
case REALLOC:
case ALIGNED_REALLOC:
if (!size)
{
if (!old_pointer) return null;
allocator.free(old_pointer);
return null;
}
if (!old_pointer) return allocator.alloc(size, alignment);
void* mem = allocator.realloc(old_pointer, size, alignment)?;
if (!old_pointer) return allocator._alloc(size, alignment, offset);
void* mem = allocator._realloc(old_pointer, size, alignment, offset)?;
return mem;
case ALIGNED_FREE:
case FREE:
if (!old_pointer) return null;
allocator.free(old_pointer);

View File

@@ -4,20 +4,80 @@ import libc;
private const Allocator _NULL_ALLOCATOR = { &null_allocator_fn };
private const Allocator _SYSTEM_ALLOCATOR = { &libc_allocator_fn };
private fn void*! null_allocator_fn(Allocator* this, usize bytes, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! null_allocator_fn(Allocator* this, usize bytes, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
switch (kind)
{
case ALLOC:
case CALLOC:
case REALLOC:
case ALIGNED_ALLOC:
case ALIGNED_REALLOC:
case ALIGNED_CALLOC:
return AllocationFailure.OUT_OF_MEMORY!;
default:
return null;
}
}
fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, void* old_pointer, AllocationKind kind) @inline
private struct AlignedBlock
{
usize len;
void* start;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_alloc(usize bytes, usize alignment, usize offset) @inline
{
usize header = mem::aligned_offset(AlignedBlock.sizeof + offset, alignment) - offset;
void* data = libc::malloc(header + bytes);
void* mem = mem::aligned_pointer(data + offset, alignment) - offset;
assert(mem > data);
AlignedBlock* desc = (AlignedBlock*)mem - 1;
*desc = { bytes, data };
return mem;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_calloc(usize bytes, usize alignment, usize offset) @inline
{
usize header = mem::aligned_offset(AlignedBlock.sizeof + offset, alignment) - offset;
void* data = libc::calloc(header + bytes, 1);
void* mem = mem::aligned_pointer(data + offset, alignment) - offset;
AlignedBlock* desc = (AlignedBlock*)mem - 1;
assert(mem > data);
*desc = { bytes, data };
return mem;
}
/**
* @require bytes > 0
* @require alignment > 0
**/
private fn void* _libc_aligned_realloc(void* old_pointer, usize bytes, usize alignment, usize offset) @inline
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
void* data_start = desc.start;
void* new_data = _libc_aligned_calloc(bytes, alignment, offset);
mem::memcpy(new_data, old_pointer, desc.len > bytes ? desc.len : bytes, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(data_start);
return new_data;
}
private fn void _libc_aligned_free(void* old_pointer) @inline
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
libc::free(desc.start);
}
fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, usize offset, void* old_pointer, AllocationKind kind) @inline
{
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
assert(math::is_power_of_2(alignment), "Alignment was not a power of 2");
@@ -25,58 +85,35 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
void* data;
switch (kind)
{
case ALIGNED_ALLOC:
if (alignment <= DEFAULT_MEM_ALIGNMENT) nextcase ALLOC;
data = _libc_aligned_alloc(bytes, alignment, offset);
case ALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(alignment, bytes);
}
else
{
data = libc::malloc(bytes);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case CALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
mem::memset(data, 0, bytes, false, DEFAULT_MEM_ALIGNMENT);
}
else
{
data = libc::calloc(bytes, 1);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
data = libc::malloc(bytes);
case ALIGNED_CALLOC:
if (alignment <= DEFAULT_MEM_ALIGNMENT) nextcase CALLOC;
data = _libc_aligned_calloc(bytes, alignment, offset);
case CALLOC:
data = libc::calloc(bytes, 1);
case ALIGNED_REALLOC:
if (!bytes) nextcase ALIGNED_FREE;
if (!old_pointer) nextcase ALIGNED_CALLOC;
data = _libc_aligned_realloc(old_pointer, bytes, alignment, offset);
case REALLOC:
if (!bytes) nextcase FREE;
if (!old_pointer) nextcase CALLOC;
$if (libc::HAS_MALLOC_SIZE):
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
usize size = libc::malloc_size(old_pointer);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
if (bytes > size)
{
mem::memset(data + size, 0, bytes - size, false, DEFAULT_MEM_ALIGNMENT);
}
mem::memcpy(data, old_pointer, size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(old_pointer);
return data;
}
$endif;
data = libc::realloc(old_pointer, bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case RESET:
return AllocationFailure.UNSUPPORTED_OPERATION!;
case ALIGNED_FREE:
_libc_aligned_free(old_pointer);
return null;
case FREE:
libc::free(old_pointer);
return null;
default:
unreachable();
}
unreachable();
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
}

View File

@@ -17,16 +17,23 @@ struct TempAllocator
char[*] data;
}
private const usize PAGE_IS_ALIGNED = (usize)isize.max + 1;
struct TempAllocatorPage
{
TempAllocatorPage* prev_page;
usize mark;
void* start;
usize mark;
usize size;
usize ident;
char[*] data;
}
macro usize TempAllocatorPage.pagesize(TempAllocatorPage* page) { return page.size & ~PAGE_IS_ALIGNED; }
macro bool TempAllocatorPage.is_aligned(TempAllocatorPage* page) { return page.size & PAGE_IS_ALIGNED == PAGE_IS_ALIGNED; }
/**
* @require size >= 16
**/
@@ -45,28 +52,31 @@ fn TempAllocator*! new_temp(usize size, Allocator* backing_allocator)
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, usize offset, void* old_pointer, AllocationKind kind)
{
TempAllocator* arena = (TempAllocator*)data;
switch (kind)
{
case CALLOC:
case ALLOC:
case ALIGNED_CALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), kind == AllocationKind.CALLOC);
return arena._alloc(size, alignment, offset, true);
case ALLOC:
case ALIGNED_ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), offset, false);
case ALIGNED_REALLOC:
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment));
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment), offset);
case FREE:
case ALIGNED_FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)&arena.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
io::println("Freeing stuff\n");
arena._free(old_pointer)?;
return null;
case MARK:
return (void*)(uptr)arena.used;
@@ -77,121 +87,172 @@ private fn void*! temp_allocator_function(Allocator* data, usize size, usize ali
unreachable();
}
private fn void! TempAllocator._free(TempAllocator* this, void* old_pointer)
{
// TODO fix free
assert((uptr)old_pointer >= (uptr)&this.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &this.data[this.used])
{
this.used -= old_size;
}
}
private fn void! TempAllocator._reset(TempAllocator* this, usize mark)
{
TempAllocatorPage *last_page = this.last_page;
while (last_page)
while (last_page && last_page.mark < mark)
{
if (last_page.mark < mark) break;
void* mem = last_page.start;
TempAllocatorPage *to_free = last_page;
last_page = last_page.prev_page;
this.backing_allocator.free(mem)?;
this._free_page(to_free)?;
}
this.last_page = last_page;
this.used = mark;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment)
private fn void! TempAllocator._free_page(TempAllocator* this, TempAllocatorPage* page) @inline
{
void* mem = page.start;
if (page.is_aligned()) return this.backing_allocator.free_aligned(mem);
return this.backing_allocator.free(mem);
}
private fn void*! TempAllocator._realloc_page(TempAllocator* this, TempAllocatorPage* page, usize size, usize alignment, usize offset) @inline
{
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
usize page_size = page.pagesize();
// Clear on size > original size.
void* data = this._alloc(size, alignment, offset, false)?;
mem::memcpy(data, &page.data[0], page_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
if (page.is_aligned())
{
this.backing_allocator.free_aligned(real_pointer)?;
}
else
{
this.backing_allocator.free(real_pointer)?;
}
return data;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment, usize offset) @inline
{
TempAllocatorChunk *chunk = pointer - TempAllocatorChunk.sizeof;
if (chunk.size == ~(usize)0)
if (chunk.size == (usize)-1)
{
assert(this.last_page, "Realloc of non temp pointer");
// First grab the page
TempAllocatorPage *page = pointer - TempAllocatorPage.sizeof;
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
// Find the new header size
usize actual_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, alignment);
// And the old one.
uptrdiff prev_header_size = pointer - real_pointer;
// If the new header size needs to move, we're out of luck.
// We need to make a copy
if (actual_header_size > prev_header_size)
{
// Clear only if the new size is bigger than the old one.
void* data = this._alloc(size, alignment, size > page.size)?;
mem::memcpy(data, pointer, page.size);
this.backing_allocator.free(real_pointer)?;
return data;
}
void* new_start = this.backing_allocator.realloc(real_pointer, prev_header_size + size, alignment)?;
page = new_start + prev_header_size - TempAllocatorPage.sizeof;
page.mark = this.used;
page.prev_page = this.last_page;
this.last_page = page;
io::printf("Size: %d\n", size);
page.size = size;
page.start = new_start;
return &page.data;
return this._realloc_page(page, size, alignment, offset);
}
assert(pointer < &this.data + this.capacity && pointer >= &this.data, "This is not a temp allocated pointer.");
assert(pointer < &this.data + this.used, "This is a stale temp pointer.");
io::printf("realloc normal %s\n", size);
// TODO optimize last allocation
TempAllocatorChunk* data = this._alloc(size, alignment, size > chunk.size)?;
TempAllocatorChunk* data = this._alloc(size, alignment, offset, size > chunk.size)?;
mem::memcpy(data, pointer, chunk.size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return data;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, bool clear)
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, usize offset, bool clear)
{
void* start_mem = &this.data;
uptr starting_ptr = (uptr)start_mem + this.used;
uptr aligned_header_start = mem::aligned_offset(starting_ptr, $alignof(TempAllocatorChunk));
uptr unaligned_data_start = aligned_header_start + TempAllocatorChunk.sizeof;
usize data_start = mem::aligned_offset(unaligned_data_start, alignment);
usize new_usage = data_start + size - (uptr)start_mem;
// Fallback to backing allocator
if (new_usage > this.capacity)
void* starting_ptr = start_mem + this.used;
void* aligned_header_start = mem::aligned_pointer(starting_ptr, $alignof(TempAllocatorChunk));
void* mem = aligned_header_start + TempAllocatorChunk.sizeof;
if (alignment > $alignof(TempAllocatorChunk))
{
// Enlarge the header if needed
usize actual_header_size = mem::aligned_offset(TempAllocator.sizeof, alignment);
usize total_alloc_size = actual_header_size + size;
void* start = clear ? this.backing_allocator.calloc(total_alloc_size, alignment) : this.backing_allocator.alloc(total_alloc_size, alignment)?;
mem = mem::aligned_pointer(mem + offset, alignment) - offset;
}
usize new_usage = (usize)(mem - start_mem) + size;
// Move forward to the memory
void* mem = start + actual_header_size;
TempAllocatorPage* page = mem - TempAllocator.sizeof;
page.start = start;
page.ident = ~(usize)0;
page.mark = this.used;
page.size = size;
page.prev_page = this.last_page;
this.last_page = page;
assert(&page.data == mem, "Expected match");
return mem;
// Arena alignment, simple!
if (new_usage <= this.capacity)
{
TempAllocatorChunk* chunk_start = mem - TempAllocatorChunk.sizeof;
chunk_start.size = size;
this.used = new_usage;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
}
TempAllocatorChunk* chunk_start = (TempAllocatorChunk*)(data_start - TempAllocatorChunk.sizeof);
chunk_start.size = size;
this.used = new_usage;
void* mem = &chunk_start.data;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
// Fallback to backing allocator
TempAllocatorPage* page;
// We have something we need to align.
if (alignment > DEFAULT_MEM_ALIGNMENT || offset)
{
// This is actually simpler, since it will create the offset for us.
usize total_alloc_size = TempAllocatorPage.sizeof + size;
if (clear)
{
page = this.backing_allocator.calloc_aligned(total_alloc_size, alignment, TempAllocatorPage.sizeof + offset)?;
}
else
{
page = this.backing_allocator.alloc_aligned(total_alloc_size, alignment, TempAllocatorPage.sizeof + offset)?;
}
page.start = page;
page.size = size | PAGE_IS_ALIGNED;
}
else
{
// Here we might need to pad
usize padded_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, DEFAULT_MEM_ALIGNMENT);
usize total_alloc_size = padded_header_size + size;
void* alloc = clear ? this.backing_allocator.calloc(total_alloc_size) : this.backing_allocator.alloc(total_alloc_size)?;
// Find the page.
page = alloc + padded_header_size - TempAllocatorPage.sizeof;
assert(mem::ptr_is_aligned(page, $alignof(TempAllocator)));
assert(mem::ptr_is_aligned(&page.data[0], DEFAULT_MEM_ALIGNMENT));
page.start = alloc;
page.size = size;
}
// Mark it as a page
page.ident = ~(usize)0;
// Store when it was created
page.mark = this.used;
// Hook up the page.
page.prev_page = this.last_page;
this.last_page = page;
return &page.data[0];
}
fn void TempAllocator.print_pages(TempAllocator* this, File f)
{
TempAllocatorPage *last_page = this.last_page;
if (!last_page)
{
f.printf("No pages.\n");
return;
}
f.printf("---Pages----\n");
uint index = 0;
while (last_page)
{
bool is_not_aligned = !(last_page.size & (1u64 << 63));
f.printf("%d. Alloc: %d %d at %p%s\n", ++index,
last_page.size & ~(1u64 << 63), last_page.mark, &last_page.data[0], is_not_aligned ? "" : " [aligned]");
last_page = last_page.prev_page;
}
}

View File

@@ -21,6 +21,11 @@ fn usize aligned_offset(usize offset, usize alignment)
return alignment * ((offset + alignment - 1) / alignment);
}
macro void* aligned_pointer(void* ptr, usize alignment)
{
return (void*)(uptr)aligned_offset((uptr)ptr, alignment);
}
/**
* @require math::is_power_of_2(alignment)
@@ -50,8 +55,6 @@ macro void memset(void* dst, char val, usize bytes, bool $is_volatile = false, u
$$memset(dst, val, bytes, $is_volatile, $dst_align);
}
macro @clone(&value) @builtin
{
$typeof(value)* x = malloc($typeof(value));
@@ -66,56 +69,62 @@ macro malloc($Type) @builtin
fn char[] alloc_bytes(usize bytes) @inline
{
return ((char*)thread_allocator.alloc(bytes, 1))[0..bytes - 1]!!;
return ((char*)thread_allocator.alloc(bytes))[:bytes]!!;
}
fn void* alloc(usize size)
{
return thread_allocator.alloc(size)!!;
}
fn void*! alloc_checked(usize size)
{
return thread_allocator.alloc(size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void* alloc(usize size, usize alignment = 0)
fn void*! alloc_aligned(usize size, usize alignment)
{
return thread_allocator.alloc(size, alignment)!!;
return thread_allocator.alloc_aligned(size, alignment);
}
fn void* calloc(usize size)
{
return thread_allocator.calloc(size)!!;
}
fn void*! calloc_checked(usize size)
{
return thread_allocator.calloc(size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! alloc_checked(usize size, usize alignment = 0)
fn void*! calloc_aligned(usize size, usize alignment)
{
return thread_allocator.alloc(size, alignment);
return thread_allocator.calloc_aligned(size, alignment);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void* calloc(usize size, usize alignment = 0)
fn void* realloc(void *ptr, usize new_size)
{
return thread_allocator.calloc(size, alignment)!!;
return thread_allocator.realloc(ptr, new_size)!!;
}
fn void*! realloc_checked(void *ptr, usize new_size)
{
return thread_allocator.realloc(ptr, new_size);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! calloc_checked(usize size, usize alignment = 0)
fn void*! realloc_aligned(void *ptr, usize new_size, usize alignment)
{
return thread_allocator.calloc(size, alignment);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void* realloc(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment)!!;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! realloc_checked(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment);
return thread_allocator.realloc_aligned(ptr, new_size, alignment);
}
fn void free(void* ptr) @builtin
@@ -123,6 +132,11 @@ fn void free(void* ptr) @builtin
return thread_allocator.free(ptr)!!;
}
fn void free_aligned(void* ptr)
{
return thread_allocator.free_aligned(ptr)!!;
}
/**
* Run with a specific allocator inside of the macro body.
**/
@@ -147,17 +161,17 @@ macro void @tscoped(;@body())
fn void* talloc(usize size, usize alignment = 0)
{
return temp_allocator().alloc(size, alignment)!!;
return temp_allocator().alloc_aligned(size, alignment)!!;
}
fn void* tcalloc(usize size, usize alignment = 0)
{
return temp_allocator().calloc(size, alignment)!!;
return temp_allocator().calloc_aligned(size, alignment)!!;
}
fn void* trealloc(void* ptr, usize size, usize alignment = 0)
{
return temp_allocator().realloc(ptr, size, alignment)!!;
return temp_allocator().realloc_aligned(ptr, size, alignment)!!;
}
macro void @pool(;@body) @builtin

View File

@@ -8,7 +8,7 @@ const DEFAULT_SIZE_PREFIX_ALIGNMENT = $alignof(usize);
const Allocator* NULL_ALLOCATOR = &_NULL_ALLOCATOR;
const Allocator* LIBC_ALLOCATOR = &_SYSTEM_ALLOCATOR;
define AllocatorFunction = fn void*!(Allocator* allocator, usize new_size, usize alignment, void* old_pointer, AllocationKind kind);
define AllocatorFunction = fn void*!(Allocator* allocator, usize new_size, usize alignment, usize offset, void* old_pointer, AllocationKind kind);
struct Allocator
{
@@ -21,6 +21,10 @@ enum AllocationKind
CALLOC,
REALLOC,
FREE,
ALIGNED_ALLOC,
ALIGNED_CALLOC,
ALIGNED_REALLOC,
ALIGNED_FREE,
RESET,
MARK,
}
@@ -34,50 +38,71 @@ fault AllocationFailure
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
fn void*! Allocator.alloc(Allocator* allocator, usize size, usize alignment = 0) @inline
fn void*! Allocator.alloc(Allocator* allocator, usize size) @inline
{
return allocator.function(allocator, size, alignment, null, ALLOC);
return allocator.function(allocator, size, 0, 0, null, ALLOC);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.realloc(Allocator* allocator, void* old_pointer, usize size, usize alignment = 0) @inline
fn void*! Allocator.alloc_aligned(Allocator* allocator, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, old_pointer, REALLOC);
return allocator.function(allocator, size, alignment, offset, null, ALIGNED_ALLOC);
}
fn void*! Allocator.realloc(Allocator* allocator, void* old_pointer, usize size) @inline
{
return allocator.function(allocator, size, 0, 0, old_pointer, REALLOC);
}
/**
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.realloc_aligned(Allocator* allocator, void* old_pointer, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, offset, old_pointer, ALIGNED_REALLOC);
}
fn usize! Allocator.mark(Allocator* allocator) @inline
{
return (usize)(uptr)allocator.function(allocator, 0, 0, null, MARK);
return (usize)(uptr)allocator.function(allocator, 0, 0, 0, null, MARK);
}
fn void*! Allocator.calloc(Allocator* allocator, usize size) @inline
{
return allocator.function(allocator, size, 0, 0, null, CALLOC);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require alignment && math::is_power_of_2(alignment)
*/
fn void*! Allocator.calloc(Allocator* allocator, usize size, usize alignment = 0) @inline
fn void*! Allocator.calloc_aligned(Allocator* allocator, usize size, usize alignment, usize offset = 0) @inline
{
return allocator.function(allocator, size, alignment, null, CALLOC);
return allocator.function(allocator, size, alignment, offset, null, ALIGNED_CALLOC);
}
fn void! Allocator.free(Allocator* allocator, void* old_pointer) @inline
{
allocator.function(allocator, 0, 0, old_pointer, FREE)?;
allocator.function(allocator, 0, 0, 0, old_pointer, FREE)?;
}
fn void! Allocator.free_aligned(Allocator* allocator, void* old_pointer) @inline
{
allocator.function(allocator, 0, 0, 0, old_pointer, ALIGNED_FREE)?;
}
fn void Allocator.reset(Allocator* allocator, usize mark = 0)
{
allocator.function(allocator, mark, 0, null, RESET)!!;
allocator.function(allocator, mark, 0, 0, null, RESET)!!;
}
private fn usize alignment_for_allocation(usize alignment) @inline
{
if (alignment < DEFAULT_MEM_ALIGNMENT)
{
alignment = DEFAULT_SIZE_PREFIX_ALIGNMENT;
alignment = DEFAULT_MEM_ALIGNMENT;
}
return alignment;
}
@@ -95,7 +120,7 @@ struct DynamicArenaAllocator
* @require page_size >= 128
* @require this != null
**/
fn void DynamicArenaAllocator.init(DynamicArenaAllocator* this, usize page_size, Allocator* backing_allocator = mem::allocator())
fn void DynamicArenaAllocator.init(DynamicArenaAllocator* this, usize page_size, Allocator* backing_allocator = mem::current_allocator())
{
this.function = &dynamic_arena_allocator_function;
this.page = null;

View File

@@ -29,7 +29,6 @@ fn String join(char[][] s, char[] joiner)
return res;
}
fn ZString copy_zstring(char[] s)
{
usize len = s.len;

View File

@@ -158,7 +158,7 @@ fn ZString String.copy_zstr(String* str, Allocator* allocator = mem::current_all
usize str_len = str.len();
if (!str_len)
{
return (ZString)allocator.calloc(1, 1)!!;
return (ZString)allocator.calloc(1)!!;
}
char* zstr = allocator.alloc(str_len + 1)!!;
StringData* data = str.data();

View File

@@ -166,6 +166,15 @@ fn usize! File.println(File* file, char[] string)
return len + 1;
}
/**
* @param [&in] file
* @require file.file `File must be initialized`
*/
fn void File.flush(File* file)
{
libc::fflush(file.file);
}
fn File stdout()
{
return { libc::stdout() };

View File

@@ -717,14 +717,36 @@ fn usize! printf(char[] format, args...) @maydiscard
return vsnprintf(&out_putchar_fn, null, format, args);
}
fn usize! printfln(char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_putchar_fn, null, format, args)?;
putchar('\n');
return size + 1;
}
fn usize! String.printf(String* str, char[] format, args...) @maydiscard
{
return vsnprintf(&out_string_append_fn, str, format, args);
}
fn usize! String.printfln(String* str, char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_string_append_fn, str, format, args)?;
str.append('\n');
return size + 1;
}
fn usize! File.printf(File file, char[] format, args...) @maydiscard
{
return vsnprintf(&out_putchar_fn, &file, format, args);
return vsnprintf(&out_putchar_fn, &file, format, args);
}
fn usize! File.printfln(File file, char[] format, args...) @maydiscard
{
usize size = vsnprintf(&out_putchar_fn, &file, format, args)?;
file.putc('\n')?;
file.flush();
return size + 1;
}
private fn void! PrintParam.left_adjust(PrintParam* param, usize len)

View File

@@ -87,7 +87,6 @@ extern fn usize strxfrm(char* dest, char* src, usize n);
// malloc
extern fn void* malloc(usize size);
extern fn void* aligned_alloc(usize align, usize size);
extern fn void* calloc(usize count, usize size);
extern fn void* free(void*);
extern fn void* realloc(void* ptr, usize size);
@@ -104,6 +103,7 @@ $case OsType.LINUX:
extern CFile __stderr @extname("stderr");
extern fn usize malloc_usable_size(void* ptr);
macro usize malloc_size(void* ptr) { return malloc_usable_size(ptr); }
extern fn void* aligned_alloc(usize align, usize size);
macro CFile stdin() { return __stdin; }
macro CFile stdout() { return __stdout; }
macro CFile stderr() { return __stderr; }
@@ -112,7 +112,8 @@ $case OsType.MACOSX:
extern CFile __stdoutp;
extern CFile __stderrp;
extern fn usize malloc_size(void* ptr);
macro CFile stdin() { return __stdinp; }
extern fn void* aligned_alloc(usize align, usize size);
macro CFile stdin() { return __stdinp; }
macro CFile stdout() { return __stdoutp; }
macro CFile stderr() { return __stderrp; }
$case OsType.WIN32:

View File

@@ -0,0 +1,87 @@
// #target: macos-x64
module test;
import std::io;
import libc;
enum Foo
{
ABC
}
fn void print_pages()
{
mem::temp_allocator().print_pages(io::stdout());
}
fn void setstring(char* dst, char[] str)
{
foreach (char c : str)
{
dst++[0] = c;
}
dst[0] = 0;
}
fn void testAllocator(Allocator* a, int val)
{
io::println("Test");
void* data = a.alloc_aligned(val, 128, 16)!!;
io::printf("Aligned with offset %p, align 16: %s offset align 128: %s\n", data, mem::ptr_is_aligned(data, 16), mem::ptr_is_aligned(data + 16, 128));
data = a.calloc_aligned(val, 128, 16)!!;
io::printf("Aligned with offset %p, align 16: %s offset align 128: %s\n", data, mem::ptr_is_aligned(data, 16), mem::ptr_is_aligned(data + 16, 128));
data = a.realloc_aligned(data, val + 1, 128, 16)!!;
io::printf("Aligned with offset %p, align 16: %s offset align 128: %s\n", data, mem::ptr_is_aligned(data, 16), mem::ptr_is_aligned(data + 16, 128));
data = a.realloc_aligned(data, val + 1, 128, 0)!!;
io::printf("No offset %p, align 16: %s offset align 128: %s\n", data, mem::ptr_is_aligned(data, 16), mem::ptr_is_aligned(data + 16, 128));
io::printfln("Freeing %p", data);
a.free_aligned(data)!!;
}
fn void main()
{
char* small = mem::talloc(128);
setstring(small, "small");
libc::printf("Small1: %p %s\n", small, small);
print_pages();
small = mem::trealloc(small, 129, 1024 * 16);
libc::printf("Small2: %p %s\n", small, small);
print_pages();
small = mem::trealloc(small, 12933);
libc::printf("Small3: %p %s\n", small, small);
print_pages();
char* first_big = mem::talloc(9512);
void *big = mem::talloc(4095);
io::printf("Big: %p\n", big);
io::printf("Small: %p\n", mem::talloc(13));
print_pages();
@pool() {
big = mem::trealloc(big, 5067);
print_pages();
void* hidden = mem::talloc(4096);
io::printf("Hidden: %p\n", hidden);
io::printf("Big: %p\n", big);
big = mem::trealloc(big, 4096, 256);
io::printf("Big: %p\n", big);
io::printf("First big: %p\n", first_big);
print_pages();
};
mem::@tscoped()
{
io::printf("Malloc: %p\n", mem::alloc(23));
io::printf("Malloc: %p\n", mem::alloc(23));
};
io::printf("Malloc: %p\n", mem::alloc(23));
@pool()
{
io::printf("Talloc: %p\n", mem::talloc(22));
};
testAllocator(mem::temp_allocator(), 126);
testAllocator(mem::temp_allocator(), 12600);
ArenaAllocator aa;
aa.init(&&char[1024] {});
testAllocator(&aa, 126);
io::println("Test dynamic arena");
DynamicArenaAllocator dynamic_arena;
dynamic_arena.init(1024);
testAllocator(&dynamic_arena, 112);
testAllocator(&dynamic_arena, 712);
first_big[3] = 123;
}

View File

@@ -3184,7 +3184,7 @@ void gencontext_emit_binary(GenContext *c, BEValue *be_value, Expr *expr, BEValu
val = LLVMBuildExactSDiv(c->builder, val, llvm_const_int(c, type_iptrdiff, type_abi_alignment(lhs_type->pointer)), "");
break;
}
rhs_value = LLVMConstNeg(rhs_value);
rhs_value = is_constant ? LLVMConstNeg(rhs_value) : LLVMBuildNeg(c->builder, rhs_value, "");
val = llvm_emit_pointer_gep_raw(c, llvm_get_pointee_type(c, lhs_type), lhs_value, rhs_value);
break;
}

View File

@@ -368,7 +368,7 @@ if.then15: ; preds = %if.exit9
store %"char[]"* null, %"char[]"** %33, align 8
%34 = load %Head, %Head* %literal18, align 8
store %Head %34, %Head* %value, align 8
%35 = call i8* @std_core_mem_alloc(i64 8, i64 0)
%35 = call i8* @std_core_mem_alloc(i64 8)
%ptrptr = bitcast i8* %35 to %Head*
store %Head* %ptrptr, %Head** %temp, align 8
%36 = load %Head*, %Head** %temp, align 8
@@ -420,7 +420,7 @@ if.then27: ; preds = %if.exit21
store %"char[]"* null, %"char[]"** %53, align 8
%54 = getelementptr inbounds %Head, %Head* %literal32, i32 0, i32 0
store %"char[]" zeroinitializer, %"char[]"* %value34, align 8
%55 = call i8* @std_core_mem_alloc(i64 16, i64 0)
%55 = call i8* @std_core_mem_alloc(i64 16)
%ptrptr36 = bitcast i8* %55 to %"char[]"*
store %"char[]"* %ptrptr36, %"char[]"** %temp35, align 8
%56 = load %"char[]"*, %"char[]"** %temp35, align 8
@@ -447,7 +447,7 @@ noerr_block41: ; preds = %if.exit39
store %"char[]"* %61, %"char[]"** %54, align 8
%62 = load %Head, %Head* %literal32, align 8
store %Head %62, %Head* %value31, align 8
%63 = call i8* @std_core_mem_alloc(i64 8, i64 0)
%63 = call i8* @std_core_mem_alloc(i64 8)
%ptrptr43 = bitcast i8* %63 to %Head*
store %Head* %ptrptr43, %Head** %temp42, align 8
%64 = load %Head*, %Head** %temp42, align 8
@@ -488,7 +488,7 @@ if.exit49: ; preds = %if.exit21
%77 = load i32, i32* %len, align 4
%siuiext = sext i32 %77 to i64
%add = add i64 %siuiext, 1
%78 = call i8* @std_core_mem_alloc(i64 %add, i64 0)
%78 = call i8* @std_core_mem_alloc(i64 %add)
store i8* %78, i8** %str, align 8
%79 = load i8*, i8** %str, align 8
%not50 = icmp eq i8* %79, null
@@ -522,7 +522,7 @@ if.exit52: ; preds = %if.exit49
%93 = insertvalue %"char[]" undef, i8* %ptroffset, 0
%94 = insertvalue %"char[]" %93, i64 %size, 1
store %"char[]" %94, %"char[]"* %value62, align 8
%95 = call i8* @std_core_mem_alloc(i64 16, i64 0)
%95 = call i8* @std_core_mem_alloc(i64 16)
%ptrptr64 = bitcast i8* %95 to %"char[]"*
store %"char[]"* %ptrptr64, %"char[]"** %temp63, align 8
%96 = load %"char[]"*, %"char[]"** %temp63, align 8
@@ -549,7 +549,7 @@ noerr_block69: ; preds = %if.exit67
store %"char[]"* %101, %"char[]"** %89, align 8
%102 = load %Head, %Head* %literal60, align 8
store %Head %102, %Head* %value59, align 8
%103 = call i8* @std_core_mem_alloc(i64 8, i64 0)
%103 = call i8* @std_core_mem_alloc(i64 8)
%ptrptr71 = bitcast i8* %103 to %Head*
store %Head* %ptrptr71, %Head** %temp70, align 8
%104 = load %Head*, %Head** %temp70, align 8

View File

@@ -358,7 +358,7 @@ if.then15: ; preds = %if.exit9
store ptr null, ptr %25, align 8
%26 = load %Head, ptr %literal18, align 8
store %Head %26, ptr %value, align 8
%27 = call ptr @std_core_mem_alloc(i64 8, i64 0)
%27 = call ptr @std_core_mem_alloc(i64 8)
store ptr %27, ptr %temp, align 8
%28 = load ptr, ptr %temp, align 8
%not = icmp eq ptr %28, null
@@ -402,7 +402,7 @@ if.then27: ; preds = %if.exit21
store ptr null, ptr %literal32, align 8
%39 = getelementptr inbounds %Head, ptr %literal32, i32 0, i32 0
store %"char[]" zeroinitializer, ptr %value34, align 8
%40 = call ptr @std_core_mem_alloc(i64 16, i64 0)
%40 = call ptr @std_core_mem_alloc(i64 16)
store ptr %40, ptr %temp35, align 8
%41 = load ptr, ptr %temp35, align 8
%not36 = icmp eq ptr %41, null
@@ -426,7 +426,7 @@ noerr_block40: ; preds = %if.exit38
store ptr %44, ptr %39, align 8
%45 = load %Head, ptr %literal32, align 8
store %Head %45, ptr %value31, align 8
%46 = call ptr @std_core_mem_alloc(i64 8, i64 0)
%46 = call ptr @std_core_mem_alloc(i64 8)
store ptr %46, ptr %temp41, align 8
%47 = load ptr, ptr %temp41, align 8
%not42 = icmp eq ptr %47, null
@@ -462,7 +462,7 @@ if.exit47: ; preds = %if.exit21
%56 = load i32, ptr %len, align 4
%siuiext = sext i32 %56 to i64
%add = add i64 %siuiext, 1
%57 = call ptr @std_core_mem_alloc(i64 %add, i64 0)
%57 = call ptr @std_core_mem_alloc(i64 %add)
store ptr %57, ptr %str, align 8
%58 = load ptr, ptr %str, align 8
%not48 = icmp eq ptr %58, null
@@ -495,7 +495,7 @@ if.exit50: ; preds = %if.exit47
%71 = insertvalue %"char[]" undef, ptr %ptroffset, 0
%72 = insertvalue %"char[]" %71, i64 %size, 1
store %"char[]" %72, ptr %value60, align 8
%73 = call ptr @std_core_mem_alloc(i64 16, i64 0)
%73 = call ptr @std_core_mem_alloc(i64 16)
store ptr %73, ptr %temp61, align 8
%74 = load ptr, ptr %temp61, align 8
%not62 = icmp eq ptr %74, null
@@ -519,7 +519,7 @@ noerr_block66: ; preds = %if.exit64
store ptr %77, ptr %67, align 8
%78 = load %Head, ptr %literal58, align 8
store %Head %78, ptr %value57, align 8
%79 = call ptr @std_core_mem_alloc(i64 8, i64 0)
%79 = call ptr @std_core_mem_alloc(i64 8)
store ptr %79, ptr %temp67, align 8
%80 = load ptr, ptr %temp67, align 8
%not68 = icmp eq ptr %80, null