Allocators. Rename of "optenum" to fault. Memcpy and memset added. Cleanup of declaration use.

This commit is contained in:
Christoffer Lerno
2022-04-04 22:01:49 +02:00
committed by Christoffer Lerno
parent 2e2a1ca21a
commit 8743223dd6
83 changed files with 1359 additions and 593 deletions

View File

@@ -3,7 +3,7 @@
// a copy of which can be found in the LICENSE_STDLIB file.
module std::builtin;
optenum VarCastResult
fault VarCastResult
{
TYPE_MISMATCH
}
@@ -60,6 +60,7 @@ fn void panic(char* message, char *file, char *function, uint line) @autoimport
while (stack)
{
libc::fprintf(@libc::stderr(), " at function %s (%s:%u)\n", stack.function, stack.file, stack.line);
if (stack == stack.prev) break;
stack = stack.prev;
}

View File

@@ -67,7 +67,7 @@ enum Seek
END = 2
}
optenum IoError
fault IoError
{
FILE_NOT_FOUND,
FILE_NOT_SEEKABLE,

View File

@@ -16,7 +16,7 @@ const DIV_PI = 0.318309886183790671537767526745028724; // 1 / pi
const DIV_2_PI = 0.636619772367581343075535053490057448; // 2 / pi
const DIV_2_SQRTPI = 1.12837916709551257389615890312154517; // 2/sqrt(pi)
const SQRT2 = 1.41421356237309504880168872420969808; // sqrt(2)
const DIV_1_SQRT2 = 0.707106781186547524400844362104849039; // 1 / sqrt(2)
const double DIV_1_SQRT2 = 0.707106781186547524400844362104849039; // 1 / sqrt(2)
const HALF_MAX = 6.5504e+4;
const HALF_MIN = 6.103515625e-5;
@@ -127,6 +127,13 @@ fn double ceil(double x) @inline
return $$ceil(x);
}
/**
* @checked x & 1
*/
macro bool is_power_of_2(x)
{
return x != 0 && (x & (x - 1)) == 0;
}

View File

@@ -3,11 +3,6 @@
// a copy of which can be found in the LICENSE_STDLIB file.
module std::mem;
extern fn void* _malloc(usize bytes) @extname("malloc");
extern fn void* _realloc(void* ptr, usize bytes) @extname("realloc");
extern fn void* _calloc(usize bytes, usize elements) @extname("calloc");
extern fn void _free(void* ptr) @extname("free");
macro volatile_load(&x)
{
return $$volatile_load(&x);
@@ -18,186 +13,143 @@ macro volatile_store(&x, y)
return $$volatile_store(&x, y);
}
/**
* @require @math::is_power_of_2(alignment)
**/
fn usize aligned_offset(usize offset, usize alignment)
{
return alignment * ((offset + alignment - 1) / alignment);
}
/**
* @require @math::is_power_of_2(alignment)
**/
fn bool ptr_is_aligned(void* ptr, usize alignment) @inline
{
return (uptr)ptr & ((uptr)alignment - 1) == 0;
}
fn void copy(char* dst, char* src, usize size) @inline
{
@memcpy(dst, src, size);
}
macro void memcpy(void* dst, void* src, usize size, bool $is_volatile = false, usize $dst_align = 0, usize $src_align = 0)
{
$$memcpy(dst, src, size, $is_volatile, $dst_align, $src_align);
}
fn void set(void* dst, char val, usize bytes) @inline
{
@memset(dst, val, bytes);
}
macro void memset(void* dst, char val, usize bytes, bool $is_volatile = false, usize $dst_align = 0)
{
$$memset(dst, val, bytes, $is_volatile, $dst_align);
}
macro bitcast(expr, $Type)
{
var $size = (usize)($sizeof(expr));
$assert($size == $Type.sizeof, "Cannot bitcast between types of different size.");
$Type x = void;
@memcpy(&x, &expr, $size, false, $alignof($Type), $alignof(expr));
return x;
}
enum AllocationKind
{
ALLOC,
CALLOC,
REALLOC,
FREE,
}
optenum AllocationFailure
fault AllocationFailure
{
OUT_OF_MEMORY
}
define AllocatorFunction = fn void!(void *data, void** pointer, usize bytes, usize alignment, AllocationKind kind);
private tlocal Allocator thread_allocator = { SYSTEM_ALLOCATOR, null };
struct Allocator
{
AllocatorFunction allocation_function;
AllocatorFunction function;
void *data;
}
fn void copy(char* dst, char* src, usize size)
{
for (usize i = 0; i < size; i++) dst[i] = src[i];
}
fn void! system_malloc_function(void *unused, void** pointer, usize bytes, usize alignment, AllocationKind kind) @inline
{
switch (kind)
{
case ALLOC:
void* data = _malloc(bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
*pointer = data;
return;
case REALLOC:
void* data = _realloc(*pointer, bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
*pointer = data;
return;
case FREE:
_free(*pointer);
*pointer = null;
return;
}
@unreachable();
}
struct SlotAllocator
{
void* pages;
usize page_size;
usize page_count;
usize bitmask;
usize current_page;
}
fn void*! SlotAllocator.alloc(SlotAllocator *allocator, usize size)
{
void* active_page = (char*)(allocator.pages) + allocator.current_page * allocator.page_size;
void** page_pointer = (void**)(active_page);
if (*page_pointer)
{
mem::free(*page_pointer);
*page_pointer = null;
}
if (size > allocator.page_size - $sizeof(page_pointer))
{
void* mem = mem::_malloc(size);
if (!mem) return AllocationFailure.OUT_OF_MEMORY!;
*page_pointer = mem;
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return mem;
}
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return &page_pointer[1];
}
struct RingAllocator
{
char *data;
usize size;
usize offset;
}
fn void* RingAllocator.alloc(RingAllocator *allocator, usize size)
{
if (size > allocator.size) return null;
// Wraparound? If so, start at the beginning.
if (allocator.offset + size > allocator.size)
{
allocator.offset = size;
return allocator.data;
}
void* data = allocator.offset + allocator.data;
allocator.offset = (allocator.offset + size) & allocator.size;
return data;
}
fn void* RingAllocator.realloc(RingAllocator *allocator, void* ptr, usize size)
{
if (size > allocator.size) return null;
assert(allocator.data >= ptr && ptr < allocator.data + size, "Realloc on other allocator.");
// 1. The pointer is before the allocator
if (allocator.data + allocator.offset > ptr)
{
if (allocator.data + allocator.size < ptr + size)
{
// 1a. There is not enough space, we need to copy to the start.
usize pointer_offset = ptr - allocator.data;
usize copy_len = pointer_offset + size > allocator.offset ? allocator.offset - pointer_offset : size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}
// 1b. There is enough space, so we just change the offset:
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2. The pointer is after the allocator
// 2a. Is there sufficient space?
if (ptr + size <= allocator.data + allocator.size)
{
// Good, if so we simply change the offset and return the pointer.
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2b. Not sufficient space, we copy to the beginning.
usize pointer_offset = ptr - allocator.data;
usize copy_len = allocator.size - (ptr - allocator.data);
if (copy_len > size) copy_len = size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}
Allocator main_allocator = { &system_malloc_function, null };
macro malloc($Type)
{
return ($Type*)(mem::alloc($Type.sizeof));
}
fn void* alloc(usize size, usize count = 1) @inline
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void* alloc(usize size, usize alignment = 0)
{
return _malloc(size * count);
return thread_allocator.alloc(size, alignment)!!;
}
fn void* calloc(usize size, usize elements = 1) @inline
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! alloc_checked(usize size, usize alignment = 0)
{
return _calloc(size, elements);
}
fn void* realloc(void *ptr, usize size) @inline
{
return _realloc(ptr, size);
}
fn void free(void* ptr) @inline
{
_free(ptr);
return thread_allocator.alloc(size, alignment);
}
const TEMP_BLOCK_SIZE = 1024;
const TEMP_PAGES = 64;
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void* calloc(usize size, usize alignment = 0)
{
return thread_allocator.calloc(size, alignment)!!;
}
private char[TEMP_BLOCK_SIZE * TEMP_PAGES] allocator_static_storage;
private void*[TEMP_PAGES] allocator_static_page_storage;
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! calloc_checked(usize size, usize alignment = 0)
{
return thread_allocator.calloc(size, alignment);
}
SlotAllocator default_allocator = {
.pages = &allocator_static_storage,
.page_size = TEMP_BLOCK_SIZE,
.page_count = TEMP_PAGES,
.bitmask = TEMP_PAGES - 1,
.current_page = 0,
};
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void* realloc(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment)!!;
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! realloc_checked(void *ptr, usize new_size, usize alignment = 0)
{
return thread_allocator.realloc(ptr, new_size, alignment);
}
fn void free(void* ptr)
{
return thread_allocator.free(ptr)!!;
}
macro void with_allocator(Allocator allocator; @body())
{
Allocator old_allocator = thread_allocator;
thread_allocator = allocator;
defer thread_allocator = old_allocator;
@body();
}
fn void*! talloc(usize size)
{
return default_allocator.alloc(size);
}
}

280
lib/std/mem_allocator.c3 Normal file
View File

@@ -0,0 +1,280 @@
module std::mem;
define AllocatorFunction = fn void*!(void *data, usize new_size, usize alignment, void* old_pointer, AllocationKind kind);
const DEFAULT_MEM_ALIGNMENT = $alignof(void*) * 2;
Allocator main_allocator = { SYSTEM_ALLOCATOR, null };
const AllocatorFunction NULL_ALLOCATOR = &null_allocator_fn;
const AllocatorFunction SYSTEM_ALLOCATOR = &libc_allocator_fn;
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! Allocator.alloc(Allocator *allocator, usize size, usize alignment = 0) @inline
{
return allocator.function(allocator.data, size, alignment, null, ALLOC);
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! Allocator.realloc(Allocator *allocator, void* old_pointer, usize size, usize alignment = 0) @inline
{
return allocator.function(allocator.data, size, alignment, old_pointer, REALLOC);
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! Allocator.calloc(Allocator *allocator, usize size, usize alignment = 0) @inline
{
return allocator.function(allocator.data, size, alignment, null, CALLOC);
}
fn void! Allocator.free(Allocator *allocator, void* old_pointer) @inline
{
allocator.function(allocator.data, 0, 0, old_pointer, FREE)?;
}
struct ArenaAllocator
{
void* memory;
void* last_ptr;
usize total;
usize used;
}
macro void*! allocator_to_function($Type, void* data, usize new_size, usize alignment, void* old_pointer, AllocationKind kind)
{
$Type* allocator = data;
switch (kind)
{
case ALLOC:
return allocator.alloc(new_size, alignment) @inline;
case CALLOC:
return allocator.calloc(new_size, alignment) @inline;
case REALLOC:
return allocator.realloc(old_pointer, new_size, alignment) @inline;
case FREE:
allocator.free(old_pointer) @inline?;
return null;
}
@unreachable();
}
fn void*! arena_allocator_function(void* allocator, usize new_size, usize alignment, void* old_pointer, AllocationKind kind)
{
return @allocator_to_function(ArenaAllocator, allocator, new_size, alignment, old_pointer, kind);
}
fn Allocator ArenaAllocator.to_allocator(ArenaAllocator* allocator) @inline
{
return { &arena_allocator_function, allocator };
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! ArenaAllocator.alloc(ArenaAllocator* allocator, usize bytes, usize alignment = 0)
{
if (!bytes) return null;
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
iptr next = aligned_offset((iptr)allocator.memory + allocator.used, alignment);
usize next_after = next - (iptr)allocator.memory + bytes;
if (next_after > allocator.total) return AllocationFailure.OUT_OF_MEMORY!;
allocator.used = next_after;
return allocator.last_ptr = (void*)next;
}
fn void*! ArenaAllocator.calloc(ArenaAllocator* allocator, usize bytes, usize alignment = 0)
{
char* bits = allocator.alloc(bytes) @inline?;
mem::set(bits, 0, bytes);
return bits;
}
/**
* @require ptr != null
* @require allocator != null
**/
fn void*! ArenaAllocator.realloc(ArenaAllocator* allocator, void *ptr, usize bytes, usize alignment = 0)
{
if (!ptr) return allocator.alloc(bytes, alignment);
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
// Is last allocation and alignment matches?
if (allocator.last_ptr == ptr && ptr_is_aligned(ptr, alignment))
{
usize new_used = (usize)(ptr - allocator.memory) + bytes;
if (new_used > allocator.total) return AllocationFailure.OUT_OF_MEMORY!;
allocator.used = new_used;
return ptr;
}
// Otherwise just allocate new memory.
void* new_mem = allocator.alloc(bytes, alignment)?;
// And copy too much probably!
copy(new_mem, ptr, (new_mem - ptr) > bytes ? bytes : (usize)(new_mem - ptr));
return new_mem;
}
fn void! ArenaAllocator.free(ArenaAllocator* allocator, void* ptr)
{
if (!ptr) return;
if (ptr == allocator.last_ptr)
{
allocator.used = (usize)(ptr - allocator.memory);
allocator.last_ptr = null;
return;
}
}
fn void! ArenaAllocator.init(ArenaAllocator* allocator, usize arena_size)
{
allocator.memory = alloc_checked(arena_size)?;
allocator.total = arena_size;
allocator.used = 0;
allocator.last_ptr = null;
}
fn void ArenaAllocator.reset(ArenaAllocator* allocator)
{
allocator.used = 0;
allocator.last_ptr = null;
}
fn void ArenaAllocator.destroy(ArenaAllocator* allocator)
{
assert(allocator.memory);
free(allocator.memory);
allocator.total = allocator.used = 0;
}
private struct DynamicArenaPage
{
void* memory;
void* prev_arena;
usize total;
usize used;
}
struct DynamicArenaAllocator
{
DynamicArenaPage* page;
usize total;
usize used;
usize page_size;
Allocator allocator;
}
fn void DynamicArenaAllocator.init(DynamicArenaAllocator* this, usize page_size, Allocator allocator = { null, null })
{
this.page = null;
this.used = this.total = 0;
this.page_size = page_size;
this.allocator = allocator.function ? allocator : thread_allocator;
}
fn void! DynamicArenaAllocator.reset(DynamicArenaAllocator* this)
{
DynamicArenaPage* page = this.page;
Allocator allocator = this.allocator;
while (page && page.prev_arena)
{
DynamicArenaPage* next_page = page.prev_arena;
void* mem = page.memory;
allocator.free(page)?;
allocator.free(mem)?;
page = next_page;
}
this.page = page;
}
fn void*! dynamic_arena_allocator_function(void* allocator, usize new_size, usize alignment, void* old_pointer, AllocationKind kind)
{
return @allocator_to_function(DynamicArenaAllocator, allocator, new_size, alignment, old_pointer, kind);
}
fn Allocator DynamicArenaAllocator.to_allocator(DynamicArenaAllocator* this)
{
return { &dynamic_arena_allocator_function, this };
}
fn void! DynamicArenaAllocator.destroy(DynamicArenaAllocator* this)
{
this.reset();
DynamicArenaPage* first_page = this.page;
if (!first_page) return;
void* mem = first_page.memory;
this.allocator.free(this.page)?;
this.page = null;
this.allocator.free(mem)?;
}
fn void! DynamicArenaAllocator.free(DynamicArenaAllocator* allocator, void* ptr)
{
// This can be made smarter.
return;
}
/**
* @require @math::is_power_of_2(alignment)
*/
private fn void*! DynamicArenaAllocator.alloc_new(DynamicArenaAllocator* this, usize size, usize alignment)
{
usize page_size = @max(this.page_size, size);
void* mem = this.allocator.alloc(page_size, alignment)?;
DynamicArenaPage*! page = this.allocator.alloc(DynamicArenaPage.sizeof);
if (catch err = page)
{
this.allocator.free(mem);
return err!;
}
page.memory = mem;
page.prev_arena = this.page;
page.total = page_size;
page.used = size;
this.page = page;
return page.memory;
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! DynamicArenaAllocator.calloc(DynamicArenaAllocator* allocator, usize size, usize alignment = 0)
{
void* mem = allocator.alloc(size, alignment)?;
set(mem, 0, size);
return mem;
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* allocator, void* ptr, usize size, usize alignment = 0)
{
void* mem = allocator.alloc(size, alignment)?;
copy(mem, ptr, size);
return mem;
}
/**
* @require !alignment || @math::is_power_of_2(alignment)
*/
fn void*! DynamicArenaAllocator.alloc(DynamicArenaAllocator* this, usize size, usize alignment)
{
DynamicArenaPage *page = this.page;
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
if (!page) return this.alloc_new(size, alignment);
usize start = aligned_offset((uptr)page.memory + page.used, alignment) - (usize)page.memory;
usize new_used = start + size;
if (new_used > page.total) return this.alloc_new(size, alignment);
page.used = new_used;
return page.memory + start;
}

View File

@@ -0,0 +1,62 @@
module std::mem;
private fn void*! null_allocator_fn(void *data, usize bytes, usize alignment, void* old_pointer, AllocationKind kind)
{
switch (kind)
{
case ALLOC:
case CALLOC:
case REALLOC:
return AllocationFailure.OUT_OF_MEMORY!;
default:
return null;
}
}
fn void*! libc_allocator_fn(void *unused, usize bytes, usize alignment, void* old_pointer, AllocationKind kind) @inline
{
if (!alignment) alignment = DEFAULT_MEM_ALIGNMENT;
assert(@math::is_power_of_2(alignment), "Alignment was not a power of 2");
void* data;
switch (kind)
{
case ALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)aligned_offset((iptr)libc::malloc(bytes + alignment), alignment);
}
else
{
data = libc::malloc(bytes);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case CALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)aligned_offset((iptr)libc::calloc(bytes + alignment, 1), alignment);
}
else
{
data = libc::malloc(bytes);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case REALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)aligned_offset((iptr)libc::realloc(old_pointer, bytes + alignment), alignment);
}
else
{
data = libc::realloc(old_pointer, bytes);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case FREE:
libc::free(old_pointer);
return null;
}
@unreachable();
}

View File

@@ -0,0 +1,106 @@
module std::mem;
const TEMP_BLOCK_SIZE = 1024;
const TEMP_PAGES = 64;
private char[TEMP_BLOCK_SIZE * TEMP_PAGES] allocator_static_storage;
private void*[TEMP_PAGES] allocator_static_page_storage;
SlotAllocator default_allocator = {
.pages = &allocator_static_storage,
.page_size = TEMP_BLOCK_SIZE,
.page_count = TEMP_PAGES,
.bitmask = TEMP_PAGES - 1,
.current_page = 0,
};
struct SlotAllocator
{
void* pages;
usize page_size;
usize page_count;
usize bitmask;
usize current_page;
}
fn void*! SlotAllocator.alloc(SlotAllocator *allocator, usize size)
{
void* active_page = (char*)(allocator.pages) + allocator.current_page * allocator.page_size;
void** page_pointer = (void**)(active_page);
if (*page_pointer)
{
// TODO fix
main_allocator.free(*page_pointer)?;
*page_pointer = null;
}
if (size > allocator.page_size - $sizeof(page_pointer))
{
void* mem = main_allocator.alloc(size)?;
*page_pointer = mem;
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return mem;
}
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return &page_pointer[1];
}
struct RingAllocator
{
char *data;
usize size;
usize offset;
}
fn void* RingAllocator.alloc(RingAllocator *allocator, usize size)
{
if (size > allocator.size) return null;
// Wraparound? If so, start at the beginning.
if (allocator.offset + size > allocator.size)
{
allocator.offset = size;
return allocator.data;
}
void* data = allocator.offset + allocator.data;
allocator.offset = (allocator.offset + size) & allocator.size;
return data;
}
fn void* RingAllocator.realloc(RingAllocator *allocator, void* ptr, usize size)
{
if (size > allocator.size) return null;
assert(allocator.data >= ptr && ptr < allocator.data + size, "Realloc on other allocator.");
// 1. The pointer is before the allocator
if (allocator.data + allocator.offset > ptr)
{
if (allocator.data + allocator.size < ptr + size)
{
// 1a. There is not enough space, we need to copy to the start.
usize pointer_offset = ptr - allocator.data;
usize copy_len = pointer_offset + size > allocator.offset ? allocator.offset - pointer_offset : size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}
// 1b. There is enough space, so we just change the offset:
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2. The pointer is after the allocator
// 2a. Is there sufficient space?
if (ptr + size <= allocator.data + allocator.size)
{
// Good, if so we simply change the offset and return the pointer.
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2b. Not sufficient space, we copy to the beginning.
usize pointer_offset = ptr - allocator.data;
usize copy_len = allocator.size - (ptr - allocator.data);
if (copy_len > size) copy_len = size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}