- Warn on use of visibility modifiers on methods. #2962

This commit is contained in:
Christoffer Lerno
2026-02-21 21:10:08 +01:00
parent e1ec4b1235
commit dc52478c09
49 changed files with 907 additions and 890 deletions

View File

@@ -56,7 +56,7 @@ fn BackedArenaAllocator*? new_backed_allocator(usz size, Allocator allocator)
fn void BackedArenaAllocator.destroy(&self)
{
self.reset(0);
if (self.last_page) (void)self._free_page(self.last_page);
if (self.last_page) (void)_free_page(self, self.last_page);
allocator::free(self.backing_allocator, self);
}
@@ -79,7 +79,7 @@ fn void BackedArenaAllocator.reset(&self, usz mark)
self.used = last_page.mark;
ExtraPage *to_free = last_page;
last_page = last_page.prev_page;
self._free_page(to_free)!!;
_free_page(self, to_free)!!;
}
self.last_page = last_page;
$if env::COMPILER_SAFE_MODE || env::ADDRESS_SANITIZER:
@@ -98,13 +98,13 @@ fn void BackedArenaAllocator.reset(&self, usz mark)
self.used = mark;
}
fn void? BackedArenaAllocator._free_page(&self, ExtraPage* page) @inline @local
fn void? _free_page(BackedArenaAllocator* self, ExtraPage* page) @inline @local
{
void* mem = page.start;
return self.backing_allocator.release(mem, page.is_aligned());
}
fn void*? BackedArenaAllocator._realloc_page(&self, ExtraPage* page, usz size, usz alignment) @inline @local
fn void*? _realloc_page(BackedArenaAllocator* self, ExtraPage* page, usz size, usz alignment) @inline @local
{
// Then the actual start pointer:
void* real_pointer = page.start;
@@ -133,7 +133,7 @@ fn void*? BackedArenaAllocator.resize(&self, void* pointer, usz size, usz alignm
assert(self.last_page, "Realloc of unrelated pointer");
// First grab the page
ExtraPage *page = pointer - ExtraPage.sizeof;
return self._realloc_page(page, size, alignment);
return _realloc_page(self, page, size, alignment);
}
AllocChunk* data = self.acquire(size, NO_ZERO, alignment)!;

View File

@@ -137,12 +137,67 @@ fn void DynamicArenaAllocator.reset(&self)
self.page = page;
}
<*
@require size > 0 : `acquire expects size > 0`
@require !alignment || math::is_power_of_2(alignment)
@return? mem::INVALID_ALLOC_SIZE, mem::OUT_OF_MEMORY
*>
fn void*? DynamicArenaAllocator.acquire(&self, usz size, AllocInitType init_type, usz alignment) @dynamic
{
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = self.page;
void* ptr @noinit;
do SET_DONE:
{
if (!page && self.unused_page)
{
self.page = page = self.unused_page;
self.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page)
{
ptr = _alloc_new(self, size, alignment)!;
break SET_DONE;
}
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
usz new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = self.unused_page))
{
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
new_used = start + size - page.memory;
if (page.total >= new_used)
{
self.unused_page = page.prev_arena;
page.prev_arena = self.page;
self.page = page;
break ALLOCATE_NEW;
}
}
ptr = _alloc_new(self, size, alignment)!;
break SET_DONE;
}
page.used = new_used;
assert(start + size == page.memory + page.used);
ptr = start;
DynamicArenaChunk* chunk = (DynamicArenaChunk*)ptr - 1;
chunk.size = size;
};
if (init_type == ZERO) mem::clear(ptr, size, mem::DEFAULT_MEM_ALIGNMENT);
return ptr;
}
<*
@require math::is_power_of_2(alignment)
@require size > 0
@return? mem::INVALID_ALLOC_SIZE, mem::OUT_OF_MEMORY
*>
fn void*? DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment) @local
fn void*? _alloc_new(DynamicArenaAllocator* self, usz size, usz alignment) @local
{
// First, make sure that we can align it, extending the page size if needed.
usz page_size = max(self.page_size, mem::aligned_offset(size + DynamicArenaChunk.sizeof + alignment, alignment));
@@ -166,57 +221,4 @@ fn void*? DynamicArenaAllocator._alloc_new(&self, usz size, usz alignment) @loca
self.page = page;
page.current_stack_ptr = mem_start;
return mem_start;
}
<*
@require size > 0 : `acquire expects size > 0`
@require !alignment || math::is_power_of_2(alignment)
@return? mem::INVALID_ALLOC_SIZE, mem::OUT_OF_MEMORY
*>
fn void*? DynamicArenaAllocator.acquire(&self, usz size, AllocInitType init_type, usz alignment) @dynamic
{
alignment = alignment_for_allocation(alignment);
DynamicArenaPage* page = self.page;
void* ptr @noinit;
do SET_DONE:
{
if (!page && self.unused_page)
{
self.page = page = self.unused_page;
self.unused_page = page.prev_arena;
page.prev_arena = null;
}
if (!page)
{
ptr = self._alloc_new(size, alignment)!;
break SET_DONE;
}
void* start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
usz new_used = start - page.memory + size;
if ALLOCATE_NEW: (new_used > page.total)
{
if ((page = self.unused_page))
{
start = mem::aligned_pointer(page.memory + page.used + DynamicArenaChunk.sizeof, alignment);
new_used = start + size - page.memory;
if (page.total >= new_used)
{
self.unused_page = page.prev_arena;
page.prev_arena = self.page;
self.page = page;
break ALLOCATE_NEW;
}
}
ptr = self._alloc_new(size, alignment)!;
break SET_DONE;
}
page.used = new_used;
assert(start + size == page.memory + page.used);
ptr = start;
DynamicArenaChunk* chunk = (DynamicArenaChunk*)ptr - 1;
chunk.size = size;
};
if (init_type == ZERO) mem::clear(ptr, size, mem::DEFAULT_MEM_ALIGNMENT);
return ptr;
}
}

View File

@@ -32,58 +32,58 @@ fn void*? SimpleHeapAllocator.acquire(&self, usz size, AllocInitType init_type,
{
if (init_type == ZERO)
{
return alignment > 0 ? @aligned_alloc(self._calloc, size, alignment) : self._calloc(size);
return alignment > 0 ? @aligned_alloc_fn(self, simple_alloc_calloc, size, alignment) : simple_alloc_calloc(self, size);
}
return alignment > 0 ? @aligned_alloc(self._alloc, size, alignment) : self._alloc(size);
return alignment > 0 ? @aligned_alloc_fn(self, simple_alloc_alloc, size, alignment) : simple_alloc_alloc(self, size);
}
fn void*? SimpleHeapAllocator.resize(&self, void* old_pointer, usz size, usz alignment) @dynamic
{
return alignment > 0
? @aligned_realloc(self._calloc, self._free, old_pointer, size, alignment)
: self._realloc(old_pointer, size);
? @aligned_realloc_fn(self, simple_alloc_calloc, simple_alloc_free, old_pointer, size, alignment)
: simple_alloc_realloc(self, old_pointer, size);
}
fn void SimpleHeapAllocator.release(&self, void* old_pointer, bool aligned) @dynamic
{
if (aligned)
{
@aligned_free(self._free, old_pointer)!!;
@aligned_free_fn(self, simple_alloc_free, old_pointer)!!;
}
else
{
self._free(old_pointer);
simple_alloc_free(self, old_pointer);
}
}
<*
@require old_pointer && bytes > 0
*>
fn void*? SimpleHeapAllocator._realloc(&self, void* old_pointer, usz bytes) @local
fn void*? simple_alloc_realloc(SimpleHeapAllocator* self, void* old_pointer, usz bytes) @local
{
// Find the block header.
Header* block = (Header*)old_pointer - 1;
if (block.size >= bytes) return old_pointer;
void* new = self._alloc(bytes)!;
void* new = simple_alloc_alloc(self, bytes)!;
usz max_to_copy = math::min(block.size, bytes);
mem::copy(new, old_pointer, max_to_copy);
self._free(old_pointer);
simple_alloc_free(self, old_pointer);
return new;
}
fn void*? SimpleHeapAllocator._calloc(&self, usz bytes) @local
fn void*? simple_alloc_calloc(SimpleHeapAllocator* self, usz bytes) @local
{
void* data = self._alloc(bytes)!;
void* data = simple_alloc_alloc(self, bytes)!;
mem::clear(data, bytes, mem::DEFAULT_MEM_ALIGNMENT);
return data;
}
fn void*? SimpleHeapAllocator._alloc(&self, usz bytes) @local
fn void*? simple_alloc_alloc(SimpleHeapAllocator* self, usz bytes) @local
{
usz aligned_bytes = mem::aligned_offset(bytes, mem::DEFAULT_MEM_ALIGNMENT);
if (!self.free_list)
{
self.add_block(aligned_bytes)!;
simple_alloc_add_block(self, aligned_bytes)!;
}
Header* current = self.free_list;
@@ -123,22 +123,22 @@ fn void*? SimpleHeapAllocator._alloc(&self, usz bytes) @local
current = current.next;
}
}
self.add_block(aligned_bytes)!;
return self._alloc(aligned_bytes);
simple_alloc_add_block(self, aligned_bytes)!;
return simple_alloc_alloc(self, aligned_bytes);
}
fn void? SimpleHeapAllocator.add_block(&self, usz aligned_bytes) @local
fn void? simple_alloc_add_block(SimpleHeapAllocator* self, usz aligned_bytes) @local
{
assert(mem::aligned_offset(aligned_bytes, mem::DEFAULT_MEM_ALIGNMENT) == aligned_bytes);
char[] result = self.alloc_fn(aligned_bytes + Header.sizeof)!;
Header* new_block = (Header*)result.ptr;
new_block.size = result.len - Header.sizeof;
new_block.next = null;
self._free(new_block + 1);
simple_alloc_free(self, new_block + 1);
}
fn void SimpleHeapAllocator._free(&self, void* ptr) @local
fn void simple_alloc_free(SimpleHeapAllocator* self, void* ptr) @local
{
// Empty ptr -> do nothing.
if (!ptr) return;

View File

@@ -127,7 +127,7 @@ fn void TempAllocator.reset(&self)
{
TempAllocator* old = child;
child = old.derived;
old.destroy();
temp_allocator_destroy(old);
}
self.capacity = self.original_capacity;
$if env::ADDRESS_SANITIZER:
@@ -142,17 +142,17 @@ fn void TempAllocator.reset(&self)
fn void TempAllocator.free(&self)
{
self.reset();
self.destroy();
temp_allocator_destroy(self);
}
fn void TempAllocator.destroy(&self) @local
fn void temp_allocator_destroy(TempAllocator* self)
{
TempAllocatorPage *last_page = self.last_page;
while (last_page)
{
TempAllocatorPage *to_free = last_page;
last_page = last_page.prev_page;
self._free_page(to_free)!!;
_free_page(self, to_free)!!;
}
if (self.allocated)
{
@@ -179,33 +179,6 @@ fn void TempAllocator.release(&self, void* old_pointer, bool) @dynamic
}
fn void? TempAllocator._free_page(&self, TempAllocatorPage* page) @inline @local
{
void* mem = page.start;
return self.backing_allocator.release(mem, page.is_aligned());
}
fn void*? TempAllocator._realloc_page(&self, TempAllocatorPage* page, usz size, usz alignment) @inline @local
{
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &self.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
usz page_size = page.pagesize();
// Clear on size > original size.
void* data = self.acquire(size, NO_ZERO, alignment)!;
if (page_size > size) page_size = size;
mem::copy(data, &page.data[0], page_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
self.backing_allocator.release(real_pointer, page.is_aligned());
return data;
}
fn void*? TempAllocator.resize(&self, void* pointer, usz size, usz alignment) @dynamic
{
@@ -215,7 +188,7 @@ fn void*? TempAllocator.resize(&self, void* pointer, usz size, usz alignment) @d
assert(self.last_page, "Realloc of non temp pointer");
// First grab the page
TempAllocatorPage *page = pointer - TempAllocatorPage.sizeof;
return self._realloc_page(page, size, alignment);
return _realloc_page(self, page, size, alignment);
}
bool is_realloc_of_last = chunk.size + pointer == &self.data[self.used];
if (is_realloc_of_last)
@@ -326,9 +299,39 @@ fn void*? TempAllocator.acquire(&self, usz size, AllocInitType init_type, usz al
return &page.data[0];
}
fn void? _free_page(TempAllocator* self, TempAllocatorPage* page) @inline @local
{
void* mem = page.start;
return self.backing_allocator.release(mem, page.is_aligned());
}
fn void*? _realloc_page(TempAllocator* self, TempAllocatorPage* page, usz size, usz alignment) @inline @local
{
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &self.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
usz page_size = page.pagesize();
// Clear on size > original size.
void* data = self.acquire(size, NO_ZERO, alignment)!;
if (page_size > size) page_size = size;
mem::copy(data, &page.data[0], page_size, mem::DEFAULT_MEM_ALIGNMENT, mem::DEFAULT_MEM_ALIGNMENT);
self.backing_allocator.release(real_pointer, page.is_aligned());
return data;
}
module std::core::mem::allocator @if((env::POSIX || env::WIN32) && $feature(VMEM_TEMP));
import std::math;
tlocal VmemOptions temp_allocator_default_options = {
.shrink_on_reset = env::MEMORY_ENV != NORMAL,
.protect_unused_pages = env::COMPILER_OPT_LEVEL <= O1 || env::COMPILER_SAFE_MODE,
@@ -383,10 +386,10 @@ fn void TempAllocator.reset(&self)
}
fn void TempAllocator.free(&self)
{
self.destroy();
_destroy(self);
}
fn void TempAllocator.destroy(&self) @local
fn void _destroy(TempAllocator* self) @local
{
TempAllocator* child = self.derived;
if (!child) return;
@@ -403,4 +406,4 @@ fn void*? TempAllocator.resize(&self, void* pointer, usz size, usz alignment) @d
fn void TempAllocator.release(&self, void* old_pointer, bool b) @dynamic
{
self.vmem.release(old_pointer, b) @inline;
}
}

View File

@@ -631,7 +631,7 @@ fn void DString.reverse(self)
}
}
fn StringData* DString.data(self) @inline @private
fn StringData* DString.data(self) @inline
{
return (StringData*)self;
}

View File

@@ -404,6 +404,28 @@ macro void*? @aligned_alloc(#alloc_fn, usz bytes, usz alignment)
return mem;
}
<*
@require bytes > 0
@require alignment > 0
@require bytes <= isz.max
*>
macro void*? @aligned_alloc_fn(context, #alloc_fn, usz bytes, usz alignment)
{
if (alignment < void*.alignof) alignment = void*.alignof;
usz header = AlignedBlock.sizeof + alignment;
usz alignsize = bytes + header;
$if $kindof(#alloc_fn(context, bytes)) == OPTIONAL:
void* data = #alloc_fn(context, alignsize)!;
$else
void* data = #alloc_fn(context, alignsize);
$endif
void* mem = mem::aligned_pointer(data + AlignedBlock.sizeof, alignment);
AlignedBlock* desc = (AlignedBlock*)mem - 1;
assert(mem > data);
*desc = { bytes, data };
return mem;
}
struct AlignedBlock
{
usz len;
@@ -420,6 +442,16 @@ macro void? @aligned_free(#free_fn, void* old_pointer)
$endif
}
macro void? @aligned_free_fn(context, #free_fn, void* old_pointer)
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
$if $kindof(#free_fn(context, desc.start)) == OPTIONAL:
#free_fn(context, desc.start)!;
$else
#free_fn(context, desc.start);
$endif
}
<*
@require bytes > 0
@require alignment > 0
@@ -438,6 +470,23 @@ macro void*? @aligned_realloc(#calloc_fn, #free_fn, void* old_pointer, usz bytes
return new_data;
}
<*
@require bytes > 0
@require alignment > 0
*>
macro void*? @aligned_realloc_fn(context, #calloc_fn, #free_fn, void* old_pointer, usz bytes, usz alignment)
{
AlignedBlock* desc = (AlignedBlock*)old_pointer - 1;
void* data_start = desc.start;
void* new_data = @aligned_alloc_fn(context, #calloc_fn, bytes, alignment)!;
mem::copy(new_data, old_pointer, desc.len < bytes ? desc.len : bytes, 1, 1);
$if $kindof(#free_fn(context, data_start)) == OPTIONAL:
#free_fn(context, data_start)!;
$else
#free_fn(context, data_start);
$endif
return new_data;
}
// All allocators
alias mem @builtin = thread_allocator ;

View File

@@ -54,7 +54,7 @@ struct FixedBlockPool
@require calculate_actual_capacity(capacity, block_size) * block_size >= block_size
: "Total memory would overflow"
*>
macro FixedBlockPool* FixedBlockPool.init(&self, Allocator allocator, usz block_size, usz capacity = INITIAL_CAPACITY, usz alignment = 0)
fn FixedBlockPool* FixedBlockPool.init(&self, Allocator allocator, usz block_size, usz capacity = INITIAL_CAPACITY, usz alignment = 0)
{
self.allocator = allocator;
self.tail = &self.head;
@@ -64,7 +64,7 @@ macro FixedBlockPool* FixedBlockPool.init(&self, Allocator allocator, usz block_
self.alignment = allocator::alignment_for_allocation(alignment);
self.page_size = capacity * self.block_size;
assert(self.page_size >= self.block_size, "Total memory would overflow %d %d", block_size, capacity);
self.head.buffer = self.allocate_page();
self.head.buffer = fixedblockpool_allocate_page(self);
$if env::COMPILER_SAFE_MODE && env::ADDRESS_SANITIZER:
asan::poison_memory_region(self.head.buffer, self.page_size);
$endif
@@ -119,7 +119,7 @@ fn void FixedBlockPool.free(&self)
$if env::COMPILER_SAFE_MODE && env::ADDRESS_SANITIZER:
asan::unpoison_memory_region(self.head.buffer, self.page_size);
$endif
self.free_page(self.head.buffer);
fixedblockpool_free_page(self, self.head.buffer);
FixedBlockPoolNode* iter = self.head.next;
while (iter)
@@ -127,7 +127,7 @@ fn void FixedBlockPool.free(&self)
$if env::COMPILER_SAFE_MODE && env::ADDRESS_SANITIZER:
asan::unpoison_memory_region(iter.buffer, self.page_size);
$endif
self.free_page(iter.buffer);
fixedblockpool_free_page(self, iter.buffer);
FixedBlockPoolNode* current = iter;
iter = iter.next;
allocator::free(self.allocator, current);
@@ -158,7 +158,7 @@ fn void* FixedBlockPool.alloc(&self)
}
void* end = self.tail.buffer + (self.tail.capacity * self.block_size);
if (self.next_free >= end) self.new_node();
if (self.next_free >= end) fixedblockpool_new_node(self);
void* ptr = self.next_free;
self.next_free += self.block_size;
$if env::COMPILER_SAFE_MODE && env::ADDRESS_SANITIZER:
@@ -172,7 +172,7 @@ fn void* FixedBlockPool.alloc(&self)
Deallocate a block from the block pool
@require self.initialized : "The block pool must be initialized"
@require self.check_ptr(ptr) : "The pointer should be part of the pool"
@require fixedblockpool_check_ptr(self, ptr) : "The pointer should be part of the pool"
*>
fn void FixedBlockPool.dealloc(&self, void* ptr)
{
@@ -193,7 +193,7 @@ fn void FixedBlockPool.dealloc(&self, void* ptr)
<*
@require self.initialized : "The block pool must be initialized"
*>
fn bool FixedBlockPool.check_ptr(&self, void *ptr) @local
fn bool fixedblockpool_check_ptr(FixedBlockPool* self, void *ptr) @local
{
FixedBlockPoolNode* iter = &self.head;
@@ -210,10 +210,10 @@ fn bool FixedBlockPool.check_ptr(&self, void *ptr) @local
<*
@require self.grow_capacity > 0 : "How many blocks will it store"
*>
fn void FixedBlockPool.new_node(&self) @local
fn void fixedblockpool_new_node(FixedBlockPool* self) @local
{
FixedBlockPoolNode* node = allocator::new(self.allocator, FixedBlockPoolNode);
node.buffer = self.allocate_page();
node.buffer = fixedblockpool_allocate_page(self);
$if env::COMPILER_SAFE_MODE && env::ADDRESS_SANITIZER:
asan::poison_memory_region(node.buffer, self.page_size);
$endif
@@ -224,14 +224,14 @@ fn void FixedBlockPool.new_node(&self) @local
self.allocated += node.capacity;
}
macro void* FixedBlockPool.allocate_page(&self) @private
macro void* fixedblockpool_allocate_page(FixedBlockPool* self) @private
{
return self.alignment > mem::DEFAULT_MEM_ALIGNMENT
? allocator::calloc_aligned(self.allocator, self.page_size, self.alignment)!!
: allocator::calloc(self.allocator, self.page_size);
}
macro void FixedBlockPool.free_page(&self, void* page) @private
macro void fixedblockpool_free_page(FixedBlockPool* self, void* page) @private
{
if (self.alignment > mem::DEFAULT_MEM_ALIGNMENT)
{

View File

@@ -321,7 +321,7 @@ fn void? VirtualMemory.destroy(&self)
return release(self.ptr, self.size);
}
fn CInt VirtualMemoryAccess.to_posix(self) @if(env::POSIX) @private
fn CInt VirtualMemoryAccess.to_posix(self) @if(env::POSIX)
{
switch (self)
{
@@ -336,7 +336,7 @@ fn CInt VirtualMemoryAccess.to_posix(self) @if(env::POSIX) @private
}
}
fn Win32_Protect VirtualMemoryAccess.to_win32(self) @if(env::WIN32) @private
fn Win32_Protect VirtualMemoryAccess.to_win32(self) @if(env::WIN32)
{
switch (self)
{