From 164c901ae68052cf71b5e0f4b330b878d42adf11 Mon Sep 17 00:00:00 2001 From: Christoffer Lerno Date: Wed, 7 May 2025 12:52:19 +0200 Subject: [PATCH] More comments on the allocators. --- .../core/allocators/backed_arena_allocator.c3 | 19 +++++++++++---- lib/std/core/allocators/dynamic_arena.c3 | 11 +++++++++ lib/std/core/allocators/heap_allocator.c3 | 7 ++++++ lib/std/core/allocators/libc_allocator.c3 | 8 ++++--- lib/std/core/allocators/on_stack_allocator.c3 | 10 +++++++- lib/std/core/mem_allocator.c3 | 24 +++++++++++++++++-- 6 files changed, 68 insertions(+), 11 deletions(-) diff --git a/lib/std/core/allocators/backed_arena_allocator.c3 b/lib/std/core/allocators/backed_arena_allocator.c3 index 62366b157..e0af33a3e 100644 --- a/lib/std/core/allocators/backed_arena_allocator.c3 +++ b/lib/std/core/allocators/backed_arena_allocator.c3 @@ -1,12 +1,15 @@ module std::core::mem::allocator; import std::io, std::math; -struct AllocChunk @local -{ - usz size; - char[*] data; -} +<* + The backed arena allocator provides an allocator that will allocate from a pre-allocated chunk of memory + provided by it's backing allocator. The allocator supports mark / reset operations, so it can be used + as a stack (push-pop) allocator. If the initial memory is used up, it will fall back to regular allocations, + that will be safely freed on `reset`. + While this allocator is similar to the dynamic arena, it supports multiple "save points", which the dynamic arena + doesn't. +*> struct BackedArenaAllocator (Allocator) { Allocator backing_allocator; @@ -16,6 +19,12 @@ struct BackedArenaAllocator (Allocator) char[*] data; } +struct AllocChunk @local +{ + usz size; + char[*] data; +} + const usz PAGE_IS_ALIGNED @local = (usz)isz.max + 1u; struct ExtraPage @local diff --git a/lib/std/core/allocators/dynamic_arena.c3 b/lib/std/core/allocators/dynamic_arena.c3 index cd1520c14..c238dbffc 100644 --- a/lib/std/core/allocators/dynamic_arena.c3 +++ b/lib/std/core/allocators/dynamic_arena.c3 @@ -4,6 +4,17 @@ module std::core::mem::allocator; import std::math; +<* + The dynamic arena allocator is an arena allocator that can grow by adding additional arena "pages". + It only supports reset, at which point all pages except the first one is released to the backing + allocator. + + If you want multiple save points, use the BackedArenaAllocator instead. + + The advantage over the BackedArenaAllocator, is that when allocating beyond the first "page", it will + retain the characteristics of an arena allocator (allocating a large piece of memory then handing off + memory from that memory), wheras the BackedArenaAllocator will have heap allocator characteristics. +*> struct DynamicArenaAllocator (Allocator) { Allocator backing_allocator; diff --git a/lib/std/core/allocators/heap_allocator.c3 b/lib/std/core/allocators/heap_allocator.c3 index a085f73eb..61b3f8a60 100644 --- a/lib/std/core/allocators/heap_allocator.c3 +++ b/lib/std/core/allocators/heap_allocator.c3 @@ -5,6 +5,13 @@ module std::core::mem::allocator; import std::math; +<* + The SimpleHeapAllocator implements a simple heap allocator on top of an allocator function. + + It uses the given allocator function to allocate memory from some source, but never frees it. + This allocator is intended to be used in environments where there isn't any native libc malloc, + and it has to be emulated from a memory region, or wrapping linear memory as is the case for plain WASM. +*> struct SimpleHeapAllocator (Allocator) { MemoryAllocFn alloc_fn; diff --git a/lib/std/core/allocators/libc_allocator.c3 b/lib/std/core/allocators/libc_allocator.c3 index 026aaef8c..9ba07a01c 100644 --- a/lib/std/core/allocators/libc_allocator.c3 +++ b/lib/std/core/allocators/libc_allocator.c3 @@ -1,13 +1,15 @@ -// Copyright (c) 2021-2024 Christoffer Lerno. All rights reserved. +// Copyright (c) 2021-2025 Christoffer Lerno. All rights reserved. // Use of this source code is governed by the MIT license // a copy of which can be found in the LICENSE_STDLIB file. - module std::core::mem::allocator @if(env::LIBC); import std::io; import libc; -const LibcAllocator LIBC_ALLOCATOR = {}; +<* + The LibcAllocator is a wrapper around malloc to conform to the Allocator interface. +*> typedef LibcAllocator (Allocator, Printable) = uptr; +const LibcAllocator LIBC_ALLOCATOR = {}; fn String LibcAllocator.to_string(&self, Allocator allocator) @dynamic => "Libc allocator".copy(allocator); fn usz? LibcAllocator.to_format(&self, Formatter *format) @dynamic => format.print("Libc allocator"); diff --git a/lib/std/core/allocators/on_stack_allocator.c3 b/lib/std/core/allocators/on_stack_allocator.c3 index 45a1ed834..e6e9f55a8 100644 --- a/lib/std/core/allocators/on_stack_allocator.c3 +++ b/lib/std/core/allocators/on_stack_allocator.c3 @@ -1,5 +1,14 @@ module std::core::mem::allocator; +<* + The OnStackAllocator is similar to the ArenaAllocator: it allocates from a chunk of memory + given to it. + + The difference is that when it runs out of memory it will go directly to its backing allocator + rather than failing. + + It is utilized by the @stack_mem macro as an alternative to the temp allocator. +*> struct OnStackAllocator (Allocator) { Allocator backing_allocator; @@ -8,7 +17,6 @@ struct OnStackAllocator (Allocator) OnStackAllocatorExtraChunk* chunk; } - struct OnStackAllocatorExtraChunk @local { bool is_aligned; diff --git a/lib/std/core/mem_allocator.c3 b/lib/std/core/mem_allocator.c3 index 4abbd2fc2..8cbb23fc4 100644 --- a/lib/std/core/mem_allocator.c3 +++ b/lib/std/core/mem_allocator.c3 @@ -1,6 +1,18 @@ module std::core::mem::allocator; import std::math; +// C3 has multiple different allocators available: +// +// Name Arena Uses buffer OOM Fallback? Mark? Reset? +// ArenaAllocator Yes Yes No Yes Yes +// BackedArenaAllocator Yes No Yes Yes Yes +// DynamicArenaAllocator Yes No Yes No Yes +// HeapAllocator No No No No No *Note: Not for normal use +// LibcAllocator No No No No No *Note: Wraps malloc +// OnStackAllocator Yes Yes Yes No No *Note: Used by @stack_mem +// TempAllocator Yes No Yes No* No* *Note: Mark/reset using @pool +// TrackingAllocator No No N/A No No *Note: Wraps other heap allocator + const DEFAULT_SIZE_PREFIX = usz.sizeof; const DEFAULT_SIZE_PREFIX_ALIGNMENT = usz.alignof; @@ -20,13 +32,18 @@ enum AllocInitType interface Allocator { <* + Acquire memory from the allocator, with the given alignment and initialization type. + @require !alignment || math::is_power_of_2(alignment) @require alignment <= mem::MAX_MEMORY_ALIGNMENT : `alignment too big` - @require size > 0 + @require size > 0 : "The size must be 1 or more" @return? mem::INVALID_ALLOC_SIZE, mem::OUT_OF_MEMORY *> fn void*? acquire(usz size, AllocInitType init_type, usz alignment = 0); + <* + Resize acquired memory from the allocator, with the given new size and alignment. + @require !alignment || math::is_power_of_2(alignment) @require alignment <= mem::MAX_MEMORY_ALIGNMENT : `alignment too big` @require ptr != null @@ -34,8 +51,11 @@ interface Allocator @return? mem::INVALID_ALLOC_SIZE, mem::OUT_OF_MEMORY *> fn void*? resize(void* ptr, usz new_size, usz alignment = 0); + <* - @require ptr != null + Release memory acquired using `acquire` or `resize`. + + @require ptr != null : "Empty pointers should never be released" *> fn void release(void* ptr, bool aligned); }