Updated memory allocator. Fix in printf printing pointers. Added aligned_alloc to libc module. Renamed MemoryArena -> ArenaAllocator. New temp allocator. @pool(), @scoped, @tscoped macros. Bump to 0.3.2.

This commit is contained in:
Christoffer Lerno
2022-08-01 15:25:26 +02:00
parent 272f134e78
commit 550bca79e9
15 changed files with 337 additions and 153 deletions

View File

@@ -6,7 +6,7 @@ module std::core::mem::allocator;
*/
private fn void*! arena_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
{
MemoryArena* arena = (MemoryArena*)data;
ArenaAllocator* arena = (ArenaAllocator*)data;
switch (kind)
{
case CALLOC:
@@ -16,18 +16,18 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
alignment = alignment_for_allocation(alignment);
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
if (kind == AllocationKind.CALLOC) mem::set(mem, 0, size);
if (kind == AllocationKind.CALLOC) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
assert((uptr)old_pointer >= (uptr)arena.memory, "Pointer originates from a different allocator.");
if (size > arena.total) return AllocationFailure.OUT_OF_MEMORY!;
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
if (size > arena.data.len) return AllocationFailure.CHUNK_TOO_LARGE!;
alignment = alignment_for_allocation(alignment);
usize* old_size_ptr = (usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
usize old_size = *old_size_ptr;
// Do last allocation and alignment match?
if (arena.memory + arena.used == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
if (&arena.data[arena.used] == old_pointer + old_size && mem::ptr_is_aligned(old_pointer, alignment))
{
if (old_size >= size)
{
@@ -36,7 +36,7 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
return old_pointer;
}
usize new_used = arena.used + size - old_size;
if (new_used > arena.total) return AllocationFailure.OUT_OF_MEMORY!;
if (new_used > arena.data.len) return AllocationFailure.OUT_OF_MEMORY!;
arena.used = new_used;
*old_size_ptr = size;
return old_pointer;
@@ -44,19 +44,21 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
// Otherwise just allocate new memory.
void* mem = arena.alloc(size, alignment, DEFAULT_SIZE_PREFIX)?;
*(usize*)(mem - DEFAULT_SIZE_PREFIX) = size;
mem::copy(mem, old_pointer, old_size);
mem::memcpy(mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return mem;
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)arena.memory, "Pointer originates from a different allocator.");
assert((uptr)old_pointer >= (uptr)arena.data.ptr, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == arena.memory + arena.used)
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
return null;
case MARK:
return (void*)(uptr)arena.used;
case RESET:
arena.used = 0;
arena.used = size;
return null;
}
unreachable();
@@ -69,14 +71,14 @@ private fn void*! arena_allocator_function(Allocator* data, usize size, usize al
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! MemoryArena.alloc(MemoryArena* this, usize size, usize alignment, usize prefixed_bytes = 0)
private fn void*! ArenaAllocator.alloc(ArenaAllocator* this, usize size, usize alignment, usize prefixed_bytes = 0)
{
void* start_mem = this.memory;
void* start_mem = this.data.ptr;
void* unaligned_pointer = start_mem + this.used + prefixed_bytes;
if ((uptr)unaligned_pointer < (uptr)start_mem) return AllocationFailure.OUT_OF_MEMORY!;
usize offset_start = mem::aligned_offset((usize)(uptr)unaligned_pointer, alignment) - (usize)(uptr)start_mem;
usize end = offset_start + size;
if (end > this.total || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
if (end > this.data.len || end < offset_start) return AllocationFailure.OUT_OF_MEMORY!;
this.used = end;
return start_mem + offset_start;
}

View File

@@ -54,7 +54,7 @@ private fn void*! DynamicArenaAllocator.realloc(DynamicArenaAllocator* this, voi
return old_pointer;
}
void* new_mem = this.alloc(size, alignment)?;
mem::copy(new_mem, old_pointer, old_size);
mem::memcpy(new_mem, old_pointer, old_size, false, DEFAULT_MEM_ALIGNMENT);
return new_mem;
}
@@ -154,7 +154,7 @@ private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size,
assert(!old_pointer, "Unexpected no old pointer for calloc.");
if (!size) return null;
void* mem = allocator.alloc(size, alignment)?;
mem::set(mem, 0, size);
mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
case ALLOC:
assert(!old_pointer, "Unexpected no old pointer for alloc.");
@@ -174,6 +174,8 @@ private fn void*! dynamic_arena_allocator_function(Allocator* data, usize size,
if (!old_pointer) return null;
allocator.free(old_pointer);
return null;
case MARK:
unreachable("Tried to mark a dynamic arena");
case RESET:
allocator.reset();
return null;

View File

@@ -28,7 +28,8 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
case ALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::malloc(bytes + alignment), alignment);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(alignment, bytes);
}
else
{
@@ -39,23 +40,36 @@ fn void*! libc_allocator_fn(Allocator* unused, usize bytes, usize alignment, voi
case CALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::calloc(bytes + alignment, 1), alignment);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
mem::memset(data, 0, bytes, false, DEFAULT_MEM_ALIGNMENT);
}
else
{
data = libc::malloc(bytes);
data = libc::calloc(bytes, 1);
}
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case REALLOC:
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
data = (void*)mem::aligned_offset((iptr)libc::realloc(old_pointer, bytes + alignment), alignment);
}
else
{
data = libc::realloc(old_pointer, bytes);
}
if (!bytes) nextcase FREE;
if (!old_pointer) nextcase CALLOC;
$if (libc::HAS_MALLOC_SIZE):
if (alignment > DEFAULT_MEM_ALIGNMENT)
{
usize size = libc::malloc_size(old_pointer);
if (alignment > bytes) bytes = alignment;
data = libc::aligned_alloc(bytes, alignment);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
if (bytes > size)
{
mem::memset(data + size, 0, bytes - size, false, DEFAULT_MEM_ALIGNMENT);
}
mem::memcpy(data, old_pointer, size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
libc::free(old_pointer);
return data;
}
$endif;
data = libc::realloc(old_pointer, bytes);
if (!data) return AllocationFailure.OUT_OF_MEMORY!;
return data;
case RESET:

View File

@@ -0,0 +1,197 @@
module std::core::mem::allocator;
import std::io;
private struct TempAllocatorChunk
{
usize size;
char[*] data;
}
struct TempAllocator
{
inline Allocator allocator;
Allocator* backing_allocator;
TempAllocatorPage* last_page;
usize used;
usize capacity;
char[*] data;
}
struct TempAllocatorPage
{
TempAllocatorPage* prev_page;
usize mark;
void* start;
usize size;
usize ident;
char[*] data;
}
/**
* @require size >= 16
**/
fn TempAllocator*! new_temp(usize size, Allocator* backing_allocator)
{
TempAllocator* allocator = backing_allocator.alloc(size + TempAllocator.sizeof)?;
allocator.last_page = null;
allocator.function = &temp_allocator_function;
allocator.backing_allocator = backing_allocator;
allocator.used = 0;
allocator.capacity = size;
return allocator;
}
/**
* @require !alignment || math::is_power_of_2(alignment)
* @require data `unexpectedly missing the allocator`
*/
private fn void*! temp_allocator_function(Allocator* data, usize size, usize alignment, void* old_pointer, AllocationKind kind)
{
TempAllocator* arena = (TempAllocator*)data;
switch (kind)
{
case CALLOC:
case ALLOC:
assert(!old_pointer, "Unexpected old pointer for alloc.");
if (!size) return null;
return arena._alloc(size, alignment_for_allocation(alignment), kind == AllocationKind.CALLOC);
case REALLOC:
if (!size) nextcase FREE;
if (!old_pointer) nextcase ALLOC;
return arena._realloc(old_pointer, size, alignment_for_allocation(alignment));
case FREE:
if (!old_pointer) return null;
assert((uptr)old_pointer >= (uptr)&arena.data, "Pointer originates from a different allocator.");
usize old_size = *(usize*)(old_pointer - DEFAULT_SIZE_PREFIX);
if (old_pointer + old_size == &arena.data[arena.used])
{
arena.used -= old_size;
}
return null;
case MARK:
return (void*)(uptr)arena.used;
case RESET:
arena._reset(size)?;
return null;
}
unreachable();
}
private fn void! TempAllocator._reset(TempAllocator* this, usize mark)
{
TempAllocatorPage *last_page = this.last_page;
while (last_page)
{
if (last_page.mark < mark) break;
void* mem = last_page.start;
last_page = last_page.prev_page;
this.backing_allocator.free(mem)?;
}
this.last_page = last_page;
this.used = mark;
}
private fn void*! TempAllocator._realloc(TempAllocator* this, void* pointer, usize size, usize alignment)
{
TempAllocatorChunk *chunk = pointer - TempAllocatorChunk.sizeof;
if (chunk.size == ~(usize)0)
{
assert(this.last_page, "Realloc of non temp pointer");
// First grab the page
TempAllocatorPage *page = pointer - TempAllocatorPage.sizeof;
// Then the actual start pointer:
void* real_pointer = page.start;
// Walk backwards to find the pointer to this page.
TempAllocatorPage **pointer_to_prev = &this.last_page;
// Remove the page from the list
while (*pointer_to_prev != page)
{
pointer_to_prev = &((*pointer_to_prev).prev_page);
}
*pointer_to_prev = page.prev_page;
// Find the new header size
usize actual_header_size = mem::aligned_offset(TempAllocatorPage.sizeof, alignment);
// And the old one.
uptrdiff prev_header_size = pointer - real_pointer;
// If the new header size needs to move, we're out of luck.
// We need to make a copy
if (actual_header_size > prev_header_size)
{
// Clear only if the new size is bigger than the old one.
void* data = this._alloc(size, alignment, size > page.size)?;
mem::memcpy(data, pointer, page.size);
this.backing_allocator.free(real_pointer)?;
return data;
}
void* new_start = this.backing_allocator.realloc(real_pointer, prev_header_size + size, alignment)?;
page = new_start + prev_header_size - TempAllocatorPage.sizeof;
page.mark = this.used;
page.prev_page = this.last_page;
this.last_page = page;
io::printf("Size: %d\n", size);
page.size = size;
page.start = new_start;
return &page.data;
}
assert(pointer < &this.data + this.capacity && pointer >= &this.data, "This is not a temp allocated pointer.");
assert(pointer < &this.data + this.used, "This is a stale temp pointer.");
io::printf("realloc normal %s\n", size);
// TODO optimize last allocation
TempAllocatorChunk* data = this._alloc(size, alignment, size > chunk.size)?;
mem::memcpy(data, pointer, chunk.size, false, DEFAULT_MEM_ALIGNMENT, DEFAULT_MEM_ALIGNMENT);
return data;
}
/**
* @require alignment > 0 `alignment must be non zero`
* @require math::is_power_of_2(alignment)
* @require size > 0
* @require alignment <= MAX_MEMORY_ALIGNMENT `alignment too big`
* @require this != null
**/
private fn void*! TempAllocator._alloc(TempAllocator* this, usize size, usize alignment, bool clear)
{
void* start_mem = &this.data;
uptr starting_ptr = (uptr)start_mem + this.used;
uptr aligned_header_start = mem::aligned_offset(starting_ptr, $alignof(TempAllocatorChunk));
uptr unaligned_data_start = aligned_header_start + TempAllocatorChunk.sizeof;
usize data_start = mem::aligned_offset(unaligned_data_start, alignment);
usize new_usage = data_start + size - (uptr)start_mem;
// Fallback to backing allocator
if (new_usage > this.capacity)
{
// Enlarge the header if needed
usize actual_header_size = mem::aligned_offset(TempAllocator.sizeof, alignment);
usize total_alloc_size = actual_header_size + size;
void* start = clear ? this.backing_allocator.calloc(total_alloc_size, alignment) : this.backing_allocator.alloc(total_alloc_size, alignment)?;
// Move forward to the memory
void* mem = start + actual_header_size;
TempAllocatorPage* page = mem - TempAllocator.sizeof;
page.start = start;
page.ident = ~(usize)0;
page.mark = this.used;
page.size = size;
page.prev_page = this.last_page;
this.last_page = page;
assert(&page.data == mem, "Expected match");
return mem;
}
TempAllocatorChunk* chunk_start = (TempAllocatorChunk*)(data_start - TempAllocatorChunk.sizeof);
chunk_start.size = size;
this.used = new_usage;
void* mem = &chunk_start.data;
if (clear) mem::memset(mem, 0, size, false, DEFAULT_MEM_ALIGNMENT);
return mem;
}

View File

@@ -56,3 +56,4 @@ const CompilerOptLevel COMPILER_OPT_LEVEL = (CompilerOptLevel)($$COMPILER_OPT_LE
const bool BIG_ENDIAN = $$PLATFORM_BIG_ENDIAN;
const bool I128_SUPPORT = $$PLATFORM_I128_SUPPORTED;
const bool COMPILER_SAFE_MODE = $$COMPILER_SAFE_MODE;
const usize TEMP_ALLOCATOR_SIZE = 128 * 1024;

View File

@@ -126,7 +126,7 @@ fn void free(void* ptr) @builtin
/**
* Run with a specific allocator inside of the macro body.
**/
macro void @with_allocator(Allocator* allocator; @body())
macro void @scoped(Allocator* allocator; @body())
{
Allocator* old_allocator = thread_allocator;
thread_allocator = allocator;
@@ -134,12 +134,52 @@ macro void @with_allocator(Allocator* allocator; @body())
@body();
}
fn void*! talloc(usize size)
macro void @tscoped(;@body())
{
return temp_allocator.alloc(size);
Allocator* old_allocator = thread_allocator;
TempAllocator* temp = temp_allocator();
usize mark = temp.mark()!!;
thread_allocator = temp;
defer temp.reset(mark);
defer thread_allocator = old_allocator;
@body();
}
fn void* talloc(usize size, usize alignment = 0)
{
return temp_allocator().alloc(size, alignment)!!;
}
fn void* tcalloc(usize size, usize alignment = 0)
{
return temp_allocator().calloc(size, alignment)!!;
}
fn void* trealloc(void* ptr, usize size, usize alignment = 0)
{
return temp_allocator().realloc(ptr, size, alignment)!!;
}
macro void @pool(;@body) @builtin
{
TempAllocator* temp = temp_allocator();
usize mark = temp.used;
defer temp.reset(mark);
@body();
}
private tlocal Allocator* thread_allocator = allocator::LIBC_ALLOCATOR;
private tlocal TempAllocator* thread_temp_allocator = null;
macro TempAllocator* temp_allocator()
{
if (!thread_temp_allocator)
{
thread_temp_allocator = allocator::new_temp(env::TEMP_ALLOCATOR_SIZE, allocator::LIBC_ALLOCATOR)!!;
}
return thread_temp_allocator;
}
macro Allocator* current_allocator()
{
return thread_allocator;

View File

@@ -22,12 +22,14 @@ enum AllocationKind
REALLOC,
FREE,
RESET,
MARK,
}
fault AllocationFailure
{
OUT_OF_MEMORY,
UNSUPPORTED_OPERATION,
CHUNK_TOO_LARGE,
}
@@ -48,6 +50,11 @@ fn void*! Allocator.realloc(Allocator* allocator, void* old_pointer, usize size,
return allocator.function(allocator, size, alignment, old_pointer, REALLOC);
}
fn usize! Allocator.mark(Allocator* allocator) @inline
{
return (usize)(uptr)allocator.function(allocator, 0, 0, null, MARK);
}
/**
* @require !alignment || math::is_power_of_2(alignment)
*/
@@ -61,9 +68,9 @@ fn void! Allocator.free(Allocator* allocator, void* old_pointer) @inline
allocator.function(allocator, 0, 0, old_pointer, FREE)?;
}
fn void Allocator.reset(Allocator* allocator)
fn void Allocator.reset(Allocator* allocator, usize mark = 0)
{
allocator.function(allocator, 0, 0, null, RESET)!!;
allocator.function(allocator, mark, 0, null, RESET)!!;
}
private fn usize alignment_for_allocation(usize alignment) @inline
@@ -121,11 +128,10 @@ fn void DynamicArenaAllocator.destroy(DynamicArenaAllocator* this)
}
struct MemoryArena
struct ArenaAllocator
{
inline Allocator allocator;
void* memory;
usize total;
char[] data;
usize used;
}
@@ -134,18 +140,17 @@ struct MemoryArena
*
* @require this != null
**/
fn void MemoryArena.init(MemoryArena* this, char[] data)
fn void ArenaAllocator.init(ArenaAllocator* this, char[] data)
{
this.function = &arena_allocator_function;
this.memory = data.ptr;
this.total = data.len;
this.data = data;
this.used = 0;
}
/**
* @require this != null
**/
fn void MemoryArena.reset(MemoryArena* this)
fn void ArenaAllocator.reset(ArenaAllocator* this)
{
this.used = 0;
}

View File

@@ -12,11 +12,29 @@ macro alloc($Type, usize elements)
return ptr[:elements];
}
/**
* @require usize.max / elements > $Type.sizeof
**/
macro talloc($Type, usize elements)
{
$Type* ptr = mem::talloc($Type.sizeof * elements, $alignof($Type[1]));
return ptr[:elements];
}
/**
* @require (usize.max / elements > $Type.sizeof)
**/
macro make($Type, usize elements)
{
$Type* ptr = mem::calloc($sizeof($Type) * elements, $alignof($Type));
$Type* ptr = mem::calloc($sizeof($Type) * elements, $alignof($Type[1]));
return ptr[:elements];
}
/**
* @require (usize.max / elements > $Type.sizeof)
**/
macro tmake($Type, usize elements)
{
$Type* ptr = mem::tcalloc($sizeof($Type) * elements, $alignof($Type[1]));
return ptr[:elements];
}

View File

@@ -1,106 +0,0 @@
module std::core::mem;
const TEMP_BLOCK_SIZE = 1024;
const TEMP_PAGES = 64;
private char[TEMP_BLOCK_SIZE * TEMP_PAGES] allocator_static_storage;
private void*[TEMP_PAGES] allocator_static_page_storage;
SlotAllocator temp_allocator = {
.pages = &allocator_static_storage,
.page_size = TEMP_BLOCK_SIZE,
.page_count = TEMP_PAGES,
.bitmask = TEMP_PAGES - 1,
.current_page = 0,
};
struct SlotAllocator
{
void* pages;
usize page_size;
usize page_count;
usize bitmask;
usize current_page;
}
fn void*! SlotAllocator.alloc(SlotAllocator *allocator, usize size)
{
void* active_page = (char*)(allocator.pages) + allocator.current_page * allocator.page_size;
void** page_pointer = (void**)(active_page);
if (*page_pointer)
{
// TODO fix
thread_allocator.free(*page_pointer)?;
*page_pointer = null;
}
if (size > allocator.page_size - $sizeof(page_pointer))
{
void* mem = thread_allocator.alloc(size)?;
*page_pointer = mem;
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return mem;
}
allocator.current_page = (allocator.current_page + 1) & (allocator.bitmask);
return &page_pointer[1];
}
struct RingAllocator
{
char *data;
usize size;
usize offset;
}
fn void* RingAllocator.alloc(RingAllocator *allocator, usize size)
{
if (size > allocator.size) return null;
// Wraparound? If so, start at the beginning.
if (allocator.offset + size > allocator.size)
{
allocator.offset = size;
return allocator.data;
}
void* data = allocator.offset + allocator.data;
allocator.offset = (allocator.offset + size) & allocator.size;
return data;
}
fn void* RingAllocator.realloc(RingAllocator *allocator, void* ptr, usize size)
{
if (size > allocator.size) return null;
assert(allocator.data >= ptr && ptr < allocator.data + size, "Realloc on other allocator.");
// 1. The pointer is before the allocator
if (allocator.data + allocator.offset > ptr)
{
if (allocator.data + allocator.size < ptr + size)
{
// 1a. There is not enough space, we need to copy to the start.
usize pointer_offset = ptr - allocator.data;
usize copy_len = pointer_offset + size > allocator.offset ? allocator.offset - pointer_offset : size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}
// 1b. There is enough space, so we just change the offset:
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2. The pointer is after the allocator
// 2a. Is there sufficient space?
if (ptr + size <= allocator.data + allocator.size)
{
// Good, if so we simply change the offset and return the pointer.
allocator.offset = ptr - allocator.data + size;
return ptr;
}
// 2b. Not sufficient space, we copy to the beginning.
usize pointer_offset = ptr - allocator.data;
usize copy_len = allocator.size - (ptr - allocator.data);
if (copy_len > size) copy_len = size;
//memcpy(allocator.data, ptr, copy_len);
allocator.offset = size;
return allocator.data;
}

View File

@@ -42,7 +42,7 @@ fn ZString copy_zstring(char[] s)
fn ZString tcopy_zstring(char[] s)
{
usize len = s.len;
char* str = mem::talloc(len + 1)!!;
char* str = mem::talloc(len + 1);
mem::copy(str, s.ptr, len);
str[len] = 0;
return (ZString)str;
@@ -129,7 +129,7 @@ fn char[] tcopy(char[] s)
fn char[] tconcat(char[] s1, char[] s2)
{
usize full_len = s1.len + s2.len;
char* str = mem::talloc(full_len + 1)!!;
char* str = mem::talloc(full_len + 1);
usize s1_len = s1.len;
mem::copy(str, s1.ptr, s1_len);
mem::copy(str + s1_len, s2.ptr, s2.len);

View File

@@ -41,8 +41,8 @@ fn int println(char *message = "") @inline
fn void! File.open(File* file, char[] filename, char[] mode)
{
char* filename_copy = mem::talloc(filename.len + 1)!!;
char* mode_copy = mem::talloc(mode.len + 1)!!;
char* filename_copy = mem::talloc(filename.len + 1);
char* mode_copy = mem::talloc(mode.len + 1);
mem::copy(filename_copy, (char*)(filename), filename.len);
mem::copy(mode_copy, (char*)(mode), mode.len);

View File

@@ -674,7 +674,7 @@ private fn NtoaType int_from_variant(variant arg, bool *is_neg)
if (arg.type.kind == TypeKind.POINTER)
{
return (NtoaType)(uptr)(void*)arg.ptr;
return (NtoaType)(uptr)*(void**)arg.ptr;
}
switch (arg)
{
@@ -873,7 +873,6 @@ private fn usize! vsnprintf(OutputFn out, void* data, char[] format, variant[] v
out_str(&param, current)?;
continue;
case 'p':
param.width = (uint)(void*.sizeof * 2);
param.flags.zeropad = true;
param.flags.hash = true;
base = 16;

View File

@@ -87,6 +87,7 @@ extern fn usize strxfrm(char* dest, char* src, usize n);
// malloc
extern fn void* malloc(usize size);
extern fn void* aligned_alloc(usize align, usize size);
extern fn void* calloc(usize count, usize size);
extern fn void* free(void*);
extern fn void* realloc(void* ptr, usize size);
@@ -95,11 +96,14 @@ extern fn void* realloc(void* ptr, usize size);
define Fpos = long;
define CFile = void*;
$switch (env::OS_TYPE):
$case OsType.LINUX:
extern CFile __stdin @extname("stdin");
extern CFile __stdout @extname("stdout");
extern CFile __stderr @extname("stderr");
extern fn usize malloc_usable_size(void* ptr);
macro usize malloc_size(void* ptr) { return malloc_usable_size(ptr); }
macro CFile stdin() { return __stdin; }
macro CFile stdout() { return __stdout; }
macro CFile stderr() { return __stderr; }
@@ -107,11 +111,14 @@ $case OsType.MACOSX:
extern CFile __stdinp;
extern CFile __stdoutp;
extern CFile __stderrp;
extern fn usize malloc_size(void* ptr);
macro CFile stdin() { return __stdinp; }
macro CFile stdout() { return __stdoutp; }
macro CFile stderr() { return __stderrp; }
$case OsType.WIN32:
extern fn CFile __acrt_iob_func(CInt c);
extern fn usize _msize(void* ptr);
macro usize malloc_size(void* ptr) { return _msize(ptr); }
macro CFile stdin() { return __acrt_iob_func(0); }
macro CFile stdout() { return __acrt_iob_func(1); }
macro CFile stderr() { return __acrt_iob_func(2); }
@@ -121,6 +128,11 @@ $default:
macro CFile stderr() { return (CFile*)(uptr)2; }
$endswitch;
const HAS_MALLOC_SIZE =
env::OS_TYPE == OsType.LINUX
|| env::OS_TYPE == OsType.WIN32
|| env::OS_TYPE == OsType.MACOSX;
// The following needs to be set per arch+os
// For now I have simply pulled the defaults from MacOS
const int SEEK_SET = 0;