memory.hpp no longer uses memory mappings by default

This commit is contained in:
Edward R. Gonzalez 2024-11-30 17:18:49 -05:00
parent 5527a27f7b
commit 06deb1e836
9 changed files with 170 additions and 102 deletions

View File

@ -19,7 +19,7 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
{ {
if ( ( last->TotalUsed + size ) > last->TotalSize ) if ( ( last->TotalUsed + size ) > last->TotalSize )
{ {
Arena bucket = Arena::init_from_allocator( heap(), Global_BucketSize ); Arena bucket = arena_init_from_allocator( heap(), Global_BucketSize );
if ( bucket.PhysicalStart == nullptr ) if ( bucket.PhysicalStart == nullptr )
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets"); GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
@ -30,7 +30,7 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
last = & Global_AllocatorBuckets.back(); last = & Global_AllocatorBuckets.back();
} }
return alloc_align( * last, size, alignment ); return alloc_align( allocator_info(* last), size, alignment );
} }
case EAllocation_FREE: case EAllocation_FREE:
{ {
@ -46,7 +46,7 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
{ {
if ( last->TotalUsed + size > last->TotalSize ) if ( last->TotalUsed + size > last->TotalSize )
{ {
Arena bucket = Arena::init_from_allocator( heap(), Global_BucketSize ); Arena bucket = arena_init_from_allocator( heap(), Global_BucketSize );
if ( bucket.PhysicalStart == nullptr ) if ( bucket.PhysicalStart == nullptr )
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets"); GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
@ -240,7 +240,7 @@ void init()
if ( Global_AllocatorBuckets == nullptr ) if ( Global_AllocatorBuckets == nullptr )
GEN_FATAL( "Failed to reserve memory for Global_AllocatorBuckets"); GEN_FATAL( "Failed to reserve memory for Global_AllocatorBuckets");
Arena bucket = Arena::init_from_allocator( heap(), Global_BucketSize ); Arena bucket = arena_init_from_allocator( heap(), Global_BucketSize );
if ( bucket.PhysicalStart == nullptr ) if ( bucket.PhysicalStart == nullptr )
GEN_FATAL( "Failed to create first bucket for Global_AllocatorBuckets"); GEN_FATAL( "Failed to create first bucket for Global_AllocatorBuckets");
@ -264,16 +264,16 @@ void init()
// Setup the code pool and code entries arena. // Setup the code pool and code entries arena.
{ {
Pool code_pool = Pool::init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) ); Pool code_pool = pool_init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) );
if ( code_pool.PhysicalStart == nullptr ) if ( code_pool.PhysicalStart == nullptr )
GEN_FATAL( "gen::init: Failed to initialize the code pool" ); GEN_FATAL( "gen::init: Failed to initialize the code pool" );
CodePools.append( code_pool ); CodePools.append( code_pool );
LexArena = Arena::init_from_allocator( Allocator_Lexer, LexAllocator_Size ); LexArena = arena_init_from_allocator( Allocator_Lexer, LexAllocator_Size );
Arena string_arena = Arena::init_from_allocator( Allocator_StringArena, SizePer_StringArena ); Arena string_arena = arena_init_from_allocator( Allocator_StringArena, SizePer_StringArena );
if ( string_arena.PhysicalStart == nullptr ) if ( string_arena.PhysicalStart == nullptr )
GEN_FATAL( "gen::init: Failed to initialize the string arena" ); GEN_FATAL( "gen::init: Failed to initialize the string arena" );
@ -303,7 +303,7 @@ void deinit()
do do
{ {
Pool* code_pool = & CodePools[index]; Pool* code_pool = & CodePools[index];
code_pool->free(); free(* code_pool);
index++; index++;
} }
while ( left--, left ); while ( left--, left );
@ -313,7 +313,7 @@ void deinit()
do do
{ {
Arena* string_arena = & StringArenas[index]; Arena* string_arena = & StringArenas[index];
string_arena->free(); free(* string_arena);
index++; index++;
} }
while ( left--, left ); while ( left--, left );
@ -323,7 +323,7 @@ void deinit()
CodePools.free(); CodePools.free();
StringArenas.free(); StringArenas.free();
LexArena.free(); free(LexArena);
PreprocessorDefines.free(); PreprocessorDefines.free();
@ -332,7 +332,7 @@ void deinit()
do do
{ {
Arena* bucket = & Global_AllocatorBuckets[ index ]; Arena* bucket = & Global_AllocatorBuckets[ index ];
bucket->free(); free(* bucket);
index++; index++;
} }
while ( left--, left ); while ( left--, left );
@ -348,7 +348,7 @@ void reset()
do do
{ {
Pool* code_pool = & CodePools[index]; Pool* code_pool = & CodePools[index];
code_pool->clear(); clear(* code_pool);
index++; index++;
} }
while ( left--, left ); while ( left--, left );
@ -376,7 +376,7 @@ AllocatorInfo get_string_allocator( s32 str_length )
if ( last->TotalUsed + ssize(size_req) > last->TotalSize ) if ( last->TotalUsed + ssize(size_req) > last->TotalSize )
{ {
Arena new_arena = Arena::init_from_allocator( Allocator_StringArena, SizePer_StringArena ); Arena new_arena = arena_init_from_allocator( Allocator_StringArena, SizePer_StringArena );
if ( ! StringArenas.append( new_arena ) ) if ( ! StringArenas.append( new_arena ) )
GEN_FATAL( "gen::get_string_allocator: Failed to allocate a new string arena" ); GEN_FATAL( "gen::get_string_allocator: Failed to allocate a new string arena" );
@ -384,7 +384,7 @@ AllocatorInfo get_string_allocator( s32 str_length )
last = & StringArenas.back(); last = & StringArenas.back();
} }
return * last; return allocator_info(* last);
} }
// Will either make or retrive a code string. // Will either make or retrive a code string.
@ -411,7 +411,7 @@ Code make_code()
Pool* allocator = & CodePools.back(); Pool* allocator = & CodePools.back();
if ( allocator->FreeList == nullptr ) if ( allocator->FreeList == nullptr )
{ {
Pool code_pool = Pool::init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) ); Pool code_pool = pool_init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) );
if ( code_pool.PhysicalStart == nullptr ) if ( code_pool.PhysicalStart == nullptr )
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePool allcoator returned nullptr." ); GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePool allcoator returned nullptr." );
@ -422,7 +422,7 @@ Code make_code()
allocator = & CodePools.back(); allocator = & CodePools.back();
} }
Code result { rcast( AST*, alloc( * allocator, sizeof(AST) )) }; Code result { rcast( AST*, alloc( allocator_info(* allocator), sizeof(AST) )) };
mem_set( result.ast, 0, sizeof(AST) ); mem_set( result.ast, 0, sizeof(AST) );
// result->Type = ECode::Invalid; // result->Type = ECode::Invalid;

View File

@ -16,8 +16,8 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
local_persist local_persist
char tok_map_mem[ TokenFmt_TokenMap_MemSize ]; char tok_map_mem[ TokenFmt_TokenMap_MemSize ];
tok_map_arena = init_from_memory( tok_map_mem, sizeof(tok_map_mem) ); tok_map_arena = arena_init_from_memory( tok_map_mem, sizeof(tok_map_mem) );
tok_map = HashTable<StrC>::init( tok_map_arena ); tok_map = HashTable<StrC>::init( allocator_info(tok_map_arena) );
s32 left = num_tokens - 1; s32 left = num_tokens - 1;
@ -95,7 +95,7 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
} }
tok_map.clear(); tok_map.clear();
tok_map_arena.free(); free(tok_map_arena);
ssize result = buf_size - remaining; ssize result = buf_size - remaining;

View File

@ -132,12 +132,12 @@ bool TokArray::__eat( TokType type )
internal internal
void init() void init()
{ {
Tokens = Array<Token>::init_reserve( LexArena Tokens = array_init_reserve<Token>( allocator_info(LexArena)
, ( LexAllocator_Size - sizeof( ArrayHeader ) ) / sizeof(Token) , ( LexAllocator_Size - sizeof( ArrayHeader ) ) / sizeof(Token)
); );
defines_map_arena = Arena_256KB::init(); fixed_arena_init(defines_map_arena);
defines = HashTable<StrC>::init_reserve( defines_map_arena, 256 ); defines = HashTable<StrC>::init_reserve( allocator_info(defines_map_arena), 256 );
} }
internal internal
@ -713,8 +713,8 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
local_persist local_persist
char interface_arr_mem[ kilobytes(4) ] {0}; char interface_arr_mem[ kilobytes(4) ] {0};
Array<CodeType> interfaces; { Array<CodeType> interfaces; {
Arena arena = init_from_memory( interface_arr_mem, kilobytes(4) ); Arena arena = arena_init_from_memory( interface_arr_mem, kilobytes(4) );
Array<CodeType>::init_reserve( arena, 4 ); array_init_reserve<CodeType>( allocator_info(arena), 4 );
} }
// TODO(Ed) : Make an AST_DerivedType, we'll store any arbitary derived type into there as a linear linked list of them. // TODO(Ed) : Make an AST_DerivedType, we'll store any arbitary derived type into there as a linear linked list of them.

View File

@ -126,10 +126,18 @@ typedef s32 b32;
using mem_ptr = void*; using mem_ptr = void*;
using mem_ptr_const = void const*; using mem_ptr_const = void const*;
#if ! GEN_COMPILER_C
template<typename Type> uptr to_uptr( Type* ptr ) { return (uptr)ptr; } template<typename Type> uptr to_uptr( Type* ptr ) { return (uptr)ptr; }
template<typename Type> sptr to_sptr( Type* ptr ) { return (sptr)ptr; } template<typename Type> sptr to_sptr( Type* ptr ) { return (sptr)ptr; }
template<typename Type> mem_ptr to_mem_ptr ( Type ptr ) { return (mem_ptr) ptr; } template<typename Type> mem_ptr to_mem_ptr ( Type ptr ) { return (mem_ptr) ptr; }
template<typename Type> mem_ptr_const to_mem_ptr_const( Type ptr ) { return (mem_ptr_const)ptr; } template<typename Type> mem_ptr_const to_mem_ptr_const( Type ptr ) { return (mem_ptr_const)ptr; }
#else
#define to_utpr( ptr ) ((uptr)(ptr))
#define to_stpr( ptr ) ((sptr)(ptr))
#define to_mem_ptr( ptr) ((mem_ptr)ptr)
#define to_mem_ptr_const( ptr) ((mem_ptr)ptr)
#endif
#pragma endregion Basic Types #pragma endregion Basic Types

View File

@ -23,17 +23,32 @@
#define bitfield_is_equal( Type, Field, Mask ) ( (Type(Mask) & Type(Field)) == Type(Mask) ) #define bitfield_is_equal( Type, Field, Mask ) ( (Type(Mask) & Type(Field)) == Type(Mask) )
#endif #endif
#ifndef ccast #if ! GEN_C_COMPILER
#define ccast( type, value ) ( const_cast< type >( (value) ) ) # ifndef ccast
#endif # define ccast( type, value ) ( const_cast< type >( (value) ) )
#ifndef pcast # endif
#define pcast( type, value ) ( * reinterpret_cast< type* >( & ( value ) ) ) # ifndef pcast
#endif # define pcast( type, value ) ( * reinterpret_cast< type* >( & ( value ) ) )
#ifndef rcast # endif
#define rcast( type, value ) reinterpret_cast< type >( value ) # ifndef rcast
#endif # define rcast( type, value ) reinterpret_cast< type >( value )
#ifndef scast # endif
#define scast( type, value ) static_cast< type >( value ) # ifndef scast
# define scast( type, value ) static_cast< type >( value )
# endif
#else
# ifndef ccast
# define ccast( type, value ) ( (type)(value) )
# endif
# ifndef pcast
# define pcast( type, value ) ( (type)(value) )
# endif
# ifndef rcast
# define rcast( type, value ) ( (type)(value) )
# endif
# ifndef scast
# define scast( type, value ) ( (type)(value) )
# endif
#endif #endif
#ifndef stringize #ifndef stringize
@ -123,20 +138,20 @@
#define min( a, b ) ( (a < b) ? (a) : (b) ) #define min( a, b ) ( (a < b) ? (a) : (b) )
#endif #endif
#if defined( _MSC_VER ) || defined( GEN_COMPILER_TINYC ) #if GEN_COMPILER_MSVC || GEN_COMPILER_TINYC
# define offset_of( Type, element ) ( ( GEN_NS( ssize ) ) & ( ( ( Type* )0 )->element ) ) # define offset_of( Type, element ) ( ( GEN_NS( ssize ) ) & ( ( ( Type* )0 )->element ) )
#else #else
# define offset_of( Type, element ) __builtin_offsetof( Type, element ) # define offset_of( Type, element ) __builtin_offsetof( Type, element )
#endif #endif
#ifndef forceinline #ifndef forceinline
# ifdef GEN_COMPILER_MSVC # if GEN_COMPILER_MSVC
# define forceinline __forceinline # define forceinline __forceinline
# define neverinline __declspec( noinline ) # define neverinline __declspec( noinline )
# elif defined(GEN_COMPILER_GCC) # elif GEN_COMPILER_GCC
# define forceinline inline __attribute__((__always_inline__)) # define forceinline inline __attribute__((__always_inline__))
# define neverinline __attribute__( ( __noinline__ ) ) # define neverinline __attribute__( ( __noinline__ ) )
# elif defined(GEN_COMPILER_CLANG) # elif GEN_COMPILER_CLANG
# if __has_attribute(__always_inline__) # if __has_attribute(__always_inline__)
# define forceinline inline __attribute__((__always_inline__)) # define forceinline inline __attribute__((__always_inline__))
# define neverinline __attribute__( ( __noinline__ ) ) # define neverinline __attribute__( ( __noinline__ ) )
@ -151,11 +166,11 @@
#endif #endif
#ifndef neverinline #ifndef neverinline
# ifdef GEN_COMPILER_MSVC # if GEN_COMPILER_MSVC
# define neverinline __declspec( noinline ) # define neverinline __declspec( noinline )
# elif defined(GEN_COMPILER_GCC) # elif GEN_COMPILER_GCC
# define neverinline __attribute__( ( __noinline__ ) ) # define neverinline __attribute__( ( __noinline__ ) )
# elif defined(GEN_COMPILER_CLANG) # elif GEN_COMPILER_CLANG
# if __has_attribute(__always_inline__) # if __has_attribute(__always_inline__)
# define neverinline __attribute__( ( __noinline__ ) ) # define neverinline __attribute__( ( __noinline__ ) )
# else # else
@ -166,4 +181,20 @@
# endif # endif
#endif #endif
#if !defined(GEN_SUPPORT_CPP_MEMBER_FEATURES) && (!GEN_COMPILER_C || __STDC_VERSION__ < 202311L)
# define GEN_SUPPORT_CPP_MEMBER_FEATURES 0
#endif
#if !defined(typeof) && (!GEN_COMPILER_C || __STDC_VERSION__ < 202311L)
# if ! GEN_COMPILER_C
# define typeof
# elif defined(_MSC_VER)
# define typeof(x) __typeof(x)
# elif defined(__GNUC__) || defined(__clang__)
# define typeof(x) __typeof__(x)
# else
# error "Compiler not supported"
# endif
#endif
#pragma endregion Macros #pragma endregion Macros

View File

@ -14,13 +14,23 @@
#define GEN__HIGHS ( GEN__ONES * ( GEN_U8_MAX / 2 + 1 ) ) #define GEN__HIGHS ( GEN__ONES * ( GEN_U8_MAX / 2 + 1 ) )
#define GEN__HAS_ZERO( x ) ( ( ( x ) - GEN__ONES ) & ~( x ) & GEN__HIGHS ) #define GEN__HAS_ZERO( x ) ( ( ( x ) - GEN__ONES ) & ~( x ) & GEN__HIGHS )
template< class Type > #if ! GEN_COMPILER_C
void swap( Type& a, Type& b ) template< class Type >
{ void swap( Type& a, Type& b )
Type tmp = a; {
a = b; Type tmp = a;
b = tmp; a = b;
} b = tmp;
}
#else
#define swap( a, b ) \
do { \
typeof(a) \
temp = (a); \
(a) = (b); \
(b) = temp; \
} while(0)
#endif
//! Checks if value is power of 2. //! Checks if value is power of 2.
b32 is_power_of_two( ssize x ); b32 is_power_of_two( ssize x );
@ -179,8 +189,8 @@ AllocatorInfo allocator_info( Arena& arena );
void* arena_allocator_proc(void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags); void* arena_allocator_proc(void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags);
// Add these declarations after the Arena struct // Add these declarations after the Arena struct
Arena init_from_allocator(AllocatorInfo backing, ssize size); Arena arena_init_from_allocator(AllocatorInfo backing, ssize size);
Arena init_from_memory( void* start, ssize size ); Arena arena_init_from_memory( void* start, ssize size );
Arena init_sub(Arena& parent, ssize size); Arena init_sub(Arena& parent, ssize size);
ssize alignment_of(Arena& arena, ssize alignment); ssize alignment_of(Arena& arena, ssize alignment);
@ -201,14 +211,14 @@ struct Arena
ssize TotalUsed; ssize TotalUsed;
ssize TempCount; ssize TempCount;
#if 1 #if GEN_SUPPORT_CPP_MEMBER_FEATURES
#pragma region Member Mapping #pragma region Member Mapping
forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); } forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); }
forceinline static void* allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags ) { return GEN_NS arena_allocator_proc( allocator_data, type, size, alignment, old_memory, old_size, flags ); } forceinline static void* allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags ) { return GEN_NS arena_allocator_proc( allocator_data, type, size, alignment, old_memory, old_size, flags ); }
forceinline static Arena init_from_memory( void* start, ssize size ) { return GEN_NS init_from_memory( start, size ); } forceinline static Arena init_from_memory( void* start, ssize size ) { return GEN_NS arena_init_from_memory( start, size ); }
forceinline static Arena init_from_allocator( AllocatorInfo backing, ssize size ) { return GEN_NS init_from_allocator( backing, size ); } forceinline static Arena init_from_allocator( AllocatorInfo backing, ssize size ) { return GEN_NS arena_init_from_allocator( backing, size ); }
forceinline static Arena init_sub( Arena& parent, ssize size ) { return GEN_NS init_from_allocator( parent.Backing, size ); } forceinline static Arena init_sub( Arena& parent, ssize size ) { return GEN_NS arena_init_from_allocator( parent.Backing, size ); }
forceinline ssize alignment_of( ssize alignment ) { return GEN_NS alignment_of(* this, alignment); } forceinline ssize alignment_of( ssize alignment ) { return GEN_NS alignment_of(* this, alignment); }
forceinline void free() { return GEN_NS free(* this); } forceinline void free() { return GEN_NS free(* this); }
forceinline ssize size_remaining( ssize alignment ) { return GEN_NS size_remaining(* this, alignment); } forceinline ssize size_remaining( ssize alignment ) { return GEN_NS size_remaining(* this, alignment); }
@ -229,7 +239,7 @@ AllocatorInfo allocator_info( Arena& arena ) {
} }
inline inline
Arena init_from_memory( void* start, ssize size ) Arena arena_init_from_memory( void* start, ssize size )
{ {
Arena arena = { Arena arena = {
{ nullptr, nullptr }, { nullptr, nullptr },
@ -242,39 +252,36 @@ Arena init_from_memory( void* start, ssize size )
} }
inline inline
Arena init_from_allocator(AllocatorInfo backing, ssize size) Arena arena_init_from_allocator(AllocatorInfo backing, ssize size) {
{ Arena result = {
Arena result = backing,
{ alloc(backing, size),
backing, size,
alloc(backing, size), 0,
size, 0
0, };
0 return result;
};
return result;
} }
inline inline
Arena init_sub(Arena& parent, ssize size) Arena init_sub(Arena& parent, ssize size) {
{ return arena_init_from_allocator(parent.Backing, size);
return init_from_allocator(parent.Backing, size);
} }
inline inline
ssize alignment_of(Arena& arena, ssize alignment) ssize alignment_of(Arena& arena, ssize alignment)
{ {
ssize alignment_offset, result_pointer, mask; ssize alignment_offset, result_pointer, mask;
GEN_ASSERT(is_power_of_two(alignment)); GEN_ASSERT(is_power_of_two(alignment));
alignment_offset = 0; alignment_offset = 0;
result_pointer = (ssize)arena.PhysicalStart + arena.TotalUsed; result_pointer = (ssize)arena.PhysicalStart + arena.TotalUsed;
mask = alignment - 1; mask = alignment - 1;
if (result_pointer & mask) if (result_pointer & mask)
alignment_offset = alignment - (result_pointer & mask); alignment_offset = alignment - (result_pointer & mask);
return alignment_offset; return alignment_offset;
} }
#pragma push_macro("check") #pragma push_macro("check")
@ -289,18 +296,18 @@ void check(Arena& arena)
inline inline
void free(Arena& arena) void free(Arena& arena)
{ {
if (arena.Backing.Proc) if (arena.Backing.Proc)
{ {
gen::free(arena.Backing, arena.PhysicalStart); GEN_NS free(arena.Backing, arena.PhysicalStart);
arena.PhysicalStart = nullptr; arena.PhysicalStart = nullptr;
} }
} }
inline inline
ssize size_remaining(Arena& arena, ssize alignment) ssize size_remaining(Arena& arena, ssize alignment)
{ {
ssize result = arena.TotalSize - (arena.TotalUsed + alignment_of(arena, alignment)); ssize result = arena.TotalSize - (arena.TotalUsed + alignment_of(arena, alignment));
return result; return result;
} }
#pragma endregion Arena #pragma endregion Arena
@ -317,15 +324,15 @@ template<s32 Size> ssize size_remaining(FixedArena<Size>& fixed_arena
template< s32 Size > template< s32 Size >
struct FixedArena struct FixedArena
{ {
char memory[Size]; char memory[Size];
Arena arena; Arena arena;
#if 1 #if GEN_SUPPORT_CPP_MEMBER_FEATURES
#pragma region Member Mapping #pragma region Member Mapping
forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); } forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); }
forceinline static FixedArena init() { FixedArena result; GEN_NS fixed_arena_init<Size>(result); return result; } forceinline static FixedArena init() { FixedArena result; GEN_NS fixed_arena_init<Size>(result); return result; }
forceinline ssize size_remaining(ssize alignment) { GEN_NS size_remaining(*this, alignment); } forceinline ssize size_remaining(ssize alignment) { GEN_NS size_remaining(*this, alignment); }
#pragma endregion Member Mapping #pragma endregion Member Mapping
#endif #endif
}; };
@ -336,7 +343,7 @@ AllocatorInfo allocator_info( FixedArena<Size>& fixed_arena ) { return { arena_a
template<s32 Size> inline template<s32 Size> inline
void fixed_arena_init(FixedArena<Size>& result) { void fixed_arena_init(FixedArena<Size>& result) {
zero_size(& result.memory[0], Size); zero_size(& result.memory[0], Size);
result.arena = init_from_memory(& result.memory[0], Size); result.arena = arena_init_from_memory(& result.memory[0], Size);
} }
template<s32 Size> inline template<s32 Size> inline
@ -378,6 +385,7 @@ struct Pool
ssize TotalSize; ssize TotalSize;
ssize NumBlocks; ssize NumBlocks;
#if GEN_SUPPORT_CPP_MEMBER_FEATURES
#pragma region Member Mapping #pragma region Member Mapping
forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); } forceinline operator AllocatorInfo() { return GEN_NS allocator_info(* this); }
@ -387,6 +395,7 @@ struct Pool
forceinline void clear() { GEN_NS clear(* this); } forceinline void clear() { GEN_NS clear(* this); }
forceinline void free() { GEN_NS free(* this); } forceinline void free() { GEN_NS free(* this); }
#pragma endregion #pragma endregion
#endif
}; };
inline inline

View File

@ -101,6 +101,14 @@
# define GEN_GCC_VERSION_CHECK(major,minor,patch) (0) # define GEN_GCC_VERSION_CHECK(major,minor,patch) (0)
#endif #endif
#ifndef GEN_COMPIELR_C
# if defined(__STDC_VERSION__)
# define GEN_COMPILER_C 1
# else
# define GEN_COMPILER_C 0
# endif
#endif
#pragma endregion Platform Detection #pragma endregion Platform Detection
#pragma region Mandatory Includes #pragma region Mandatory Includes
@ -114,7 +122,7 @@
#pragma endregion Mandatory Includes #pragma endregion Mandatory Includes
#ifdef GEN_DONT_USE_NAMESPACE #if GEN_DONT_USE_NAMESPACE || GEN_COMPILER_C
# define GEN_NS # define GEN_NS
# define GEN_NS_BEGIN # define GEN_NS_BEGIN
# define GEN_NS_END # define GEN_NS_END

View File

@ -11,9 +11,9 @@ using namespace gen;
CodeBody gen_ecode( char const* path ) CodeBody gen_ecode( char const* path )
{ {
char scratch_mem[kilobytes(1)]; char scratch_mem[kilobytes(1)];
Arena scratch = Arena::init_from_memory( scratch_mem, sizeof(scratch_mem) ); Arena scratch = arena_init_from_memory( scratch_mem, sizeof(scratch_mem) );
file_read_contents( scratch, zero_terminate, path ); file_read_contents( allocator_info(scratch), zero_terminate, path );
CSV_Object csv_nodes; CSV_Object csv_nodes;
csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false ); csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false );
@ -57,9 +57,9 @@ CodeBody gen_ecode( char const* path )
CodeBody gen_eoperator( char const* path ) CodeBody gen_eoperator( char const* path )
{ {
char scratch_mem[kilobytes(4)]; char scratch_mem[kilobytes(4)];
Arena scratch = Arena::init_from_memory( scratch_mem, sizeof(scratch_mem) ); Arena scratch = arena_init_from_memory( scratch_mem, sizeof(scratch_mem) );
file_read_contents( scratch, zero_terminate, path ); file_read_contents( allocator_info(scratch), zero_terminate, path );
CSV_Object csv_nodes; CSV_Object csv_nodes;
csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false ); csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false );
@ -113,9 +113,9 @@ CodeBody gen_eoperator( char const* path )
CodeBody gen_especifier( char const* path ) CodeBody gen_especifier( char const* path )
{ {
char scratch_mem[kilobytes(4)]; char scratch_mem[kilobytes(4)];
Arena scratch = Arena::init_from_memory( scratch_mem, sizeof(scratch_mem) ); Arena scratch = arena_init_from_memory( scratch_mem, sizeof(scratch_mem) );
file_read_contents( scratch, zero_terminate, path ); file_read_contents( allocator_info(scratch), zero_terminate, path );
CSV_Object csv_nodes; CSV_Object csv_nodes;
csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false ); csv_parse( &csv_nodes, scratch_mem, GlobalAllocator, false );
@ -218,14 +218,16 @@ CodeBody gen_especifier( char const* path )
CodeBody gen_etoktype( char const* etok_path, char const* attr_path ) CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
{ {
char scratch_mem[kilobytes(16)]; char scratch_mem[kilobytes(16)];
Arena scratch = Arena::init_from_memory( scratch_mem, sizeof(scratch_mem) ); Arena scratch = arena_init_from_memory( scratch_mem, sizeof(scratch_mem) );
FileContents enum_content = file_read_contents( scratch, zero_terminate, etok_path ); AllocatorInfo scratch_info = allocator_info(scratch);
FileContents enum_content = file_read_contents( scratch_info, zero_terminate, etok_path );
CSV_Object csv_enum_nodes; CSV_Object csv_enum_nodes;
csv_parse( &csv_enum_nodes, rcast(char*, enum_content.data), GlobalAllocator, false ); csv_parse( &csv_enum_nodes, rcast(char*, enum_content.data), GlobalAllocator, false );
FileContents attrib_content = file_read_contents( scratch, zero_terminate, attr_path ); FileContents attrib_content = file_read_contents( scratch_info, zero_terminate, attr_path );
CSV_Object csv_attr_nodes; CSV_Object csv_attr_nodes;
csv_parse( &csv_attr_nodes, rcast(char*, attrib_content.data), GlobalAllocator, false ); csv_parse( &csv_attr_nodes, rcast(char*, attrib_content.data), GlobalAllocator, false );

View File

@ -0,0 +1,10 @@
#pragma once
#include "gen.hpp"
GEN_NS_BEGIN
#include "dependencies/parsing.hpp"
GEN_NS_END
using namespace gen;