mirror of
https://github.com/Ed94/gencpp.git
synced 2025-06-15 03:01:47 -07:00
Reduce cpp freatures usage of Array container.
Almost ready to be inter-operable with C
This commit is contained in:
@ -11,7 +11,7 @@ internal void deinit();
|
||||
internal
|
||||
void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
||||
{
|
||||
Arena* last = & Global_AllocatorBuckets.back();
|
||||
Arena* last = & back(Global_AllocatorBuckets);
|
||||
|
||||
switch ( type )
|
||||
{
|
||||
@ -24,10 +24,10 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
|
||||
|
||||
if ( ! Global_AllocatorBuckets.append( bucket ) )
|
||||
if ( ! append( Global_AllocatorBuckets, bucket ) )
|
||||
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
|
||||
|
||||
last = & Global_AllocatorBuckets.back();
|
||||
last = & back(Global_AllocatorBuckets);
|
||||
}
|
||||
|
||||
return alloc_align( allocator_info(* last), size, alignment );
|
||||
@ -51,10 +51,10 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
|
||||
|
||||
if ( ! Global_AllocatorBuckets.append( bucket ) )
|
||||
if ( ! append( Global_AllocatorBuckets, bucket ) )
|
||||
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
|
||||
|
||||
last = & Global_AllocatorBuckets.back();
|
||||
last = & back(Global_AllocatorBuckets);
|
||||
}
|
||||
|
||||
void* result = alloc_align( last->Backing, size, alignment );
|
||||
@ -235,7 +235,7 @@ void init()
|
||||
{
|
||||
GlobalAllocator = AllocatorInfo { & Global_Allocator_Proc, nullptr };
|
||||
|
||||
Global_AllocatorBuckets = Array<Arena>::init_reserve( heap(), 128 );
|
||||
Global_AllocatorBuckets = array_init_reserve<Arena>( heap(), 128 );
|
||||
|
||||
if ( Global_AllocatorBuckets == nullptr )
|
||||
GEN_FATAL( "Failed to reserve memory for Global_AllocatorBuckets");
|
||||
@ -245,18 +245,17 @@ void init()
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create first bucket for Global_AllocatorBuckets");
|
||||
|
||||
Global_AllocatorBuckets.append( bucket );
|
||||
|
||||
append( Global_AllocatorBuckets, bucket );
|
||||
}
|
||||
|
||||
// Setup the arrays
|
||||
{
|
||||
CodePools = Array<Pool>::init_reserve( Allocator_DataArrays, InitSize_DataArrays );
|
||||
CodePools = array_init_reserve<Pool>( Allocator_DataArrays, InitSize_DataArrays );
|
||||
|
||||
if ( CodePools == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the CodePools array" );
|
||||
|
||||
StringArenas = Array<Arena>::init_reserve( Allocator_DataArrays, InitSize_DataArrays );
|
||||
StringArenas = array_init_reserve<Arena>( Allocator_DataArrays, InitSize_DataArrays );
|
||||
|
||||
if ( StringArenas == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the StringArenas array" );
|
||||
@ -269,7 +268,7 @@ void init()
|
||||
if ( code_pool.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the code pool" );
|
||||
|
||||
CodePools.append( code_pool );
|
||||
append(CodePools, code_pool );
|
||||
|
||||
LexArena = arena_init_from_allocator( Allocator_Lexer, LexAllocator_Size );
|
||||
|
||||
@ -278,7 +277,7 @@ void init()
|
||||
if ( string_arena.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the string arena" );
|
||||
|
||||
StringArenas.append( string_arena );
|
||||
append(StringArenas, string_arena );
|
||||
}
|
||||
|
||||
// Setup the hash tables
|
||||
@ -290,7 +289,7 @@ void init()
|
||||
}
|
||||
|
||||
// Preprocessor Defines
|
||||
PreprocessorDefines = Array<StringCached>::init_reserve( GlobalAllocator, kilobytes(1) );
|
||||
PreprocessorDefines = array_init_reserve<StringCached>( GlobalAllocator, kilobytes(1) );
|
||||
|
||||
define_constants();
|
||||
parser::init();
|
||||
@ -299,7 +298,7 @@ void init()
|
||||
void deinit()
|
||||
{
|
||||
usize index = 0;
|
||||
usize left = CodePools.num();
|
||||
usize left = num(CodePools);
|
||||
do
|
||||
{
|
||||
Pool* code_pool = & CodePools[index];
|
||||
@ -309,7 +308,7 @@ void deinit()
|
||||
while ( left--, left );
|
||||
|
||||
index = 0;
|
||||
left = StringArenas.num();
|
||||
left = num(StringArenas);
|
||||
do
|
||||
{
|
||||
Arena* string_arena = & StringArenas[index];
|
||||
@ -320,15 +319,15 @@ void deinit()
|
||||
|
||||
StringCache.destroy();
|
||||
|
||||
CodePools.free();
|
||||
StringArenas.free();
|
||||
free(CodePools);
|
||||
free(StringArenas);
|
||||
|
||||
free(LexArena);
|
||||
|
||||
PreprocessorDefines.free();
|
||||
free(PreprocessorDefines);
|
||||
|
||||
index = 0;
|
||||
left = Global_AllocatorBuckets.num();
|
||||
left = num(Global_AllocatorBuckets);
|
||||
do
|
||||
{
|
||||
Arena* bucket = & Global_AllocatorBuckets[ index ];
|
||||
@ -337,14 +336,14 @@ void deinit()
|
||||
}
|
||||
while ( left--, left );
|
||||
|
||||
Global_AllocatorBuckets.free();
|
||||
free(Global_AllocatorBuckets);
|
||||
parser::deinit();
|
||||
}
|
||||
|
||||
void reset()
|
||||
{
|
||||
s32 index = 0;
|
||||
s32 left = CodePools.num();
|
||||
s32 left = num(CodePools);
|
||||
do
|
||||
{
|
||||
Pool* code_pool = & CodePools[index];
|
||||
@ -354,7 +353,7 @@ void reset()
|
||||
while ( left--, left );
|
||||
|
||||
index = 0;
|
||||
left = StringArenas.num();
|
||||
left = num(StringArenas);
|
||||
do
|
||||
{
|
||||
Arena* string_arena = & StringArenas[index];
|
||||
@ -363,14 +362,14 @@ void reset()
|
||||
}
|
||||
while ( left--, left );
|
||||
|
||||
StringCache.clear();
|
||||
clear(StringCache);
|
||||
|
||||
define_constants();
|
||||
}
|
||||
|
||||
AllocatorInfo get_string_allocator( s32 str_length )
|
||||
{
|
||||
Arena* last = & StringArenas.back();
|
||||
Arena* last = & back(StringArenas);
|
||||
|
||||
usize size_req = str_length + sizeof(StringHeader) + sizeof(char*);
|
||||
|
||||
@ -378,10 +377,10 @@ AllocatorInfo get_string_allocator( s32 str_length )
|
||||
{
|
||||
Arena new_arena = arena_init_from_allocator( Allocator_StringArena, SizePer_StringArena );
|
||||
|
||||
if ( ! StringArenas.append( new_arena ) )
|
||||
if ( ! append(StringArenas, new_arena ) )
|
||||
GEN_FATAL( "gen::get_string_allocator: Failed to allocate a new string arena" );
|
||||
|
||||
last = & StringArenas.back();
|
||||
last = & back(StringArenas);
|
||||
}
|
||||
|
||||
return allocator_info(* last);
|
||||
@ -408,7 +407,7 @@ StringCached get_cached_string( StrC str )
|
||||
// Used internally to retireve a Code object form the CodePool.
|
||||
Code make_code()
|
||||
{
|
||||
Pool* allocator = & CodePools.back();
|
||||
Pool* allocator = & back(CodePools);
|
||||
if ( allocator->FreeList == nullptr )
|
||||
{
|
||||
Pool code_pool = pool_init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) );
|
||||
@ -416,10 +415,10 @@ Code make_code()
|
||||
if ( code_pool.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePool allcoator returned nullptr." );
|
||||
|
||||
if ( ! CodePools.append( code_pool ) )
|
||||
if ( ! append( CodePools, code_pool ) )
|
||||
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePools failed to append new pool." );
|
||||
|
||||
allocator = & CodePools.back();
|
||||
allocator = & back(CodePools);
|
||||
}
|
||||
|
||||
Code result { rcast( AST*, alloc( allocator_info(* allocator), sizeof(AST) )) };
|
||||
|
@ -222,7 +222,7 @@ s32 lex_preprocessor_directive(
|
||||
, Token& token )
|
||||
{
|
||||
char const* hash = scanner;
|
||||
Tokens.append( { hash, 1, TokType::Preprocess_Hash, line, column, TF_Preprocess } );
|
||||
append(Tokens, { hash, 1, TokType::Preprocess_Hash, line, column, TF_Preprocess } );
|
||||
|
||||
move_forward();
|
||||
SkipWhitespace();
|
||||
@ -298,14 +298,14 @@ s32 lex_preprocessor_directive(
|
||||
|
||||
token.Length = token.Length + token.Text - hash;
|
||||
token.Text = hash;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
if ( token.Type == TokType::Preprocess_Else || token.Type == TokType::Preprocess_EndIf )
|
||||
{
|
||||
token.Flags |= TF_Preprocess_Cond;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
end_line();
|
||||
return Lex_Continue;
|
||||
}
|
||||
@ -314,7 +314,7 @@ s32 lex_preprocessor_directive(
|
||||
token.Flags |= TF_Preprocess_Cond;
|
||||
}
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
|
||||
SkipWhitespace();
|
||||
|
||||
@ -338,7 +338,7 @@ s32 lex_preprocessor_directive(
|
||||
name.Length++;
|
||||
}
|
||||
|
||||
Tokens.append( name );
|
||||
append(Tokens, name );
|
||||
|
||||
u64 key = crc32( name.Text, name.Length );
|
||||
defines.set( key, name );
|
||||
@ -384,7 +384,7 @@ s32 lex_preprocessor_directive(
|
||||
move_forward();
|
||||
}
|
||||
|
||||
Tokens.append( preprocess_content );
|
||||
append(Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -446,7 +446,7 @@ s32 lex_preprocessor_directive(
|
||||
preprocess_content.Length++;
|
||||
}
|
||||
|
||||
Tokens.append( preprocess_content );
|
||||
append(Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -461,7 +461,7 @@ void lex_found_token( StrC& content
|
||||
{
|
||||
if ( token.Type != TokType::Invalid )
|
||||
{
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -488,7 +488,7 @@ void lex_found_token( StrC& content
|
||||
}
|
||||
|
||||
token.Type = type;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -498,7 +498,7 @@ void lex_found_token( StrC& content
|
||||
{
|
||||
token.Type = type;
|
||||
token.Flags |= TF_Specifier;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -506,7 +506,7 @@ void lex_found_token( StrC& content
|
||||
if ( type != TokType::Invalid )
|
||||
{
|
||||
token.Type = type;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -558,7 +558,7 @@ void lex_found_token( StrC& content
|
||||
token.Type = TokType::Identifier;
|
||||
}
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
}
|
||||
|
||||
|
||||
@ -582,7 +582,7 @@ TokArray lex( StrC content )
|
||||
return { { nullptr }, 0 };
|
||||
}
|
||||
|
||||
for ( StringCached entry : PreprocessorDefines )
|
||||
foreach( StringCached, entry, PreprocessorDefines )
|
||||
{
|
||||
s32 length = 0;
|
||||
char const* scanner = entry.Data;
|
||||
@ -600,7 +600,7 @@ TokArray lex( StrC content )
|
||||
defines.set( key, entry );
|
||||
}
|
||||
|
||||
Tokens.clear();
|
||||
clear(Tokens);
|
||||
|
||||
while (left )
|
||||
{
|
||||
@ -630,7 +630,7 @@ TokArray lex( StrC content )
|
||||
token.Type = TokType::NewLine;
|
||||
token.Length++;
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -1099,7 +1099,7 @@ TokArray lex( StrC content )
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
continue;
|
||||
}
|
||||
else if ( current == '*' )
|
||||
@ -1135,7 +1135,7 @@ TokArray lex( StrC content )
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
// end_line();
|
||||
continue;
|
||||
}
|
||||
@ -1228,9 +1228,9 @@ TokArray lex( StrC content )
|
||||
}
|
||||
else
|
||||
{
|
||||
s32 start = max( 0, Tokens.num() - 100 );
|
||||
s32 start = max( 0, num(Tokens) - 100 );
|
||||
log_fmt("\n%d\n", start);
|
||||
for ( s32 idx = start; idx < Tokens.num(); idx++ )
|
||||
for ( s32 idx = start; idx < num(Tokens); idx++ )
|
||||
{
|
||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||
, idx
|
||||
@ -1253,7 +1253,7 @@ TokArray lex( StrC content )
|
||||
lex_found_token( content, left, scanner, line, column, defines, token );
|
||||
}
|
||||
|
||||
if ( Tokens.num() == 0 )
|
||||
if ( num(Tokens) == 0 )
|
||||
{
|
||||
log_failure( "Failed to lex any tokens" );
|
||||
return { { nullptr }, 0 };
|
||||
|
@ -48,11 +48,11 @@ struct ParseContext
|
||||
String result = String::make_reserve( GlobalAllocator, kilobytes(4) );
|
||||
|
||||
Token scope_start = Scope->Start;
|
||||
Token last_valid = Tokens.Idx >= Tokens.Arr.num() ? Tokens.Arr[Tokens.Arr.num() -1] : Tokens.current();
|
||||
Token last_valid = Tokens.Idx >= num(Tokens.Arr) ? Tokens.Arr[num(Tokens.Arr) -1] : Tokens.current();
|
||||
|
||||
sptr length = scope_start.Length;
|
||||
char const* current = scope_start.Text + length;
|
||||
while ( current <= Tokens.Arr.back().Text && *current != '\n' && length < 74 )
|
||||
while ( current <= back(Tokens.Arr).Text && *current != '\n' && length < 74 )
|
||||
{
|
||||
current++;
|
||||
length++;
|
||||
@ -96,7 +96,7 @@ global ParseContext Context;
|
||||
|
||||
bool TokArray::__eat( TokType type )
|
||||
{
|
||||
if ( Arr.num() - Idx <= 0 )
|
||||
if ( num(Arr) - Idx <= 0 )
|
||||
{
|
||||
log_failure( "No tokens left.\n%s", Context.to_string() );
|
||||
return false;
|
||||
@ -167,7 +167,7 @@ if ( def.Ptr == nullptr ) \
|
||||
# define prevtok Context.Tokens.previous()
|
||||
# define nexttok Context.Tokens.next()
|
||||
# define eat( Type_ ) Context.Tokens.__eat( Type_ )
|
||||
# define left ( Context.Tokens.Arr.num() - Context.Tokens.Idx )
|
||||
# define left ( num(Context.Tokens.Arr) - Context.Tokens.Idx )
|
||||
|
||||
#ifdef check
|
||||
#define CHECK_WAS_DEFINED
|
||||
@ -745,7 +745,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
|
||||
}
|
||||
Token interface_tok = parse_identifier();
|
||||
|
||||
interfaces.append( def_type( interface_tok ) );
|
||||
append(interfaces, def_type( interface_tok ) );
|
||||
// <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Name>, ...
|
||||
}
|
||||
}
|
||||
@ -777,7 +777,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
|
||||
if ( inline_cmt )
|
||||
result->InlineCmt = inline_cmt;
|
||||
|
||||
interfaces.free();
|
||||
free(interfaces);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1152,7 +1152,7 @@ Code parse_complicated_definition( TokType which )
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
s32 level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(tokens.Arr); idx++ )
|
||||
{
|
||||
if ( tokens[ idx ].Type == TokType::BraceCurly_Open )
|
||||
level++;
|
||||
@ -1837,7 +1837,7 @@ CodeBody parse_global_nspace( CodeT which )
|
||||
bool found_operator_cast_outside_class_implmentation = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -1909,14 +1909,14 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
Token nav = tokens[ idx ];
|
||||
for ( ; idx < tokens.Arr.num(); idx++, nav = tokens[ idx ] )
|
||||
for ( ; idx < num(tokens.Arr); idx++, nav = tokens[ idx ] )
|
||||
{
|
||||
if ( nav.Text[0] == '<' )
|
||||
{
|
||||
// Skip templated expressions as they mey have expressions with the () operators
|
||||
s32 capture_level = 0;
|
||||
s32 template_level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx++, nav = tokens[idx] )
|
||||
for ( ; idx < num(tokens.Arr); idx++, nav = tokens[idx] )
|
||||
{
|
||||
if (nav.Text[ 0 ] == '<')
|
||||
++ template_level;
|
||||
@ -2511,7 +2511,7 @@ Code parse_operator_function_or_variable( bool expects_function, CodeAttributes
|
||||
bool found_operator = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -4348,7 +4348,7 @@ CodeTemplate parse_template()
|
||||
bool found_operator_cast_outside_class_implmentation = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -4896,7 +4896,7 @@ CodeTypedef parse_typedef()
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
s32 level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx ++ )
|
||||
for ( ; idx < num(tokens.Arr); idx ++ )
|
||||
{
|
||||
if ( tokens[idx].Type == TokType::BraceCurly_Open )
|
||||
level++;
|
||||
|
Reference in New Issue
Block a user