mirror of
https://github.com/Ed94/gencpp.git
synced 2024-12-22 07:44:45 -08:00
Reduce cpp freatures usage of Array container.
Almost ready to be inter-operable with C
This commit is contained in:
parent
cc245cc263
commit
6d04165b96
@ -18,6 +18,32 @@ GEN_NS_END
|
||||
|
||||
using namespace gen;
|
||||
|
||||
constexpr char const* generation_notice =
|
||||
"// This file was generated automatially by gencpp's c_library.cpp"
|
||||
"(See: https://github.com/Ed94/gencpp)\n\n";
|
||||
|
||||
constexpr StrC roll_own_dependencies_guard_start = txt(R"(
|
||||
//! If its desired to roll your own dependencies, define GEN_ROLL_OWN_DEPENDENCIES before including this file.
|
||||
// Dependencies are derived from the c-zpl library: https://github.com/zpl-c/zpl
|
||||
#ifndef GEN_ROLL_OWN_DEPENDENCIES
|
||||
)");
|
||||
|
||||
constexpr StrC roll_own_dependencies_guard_end = txt(R"(
|
||||
// GEN_ROLL_OWN_DEPENDENCIES
|
||||
#endif
|
||||
)");
|
||||
|
||||
constexpr StrC implementation_guard_start = txt(R"(
|
||||
#pragma region GENCPP IMPLEMENTATION GUARD
|
||||
#if defined(GEN_IMPLEMENTATION) && ! defined(GEN_IMPLEMENTED)
|
||||
# define GEN_IMPLEMENTED
|
||||
)");
|
||||
|
||||
constexpr StrC implementation_guard_end = txt(R"(
|
||||
#endif
|
||||
#pragma endregion GENCPP IMPLEMENTATION GUARD
|
||||
)");
|
||||
|
||||
void format_file( char const* path )
|
||||
{
|
||||
String resolved_path = String::make(GlobalAllocator, to_str(path));
|
||||
@ -59,6 +85,20 @@ int gen_main()
|
||||
#define project_dir "../project/"
|
||||
gen::init();
|
||||
|
||||
Code push_ignores = scan_file( project_dir "helpers/push_ignores.inline.hpp" );
|
||||
Code pop_ignores = scan_file( project_dir "helpers/pop_ignores.inline.hpp" );
|
||||
Code single_header_start = scan_file( "components/header_start.hpp" );
|
||||
|
||||
Builder
|
||||
header = Builder::open( "gen/gen.hpp" );
|
||||
header.print_fmt( generation_notice );
|
||||
header.print_fmt("#pragma once\n\n");
|
||||
header.print( push_ignores );
|
||||
|
||||
// Headers
|
||||
{
|
||||
|
||||
}
|
||||
|
||||
gen::deinit();
|
||||
return 0;
|
||||
|
@ -11,7 +11,7 @@ internal void deinit();
|
||||
internal
|
||||
void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
||||
{
|
||||
Arena* last = & Global_AllocatorBuckets.back();
|
||||
Arena* last = & back(Global_AllocatorBuckets);
|
||||
|
||||
switch ( type )
|
||||
{
|
||||
@ -24,10 +24,10 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
|
||||
|
||||
if ( ! Global_AllocatorBuckets.append( bucket ) )
|
||||
if ( ! append( Global_AllocatorBuckets, bucket ) )
|
||||
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
|
||||
|
||||
last = & Global_AllocatorBuckets.back();
|
||||
last = & back(Global_AllocatorBuckets);
|
||||
}
|
||||
|
||||
return alloc_align( allocator_info(* last), size, alignment );
|
||||
@ -51,10 +51,10 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create bucket for Global_AllocatorBuckets");
|
||||
|
||||
if ( ! Global_AllocatorBuckets.append( bucket ) )
|
||||
if ( ! append( Global_AllocatorBuckets, bucket ) )
|
||||
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
|
||||
|
||||
last = & Global_AllocatorBuckets.back();
|
||||
last = & back(Global_AllocatorBuckets);
|
||||
}
|
||||
|
||||
void* result = alloc_align( last->Backing, size, alignment );
|
||||
@ -235,7 +235,7 @@ void init()
|
||||
{
|
||||
GlobalAllocator = AllocatorInfo { & Global_Allocator_Proc, nullptr };
|
||||
|
||||
Global_AllocatorBuckets = Array<Arena>::init_reserve( heap(), 128 );
|
||||
Global_AllocatorBuckets = array_init_reserve<Arena>( heap(), 128 );
|
||||
|
||||
if ( Global_AllocatorBuckets == nullptr )
|
||||
GEN_FATAL( "Failed to reserve memory for Global_AllocatorBuckets");
|
||||
@ -245,18 +245,17 @@ void init()
|
||||
if ( bucket.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "Failed to create first bucket for Global_AllocatorBuckets");
|
||||
|
||||
Global_AllocatorBuckets.append( bucket );
|
||||
|
||||
append( Global_AllocatorBuckets, bucket );
|
||||
}
|
||||
|
||||
// Setup the arrays
|
||||
{
|
||||
CodePools = Array<Pool>::init_reserve( Allocator_DataArrays, InitSize_DataArrays );
|
||||
CodePools = array_init_reserve<Pool>( Allocator_DataArrays, InitSize_DataArrays );
|
||||
|
||||
if ( CodePools == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the CodePools array" );
|
||||
|
||||
StringArenas = Array<Arena>::init_reserve( Allocator_DataArrays, InitSize_DataArrays );
|
||||
StringArenas = array_init_reserve<Arena>( Allocator_DataArrays, InitSize_DataArrays );
|
||||
|
||||
if ( StringArenas == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the StringArenas array" );
|
||||
@ -269,7 +268,7 @@ void init()
|
||||
if ( code_pool.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the code pool" );
|
||||
|
||||
CodePools.append( code_pool );
|
||||
append(CodePools, code_pool );
|
||||
|
||||
LexArena = arena_init_from_allocator( Allocator_Lexer, LexAllocator_Size );
|
||||
|
||||
@ -278,7 +277,7 @@ void init()
|
||||
if ( string_arena.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the string arena" );
|
||||
|
||||
StringArenas.append( string_arena );
|
||||
append(StringArenas, string_arena );
|
||||
}
|
||||
|
||||
// Setup the hash tables
|
||||
@ -290,7 +289,7 @@ void init()
|
||||
}
|
||||
|
||||
// Preprocessor Defines
|
||||
PreprocessorDefines = Array<StringCached>::init_reserve( GlobalAllocator, kilobytes(1) );
|
||||
PreprocessorDefines = array_init_reserve<StringCached>( GlobalAllocator, kilobytes(1) );
|
||||
|
||||
define_constants();
|
||||
parser::init();
|
||||
@ -299,7 +298,7 @@ void init()
|
||||
void deinit()
|
||||
{
|
||||
usize index = 0;
|
||||
usize left = CodePools.num();
|
||||
usize left = num(CodePools);
|
||||
do
|
||||
{
|
||||
Pool* code_pool = & CodePools[index];
|
||||
@ -309,7 +308,7 @@ void deinit()
|
||||
while ( left--, left );
|
||||
|
||||
index = 0;
|
||||
left = StringArenas.num();
|
||||
left = num(StringArenas);
|
||||
do
|
||||
{
|
||||
Arena* string_arena = & StringArenas[index];
|
||||
@ -320,15 +319,15 @@ void deinit()
|
||||
|
||||
StringCache.destroy();
|
||||
|
||||
CodePools.free();
|
||||
StringArenas.free();
|
||||
free(CodePools);
|
||||
free(StringArenas);
|
||||
|
||||
free(LexArena);
|
||||
|
||||
PreprocessorDefines.free();
|
||||
free(PreprocessorDefines);
|
||||
|
||||
index = 0;
|
||||
left = Global_AllocatorBuckets.num();
|
||||
left = num(Global_AllocatorBuckets);
|
||||
do
|
||||
{
|
||||
Arena* bucket = & Global_AllocatorBuckets[ index ];
|
||||
@ -337,14 +336,14 @@ void deinit()
|
||||
}
|
||||
while ( left--, left );
|
||||
|
||||
Global_AllocatorBuckets.free();
|
||||
free(Global_AllocatorBuckets);
|
||||
parser::deinit();
|
||||
}
|
||||
|
||||
void reset()
|
||||
{
|
||||
s32 index = 0;
|
||||
s32 left = CodePools.num();
|
||||
s32 left = num(CodePools);
|
||||
do
|
||||
{
|
||||
Pool* code_pool = & CodePools[index];
|
||||
@ -354,7 +353,7 @@ void reset()
|
||||
while ( left--, left );
|
||||
|
||||
index = 0;
|
||||
left = StringArenas.num();
|
||||
left = num(StringArenas);
|
||||
do
|
||||
{
|
||||
Arena* string_arena = & StringArenas[index];
|
||||
@ -363,14 +362,14 @@ void reset()
|
||||
}
|
||||
while ( left--, left );
|
||||
|
||||
StringCache.clear();
|
||||
clear(StringCache);
|
||||
|
||||
define_constants();
|
||||
}
|
||||
|
||||
AllocatorInfo get_string_allocator( s32 str_length )
|
||||
{
|
||||
Arena* last = & StringArenas.back();
|
||||
Arena* last = & back(StringArenas);
|
||||
|
||||
usize size_req = str_length + sizeof(StringHeader) + sizeof(char*);
|
||||
|
||||
@ -378,10 +377,10 @@ AllocatorInfo get_string_allocator( s32 str_length )
|
||||
{
|
||||
Arena new_arena = arena_init_from_allocator( Allocator_StringArena, SizePer_StringArena );
|
||||
|
||||
if ( ! StringArenas.append( new_arena ) )
|
||||
if ( ! append(StringArenas, new_arena ) )
|
||||
GEN_FATAL( "gen::get_string_allocator: Failed to allocate a new string arena" );
|
||||
|
||||
last = & StringArenas.back();
|
||||
last = & back(StringArenas);
|
||||
}
|
||||
|
||||
return allocator_info(* last);
|
||||
@ -408,7 +407,7 @@ StringCached get_cached_string( StrC str )
|
||||
// Used internally to retireve a Code object form the CodePool.
|
||||
Code make_code()
|
||||
{
|
||||
Pool* allocator = & CodePools.back();
|
||||
Pool* allocator = & back(CodePools);
|
||||
if ( allocator->FreeList == nullptr )
|
||||
{
|
||||
Pool code_pool = pool_init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) );
|
||||
@ -416,10 +415,10 @@ Code make_code()
|
||||
if ( code_pool.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePool allcoator returned nullptr." );
|
||||
|
||||
if ( ! CodePools.append( code_pool ) )
|
||||
if ( ! append( CodePools, code_pool ) )
|
||||
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePools failed to append new pool." );
|
||||
|
||||
allocator = & CodePools.back();
|
||||
allocator = & back(CodePools);
|
||||
}
|
||||
|
||||
Code result { rcast( AST*, alloc( allocator_info(* allocator), sizeof(AST) )) };
|
||||
|
@ -222,7 +222,7 @@ s32 lex_preprocessor_directive(
|
||||
, Token& token )
|
||||
{
|
||||
char const* hash = scanner;
|
||||
Tokens.append( { hash, 1, TokType::Preprocess_Hash, line, column, TF_Preprocess } );
|
||||
append(Tokens, { hash, 1, TokType::Preprocess_Hash, line, column, TF_Preprocess } );
|
||||
|
||||
move_forward();
|
||||
SkipWhitespace();
|
||||
@ -298,14 +298,14 @@ s32 lex_preprocessor_directive(
|
||||
|
||||
token.Length = token.Length + token.Text - hash;
|
||||
token.Text = hash;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
if ( token.Type == TokType::Preprocess_Else || token.Type == TokType::Preprocess_EndIf )
|
||||
{
|
||||
token.Flags |= TF_Preprocess_Cond;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
end_line();
|
||||
return Lex_Continue;
|
||||
}
|
||||
@ -314,7 +314,7 @@ s32 lex_preprocessor_directive(
|
||||
token.Flags |= TF_Preprocess_Cond;
|
||||
}
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
|
||||
SkipWhitespace();
|
||||
|
||||
@ -338,7 +338,7 @@ s32 lex_preprocessor_directive(
|
||||
name.Length++;
|
||||
}
|
||||
|
||||
Tokens.append( name );
|
||||
append(Tokens, name );
|
||||
|
||||
u64 key = crc32( name.Text, name.Length );
|
||||
defines.set( key, name );
|
||||
@ -384,7 +384,7 @@ s32 lex_preprocessor_directive(
|
||||
move_forward();
|
||||
}
|
||||
|
||||
Tokens.append( preprocess_content );
|
||||
append(Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -446,7 +446,7 @@ s32 lex_preprocessor_directive(
|
||||
preprocess_content.Length++;
|
||||
}
|
||||
|
||||
Tokens.append( preprocess_content );
|
||||
append(Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -461,7 +461,7 @@ void lex_found_token( StrC& content
|
||||
{
|
||||
if ( token.Type != TokType::Invalid )
|
||||
{
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -488,7 +488,7 @@ void lex_found_token( StrC& content
|
||||
}
|
||||
|
||||
token.Type = type;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -498,7 +498,7 @@ void lex_found_token( StrC& content
|
||||
{
|
||||
token.Type = type;
|
||||
token.Flags |= TF_Specifier;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -506,7 +506,7 @@ void lex_found_token( StrC& content
|
||||
if ( type != TokType::Invalid )
|
||||
{
|
||||
token.Type = type;
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -558,7 +558,7 @@ void lex_found_token( StrC& content
|
||||
token.Type = TokType::Identifier;
|
||||
}
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
}
|
||||
|
||||
|
||||
@ -582,7 +582,7 @@ TokArray lex( StrC content )
|
||||
return { { nullptr }, 0 };
|
||||
}
|
||||
|
||||
for ( StringCached entry : PreprocessorDefines )
|
||||
foreach( StringCached, entry, PreprocessorDefines )
|
||||
{
|
||||
s32 length = 0;
|
||||
char const* scanner = entry.Data;
|
||||
@ -600,7 +600,7 @@ TokArray lex( StrC content )
|
||||
defines.set( key, entry );
|
||||
}
|
||||
|
||||
Tokens.clear();
|
||||
clear(Tokens);
|
||||
|
||||
while (left )
|
||||
{
|
||||
@ -630,7 +630,7 @@ TokArray lex( StrC content )
|
||||
token.Type = TokType::NewLine;
|
||||
token.Length++;
|
||||
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -1099,7 +1099,7 @@ TokArray lex( StrC content )
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
continue;
|
||||
}
|
||||
else if ( current == '*' )
|
||||
@ -1135,7 +1135,7 @@ TokArray lex( StrC content )
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
Tokens.append( token );
|
||||
append(Tokens, token );
|
||||
// end_line();
|
||||
continue;
|
||||
}
|
||||
@ -1228,9 +1228,9 @@ TokArray lex( StrC content )
|
||||
}
|
||||
else
|
||||
{
|
||||
s32 start = max( 0, Tokens.num() - 100 );
|
||||
s32 start = max( 0, num(Tokens) - 100 );
|
||||
log_fmt("\n%d\n", start);
|
||||
for ( s32 idx = start; idx < Tokens.num(); idx++ )
|
||||
for ( s32 idx = start; idx < num(Tokens); idx++ )
|
||||
{
|
||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||
, idx
|
||||
@ -1253,7 +1253,7 @@ TokArray lex( StrC content )
|
||||
lex_found_token( content, left, scanner, line, column, defines, token );
|
||||
}
|
||||
|
||||
if ( Tokens.num() == 0 )
|
||||
if ( num(Tokens) == 0 )
|
||||
{
|
||||
log_failure( "Failed to lex any tokens" );
|
||||
return { { nullptr }, 0 };
|
||||
|
@ -48,11 +48,11 @@ struct ParseContext
|
||||
String result = String::make_reserve( GlobalAllocator, kilobytes(4) );
|
||||
|
||||
Token scope_start = Scope->Start;
|
||||
Token last_valid = Tokens.Idx >= Tokens.Arr.num() ? Tokens.Arr[Tokens.Arr.num() -1] : Tokens.current();
|
||||
Token last_valid = Tokens.Idx >= num(Tokens.Arr) ? Tokens.Arr[num(Tokens.Arr) -1] : Tokens.current();
|
||||
|
||||
sptr length = scope_start.Length;
|
||||
char const* current = scope_start.Text + length;
|
||||
while ( current <= Tokens.Arr.back().Text && *current != '\n' && length < 74 )
|
||||
while ( current <= back(Tokens.Arr).Text && *current != '\n' && length < 74 )
|
||||
{
|
||||
current++;
|
||||
length++;
|
||||
@ -96,7 +96,7 @@ global ParseContext Context;
|
||||
|
||||
bool TokArray::__eat( TokType type )
|
||||
{
|
||||
if ( Arr.num() - Idx <= 0 )
|
||||
if ( num(Arr) - Idx <= 0 )
|
||||
{
|
||||
log_failure( "No tokens left.\n%s", Context.to_string() );
|
||||
return false;
|
||||
@ -167,7 +167,7 @@ if ( def.Ptr == nullptr ) \
|
||||
# define prevtok Context.Tokens.previous()
|
||||
# define nexttok Context.Tokens.next()
|
||||
# define eat( Type_ ) Context.Tokens.__eat( Type_ )
|
||||
# define left ( Context.Tokens.Arr.num() - Context.Tokens.Idx )
|
||||
# define left ( num(Context.Tokens.Arr) - Context.Tokens.Idx )
|
||||
|
||||
#ifdef check
|
||||
#define CHECK_WAS_DEFINED
|
||||
@ -745,7 +745,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
|
||||
}
|
||||
Token interface_tok = parse_identifier();
|
||||
|
||||
interfaces.append( def_type( interface_tok ) );
|
||||
append(interfaces, def_type( interface_tok ) );
|
||||
// <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Name>, ...
|
||||
}
|
||||
}
|
||||
@ -777,7 +777,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
|
||||
if ( inline_cmt )
|
||||
result->InlineCmt = inline_cmt;
|
||||
|
||||
interfaces.free();
|
||||
free(interfaces);
|
||||
return result;
|
||||
}
|
||||
|
||||
@ -1152,7 +1152,7 @@ Code parse_complicated_definition( TokType which )
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
s32 level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(tokens.Arr); idx++ )
|
||||
{
|
||||
if ( tokens[ idx ].Type == TokType::BraceCurly_Open )
|
||||
level++;
|
||||
@ -1837,7 +1837,7 @@ CodeBody parse_global_nspace( CodeT which )
|
||||
bool found_operator_cast_outside_class_implmentation = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -1909,14 +1909,14 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
Token nav = tokens[ idx ];
|
||||
for ( ; idx < tokens.Arr.num(); idx++, nav = tokens[ idx ] )
|
||||
for ( ; idx < num(tokens.Arr); idx++, nav = tokens[ idx ] )
|
||||
{
|
||||
if ( nav.Text[0] == '<' )
|
||||
{
|
||||
// Skip templated expressions as they mey have expressions with the () operators
|
||||
s32 capture_level = 0;
|
||||
s32 template_level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx++, nav = tokens[idx] )
|
||||
for ( ; idx < num(tokens.Arr); idx++, nav = tokens[idx] )
|
||||
{
|
||||
if (nav.Text[ 0 ] == '<')
|
||||
++ template_level;
|
||||
@ -2511,7 +2511,7 @@ Code parse_operator_function_or_variable( bool expects_function, CodeAttributes
|
||||
bool found_operator = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -4348,7 +4348,7 @@ CodeTemplate parse_template()
|
||||
bool found_operator_cast_outside_class_implmentation = false;
|
||||
s32 idx = Context.Tokens.Idx;
|
||||
|
||||
for ( ; idx < Context.Tokens.Arr.num(); idx++ )
|
||||
for ( ; idx < num(Context.Tokens.Arr); idx++ )
|
||||
{
|
||||
Token tok = Context.Tokens[ idx ];
|
||||
|
||||
@ -4896,7 +4896,7 @@ CodeTypedef parse_typedef()
|
||||
|
||||
s32 idx = tokens.Idx;
|
||||
s32 level = 0;
|
||||
for ( ; idx < tokens.Arr.num(); idx ++ )
|
||||
for ( ; idx < num(tokens.Arr); idx ++ )
|
||||
{
|
||||
if ( tokens[idx].Type == TokType::BraceCurly_Open )
|
||||
level++;
|
||||
|
@ -14,12 +14,26 @@ template<class TType>
|
||||
using TRemoveConst = typename RemoveConst<TType>::Type;
|
||||
|
||||
#pragma region Array
|
||||
#if ! GEN_COMPILER_C
|
||||
#define Array(Type) Array<Type>
|
||||
|
||||
// #define array_init(Type, ...) array_init <Type>(__VA_ARGS__)
|
||||
// #define array_init_reserve(Type, ...) array_init_reserve<Type>(__VA_ARGS__)
|
||||
#endif
|
||||
|
||||
struct ArrayHeader;
|
||||
|
||||
#if GEN_SUPPORT_CPP_MEMBER_FEATURES
|
||||
template<class Type> struct Array;
|
||||
#else
|
||||
template<class Type>
|
||||
using Array = Type*;
|
||||
#endif
|
||||
|
||||
usize array_grow_formula(ssize value);
|
||||
|
||||
template<class Type> Array<Type> array_init(AllocatorInfo allocator);
|
||||
template<class Type> Array<Type> array_init_reserve(AllocatorInfo allocator, ssize capacity);
|
||||
template<class Type> usize array_grow_formula(ssize value);
|
||||
template<class Type> bool append(Array<Type>& array, Array<Type> other);
|
||||
template<class Type> bool append(Array<Type>& array, Type value);
|
||||
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num);
|
||||
@ -38,18 +52,22 @@ template<class Type> bool resize(Array<Type>& array, usize num);
|
||||
template<class Type> bool set_capacity(Array<Type>& array, usize new_capacity);
|
||||
template<class Type> ArrayHeader* get_header(Array<Type>& array);
|
||||
|
||||
template<class Type> forceinline Type* begin(Array<Type>& array) { return array; }
|
||||
template<class Type> forceinline Type* end(Array<Type>& array) { return array + get_header(array)->Num; }
|
||||
template<class Type> forceinline Type* next(Type* entry) { return entry + 1; }
|
||||
|
||||
struct ArrayHeader {
|
||||
AllocatorInfo Allocator;
|
||||
usize Capacity;
|
||||
usize Num;
|
||||
};
|
||||
|
||||
#if GEN_SUPPORT_CPP_MEMBER_FEATURES
|
||||
template<class Type>
|
||||
struct Array
|
||||
{
|
||||
Type* Data;
|
||||
|
||||
#if 1
|
||||
#pragma region Member Mapping
|
||||
forceinline static Array init(AllocatorInfo allocator) { return GEN_NS array_init<Type>(allocator); }
|
||||
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return GEN_NS array_init_reserve<Type>(allocator, capacity); }
|
||||
@ -78,12 +96,12 @@ struct Array
|
||||
forceinline Type* begin() { return Data; }
|
||||
forceinline Type* end() { return Data + get_header()->Num; }
|
||||
#pragma endregion Member Mapping
|
||||
#endif
|
||||
};
|
||||
#endif
|
||||
|
||||
template<class Type> inline
|
||||
Array<Type> array_init(AllocatorInfo allocator) {
|
||||
return array_init_reserve<Type>(allocator, array_grow_formula<Type>(0));
|
||||
return array_init_reserve<Type>(allocator, array_grow_formula(0));
|
||||
}
|
||||
|
||||
template<class Type> inline
|
||||
@ -101,7 +119,6 @@ Array<Type> array_init_reserve(AllocatorInfo allocator, ssize capacity)
|
||||
return {rcast(Type*, header + 1)};
|
||||
}
|
||||
|
||||
template<class Type> inline
|
||||
usize array_grow_formula(ssize value) {
|
||||
return 2 * value + 8;
|
||||
}
|
||||
@ -123,7 +140,7 @@ bool append(Array<Type>& array, Type value)
|
||||
header = get_header(array);
|
||||
}
|
||||
|
||||
array.Data[header->Num] = value;
|
||||
array[header->Num] = value;
|
||||
header->Num++;
|
||||
|
||||
return true;
|
||||
@ -166,7 +183,7 @@ bool append_at(Array<Type>& array, Type item, usize idx)
|
||||
header = get_header(array);
|
||||
}
|
||||
|
||||
Type* target = array.Data + idx;
|
||||
Type* target = array + idx;
|
||||
|
||||
mem_move(target + 1, target, (header->Num - idx) * sizeof(Type));
|
||||
header->Num++;
|
||||
@ -205,7 +222,7 @@ bool append_at(Array<Type>& array, Type* items, usize item_num, usize idx)
|
||||
template<class Type> inline
|
||||
Type& back(Array<Type>& array) {
|
||||
ArrayHeader* header = get_header(array);
|
||||
return array.Data[header->Num - 1];
|
||||
return array[header->Num - 1];
|
||||
}
|
||||
|
||||
template<class Type> inline
|
||||
@ -224,7 +241,7 @@ bool fill(Array<Type>& array, usize begin, usize end, Type value)
|
||||
|
||||
for (ssize idx = ssize(begin); idx < ssize(end); idx++)
|
||||
{
|
||||
array.Data[idx] = value;
|
||||
array[idx] = value;
|
||||
}
|
||||
|
||||
return true;
|
||||
@ -234,20 +251,22 @@ template<class Type> inline
|
||||
void free(Array<Type>& array) {
|
||||
ArrayHeader* header = get_header(array);
|
||||
gen::free(header->Allocator, header);
|
||||
array.Data = nullptr;
|
||||
Type*& Data = rcast(Type*&, array);
|
||||
Data = nullptr;
|
||||
}
|
||||
|
||||
template<class Type> inline
|
||||
ArrayHeader* get_header(Array<Type>& array) {
|
||||
using NonConstType = TRemoveConst<Type>;
|
||||
return rcast(ArrayHeader*, const_cast<NonConstType*>(array.Data)) - 1;
|
||||
Type* Data = array; // This should do nothing in C but in C++ gets member Data struct.
|
||||
return rcast(ArrayHeader*, const_cast<NonConstType*>(Data)) - 1;
|
||||
}
|
||||
|
||||
template<class Type> inline
|
||||
bool grow(Array<Type>& array, usize min_capacity)
|
||||
{
|
||||
ArrayHeader* header = get_header(array);
|
||||
usize new_capacity = array_grow_formula<Type>(header->Capacity);
|
||||
usize new_capacity = array_grow_formula(header->Capacity);
|
||||
|
||||
if (new_capacity < min_capacity)
|
||||
new_capacity = min_capacity;
|
||||
@ -273,7 +292,7 @@ void remove_at(Array<Type>& array, usize idx)
|
||||
ArrayHeader* header = get_header(array);
|
||||
GEN_ASSERT(idx < header->Num);
|
||||
|
||||
mem_move(array.Data + idx, array.Data + idx + 1, sizeof(Type) * (header->Num - idx - 1));
|
||||
mem_move(array + idx, array + idx + 1, sizeof(Type) * (header->Num - idx - 1));
|
||||
header->Num--;
|
||||
}
|
||||
|
||||
@ -329,7 +348,8 @@ bool set_capacity(Array<Type>& array, usize new_capacity)
|
||||
|
||||
GEN_NS free(header->Allocator, header);
|
||||
|
||||
array.Data = rcast(Type*, new_header + 1);
|
||||
Type*& Data = rcast(Type*&, array);
|
||||
Data = rcast(Type*, new_header + 1);
|
||||
return true;
|
||||
}
|
||||
#pragma endregion Array
|
||||
@ -371,11 +391,11 @@ template<class Type> bool full(HashTable<Type>& table);
|
||||
template<class Type> void map(HashTable<Type>& table, void (*map_proc)(u64 key, Type value));
|
||||
template<class Type> void map_mut(HashTable<Type>& table, void (*map_proc)(u64 key, Type* value));
|
||||
|
||||
static constexpr f32 HashTable_CriticalLoadScale = 0.7f;
|
||||
|
||||
template<typename Type>
|
||||
struct HashTable
|
||||
{
|
||||
static constexpr f32 CriticalLoadScale = 0.7f;
|
||||
|
||||
Array<ssize> Hashes;
|
||||
Array<HashTableEntry<Type>> Entries;
|
||||
|
||||
@ -411,26 +431,26 @@ HashTable<Type> hashtable_init_reserve(AllocatorInfo allocator, usize num)
|
||||
{
|
||||
HashTable<Type> result = { { nullptr }, { nullptr } };
|
||||
|
||||
result.Hashes = Array<ssize>::init_reserve(allocator, num);
|
||||
result.Hashes.get_header()->Num = num;
|
||||
result.Hashes.resize(num);
|
||||
result.Hashes.fill(0, num, -1);
|
||||
result.Hashes = array_init_reserve<ssize>(allocator, num);
|
||||
get_header(result.Hashes)->Num = num;
|
||||
resize(result.Hashes, num);
|
||||
fill<ssize>(result.Hashes, 0, num, -1);
|
||||
|
||||
result.Entries = Array<HashTableEntry<Type>>::init_reserve(allocator, num);
|
||||
result.Entries = array_init_reserve<HashTableEntry<Type>>(allocator, num);
|
||||
return result;
|
||||
}
|
||||
|
||||
template<typename Type> inline
|
||||
void clear(HashTable<Type>& table) {
|
||||
table.Entries.clear();
|
||||
table.Hashes.fill(0, table.Hashes.num(), -1);
|
||||
clear(table.Entries);
|
||||
fill<ssize>(table.Hashes, 0, num(table.Hashes), -1);
|
||||
}
|
||||
|
||||
template<typename Type> inline
|
||||
void destroy(HashTable<Type>& table) {
|
||||
if (table.Hashes && table.Hashes.get_header()->Capacity) {
|
||||
table.Hashes.free();
|
||||
table.Entries.free();
|
||||
if (table.Hashes && get_header(table.Hashes)->Capacity) {
|
||||
free(table.Hashes);
|
||||
free(table.Entries);
|
||||
}
|
||||
}
|
||||
|
||||
@ -463,7 +483,7 @@ void map_mut(HashTable<Type>& table, void (*map_proc)(u64 key, Type* value)) {
|
||||
|
||||
template<typename Type> inline
|
||||
void grow(HashTable<Type>& table) {
|
||||
ssize new_num = Array<HashTableEntry<Type>>::grow_formula(table.Entries.num());
|
||||
ssize new_num = array_grow_formula(num(table.Entries));
|
||||
rehash(table, new_num);
|
||||
}
|
||||
|
||||
@ -471,9 +491,9 @@ template<typename Type> inline
|
||||
void rehash(HashTable<Type>& table, ssize new_num)
|
||||
{
|
||||
ssize last_added_index;
|
||||
HashTable<Type> new_ht = hashtable_init_reserve<Type>(table.Hashes.get_header()->Allocator, new_num);
|
||||
HashTable<Type> new_ht = hashtable_init_reserve<Type>(get_header(table.Hashes)->Allocator, new_num);
|
||||
|
||||
for (ssize idx = 0; idx < ssize(table.Entries.num()); ++idx)
|
||||
for (ssize idx = 0; idx < ssize(num(table.Entries)); ++idx)
|
||||
{
|
||||
HashTableFindResult find_result;
|
||||
HashTableEntry<Type>& entry = table.Entries[idx];
|
||||
@ -580,8 +600,8 @@ ssize add_entry(HashTable<Type>& table, u64 key) {
|
||||
ssize idx;
|
||||
HashTableEntry<Type> entry = { key, -1 };
|
||||
|
||||
idx = table.Entries.num();
|
||||
table.Entries.append(entry);
|
||||
idx = num(table.Entries);
|
||||
append(table.Entries, entry);
|
||||
return idx;
|
||||
}
|
||||
|
||||
@ -590,9 +610,9 @@ HashTableFindResult find(HashTable<Type>& table, u64 key)
|
||||
{
|
||||
HashTableFindResult result = { -1, -1, -1 };
|
||||
|
||||
if (table.Hashes.num() > 0)
|
||||
if (num(table.Hashes) > 0)
|
||||
{
|
||||
result.HashIndex = key % table.Hashes.num();
|
||||
result.HashIndex = key % num(table.Hashes);
|
||||
result.EntryIndex = table.Hashes[result.HashIndex];
|
||||
|
||||
while (result.EntryIndex >= 0)
|
||||
@ -610,8 +630,8 @@ HashTableFindResult find(HashTable<Type>& table, u64 key)
|
||||
|
||||
template<typename Type> inline
|
||||
bool full(HashTable<Type>& table) {
|
||||
usize critical_load = usize(HashTable<Type>::CriticalLoadScale * f32(table.Hashes.num()));
|
||||
b32 result = table.Entries.num() > critical_load;
|
||||
usize critical_load = usize(HashTable_CriticalLoadScale * f32(num(table.Hashes)));
|
||||
b32 result = num(table.Entries) > critical_load;
|
||||
return result;
|
||||
}
|
||||
#pragma endregion HashTable
|
||||
|
@ -505,7 +505,7 @@ b8 file_stream_new( FileInfo* file, AllocatorInfo allocator )
|
||||
d->allocator = allocator;
|
||||
d->flags = EFileStream_CLONE_WRITABLE;
|
||||
d->cap = 0;
|
||||
d->buf = Array<u8>::init( allocator );
|
||||
d->buf = array_init<u8>( allocator );
|
||||
|
||||
if ( ! d->buf )
|
||||
return false;
|
||||
@ -531,7 +531,7 @@ b8 file_stream_open( FileInfo* file, AllocatorInfo allocator, u8* buffer, ssize
|
||||
d->flags = flags;
|
||||
if ( d->flags & EFileStream_CLONE_WRITABLE )
|
||||
{
|
||||
Array<u8> arr = Array<u8>::init_reserve( allocator, size );
|
||||
Array<u8> arr = array_init_reserve<u8>( allocator, size );
|
||||
d->buf = arr;
|
||||
|
||||
if ( ! d->buf )
|
||||
@ -540,7 +540,7 @@ b8 file_stream_open( FileInfo* file, AllocatorInfo allocator, u8* buffer, ssize
|
||||
mem_copy( d->buf, buffer, size );
|
||||
d->cap = size;
|
||||
|
||||
arr.get_header()->Num = size;
|
||||
get_header(arr)->Num = size;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -610,9 +610,9 @@ GEN_FILE_WRITE_AT_PROC( _memory_file_write )
|
||||
{
|
||||
Array<u8> arr = { d->buf };
|
||||
|
||||
if ( arr.get_header()->Capacity < usize(new_cap) )
|
||||
if ( get_header(arr)->Capacity < usize(new_cap) )
|
||||
{
|
||||
if ( ! arr.grow( ( s64 )( new_cap ) ) )
|
||||
if ( ! grow( arr, ( s64 )( new_cap ) ) )
|
||||
return false;
|
||||
d->buf = arr;
|
||||
}
|
||||
@ -626,7 +626,7 @@ GEN_FILE_WRITE_AT_PROC( _memory_file_write )
|
||||
|
||||
mem_copy( d->buf + offset + rwlen, pointer_add_const( buffer, rwlen ), extralen );
|
||||
d->cap = new_cap;
|
||||
arr.get_header()->Capacity = new_cap;
|
||||
get_header(arr)->Capacity = new_cap;
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -647,7 +647,7 @@ GEN_FILE_CLOSE_PROC( _memory_file_close )
|
||||
if ( d->flags & EFileStream_CLONE_WRITABLE )
|
||||
{
|
||||
Array<u8> arr = { d->buf };
|
||||
arr.free();
|
||||
free(arr);
|
||||
}
|
||||
|
||||
free( allocator, d );
|
||||
|
@ -187,7 +187,7 @@
|
||||
|
||||
#if !defined(typeof) && (!GEN_COMPILER_C || __STDC_VERSION__ < 202311L)
|
||||
# if ! GEN_COMPILER_C
|
||||
# define typeof
|
||||
# define typeof decltype
|
||||
# elif defined(_MSC_VER)
|
||||
# define typeof(x) __typeof(x)
|
||||
# elif defined(__GNUC__) || defined(__clang__)
|
||||
@ -197,4 +197,12 @@
|
||||
# endif
|
||||
#endif
|
||||
|
||||
// This is intended to only really be used internally or with the C-library variant
|
||||
// C++ users can just use the for-range directly.
|
||||
#if GEN_COMPILER_C
|
||||
# define foreach(Type, entry_id, iterable) for ( Type entry_id = begin(iterable); entry_id != end(iterable); entry_id = next(entry_id) )
|
||||
#else
|
||||
# define foreach(Type, entry_id, iterable) for ( Type entry_id : iterable )
|
||||
#endif
|
||||
|
||||
#pragma endregion Macros
|
||||
|
@ -23,7 +23,7 @@ u8 adt_make_branch( ADT_Node* node, AllocatorInfo backing, char const* name, b32
|
||||
node->type = type;
|
||||
node->name = name;
|
||||
node->parent = parent;
|
||||
node->nodes = Array<ADT_Node>::init( backing );
|
||||
node->nodes = array_init<ADT_Node>( backing );
|
||||
|
||||
if ( ! node->nodes )
|
||||
return EADT_ERROR_OUT_OF_MEMORY;
|
||||
@ -36,12 +36,12 @@ u8 adt_destroy_branch( ADT_Node* node )
|
||||
GEN_ASSERT_NOT_NULL( node );
|
||||
if ( ( node->type == EADT_TYPE_OBJECT || node->type == EADT_TYPE_ARRAY ) && node->nodes )
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); ++i )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); ++i )
|
||||
{
|
||||
adt_destroy_branch( node->nodes + i );
|
||||
}
|
||||
|
||||
node->nodes.free();
|
||||
free(node->nodes);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
@ -66,7 +66,7 @@ ADT_Node* adt_find( ADT_Node* node, char const* name, b32 deep_search )
|
||||
return NULL;
|
||||
}
|
||||
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); i++ )
|
||||
{
|
||||
if ( ! str_compare( node->nodes[ i ].name, name ) )
|
||||
{
|
||||
@ -76,7 +76,7 @@ ADT_Node* adt_find( ADT_Node* node, char const* name, b32 deep_search )
|
||||
|
||||
if ( deep_search )
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); i++ )
|
||||
{
|
||||
ADT_Node* res = adt_find( node->nodes + i, name, deep_search );
|
||||
|
||||
@ -132,7 +132,7 @@ internal ADT_Node* _adt_get_value( ADT_Node* node, char const* value )
|
||||
|
||||
internal ADT_Node* _adt_get_field( ADT_Node* node, char* name, char* value )
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); i++ )
|
||||
{
|
||||
if ( ! str_compare( node->nodes[ i ].name, name ) )
|
||||
{
|
||||
@ -207,7 +207,7 @@ ADT_Node* adt_query( ADT_Node* node, char const* uri )
|
||||
/* run a value comparison against any child that is an object node */
|
||||
else if ( node->type == EADT_TYPE_ARRAY )
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); i++ )
|
||||
{
|
||||
ADT_Node* child = &node->nodes[ i ];
|
||||
if ( child->type != EADT_TYPE_OBJECT )
|
||||
@ -225,7 +225,7 @@ ADT_Node* adt_query( ADT_Node* node, char const* uri )
|
||||
/* [value] */
|
||||
else
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, node->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(node->nodes)); i++ )
|
||||
{
|
||||
ADT_Node* child = &node->nodes[ i ];
|
||||
if ( _adt_get_value( child, l_b2 ) )
|
||||
@ -257,7 +257,7 @@ ADT_Node* adt_query( ADT_Node* node, char const* uri )
|
||||
else
|
||||
{
|
||||
ssize idx = ( ssize )str_to_i64( buf, NULL, 10 );
|
||||
if ( idx >= 0 && idx < scast(ssize, node->nodes.num()) )
|
||||
if ( idx >= 0 && idx < scast(ssize, num(node->nodes)) )
|
||||
{
|
||||
found_node = &node->nodes[ idx ];
|
||||
|
||||
@ -282,12 +282,12 @@ ADT_Node* adt_alloc_at( ADT_Node* parent, ssize index )
|
||||
if ( ! parent->nodes )
|
||||
return NULL;
|
||||
|
||||
if ( index < 0 || index > scast(ssize, parent->nodes.num()) )
|
||||
if ( index < 0 || index > scast(ssize, num(parent->nodes)) )
|
||||
return NULL;
|
||||
|
||||
ADT_Node o = { 0 };
|
||||
o.parent = parent;
|
||||
if ( ! parent->nodes.append_at( o, index ) )
|
||||
if ( ! append_at( parent->nodes, o, index ) )
|
||||
return NULL;
|
||||
|
||||
return parent->nodes + index;
|
||||
@ -303,7 +303,7 @@ ADT_Node* adt_alloc( ADT_Node* parent )
|
||||
if ( ! parent->nodes )
|
||||
return NULL;
|
||||
|
||||
return adt_alloc_at( parent, parent->nodes.num() );
|
||||
return adt_alloc_at( parent, num(parent->nodes) );
|
||||
}
|
||||
|
||||
b8 adt_set_obj( ADT_Node* obj, char const* name, AllocatorInfo backing )
|
||||
@ -357,7 +357,7 @@ ADT_Node* adt_move_node( ADT_Node* node, ADT_Node* new_parent )
|
||||
GEN_ASSERT_NOT_NULL( node );
|
||||
GEN_ASSERT_NOT_NULL( new_parent );
|
||||
GEN_ASSERT( new_parent->type == EADT_TYPE_ARRAY || new_parent->type == EADT_TYPE_OBJECT );
|
||||
return adt_move_node_at( node, new_parent, new_parent->nodes.num() );
|
||||
return adt_move_node_at( node, new_parent, num(new_parent->nodes) );
|
||||
}
|
||||
|
||||
void adt_swap_nodes( ADT_Node* node, ADT_Node* other_node )
|
||||
@ -381,7 +381,7 @@ void adt_remove_node( ADT_Node* node )
|
||||
GEN_ASSERT_NOT_NULL( node->parent );
|
||||
ADT_Node* parent = node->parent;
|
||||
ssize index = ( pointer_diff( parent->nodes, node ) / size_of( ADT_Node ) );
|
||||
parent->nodes.remove_at( index );
|
||||
remove_at( parent->nodes, index );
|
||||
}
|
||||
|
||||
ADT_Node* adt_append_obj( ADT_Node* parent, char const* name )
|
||||
@ -389,7 +389,7 @@ ADT_Node* adt_append_obj( ADT_Node* parent, char const* name )
|
||||
ADT_Node* o = adt_alloc( parent );
|
||||
if ( ! o )
|
||||
return NULL;
|
||||
if ( adt_set_obj( o, name, parent->nodes.get_header()->Allocator ) )
|
||||
if ( adt_set_obj( o, name, get_header(parent->nodes)->Allocator ) )
|
||||
{
|
||||
adt_remove_node( o );
|
||||
return NULL;
|
||||
@ -402,7 +402,7 @@ ADT_Node* adt_append_arr( ADT_Node* parent, char const* name )
|
||||
ADT_Node* o = adt_alloc( parent );
|
||||
if ( ! o )
|
||||
return NULL;
|
||||
if ( adt_set_arr( o, name, parent->nodes.get_header()->Allocator ) )
|
||||
if ( adt_set_arr( o, name, get_header(parent->nodes)->Allocator ) )
|
||||
{
|
||||
adt_remove_node( o );
|
||||
return NULL;
|
||||
@ -946,12 +946,12 @@ u8 csv_parse_delimiter( CSV_Object* root, char* text, AllocatorInfo allocator, b
|
||||
}
|
||||
}
|
||||
|
||||
if ( columnIndex >= scast(ssize, root->nodes.num()) )
|
||||
if ( columnIndex >= scast(ssize, num(root->nodes)) )
|
||||
{
|
||||
adt_append_arr( root, NULL );
|
||||
}
|
||||
|
||||
root->nodes[ columnIndex ].nodes.append( rowItem );
|
||||
append(root->nodes[ columnIndex ].nodes, rowItem );
|
||||
|
||||
if ( delimiter == delim )
|
||||
{
|
||||
@ -979,7 +979,7 @@ u8 csv_parse_delimiter( CSV_Object* root, char* text, AllocatorInfo allocator, b
|
||||
}
|
||||
while ( *currentChar );
|
||||
|
||||
if ( root->nodes.num() == 0 )
|
||||
if (num( root->nodes) == 0 )
|
||||
{
|
||||
GEN_CSV_ASSERT( "unexpected end of input. stream is empty." );
|
||||
error = ECSV_Error__UNEXPECTED_END_OF_INPUT;
|
||||
@ -989,12 +989,12 @@ u8 csv_parse_delimiter( CSV_Object* root, char* text, AllocatorInfo allocator, b
|
||||
/* consider first row as a header. */
|
||||
if ( has_header )
|
||||
{
|
||||
for ( ssize i = 0; i < scast(ssize, root->nodes.num()); i++ )
|
||||
for ( ssize i = 0; i < scast(ssize, num(root->nodes)); i++ )
|
||||
{
|
||||
CSV_Object* col = root->nodes + i;
|
||||
CSV_Object* hdr = col->nodes;
|
||||
col->name = hdr->string;
|
||||
col->nodes.remove_at( 0 );
|
||||
remove_at(col->nodes, 0 );
|
||||
}
|
||||
}
|
||||
|
||||
@ -1057,11 +1057,11 @@ void csv_write_delimiter( FileInfo* file, CSV_Object* obj, char delimiter )
|
||||
GEN_ASSERT_NOT_NULL( file );
|
||||
GEN_ASSERT_NOT_NULL( obj );
|
||||
GEN_ASSERT( obj->nodes );
|
||||
ssize cols = obj->nodes.num();
|
||||
ssize cols = num(obj->nodes);
|
||||
if ( cols == 0 )
|
||||
return;
|
||||
|
||||
ssize rows = obj->nodes[ 0 ].nodes.num();
|
||||
ssize rows = num(obj->nodes[ 0 ].nodes);
|
||||
if ( rows == 0 )
|
||||
return;
|
||||
|
||||
|
@ -1,3 +1,5 @@
|
||||
#define GEN_SUPPORT_CPP_MEMBER_FEATURES 1
|
||||
|
||||
#ifdef GEN_INTELLISENSE_DIRECTIVES
|
||||
# pragma once
|
||||
#endif
|
||||
|
@ -70,7 +70,7 @@ CodeBody gen_eoperator( char const* path )
|
||||
String enum_entries = String::make_reserve( GlobalAllocator, kilobytes(1) );
|
||||
String to_str_entries = String::make_reserve( GlobalAllocator, kilobytes(1) );
|
||||
|
||||
for (usize idx = 0; idx < enum_strs.num(); idx++)
|
||||
for (usize idx = 0; idx < num(enum_strs); idx++)
|
||||
{
|
||||
char const* enum_str = enum_strs[idx].string;
|
||||
char const* entry_to_str = str_strs [idx].string;
|
||||
@ -126,7 +126,7 @@ CodeBody gen_especifier( char const* path )
|
||||
String enum_entries = String::make_reserve( GlobalAllocator, kilobytes(1) );
|
||||
String to_str_entries = String::make_reserve( GlobalAllocator, kilobytes(1) );
|
||||
|
||||
for (usize idx = 0; idx < enum_strs.num(); idx++)
|
||||
for (usize idx = 0; idx < num(enum_strs); idx++)
|
||||
{
|
||||
char const* enum_str = enum_strs[idx].string;
|
||||
char const* entry_to_str = str_strs [idx].string;
|
||||
@ -243,7 +243,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
||||
String to_str_attributes = String::make_reserve( GlobalAllocator, kilobytes(4) );
|
||||
String attribute_define_entries = String::make_reserve( GlobalAllocator, kilobytes(4) );
|
||||
|
||||
for (usize idx = 0; idx < enum_strs.num(); idx++)
|
||||
for (usize idx = 0; idx < num(enum_strs); idx++)
|
||||
{
|
||||
char const* enum_str = enum_strs[idx].string;
|
||||
char const* entry_to_str = enum_str_strs [idx].string;
|
||||
@ -252,7 +252,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
||||
to_str_entries.append_fmt( "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
||||
}
|
||||
|
||||
for ( usize idx = 0; idx < attribute_strs.num(); idx++ )
|
||||
for ( usize idx = 0; idx < num(attribute_strs); idx++ )
|
||||
{
|
||||
char const* attribute_str = attribute_strs[idx].string;
|
||||
char const* entry_to_str = attribute_str_strs [idx].string;
|
||||
@ -261,7 +261,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
||||
to_str_attributes.append_fmt( "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
||||
attribute_define_entries.append_fmt( "Entry( Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
|
||||
|
||||
if ( idx < attribute_strs.num() - 1 )
|
||||
if ( idx < num(attribute_strs) - 1 )
|
||||
attribute_define_entries.append( " \\\n");
|
||||
else
|
||||
attribute_define_entries.append( "\n");
|
||||
|
@ -192,7 +192,40 @@ if ( $singleheader )
|
||||
|
||||
if ( $c_library )
|
||||
{
|
||||
$path_build = join-path $path_c_library build
|
||||
$path_gen = join-path $path_c_library gen
|
||||
|
||||
if ( -not(Test-Path($path_build) )) {
|
||||
New-Item -ItemType Directory -Path $path_build
|
||||
}
|
||||
if ( -not(Test-Path($path_gen) )) {
|
||||
New-Item -ItemType Directory -Path $path_gen
|
||||
}
|
||||
|
||||
$includes = @( $path_project )
|
||||
$unit = join-path $path_c_library "c_library.cpp"
|
||||
$executable = join-path $path_build "c_library.exe"
|
||||
|
||||
$compiler_args = @()
|
||||
$compiler_args += ( $flag_define + 'GEN_TIME' )
|
||||
|
||||
$linker_args = @(
|
||||
$flag_link_win_subsystem_console
|
||||
)
|
||||
|
||||
build-simple $path_build $includes $compiler_args $linker_args $unit $executable
|
||||
|
||||
Push-Location $path_singleheader
|
||||
if ( Test-Path( $executable ) ) {
|
||||
write-host "`nRunning c_library generator"
|
||||
$time_taken = Measure-Command { & $executable
|
||||
| ForEach-Object {
|
||||
write-host `t $_ -ForegroundColor Green
|
||||
}
|
||||
}
|
||||
write-host "`nc_library generator completed in $($time_taken.TotalMilliseconds) ms"
|
||||
}
|
||||
Pop-Location
|
||||
}
|
||||
|
||||
if ( $unreal )
|
||||
|
Loading…
Reference in New Issue
Block a user