successful compile of c_library for: platform, macros, basic_types, debug, and memory headers (and newly generated c-code)

This commit is contained in:
2024-12-05 00:40:51 -05:00
parent cae1555b11
commit a3e7ec4c72
20 changed files with 471 additions and 248 deletions

View File

@ -221,12 +221,12 @@ void to_string_fwd( CodeClass self, String* result )
String to_string(CodeDefine define)
{
return string_fmt_buf( GlobalAllocator, "#define %SC %SC\n", define->Name, define->Content );
return string_fmt_buf( GlobalAllocator, "#define %SC %SC", define->Name, define->Content );
}
void to_string(CodeDefine define, String* result )
{
string_append_fmt( result, "#define %SC %SC\n", define->Name, define->Content );
string_append_fmt( result, "#define %SC %SC", define->Name, define->Content );
}
String to_string(CodeDestructor self)
@ -919,7 +919,7 @@ void to_string_ifdef(CodePreprocessCond cond, String* result )
void to_string_ifndef(CodePreprocessCond cond, String* result )
{
string_append_fmt( result, "#ifndef %SC\n", cond->Content );
string_append_fmt( result, "#ifndef %SC", cond->Content );
}
void to_string_elif(CodePreprocessCond cond, String* result )

View File

@ -11,7 +11,7 @@ internal void deinit();
internal
void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
{
Arena* last = array_back(& Global_AllocatorBuckets);
Arena* last = array_back(Global_AllocatorBuckets);
switch ( type )
{
@ -27,7 +27,7 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
if ( ! array_append( & Global_AllocatorBuckets, bucket ) )
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
last = array_back(& Global_AllocatorBuckets);
last = array_back(Global_AllocatorBuckets);
}
return alloc_align( arena_allocator_info(last), size, alignment );
@ -54,7 +54,7 @@ void* Global_Allocator_Proc( void* allocator_data, AllocType type, ssize size, s
if ( ! array_append( & Global_AllocatorBuckets, bucket ) )
GEN_FATAL( "Failed to append bucket to Global_AllocatorBuckets");
last = array_back(& Global_AllocatorBuckets);
last = array_back(Global_AllocatorBuckets);
}
void* result = alloc_align( last->Backing, size, alignment );
@ -296,7 +296,7 @@ void init()
PreprocessorDefines = array_init_reserve<StringCached>( GlobalAllocator, kilobytes(1) );
define_constants();
parser::init();
GEN_NS_PARSER init();
}
void deinit()
@ -321,14 +321,14 @@ void deinit()
}
while ( left--, left );
destroy(& StringCache);
hashtable_destroy(StringCache);
array_free( & CodePools);
array_free( & StringArenas);
array_free( CodePools);
array_free( StringArenas);
arena_free(& LexArena);
array_free(& PreprocessorDefines);
array_free(PreprocessorDefines);
index = 0;
left = array_num(Global_AllocatorBuckets);
@ -340,8 +340,8 @@ void deinit()
}
while ( left--, left );
array_free(& Global_AllocatorBuckets);
parser::deinit();
array_free(Global_AllocatorBuckets);
GEN_NS_PARSER deinit();
}
void reset()
@ -366,14 +366,14 @@ void reset()
}
while ( left--, left );
clear(StringCache);
hashtable_clear(StringCache);
define_constants();
}
AllocatorInfo get_string_allocator( s32 str_length )
{
Arena* last = array_back(& StringArenas);
Arena* last = array_back(StringArenas);
usize size_req = str_length + sizeof(StringHeader) + sizeof(char*);
@ -384,7 +384,7 @@ AllocatorInfo get_string_allocator( s32 str_length )
if ( ! array_append( & StringArenas, new_arena ) )
GEN_FATAL( "gen::get_string_allocator: Failed to allocate a new string arena" );
last = array_back(& StringArenas);
last = array_back(StringArenas);
}
return arena_allocator_info(last);
@ -396,14 +396,14 @@ StringCached get_cached_string( StrC str )
s32 hash_length = str.Len > kilobytes(1) ? kilobytes(1) : str.Len;
u64 key = crc32( str.Ptr, hash_length );
{
StringCached* result = get(StringCache, key );
StringCached* result = hashtable_get(StringCache, key );
if ( result )
return * result;
}
String result = string_make_strc( get_string_allocator( str.Len ), str );
set(& StringCache, key, { str.Len, result } );
StrC result = string_to_strc( string_make_strc( get_string_allocator( str.Len ), str ));
hashtable_set(StringCache, key, result );
return { str.Len, result };
}
@ -411,7 +411,7 @@ StringCached get_cached_string( StrC str )
// Used internally to retireve a Code object form the CodePool.
Code make_code()
{
Pool* allocator = array_back( & CodePools);
Pool* allocator = array_back( CodePools);
if ( allocator->FreeList == nullptr )
{
Pool code_pool = pool_init( Allocator_CodePool, CodePool_NumBlocks, sizeof(AST) );
@ -422,7 +422,7 @@ Code make_code()
if ( ! array_append( & CodePools, code_pool ) )
GEN_FATAL( "gen::make_code: Failed to allocate a new code pool - CodePools failed to append new pool." );
allocator = array_back( & CodePools);
allocator = array_back( CodePools);
}
Code result { rcast( AST*, alloc( pool_allocator_info(allocator), sizeof(AST) )) };

View File

@ -9,15 +9,13 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
ssize remaining = buf_size;
local_persist
Arena tok_map_arena;
FixedArena<TokenFmt_TokenMap_MemSize> tok_map_arena;
fixed_arena_init( & tok_map_arena);
local_persist
HashTable(StrC) tok_map;
{
local_persist
char tok_map_mem[ TokenFmt_TokenMap_MemSize ];
tok_map_arena = arena_init_from_memory( tok_map_mem, sizeof(tok_map_mem) );
tok_map = hashtable_init(StrC, arena_allocator_info(& tok_map_arena) );
tok_map = hashtable_init(StrC, fixed_arena_allocator_info(& tok_map_arena) );
s32 left = num_tokens - 1;
@ -27,7 +25,7 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
StrC value = va_arg( va, StrC );
u32 key = crc32( token, str_len(token) );
set(& tok_map, key, value );
hashtable_set( tok_map, key, value );
}
}
@ -63,7 +61,7 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
char const* token = fmt + 1;
u32 key = crc32( token, tok_len );
StrC* value = get(tok_map, key );
StrC* value = hashtable_get(tok_map, key );
if ( value )
{
@ -93,8 +91,8 @@ ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list va )
}
}
clear(tok_map);
arena_free(& tok_map_arena);
hashtable_clear(tok_map);
fixed_arena_free(& tok_map_arena);
ssize result = buf_size - remaining;

View File

@ -635,7 +635,7 @@ CodeDefine def_define( StrC name, StrC content )
result->Content = get_cached_string( txt("") );
}
else
result->Content = get_cached_string( content );
result->Content = get_cached_string( string_to_strc(string_fmt_buf(GlobalAllocator, "%SC\n", content)) );
return result;
}
@ -644,13 +644,13 @@ CodeDestructor def_destructor( Opts_def_destructor p )
{
Code body = p.body;
CodeSpecifiers specifiers = p.specifiers;
if ( specifiers && specifiers->Type != CT_Specifiers )
{
log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", debug_str(specifiers) );
return InvalidCode;
}
CodeDestructor result = (CodeDestructor) make_code();
if ( specifiers )

View File

@ -124,6 +124,21 @@ Token* current(TokArray* self, bool skip_formatting )
return & self->Arr[self->Idx];
}
Token* peek(TokArray self, bool skip_formatting)
{
s32 idx = self.Idx;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
idx++;
return & self.Arr[idx];
}
return & self.Arr[idx];
}
Token* previous(TokArray self, bool skip_formatting)
{
s32 idx = self.Idx;
@ -340,7 +355,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
array_append( & Tokens, name );
u64 key = crc32( name.Text, name.Length );
set(& ctx->defines, key, to_str(name) );
hashtable_set(ctx->defines, key, to_str(name) );
}
Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
@ -432,12 +447,13 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( current == '\r' )
{
move_forward();
break;
//move_forward();
}
if ( current == '\n' )
{
move_forward();
//move_forward();
break;
}
@ -509,7 +525,7 @@ void lex_found_token( LexContext* ctx )
else
key = crc32( ctx->token.Text, ctx->token.Length );
StrC* define = get(ctx->defines, key );
StrC* define = hashtable_get(ctx->defines, key );
if ( define )
{
ctx->token.Type = Tok_Preprocess_Macro;
@ -537,14 +553,16 @@ void lex_found_token( LexContext* ctx )
ctx->token.Length++;
}
if ( current == '\r' && ctx->scanner[1] == '\n' )
{
move_forward();
}
else if ( current == '\n' )
{
move_forward();
}
//if ( current == '\r' && ctx->scanner[1] == '\n' )
//{
// move_forward();
// ctx->token.Length++;
//}
//else if ( current == '\n' )
//{
// move_forward();
// ctx->token.Length++;
//}
}
else
{
@ -554,7 +572,6 @@ void lex_found_token( LexContext* ctx )
array_append( & Tokens, ctx->token );
}
neverinline
// TokArray lex( Array<Token> tokens, StrC content )
TokArray lex( StrC content )
@ -593,7 +610,7 @@ TokArray lex( StrC content )
}
u64 key = crc32( * entry, length );
set(& c.defines, key, (StrC) * entry );
hashtable_set(c.defines, key, (StrC) * entry );
}
array_clear(Tokens);
@ -645,7 +662,29 @@ TokArray lex( StrC content )
switch ( result )
{
case Lex_Continue:
{
//TokType last_type = Tokens[array_get_header(Tokens)->Num - 2].Type;
//if ( last_type == Tok_Preprocess_Pragma )
{
c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
if ( current == '\r')
{
move_forward();
c.token.Length = 1;
}
if ( current == '\n' )
{
c.token.Type = Tok_NewLine;
c.token.Length++;
move_forward();
array_append( & Tokens, c.token );
}
}
continue;
}
case Lex_ReturnNull:
return { {}, 0 };
@ -1245,8 +1284,30 @@ TokArray lex( StrC content )
}
}
FoundToken:
lex_found_token( ctx );
FoundToken:
{
lex_found_token( ctx );
TokType last_type = array_back(Tokens)->Type;
if ( last_type == Tok_Preprocess_Macro )
{
c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
if ( current == '\r')
{
move_forward();
c.token.Length = 1;
}
if ( current == '\n' )
{
c.token.Type = Tok_NewLine;
c.token.Length++;
move_forward();
array_append( & Tokens, c.token );
continue;
}
}
}
}
if ( array_num(Tokens) == 0 )
@ -1255,7 +1316,7 @@ TokArray lex( StrC content )
return { {}, 0 };
}
clear(defines);
hashtable_clear(defines);
// defines_map_arena.free();
return { Tokens, 0 };
}

View File

@ -54,7 +54,7 @@ String to_string(ParseContext ctx)
sptr length = scope_start.Length;
char const* current = scope_start.Text + length;
while ( current <= array_back( & ctx.Tokens.Arr)->Text && *current != '\n' && length < 74 )
while ( current <= array_back( ctx.Tokens.Arr)->Text && *current != '\n' && length < 74 )
{
current++;
length++;
@ -141,7 +141,7 @@ void init()
);
fixed_arena_init(& defines_map_arena);
defines = hashtable_init_reserve(StrC, allocator_info( & defines_map_arena), 256 );
defines = hashtable_init_reserve(StrC, fixed_arena_allocator_info( & defines_map_arena), 256 );
}
internal
@ -170,8 +170,9 @@ bool _check_parse_args( StrC def, char const* func_name )
return true;
}
# define currtok_noskip (* current( & Context.Tokens, dont_skip_formatting ))
# define currtok (* current( & Context.Tokens, skip_formatting ))
# define currtok_noskip (* current( & Context.Tokens, dont_skip_formatting ))
# define currtok (* current( & Context.Tokens, skip_formatting ))
# define peektok (* peek(Context.Tokens, skip_formatting))
# define prevtok (* previous( Context.Tokens, dont_skip_formatting))
# define nexttok (* next( Context.Tokens, skip_formatting ))
# define eat( Type_ ) __eat( & Context.Tokens, Type_ )
@ -720,7 +721,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
// <ModuleFlags> <class/struct> <Attributes> <Name>
local_persist
char interface_arr_mem[ kilobytes(4) ] {0};
char interface_arr_mem[ kilobytes(4) ] {0};
Array<CodeTypename> interfaces; {
Arena arena = arena_init_from_memory( interface_arr_mem, kilobytes(4) );
interfaces = array_init_reserve(CodeTypename, arena_allocator_info(& arena), 4 );
@ -786,7 +787,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
if ( inline_cmt )
result->InlineCmt = inline_cmt;
array_free(& interfaces);
array_free(interfaces);
return result;
}
@ -1708,8 +1709,17 @@ CodeBody parse_global_nspace( CodeType which )
break;
case Tok_Preprocess_Macro:
{
member = parse_simple_preprocess( Tok_Preprocess_Macro );
// <Macro>
if ( member == Code_Invalid )
{
log_failure( "Failed to parse member\n%s", to_string(Context) );
pop(& Context);
return InvalidCode;
}
}
break;
case Tok_Preprocess_Pragma:
@ -2935,7 +2945,7 @@ Code parse_simple_preprocess( TokType which )
eat( which );
// <Macro>
if ( currtok.Type == Tok_BraceCurly_Open )
if ( peektok.Type == Tok_BraceCurly_Open )
{
// Eat the block scope right after the macro. Were assuming the macro defines a function definition's signature
eat( Tok_BraceCurly_Open );
@ -2977,7 +2987,7 @@ Code parse_simple_preprocess( TokType which )
{
if ( str_compare_len( Context.Scope->Prev->ProcName.Ptr, "parse_typedef", Context.Scope->Prev->ProcName.Len ) != 0 )
{
if ( check( Tok_Statement_End ))
if ( peektok.Type == Tok_Statement_End )
{
Token stmt_end = currtok;
eat( Tok_Statement_End );
@ -2989,7 +2999,7 @@ Code parse_simple_preprocess( TokType which )
}
}
tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)tok.Text;
tok.Length = ( (sptr)currtok_noskip.Text + currtok_noskip.Length ) - (sptr)tok.Text;
}
char const* content = str_fmt_buf( "%.*s ", tok.Length, tok.Text );
@ -3554,7 +3564,7 @@ CodeEnum parse_enum( bool inplace_def )
push_scope();
Specifier specs_found[16] { Spec_NumSpecifiers };
s32 NumSpecifiers = 0;
s32 NumSpecifiers = 0;
CodeAttributes attributes = { nullptr };
@ -3589,7 +3599,7 @@ CodeEnum parse_enum( bool inplace_def )
// enum <class> <Attributes> <Name>
b32 use_macro_underlying = false;
Code underlying_macro = { nullptr };
Code underlying_macro = { nullptr };
if ( currtok.Type == Tok_Assign_Classifer )
{
eat( Tok_Assign_Classifer );
@ -3701,9 +3711,9 @@ CodeEnum parse_enum( bool inplace_def )
eat( Tok_Operator );
// <Name> =
while ( currtok_noskip.Type != Tok_Comma && currtok_noskip.Type != Tok_BraceCurly_Close )
while ( currtok.Type != Tok_Comma && currtok.Type != Tok_BraceCurly_Close )
{
eat( currtok_noskip.Type );
eat( currtok.Type );
}
}
// <Name> = <Expression>
@ -3717,6 +3727,9 @@ CodeEnum parse_enum( bool inplace_def )
if ( currtok.Type == Tok_Comma )
{
//Token prev = * previous(Context.Tokens, dont_skip_formatting);
//entry.Length = ( (sptr)prev.Text + prev.Length ) - (sptr)entry.Text;
eat( Tok_Comma );
// <Name> = <Expression> <Macro>,
}
@ -3727,10 +3740,9 @@ CodeEnum parse_enum( bool inplace_def )
// eat( Tok_Comment );
// <Name> = <Expression> <Macro>, // <Inline Comment>
// }
//Token prev = * previous(Context.Tokens, dont_skip_formatting);
entry.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)entry.Text;
Token prev = * previous(Context.Tokens, dont_skip_formatting);
entry.Length = ( (sptr)prev.Text + prev.Length ) - (sptr)entry.Text;
member = untyped_str( to_str(entry) );
break;