diff --git a/gen_c_library/c_library.cpp b/gen_c_library/c_library.cpp index 5e675b6..46d5fd6 100644 --- a/gen_c_library/c_library.cpp +++ b/gen_c_library/c_library.cpp @@ -456,6 +456,12 @@ do \ CodeBody header_parsing = def_body(CT_Global_Body); for ( Code entry = parsed_header_parsing.begin(); entry != parsed_header_parsing.end(); ++ entry ) switch (entry->Type) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Preprocess_Pragma: { if ( entry->Content.contains(txt("ADT")) ) @@ -546,6 +552,12 @@ do \ CodeBody types = def_body(CT_Global_Body); for ( Code entry = parsed_types.begin(); entry != parsed_types.end(); ++ entry ) switch(entry->Type) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Using: { CodeUsing using_ver = cast(CodeUsing, entry); @@ -600,6 +612,12 @@ do \ CodeBody ast = def_body(CT_Global_Body); for ( Code entry = parsed_ast.begin(); entry != parsed_ast.end(); ++ entry ) switch (entry->Type) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Preprocess_If: { CodePreprocessCond cond = cast(CodePreprocessCond, entry); @@ -992,6 +1010,12 @@ R"(#define ( code ) _Generic( (code), \ CodeBody inlines = def_body(CT_Global_Body); for ( Code entry = parsed_inlines.begin(); entry != parsed_inlines.end(); ++ entry ) switch( entry->Type ) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Function: { // Were going to wrap usage of these procedures into generic selectors in code_types.hpp section, @@ -1019,6 +1043,12 @@ R"(#define ( code ) _Generic( (code), \ CodeBody header_end = def_body(CT_Global_Body); for ( Code entry = parsed_header_end.begin(); entry != parsed_header_end.end(); ++ entry, ++ idx ) switch( entry->Type ) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Variable: { CodeVar var = cast(CodeVar, entry); @@ -1058,13 +1088,14 @@ R"(#define ( code ) _Generic( (code), \ #pragma region Resolve Components CodeBody array_arena = gen_array(txt("Arena"), txt("Array_Arena")); CodeBody array_pool = gen_array(txt("Pool"), txt("Array_Pool")); + CodeBody array_token = gen_array(txt("Token"), txt("Array_Token")); Code src_static_data = scan_file( project_dir "components/static_data.cpp" ); Code src_ast_case_macros = scan_file( project_dir "components/ast_case_macros.cpp" ); Code src_code_serialization = scan_file( project_dir "components/code_serialization.cpp" ); Code src_interface = scan_file( project_dir "components/interface.cpp" ); - Code src_lexer = scan_file( project_dir "components/lexer.cpp" ); - Code src_parser = scan_file( project_dir "components/parser.cpp" ); + // Code src_lexer = scan_file( project_dir "components/lexer.cpp" ); + // Code src_parser = scan_file( project_dir "components/parser.cpp" ); Code src_parsing_interface = scan_file( project_dir "components/interface.parsing.cpp" ); Code src_untyped = scan_file( project_dir "components/interface.untyped.cpp" ); @@ -1072,6 +1103,12 @@ R"(#define ( code ) _Generic( (code), \ CodeBody src_ast = def_body(CT_Global_Body); for ( Code entry = parsed_src_ast.begin(); entry != parsed_src_ast.end(); ++ entry ) switch( entry ->Type ) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Function: { // Were going to wrap usage of these procedures into generic selectors in code_types.hpp section, @@ -1098,6 +1135,12 @@ R"(#define ( code ) _Generic( (code), \ CodeBody src_upfront = def_body(CT_Global_Body); for ( Code entry = parsed_src_upfront.begin(); entry != parsed_src_upfront.end(); ++ entry ) switch( entry ->Type ) { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + case CT_Enum: { convert_cpp_enum_to_c(cast(CodeEnum, entry), src_upfront); } @@ -1137,6 +1180,97 @@ R"(#define ( code ) _Generic( (code), \ src_upfront.append(entry); break; } + + CodeBody parsed_src_lexer = parse_file( project_dir "components/lexer.cpp" ); + CodeBody src_lexer = def_body(CT_Global_Body); + for ( Code entry = parsed_src_ast.begin(); entry != parsed_src_ast.end(); ++ entry ) switch( entry ->Type ) + { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + + CT_Enum: + { + if (entry->Name.Len) + { + convert_cpp_enum_to_c() + } + } + break; + + CT_Struct: + { + if ( entry->Name.is_equal(txt("Token"))) + { + // Add struct Token forward and typedef early. + CodeStruct token_fwd = parse_struct(code( struct Token; )); + CodeTypedef token_typedef = parse_typedef(code( typedef struct Token Token; )); + header_parsing.append(token_fwd); + header_parsing.append(token_typedef); + + // Skip typedef since we added it + b32 continue_for = true; + for (Code array_entry = array_token.begin(); continue_for && array_entry != array_token.end(); ++ array_entry) switch (array_entry->Type) + { + case CT_Typedef: + { + // pop the array entry + array_token->NumEntries -= 1; + Code next = array_entry->Next; + Code prev = array_entry->Prev; + next->Prev = array_entry->Prev; + prev->Next = next; + if ( array_token->Front == array_entry ) + array_token->Front = next; + + src_lexer.append(array_entry); + continue_for = false; + } + break; + } + + // Append the struct + src_lexer.append(entry); + + // Append the token array + src_lexer.append(array_token); + continue; + } + + CodeTypedef struct_tdef = parse_typedef(token_fmt("name", entry->Name, stringize( typedef struct ; ))) + src_lexer.append(entry); + src_lexer.append(struct_tdef); + } + break; + + default: + src_lexer.append(entry); + break; + } + + CodeBody parsed_src_parser = parse_file( project_dir "components/parser.cpp" ); + CodeBody src_parser = def_body(CT_Global_Body); + for ( Code entry = parsed_src_parser.begin(); entry != parsed_src_parser.end(); ++ entry ) switch( entry ->Type ) + { + case CT_Preprocess_IfDef: + { + ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_header_strings, header_strings ); + } + break; + + case CT_Struct: + { + CodeTypedef tdef = parse_typedef(token_fmt("name", entry->Name, stringize( typedef struct ; ))); + header_memory.append(entry); + header_memory.append(tdef); + } + + default: + src_parser.append(entry); + break; + } #pragma endregion Resolve Components // THERE SHOULD BE NO NEW GENERIC CONTAINER DEFINTIONS PAST THIS POINT (It will not have slots for the generic selection generated macros) @@ -1285,7 +1419,7 @@ R"(#define ( code ) _Generic( (code), \ header.print( format_code_to_untyped(src_upfront) ); header.print_fmt( "\n#pragma region Parsing\n\n" ); header.print( format_code_to_untyped(etoktype) ); - // header.print( lexer ); + header.print( lexer ); // header.print( parser ); // header.print( parsing_interface ); header.print_fmt( "\n#pragma endregion Parsing\n" ); diff --git a/project/bootstrap.cpp b/project/bootstrap.cpp index 8eb2188..c78c930 100644 --- a/project/bootstrap.cpp +++ b/project/bootstrap.cpp @@ -246,9 +246,7 @@ int gen_main() CodeBody etoktype = gen_etoktype( "enums/ETokType.csv", "enums/AttributeTokens.csv" ); //CodeNS nspaced_etoktype = def_namespace( name(parser), def_namespace_body( args(etoktype)) ); CodeBody nspaced_etoktype = def_global_body( args( - untyped_str(txt("GEN_NS_PARSER_BEGIN\n")), - etoktype, - untyped_str(txt("GEN_NS_PARSER_END\n")) + etoktype )); Builder diff --git a/project/components/gen/etoktype.cpp b/project/components/gen/etoktype.cpp index c3ad2f6..574f516 100644 --- a/project/components/gen/etoktype.cpp +++ b/project/components/gen/etoktype.cpp @@ -6,6 +6,7 @@ // This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp) GEN_NS_PARSER_BEGIN + #define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" ) enum TokType : u32 @@ -112,7 +113,7 @@ enum TokType : u32 inline StrC toktype_to_str( TokType type ) { - local_persist StrC lookup[] { + local_persist StrC lookup[] = { { sizeof( "__invalid__" ), "__invalid__" }, { sizeof( "private" ), "private" }, { sizeof( "protected" ), "protected" }, diff --git a/project/components/interface.parsing.cpp b/project/components/interface.parsing.cpp index e2d6ee9..bdc9910 100644 --- a/project/components/interface.parsing.cpp +++ b/project/components/interface.parsing.cpp @@ -12,7 +12,7 @@ CodeClass parse_class( StrC def ) { GEN_USING_NS_PARSER; check_parse_args( def ); - + TokArray toks = lex( def ); if ( toks.Arr == nullptr ) return InvalidCode; @@ -20,7 +20,7 @@ CodeClass parse_class( StrC def ) Context.Tokens = toks; push_scope(); CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class ); - pop(& Context); + parser_pop(& Context); return result; } @@ -39,9 +39,9 @@ CodeConstructor parse_constructor( StrC def ) Specifier specs_found[ 16 ] { Spec_NumSpecifiers }; s32 NumSpecifiers = 0; - while ( left && is_specifier(currtok) ) + while ( left && tok_is_specifier(currtok) ) { - Specifier spec = strc_to_specifier( to_str(currtok) ); + Specifier spec = strc_to_specifier( tok_to_str(currtok) ); b32 ignore_spec = false; @@ -59,8 +59,8 @@ CodeConstructor parse_constructor( StrC def ) break; default : - log_failure( "Invalid specifier %s for variable\n%s", spec_to_str( spec ), to_string(Context) ); - pop(& Context); + log_failure( "Invalid specifier %s for variable\n%s", spec_to_str( spec ), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -109,7 +109,7 @@ CodeEnum parse_enum( StrC def ) TokArray toks = lex( def ); if ( toks.Arr == nullptr ) { - pop(& Context); + parser_pop(& Context); return InvalidCode; } @@ -181,7 +181,7 @@ CodeBody parse_global_body( StrC def ) Context.Tokens = toks; push_scope(); CodeBody result = parse_global_nspace( CT_Global_Body ); - pop(& Context); + parser_pop(& Context); return result; } @@ -236,7 +236,7 @@ CodeStruct parse_struct( StrC def ) Context.Tokens = toks; push_scope(); CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct ); - pop(& Context); + parser_pop(& Context); return result; } diff --git a/project/components/lexer.cpp b/project/components/lexer.cpp index 3041f32..9c09e6a 100644 --- a/project/components/lexer.cpp +++ b/project/components/lexer.cpp @@ -35,62 +35,62 @@ struct Token constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null }; -AccessSpec to_access_specifier(Token tok) +AccessSpec tok_to_access_specifier(Token tok) { return scast(AccessSpec, tok.Type); } -StrC to_str(Token tok) +StrC tok_to_str(Token tok) { return { tok.Length, tok.Text }; } -bool is_valid( Token tok ) +bool tok_is_valid( Token tok ) { return tok.Text && tok.Length && tok.Type != Tok_Invalid; } -bool is_access_operator(Token tok) +bool tok_is_access_operator(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_AccessOperator ); } -bool is_access_specifier(Token tok) +bool tok_is_access_specifier(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_AccessSpecifier ); } -bool is_attribute(Token tok) +bool tok_is_attribute(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_Attribute ); } -bool is_operator(Token tok) +bool tok_is_operator(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_Operator ); } -bool is_preprocessor(Token tok) +bool tok_is_preprocessor(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_Preprocess ); } -bool is_preprocess_cond(Token tok) +bool tok_is_preprocess_cond(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_Preprocess_Cond ); } -bool is_specifier(Token tok) +bool tok_is_specifier(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_Specifier ); } -bool is_end_definition(Token tok) +bool tok_is_end_definition(Token tok) { return bitfield_is_equal( u32, tok.Flags, TF_EndDefinition ); } -String to_string(Token tok) +String tok_to_string(Token tok) { String result = string_make_reserve( GlobalAllocator, kilobytes(4) ); @@ -111,9 +111,9 @@ struct TokArray s32 Idx; }; -bool __eat( TokType type ); +bool lex__eat( TokType type ); -Token* current(TokArray* self, bool skip_formatting ) +Token* lex_current(TokArray* self, bool skip_formatting ) { if ( skip_formatting ) { @@ -124,7 +124,7 @@ Token* current(TokArray* self, bool skip_formatting ) return & self->Arr[self->Idx]; } -Token* peek(TokArray self, bool skip_formatting) +Token* lex_peek(TokArray self, bool skip_formatting) { s32 idx = self.Idx; @@ -139,7 +139,7 @@ Token* peek(TokArray self, bool skip_formatting) return & self.Arr[idx]; } -Token* previous(TokArray self, bool skip_formatting) +Token* lex_previous(TokArray self, bool skip_formatting) { s32 idx = self.Idx; @@ -154,7 +154,7 @@ Token* previous(TokArray self, bool skip_formatting) return & self.Arr[idx - 1]; } -Token* next(TokArray self, bool skip_formatting) +Token* lex_next(TokArray self, bool skip_formatting) { s32 idx = self.Idx; @@ -169,9 +169,9 @@ Token* next(TokArray self, bool skip_formatting) return & self.Arr[idx + 1]; } -global Arena_256KB defines_map_arena; -global HashTable(StrC) defines; -global Array(Token) Tokens; +global Arena_256KB Lexer_defines_map_arena; +global HashTable(StrC) Lexer_defines; +global Array(Token) Lexer_Tokens; #define current ( * ctx->scanner ) @@ -190,7 +190,7 @@ global Array(Token) Tokens; ctx->scanner++; \ } -#define SkipWhitespace() \ +#define skip_whitespace() \ while ( ctx->left && char_is_space( current ) ) \ { \ move_forward(); \ @@ -237,10 +237,10 @@ s32 lex_preprocessor_directive( LexContext* ctx ) { char const* hash = ctx->scanner; Token hash_tok = { hash, 1, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess }; - array_append( Tokens, hash_tok ); + array_append( Lexer_Tokens, hash_tok ); move_forward(); - SkipWhitespace(); + skip_whitespace(); ctx->token.Text = ctx->scanner; while (ctx->left && ! char_is_space(current) ) @@ -249,7 +249,7 @@ s32 lex_preprocessor_directive( LexContext* ctx ) ctx->token.Length++; } - ctx->token.Type = strc_to_toktype( to_str(ctx->token) ); + ctx->token.Type = strc_to_toktype( tok_to_str(ctx->token) ); bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma; if ( ! is_preprocessor ) @@ -313,14 +313,14 @@ s32 lex_preprocessor_directive( LexContext* ctx ) ctx->token.Length = ctx->token.Length + ctx->token.Text - hash; ctx->token.Text = hash; - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); return Lex_Continue; // Skip found token, its all handled here. } if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf ) { ctx->token.Flags |= TF_Preprocess_Cond; - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); end_line(); return Lex_Continue; } @@ -329,9 +329,9 @@ s32 lex_preprocessor_directive( LexContext* ctx ) ctx->token.Flags |= TF_Preprocess_Cond; } - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); - SkipWhitespace(); + skip_whitespace(); if ( ctx->token.Type == Tok_Preprocess_Define ) { @@ -353,10 +353,10 @@ s32 lex_preprocessor_directive( LexContext* ctx ) name.Length++; } - array_append( Tokens, name ); + array_append( Lexer_Tokens, name ); u64 key = crc32( name.Text, name.Length ); - hashtable_set(ctx->defines, key, to_str(name) ); + hashtable_set(ctx->defines, key, tok_to_str(name) ); } Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess }; @@ -399,7 +399,7 @@ s32 lex_preprocessor_directive( LexContext* ctx ) move_forward(); } - array_append( Tokens, preprocess_content ); + array_append( Lexer_Tokens, preprocess_content ); return Lex_Continue; // Skip found token, its all handled here. } @@ -462,7 +462,7 @@ s32 lex_preprocessor_directive( LexContext* ctx ) preprocess_content.Length++; } - array_append( Tokens, preprocess_content ); + array_append( Lexer_Tokens, preprocess_content ); return Lex_Continue; // Skip found token, its all handled here. } @@ -471,11 +471,11 @@ void lex_found_token( LexContext* ctx ) { if ( ctx->token.Type != Tok_Invalid ) { - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); return; } - TokType type = strc_to_toktype( to_str(ctx->token) ); + TokType type = strc_to_toktype( tok_to_str(ctx->token) ); if (type <= Tok_Access_Public && type >= Tok_Access_Private ) { @@ -489,7 +489,7 @@ void lex_found_token( LexContext* ctx ) if ( type == Tok_Decl_Extern_Linkage ) { - SkipWhitespace(); + skip_whitespace(); if ( current != '"' ) { @@ -498,7 +498,7 @@ void lex_found_token( LexContext* ctx ) } ctx->token.Type = type; - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); return; } @@ -508,7 +508,7 @@ void lex_found_token( LexContext* ctx ) { ctx->token.Type = type; ctx->token.Flags |= TF_Specifier; - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); return; } @@ -516,7 +516,7 @@ void lex_found_token( LexContext* ctx ) if ( type != Tok_Invalid ) { ctx->token.Type = type; - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); return; } @@ -570,7 +570,7 @@ void lex_found_token( LexContext* ctx ) ctx->token.Type = Tok_Identifier; } - array_append( Tokens, ctx->token ); + array_append( Lexer_Tokens, ctx->token ); } neverinline @@ -581,7 +581,7 @@ TokArray lex( StrC content ) c.content = content; c.left = content.Len; c.scanner = content.Ptr; - c.defines = defines; + c.defines = Lexer_defines; char const* word = c.scanner; s32 word_length = 0; @@ -589,7 +589,7 @@ TokArray lex( StrC content ) c.line = 1; c.column = 1; - SkipWhitespace(); + skip_whitespace(); if ( c.left <= 0 ) { log_failure( "gen::lex: no tokens found (only whitespace provided)" ); @@ -614,7 +614,7 @@ TokArray lex( StrC content ) hashtable_set(c.defines, key, (StrC) * entry ); } - array_clear(Tokens); + array_clear(Lexer_Tokens); while (c.left ) { @@ -644,14 +644,14 @@ TokArray lex( StrC content ) c.token.Type = Tok_NewLine; c.token.Length++; - array_append( Tokens, c.token ); + array_append( Lexer_Tokens, c.token ); continue; } } c.token.Length = 0; - SkipWhitespace(); + skip_whitespace(); if ( c.left <= 0 ) break; @@ -680,7 +680,7 @@ TokArray lex( StrC content ) c.token.Length++; move_forward(); - array_append( Tokens, c.token ); + array_append( Lexer_Tokens, c.token ); } } @@ -1135,7 +1135,7 @@ TokArray lex( StrC content ) move_forward(); c.token.Length++; } - array_append( Tokens, c.token ); + array_append( Lexer_Tokens, c.token ); continue; } else if ( current == '*' ) @@ -1171,7 +1171,7 @@ TokArray lex( StrC content ) move_forward(); c.token.Length++; } - array_append( Tokens, c.token ); + array_append( Lexer_Tokens, c.token ); // end_line(); continue; } @@ -1264,14 +1264,14 @@ TokArray lex( StrC content ) } else { - s32 start = max( 0, array_num(Tokens) - 100 ); + s32 start = max( 0, array_num(Lexer_Tokens) - 100 ); log_fmt("\n%d\n", start); - for ( s32 idx = start; idx < array_num(Tokens); idx++ ) + for ( s32 idx = start; idx < array_num(Lexer_Tokens); idx++ ) { log_fmt( "Token %d Type: %s : %.*s\n" , idx - , toktype_to_str( Tokens[ idx ].Type ).Ptr - , Tokens[ idx ].Length, Tokens[ idx ].Text + , toktype_to_str( Lexer_Tokens[ idx ].Type ).Ptr + , Lexer_Tokens[ idx ].Length, Lexer_Tokens[ idx ].Text ); } @@ -1288,7 +1288,7 @@ TokArray lex( StrC content ) FoundToken: { lex_found_token( ctx ); - TokType last_type = array_back(Tokens)->Type; + TokType last_type = array_back(Lexer_Tokens)->Type; if ( last_type == Tok_Preprocess_Macro ) { c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null }; @@ -1304,22 +1304,23 @@ TokArray lex( StrC content ) c.token.Length++; move_forward(); - array_append( Tokens, c.token ); + array_append( Lexer_Tokens, c.token ); continue; } } } } - if ( array_num(Tokens) == 0 ) + if ( array_num(Lexer_Tokens) == 0 ) { log_failure( "Failed to lex any tokens" ); return { {}, 0 }; } - hashtable_clear(defines); + hashtable_clear(Lexer_defines); // defines_map_arena.free(); - return { Tokens, 0 }; + TokArray result = { Lexer_Tokens, 0 }; + return result; } #undef current #undef move_forward diff --git a/project/components/parser.cpp b/project/components/parser.cpp index 4f92118..d856244 100644 --- a/project/components/parser.cpp +++ b/project/components/parser.cpp @@ -27,7 +27,7 @@ struct ParseContext StackNode* Scope; }; -void push( ParseContext* ctx, StackNode* node ) +void parser_push( ParseContext* ctx, StackNode* node ) { node->Prev = ctx->Scope; ctx->Scope = node; @@ -37,7 +37,7 @@ void push( ParseContext* ctx, StackNode* node ) #endif } -void pop(ParseContext* ctx) +void parser_pop(ParseContext* ctx) { #if 0 && Build_Debug log_fmt("\tPopping Context: %.*s\n", Scope->ProcName.Len, Scope->ProcName.Ptr ); @@ -45,12 +45,12 @@ void pop(ParseContext* ctx) ctx->Scope = ctx->Scope->Prev; } -String to_string(ParseContext ctx) +String parser_to_string(ParseContext ctx) { String result = string_make_reserve( GlobalAllocator, kilobytes(4) ); Token scope_start = ctx.Scope->Start; - Token last_valid = ctx.Tokens.Idx >= array_num(ctx.Tokens.Arr) ? ctx.Tokens.Arr[array_num(ctx.Tokens.Arr) -1] : (* current(& ctx.Tokens, true)); + Token last_valid = ctx.Tokens.Idx >= array_num(ctx.Tokens.Arr) ? ctx.Tokens.Arr[array_num(ctx.Tokens.Arr) -1] : (* lex_current(& ctx.Tokens, true)); sptr length = scope_start.Length; char const* current = scope_start.Text + length; @@ -77,7 +77,7 @@ String to_string(ParseContext ctx) s32 level = 0; do { - if ( is_valid(curr_scope->Name) ) + if ( tok_is_valid(curr_scope->Name) ) { string_append_fmt(& result, "\t%d: %s, AST Name: %.*s\n", level, curr_scope->ProcName.Ptr, curr_scope->Name.Length, curr_scope->Name.Text ); } @@ -95,11 +95,11 @@ String to_string(ParseContext ctx) global ParseContext Context; -bool __eat(TokArray* self, TokType type ) +bool lex__eat(TokArray* self, TokType type ) { if ( array_num(self->Arr) - self->Idx <= 0 ) { - log_failure( "No tokens left.\n%s", to_string(Context) ); + log_failure( "No tokens left.\n%s", parser_to_string(Context) ); return false; } @@ -113,13 +113,13 @@ bool __eat(TokArray* self, TokType type ) if ( at_idx.Type != type ) { - Token tok = * current( self, skip_formatting ); + Token tok = * lex_current( self, skip_formatting ); log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %.*s ' (%d, %d)`\n%s" , toktype_to_str(type).Ptr , at_idx.Length, at_idx.Text , tok.Line , tok.Column - , to_string(Context) + , parser_to_string(Context) ); return false; @@ -136,18 +136,18 @@ bool __eat(TokArray* self, TokType type ) internal void parser_init() { - Tokens = array_init_reserve(Token, arena_allocator_info( & LexArena) + Lexer_Tokens = array_init_reserve(Token, arena_allocator_info( & LexArena) , ( LexAllocator_Size - sizeof( ArrayHeader ) ) / sizeof(Token) ); - fixed_arena_init(& defines_map_arena); - defines = hashtable_init_reserve(StrC, fixed_arena_allocator_info( & defines_map_arena), 256 ); + fixed_arena_init(& Lexer_defines_map_arena); + Lexer_defines = hashtable_init_reserve(StrC, fixed_arena_allocator_info( & Lexer_defines_map_arena), 256 ); } internal void parser_deinit() { - parser::Tokens = { nullptr }; + Lexer_Tokens = { nullptr }; } #pragma region Helper Macros @@ -158,24 +158,24 @@ bool _check_parse_args( StrC def, char const* func_name ) if ( def.Len <= 0 ) { log_failure( str_fmt_buf("gen::%s: length must greater than 0", func_name) ); - pop(& Context); + parser_pop(& Context); return false; } if ( def.Ptr == nullptr ) { log_failure( str_fmt_buf("gen::%s: def was null", func_name) ); - pop(& Context); + parser_pop(& Context); return false; } return true; } -# define currtok_noskip (* current( & Context.Tokens, dont_skip_formatting )) -# define currtok (* current( & Context.Tokens, skip_formatting )) -# define peektok (* peek(Context.Tokens, skip_formatting)) -# define prevtok (* previous( Context.Tokens, dont_skip_formatting)) -# define nexttok (* next( Context.Tokens, skip_formatting )) -# define eat( Type_ ) __eat( & Context.Tokens, Type_ ) +# define currtok_noskip (* lex_current( & Context.Tokens, dont_skip_formatting )) +# define currtok (* lex_current( & Context.Tokens, skip_formatting )) +# define peektok (* lex_peek(Context.Tokens, skip_formatting)) +# define prevtok (* lex_previous( Context.Tokens, dont_skip_formatting)) +# define nexttok (* lex_next( Context.Tokens, skip_formatting )) +# define eat( Type_ ) lex__eat( & Context.Tokens, Type_ ) # define left ( array_num(Context.Tokens.Arr) - Context.Tokens.Idx ) #ifdef check @@ -187,9 +187,9 @@ bool _check_parse_args( StrC def, char const* func_name ) # define check_noskip( Type_ ) ( left && currtok_noskip.Type == Type_ ) # define check( Type_ ) ( left && currtok.Type == Type_ ) -# define push_scope() \ - parser::StackNode scope { nullptr, currtok_noskip, parser::NullToken, txt( __func__ ) }; \ - push( & parser::Context, & scope ) +# define push_scope() \ + GEN_NS_PARSER StackNode scope { nullptr, currtok_noskip, GEN_NS_PARSER NullToken, txt( __func__ ) }; \ + parser_push( & parser::Context, & scope ) #pragma endregion Helper Macros @@ -498,11 +498,11 @@ Code parse_array_decl() if ( check( Tok_Operator ) && currtok.Text[0] == '[' && currtok.Text[1] == ']' ) { - Code array_expr = untyped_str( to_str(currtok) ); + Code array_expr = untyped_str( tok_to_str(currtok) ); eat( Tok_Operator ); // [] - pop(& Context); + parser_pop(& Context); return array_expr; } @@ -513,15 +513,15 @@ Code parse_array_decl() if ( left == 0 ) { - log_failure( "Error, unexpected end of array declaration ( '[]' scope started )\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Error, unexpected end of array declaration ( '[]' scope started )\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } if ( currtok.Type == Tok_BraceSquare_Close ) { - log_failure( "Error, empty array expression in definition\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Error, empty array expression in definition\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -534,20 +534,20 @@ Code parse_array_decl() untyped_tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)untyped_tok.Text; - Code array_expr = untyped_str( to_str(untyped_tok) ); + Code array_expr = untyped_str( tok_to_str(untyped_tok) ); // [ if ( left == 0 ) { - log_failure( "Error, unexpected end of array declaration, expected ]\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Error, unexpected end of array declaration, expected ]\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } if ( currtok.Type != Tok_BraceSquare_Close ) { - log_failure( "%s: Error, expected ] in array declaration, not %s\n%s", toktype_to_str( currtok.Type ), to_string(Context) ); - pop(& Context); + log_failure( "%s: Error, expected ] in array declaration, not %s\n%s", toktype_to_str( currtok.Type ), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -563,11 +563,11 @@ Code parse_array_decl() array_expr->Next.ast = adjacent_arr_expr.ast; } - pop(& Context); + parser_pop(& Context); return array_expr; } - pop(& Context); + parser_pop(& Context); return { nullptr }; } @@ -581,7 +581,7 @@ CodeAttributes parse_attributes() // There can be more than one attribute. If there is flatten them to a single string. // TODO(Ed): Support keeping an linked list of attributes similar to parameters - while ( left && is_attribute(currtok) ) + while ( left && tok_is_attribute(currtok) ) { if ( check( Tok_Attribute_Open ) ) { @@ -635,7 +635,7 @@ CodeAttributes parse_attributes() len = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )start.Text; } - else if ( is_attribute(currtok) ) + else if ( tok_is_attribute(currtok) ) { eat( currtok.Type ); // @@ -665,7 +665,7 @@ CodeAttributes parse_attributes() if ( len > 0 ) { StrC attribute_txt = { len, start.Text }; - pop(& Context); + parser_pop(& Context); String name_stripped = strip_formatting( attribute_txt, strip_formatting_dont_preserve_newlines ); @@ -678,7 +678,7 @@ CodeAttributes parse_attributes() return ( CodeAttributes )result; } - pop(& Context); + parser_pop(& Context); return { nullptr }; } @@ -687,7 +687,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false ) { if ( which != Tok_Decl_Class && which != Tok_Decl_Struct ) { - log_failure( "Error, expected class or struct, not %s\n%s", toktype_to_str( which ), to_string(Context) ); + log_failure( "Error, expected class or struct, not %s\n%s", toktype_to_str( which ), parser_to_string(Context) ); return InvalidCode; } @@ -734,15 +734,15 @@ Code parse_class_struct( TokType which, bool inplace_def = false ) eat( Tok_Assign_Classifer ); // : - if ( is_access_specifier(currtok) ) + if ( tok_is_access_specifier(currtok) ) { - access = to_access_specifier(currtok); + access = tok_to_access_specifier(currtok); // : eat( currtok.Type ); } Token parent_tok = parse_identifier(); - parent = def_type( to_str(parent_tok) ); + parent = def_type( tok_to_str(parent_tok) ); // : while ( check(Tok_Comma) ) @@ -750,13 +750,13 @@ Code parse_class_struct( TokType which, bool inplace_def = false ) eat( Tok_Comma ); // : , - if ( is_access_specifier(currtok) ) + if ( tok_is_access_specifier(currtok) ) { eat(currtok.Type); } Token interface_tok = parse_identifier(); - array_append( interfaces, def_type( to_str(interface_tok) ) ); + array_append( interfaces, def_type( tok_to_str(interface_tok) ) ); // : , ... } } @@ -780,10 +780,10 @@ Code parse_class_struct( TokType which, bool inplace_def = false ) } if ( which == Tok_Decl_Class ) - result = def_class( to_str(name), { body, parent, access, attributes, nullptr, 0, mflags } ); + result = def_class( tok_to_str(name), { body, parent, access, attributes, nullptr, 0, mflags } ); else - result = def_struct( to_str(name), { body, (CodeTypename)parent, access, attributes, nullptr, 0, mflags } ); + result = def_struct( tok_to_str(name), { body, (CodeTypename)parent, access, attributes, nullptr, 0, mflags } ); if ( inline_cmt ) result->InlineCmt = inline_cmt; @@ -827,7 +827,7 @@ CodeBody parse_class_struct_body( TokType which, Token name ) case Tok_Statement_End: { // TODO(Ed): Convert this to a general warning procedure - log_fmt("Dangling end statement found %S\n", to_string(currtok_noskip)); + log_fmt("Dangling end statement found %S\n", tok_to_string(currtok_noskip)); eat( Tok_Statement_End ); continue; } @@ -993,9 +993,9 @@ CodeBody parse_class_struct_body( TokType which, Token name ) Specifier specs_found[16] { Spec_NumSpecifiers }; s32 NumSpecifiers = 0; - while ( left && is_specifier(currtok) ) + while ( left && tok_is_specifier(currtok) ) { - Specifier spec = strc_to_specifier( to_str(currtok) ); + Specifier spec = strc_to_specifier( tok_to_str(currtok) ); b32 ignore_spec = false; @@ -1022,8 +1022,8 @@ CodeBody parse_class_struct_body( TokType which, Token name ) break; default: - log_failure( "Invalid specifier %s for variable\n%s", spec_to_str(spec), to_string(Context) ); - pop(& Context); + log_failure( "Invalid specifier %s for variable\n%s", spec_to_str(spec), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1042,7 +1042,7 @@ CodeBody parse_class_struct_body( TokType which, Token name ) } // - if ( is_attribute(currtok) ) + if ( tok_is_attribute(currtok) ) { // Unfortuantely Unreal has code where there is attirbutes before specifiers CodeAttributes more_attributes = parse_attributes(); @@ -1052,7 +1052,8 @@ CodeBody parse_class_struct_body( TokType which, Token name ) String fused = string_make_reserve( GlobalAllocator, attributes->Content.Len + more_attributes->Content.Len ); string_append_fmt( & fused, "%S %S", attributes->Content, more_attributes->Content ); - attributes->Name = get_cached_string( { string_length(fused), fused }); + StrC attrib_name = { string_length(fused), fused }; + attributes->Name = get_cached_string( attrib_name ); attributes->Content = attributes->Name; // } @@ -1112,15 +1113,15 @@ CodeBody parse_class_struct_body( TokType which, Token name ) eat( currtok.Type ); } - member = untyped_str( to_str(untyped_tok) ); + member = untyped_str( tok_to_str(untyped_tok) ); // Something unknown break; } if ( member == Code_Invalid ) { - log_failure( "Failed to parse member\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Failed to parse member\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1129,7 +1130,7 @@ CodeBody parse_class_struct_body( TokType which, Token name ) eat( Tok_BraceCurly_Close ); // { } - pop(& Context); + parser_pop(& Context); return result; } @@ -1141,12 +1142,12 @@ CodeComment parse_comment() CodeComment result = (CodeComment) make_code(); result->Type = CT_Comment; - result->Content = get_cached_string( to_str(currtok_noskip) ); + result->Content = get_cached_string( tok_to_str(currtok_noskip) ); result->Name = result->Content; // result->Token = currtok_noskip; eat( Tok_Comment ); - pop(& Context); + parser_pop(& Context); return result; } @@ -1178,18 +1179,18 @@ Code parse_complicated_definition( TokType which ) // Its a forward declaration only Code result = parse_forward_or_definition( which, is_inplace ); // ; - pop(& Context); + parser_pop(& Context); return result; } Token tok = tokens.Arr[ idx - 1 ]; - if ( is_specifier(tok) && spec_is_trailing( strc_to_specifier( to_str(tok))) ) + if ( tok_is_specifier(tok) && spec_is_trailing( strc_to_specifier( tok_to_str(tok))) ) { // (...) ...; s32 spec_idx = idx - 1; Token spec = tokens.Arr[spec_idx]; - while ( is_specifier(spec) && spec_is_trailing( strc_to_specifier( to_str(spec))) ) + while ( tok_is_specifier(spec) && spec_is_trailing( strc_to_specifier( tok_to_str(spec))) ) { -- spec_idx; spec = tokens.Arr[spec_idx]; @@ -1202,12 +1203,12 @@ Code parse_complicated_definition( TokType which ) Code result = parse_operator_function_or_variable( false, { nullptr }, { nullptr } ); // , or Name> ... - pop(& Context); + parser_pop(& Context); return result; } - log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) ); - pop(& Context); + log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } if ( tok.Type == Tok_Identifier ) @@ -1239,7 +1240,7 @@ Code parse_complicated_definition( TokType which ) // : ; ok_to_parse = true; Code result = parse_enum(); - pop(& Context); + parser_pop(& Context); return result; } else if ( is_indirection ) @@ -1251,14 +1252,14 @@ Code parse_complicated_definition( TokType which ) if ( ! ok_to_parse ) { - log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) ); - pop(& Context); + log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } Code result = parse_operator_function_or_variable( false, { nullptr }, { nullptr } ); // , or Name> ... - pop(& Context); + parser_pop(& Context); return result; } else if ( tok.Type >= Tok_Type_Unsigned && tok.Type <= Tok_Type_MS_W64 ) @@ -1270,8 +1271,8 @@ Code parse_complicated_definition( TokType which ) && ( tokens.Arr[idx - 4].Type != which)) ) { - log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) ); - pop(& Context); + log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1279,7 +1280,7 @@ Code parse_complicated_definition( TokType which ) // : ; // : ; Code result = parse_enum(); - pop(& Context); + parser_pop(& Context); return result; } else if ( tok.Type == Tok_BraceCurly_Close ) @@ -1287,7 +1288,7 @@ Code parse_complicated_definition( TokType which ) // Its a definition Code result = parse_forward_or_definition( which, is_inplace ); // { ... }; - pop(& Context); + parser_pop(& Context); return result; } else if ( tok.Type == Tok_BraceSquare_Close ) @@ -1295,13 +1296,13 @@ Code parse_complicated_definition( TokType which ) // Its an array definition Code result = parse_operator_function_or_variable( false, { nullptr }, { nullptr } ); // [ ... ]; - pop(& Context); + parser_pop(& Context); return result; } else { - log_failure( "Unsupported or bad member definition after %s declaration\n%S", toktype_to_str(which).Ptr, to_string(Context) ); - pop(& Context); + log_failure( "Unsupported or bad member definition after %s declaration\n%S", toktype_to_str(which).Ptr, parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } } @@ -1319,38 +1320,38 @@ CodeDefine parse_define() if ( ! check( Tok_Identifier ) ) { - log_failure( "Error, expected identifier after #define\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Error, expected identifier after #define\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } Context.Scope->Name = currtok; - define->Name = get_cached_string( to_str(currtok) ); + define->Name = get_cached_string( tok_to_str(currtok) ); eat( Tok_Identifier ); // #define if ( ! check( Tok_Preprocess_Content )) { - log_failure( "Error, expected content after #define %s\n%s", define->Name, to_string(Context) ); - pop(& Context); + log_failure( "Error, expected content after #define %s\n%s", define->Name, parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } if ( currtok.Length == 0 ) { - define->Content = get_cached_string( to_str(currtok) ); + define->Content = get_cached_string( tok_to_str(currtok) ); eat( Tok_Preprocess_Content ); // #define - pop(& Context); + parser_pop(& Context); return define; } - define->Content = get_cached_string( string_to_strc( strip_formatting( to_str(currtok), strip_formatting_dont_preserve_newlines )) ); + define->Content = get_cached_string( string_to_strc( strip_formatting( tok_to_str(currtok), strip_formatting_dont_preserve_newlines )) ); eat( Tok_Preprocess_Content ); // #define - pop(& Context); + parser_pop(& Context); return define; } @@ -1366,8 +1367,8 @@ Code parse_assignment_expression() if ( currtok.Type == Tok_Statement_End && currtok.Type != Tok_Comma ) { - log_failure( "Expected expression after assignment operator\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Expected expression after assignment operator\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1387,7 +1388,7 @@ Code parse_assignment_expression() } expr_tok.Length = ( ( sptr )currtok.Text + currtok.Length ) - ( sptr )expr_tok.Text - 1; - expr = untyped_str( to_str(expr_tok) ); + expr = untyped_str( tok_to_str(expr_tok) ); // = return expr; } @@ -1418,7 +1419,7 @@ Code parse_forward_or_definition( TokType which, bool is_inplace ) default: log_failure( "Error, wrong token type given to parse_complicated_definition " "(only supports class, enum, struct, union) \n%s" - , to_string(Context) ); + , parser_to_string(Context) ); return InvalidCode; } @@ -1439,16 +1440,16 @@ CodeFn parse_function_after_name( // ( ) // TODO(Ed), Review old comment : These have to be kept separate from the return type's specifiers. - while ( left && is_specifier(currtok) ) + while ( left && tok_is_specifier(currtok) ) { if ( specifiers.ast == nullptr ) { - specifiers = def_specifier( strc_to_specifier( to_str(currtok)) ); + specifiers = def_specifier( strc_to_specifier( tok_to_str(currtok)) ); eat( currtok.Type ); continue; } - specifiers_append(specifiers, strc_to_specifier( to_str(currtok)) ); + specifiers_append(specifiers, strc_to_specifier( tok_to_str(currtok)) ); eat( currtok.Type ); } // ( ) @@ -1460,7 +1461,7 @@ CodeFn parse_function_after_name( body = parse_function_body(); if ( body == Code_Invalid ) { - pop(& Context); + parser_pop(& Context); return InvalidCode; } // ( ) { } @@ -1491,7 +1492,7 @@ CodeFn parse_function_after_name( } String - name_stripped = string_make_strc( GlobalAllocator, to_str(name) ); + name_stripped = string_make_strc( GlobalAllocator, tok_to_str(name) ); strip_space(name_stripped); CodeFn @@ -1509,8 +1510,8 @@ CodeFn parse_function_after_name( default: { - log_failure("Body must be either of Function_Body or Untyped type, %s\n%s", code_debug_str(body), to_string(Context)); - pop(& Context); + log_failure("Body must be either of Function_Body or Untyped type, %s\n%s", code_debug_str(body), parser_to_string(Context)); + parser_pop(& Context); return InvalidCode; } } @@ -1537,14 +1538,13 @@ CodeFn parse_function_after_name( if ( inline_cmt ) result->InlineCmt = inline_cmt; - pop(& Context); + parser_pop(& Context); return result; } internal Code parse_function_body() { - push_scope(); eat( Tok_BraceCurly_Open ); @@ -1579,7 +1579,7 @@ Code parse_function_body() eat( Tok_BraceCurly_Close ); - pop(& Context); + parser_pop(& Context); return result; } @@ -1616,15 +1616,15 @@ CodeBody parse_global_nspace( CodeType which ) { case Tok_Comma: { - log_failure("Dangling comma found: %S\nContext:\n%S", to_string(currtok), to_string(Context)); - pop( & Context); + log_failure("Dangling comma found: %S\nContext:\n%S", tok_to_string(currtok), parser_to_string(Context)); + parser_pop( & Context); return InvalidCode; } break; case Tok_Statement_End: { // TODO(Ed): Convert this to a general warning procedure - log_fmt("Dangling end statement found %S\n", to_string(currtok_noskip)); + log_fmt("Dangling end statement found %S\n", tok_to_string(currtok_noskip)); eat( Tok_Statement_End ); continue; } @@ -1650,7 +1650,7 @@ CodeBody parse_global_nspace( CodeType which ) case Tok_Decl_Extern_Linkage: if ( which == CT_Extern_Linkage_Body ) - log_failure( "Nested extern linkage\n%s", to_string(Context) ); + log_failure( "Nested extern linkage\n%s", parser_to_string(Context) ); member = parse_extern_link(); // extern "..." { ... } @@ -1723,8 +1723,8 @@ CodeBody parse_global_nspace( CodeType which ) if ( member == Code_Invalid ) { - log_failure( "Failed to parse member\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Failed to parse member\n%s", parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } } @@ -1747,7 +1747,7 @@ CodeBody parse_global_nspace( CodeType which ) case Tok_Module_Export: if ( which == CT_Export_Body ) - log_failure( "Nested export declaration\n%s", to_string(Context) ); + log_failure( "Nested export declaration\n%s", parser_to_string(Context) ); member = parse_export_body(); // export { ... } @@ -1784,9 +1784,9 @@ CodeBody parse_global_nspace( CodeType which ) Specifier specs_found[16] { Spec_NumSpecifiers }; s32 NumSpecifiers = 0; - while ( left && is_specifier(currtok) ) + while ( left && tok_is_specifier(currtok) ) { - Specifier spec = strc_to_specifier( to_str(currtok) ); + Specifier spec = strc_to_specifier( tok_to_str(currtok) ); bool ignore_spec = false; @@ -1816,8 +1816,8 @@ CodeBody parse_global_nspace( CodeType which ) default: StrC spec_str = spec_to_str(spec); - log_failure( "Invalid specifier %.*s for variable\n%s", spec_str.Len, spec_str, to_string(Context) ); - pop(& Context); + log_failure( "Invalid specifier %.*s for variable\n%s", spec_str.Len, spec_str, parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1892,8 +1892,8 @@ CodeBody parse_global_nspace( CodeType which ) if ( member == Code_Invalid ) { - log_failure( "Failed to parse member\nToken: %s\nContext:\n%s", to_string(currtok_noskip), to_string(Context) ); - pop(& Context); + log_failure( "Failed to parse member\nToken: %S\nContext:\n%S", tok_to_string(currtok_noskip), parser_to_string(Context) ); + parser_pop(& Context); return InvalidCode; } @@ -1905,7 +1905,7 @@ CodeBody parse_global_nspace( CodeType which ) eat( Tok_BraceCurly_Close ); // { } - pop(& Context); + parser_pop(& Context); return result; } @@ -2060,8 +2060,8 @@ Token parse_identifier( bool* possible_member_function ) if ( left == 0 ) { - log_failure( "Error, unexpected end of static symbol identifier\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Error, unexpected end of static symbol identifier\n%s", parser_to_string(Context) ); + parser_pop(& Context); return { nullptr, 0, Tok_Invalid }; } @@ -2071,12 +2071,12 @@ Token parse_identifier( bool* possible_member_function ) if (is_destructor) { name.Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )name.Text; - pop(& Context); + parser_pop(& Context); return name; } - log_failure( "Error, had a ~ operator after %S but not a destructor\n%s", toktype_to_str( prevtok.Type ), to_string(Context) ); - pop(& Context); + log_failure( "Error, had a ~ operator after %S but not a destructor\n%s", toktype_to_str( prevtok.Type ), parser_to_string(Context) ); + parser_pop(& Context); return { nullptr, 0, Tok_Invalid }; } @@ -2087,16 +2087,16 @@ Token parse_identifier( bool* possible_member_function ) else { - log_failure( "Found a member function pointer identifier but the parsing context did not expect it\n%s", to_string(Context) ); - pop(& Context); + log_failure( "Found a member function pointer identifier but the parsing context did not expect it\n%s", parser_to_string(Context) ); + parser_pop(& Context); return { nullptr, 0, Tok_Invalid }; } } if ( currtok.Type != Tok_Identifier ) { - log_failure( "Error, expected static symbol identifier, not %s\n%s", toktype_to_str( currtok.Type ), to_string(Context) ); - pop(& Context); + log_failure( "Error, expected static symbol identifier, not %s\n%s", toktype_to_str( currtok.Type ), parser_to_string(Context) ); + parser_pop(& Context); return { nullptr, 0, Tok_Invalid }; } @@ -2109,7 +2109,7 @@ Token parse_identifier( bool* possible_member_function ) } //