10 Commits

Author SHA1 Message Date
Ed_
822a3b07dc misc changes (technically, old was never pushed...) 2025-06-15 17:56:26 -04:00
Ed_
685bba36d5 .gitignore/ai 2025-04-23 19:52:27 -04:00
Ed_
346e8e3305 fixes for CodeSpecifiers cpp mappings 2025-04-02 21:12:31 -04:00
Ed_
bfc754e66c progress 2025-03-19 13:01:51 -04:00
Ed_
84f4fc5ae9 lexer/parser revamp progress: got c11 ver compiling (synchup before continuation) 2025-03-19 12:09:07 -04:00
Ed_
ad5cb6597b progress on parser and lexer revamp 2025-03-19 11:48:40 -04:00
Ed_
1c7dd4ab32 WIP Broken: Working towards major changes to lexer
Lexer tokens now tracked using TokenSlice. ParserContext as the id for current token during traversal.
Last progress on errors needs updates to interface.parsing.cpp
Not adding message support yet as that will need to revamped with the logging, right now just focused on getting both lexer on parser to use proper info data structures.
Thinking of separating ParseContext from the general lib Context, plan is to just have the parser context have its allocator and other references it needs.
So there will seem to be redundant parameter passing to some procedures for now.
The big endgoal of this other than the parser's compression is the ability to support mult-threading.
Immediate concern other than making sure everything necessary is only within ParseContext, etc is things related to logging, or otherwise that is not thread dependent.
Those can get guarded but I don't have full intuition on what will have that (most likely the library's provided allocator/s as well will need guards introduced).
I'll concern myself more with charting those out once things are at least lifted properly. Worst case a trivial situation can be achived by the user by just abusing multiple contextes/allocators/etc as we already have in place.
2025-03-18 03:26:14 -04:00
Ed_
1e7fdcec16 preparing to revamp lexer 2025-03-17 01:20:56 -04:00
Ed_
2ed36506b1 progress on modularizing parser paths 2025-03-17 01:09:46 -04:00
Ed_
790087aa3c progress on modularizing parser paths 2025-03-16 23:13:46 -04:00
14 changed files with 1175 additions and 969 deletions

2
.gitignore vendored
View File

@ -44,3 +44,5 @@ test/c_library/gen
test/cpp_library/gen
!scripts/helpers/refactor.exe
# ai/**

View File

@ -252,7 +252,7 @@ struct CodeSpecifiers
#if ! GEN_C_LIKE_CPP
Using_Code( CodeSpecifiers );
bool append( Specifier spec ) { return specifiers_append(* this, spec); }
s32 has( Specifier spec ) { return specifiers_has(* this, spec); }
bool has( Specifier spec ) { return specifiers_has(* this, spec); }
s32 index_of( Specifier spec ) { return specifiers_index_of(* this, spec); }
s32 remove( Specifier to_remove ) { return specifiers_remove(* this, to_remove); }
StrBuilder to_strbuilder() { return specifiers_to_strbuilder(* this ); }
@ -1074,11 +1074,12 @@ forceinline bool has_entries (CodeParams params ) {
forceinline StrBuilder to_strbuilder(CodeParams params ) { return params_to_strbuilder(params); }
forceinline void to_strbuilder(CodeParams params, StrBuilder& result ) { return params_to_strbuilder_ref(params, & result); }
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
forceinline s32 has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
forceinline StrBuilder to_strbuilder(CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
forceinline void to_strbuilder(CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
forceinline bool has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
forceinline s32 index_of (CodeSpecifiers specifiers, Specifier spec) { return specifiers_index_of(specifiers, spec); }
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
forceinline StrBuilder to_strbuilder (CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
forceinline void to_strbuilder (CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
forceinline void add_interface (CodeStruct self, CodeTypename interface) { return struct_add_interface(self, interface); }
forceinline StrBuilder to_strbuilder (CodeStruct self) { return struct_to_strbuilder(self); }

View File

@ -3,8 +3,8 @@
#include "code_serialization.cpp"
#endif
internal void parser_init();
internal void parser_deinit();
internal void parser_init(Context* ctx);
internal void parser_deinit(Context* ctx);
internal
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
@ -343,7 +343,7 @@ void init(Context* ctx)
}
define_constants();
parser_init();
parser_init(ctx);
++ context_counter;
}
@ -392,7 +392,7 @@ void deinit(Context* ctx)
while ( left--, left );
array_free( ctx->Fallback_AllocatorBuckets);
}
parser_deinit();
parser_deinit(ctx);
if (_ctx == ctx)
_ctx = nullptr;

View File

@ -15,38 +15,6 @@
\▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓
*/
enum LogLevel //: u32
{
LL_Null,
LL_Note,
LL_Warning,
LL_Error,
LL_Fatal,
LL_UnderlyingType = GEN_U32_MAX,
};
typedef enum LogLevel LogLevel;
Str loglevel_to_str(LogLevel level)
{
local_persist
Str lookup[] = {
{ "Null", sizeof("Null") - 1 },
{ "Note", sizeof("Note") - 1 },
{ "Warning", sizeof("Info") - 1 },
{ "Error", sizeof("Error") - 1 },
{ "Fatal", sizeof("Fatal") - 1 },
};
return lookup[level];
}
struct LogEntry
{
Str msg;
LogLevel level;
};
typedef void LoggerProc(LogEntry entry);
// Note(Ed): This is subject to heavily change
// with upcoming changes to the library's fallback (default) allocations strategy;
// and major changes to lexer/parser context usage.
@ -110,9 +78,6 @@ struct Context
StringTable StrCache;
// TODO(Ed): This needs to be just handled by a parser context
Array(Token) Lexer_Tokens;
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually
ParseContext parser;
@ -125,14 +90,6 @@ struct Context
// An implicit context interface will be provided instead as wrapper procedures as convience.
GEN_API extern Context* _ctx;
// By default this library will either crash or exit if an error is detected while generating codes.
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.
#ifdef GEN_DONT_USE_FATAL
#define log_failure log_fmt
#else
#define log_failure GEN_FATAL
#endif
// TODO(Ed): Swap all usage of this with logger_fmt (then rename logger_fmt to log_fmt)
inline
ssize log_fmt(char const* fmt, ...)
@ -151,7 +108,7 @@ inline
void logger_fmt(Context* ctx, LogLevel level, char const* fmt, ...)
{
local_persist thread_local
PrintF_Buffer buf = struct_zero(PrintF_Buffer);
PrintF_Buffer buf = struct_zero_init();
va_list va;
va_start(va, fmt);
@ -396,25 +353,16 @@ forceinline CodeBody def_union_body ( s32 num, Code* codes )
struct ParseStackNode
{
ParseStackNode* Prev;
ParseStackNode* prev;
TokenSlice Tokens;
Token* Start;
Str Name; // The name of the AST node (if parsed)
Str ProcName; // The name of the procedure
Code CodeRel; // Relevant AST node
TokenSlice tokens;
Token* start;
Str name; // The name of the AST node (if parsed)
Str proc_name; // The name of the procedure
Code code_rel; // Relevant AST node
// TODO(Ed): When an error occurs, the parse stack is not released and instead the scope is left dangling.
};
typedef struct ParseMessage ParseMessage;
struct ParseMessage
{
ParseMessage* Next;
ParseStackNode* Scope;
Str Log;
LogLevel Level;
};
struct ParseInfo
{
ParseMessage* messages;

View File

@ -10,50 +10,68 @@
ParseInfo wip_parse_str(LexedInfo lexed, ParseOpts* opts)
{
TokArray toks;
if (lexed.tokens.Num == 0 && lexed.tokens.Ptr == nullptr) {
check_parse_args(lexed.text);
toks = lex(lexed.text);
// TODO(Ed): Lift this.
Context* ctx = _ctx;
TokenSlice slice = { toks.Arr, scast(s32, array_num(toks.Arr)) };
lexed.tokens = slice;
if (lexed.tokens.num == 0 && lexed.tokens.ptr == nullptr) {
check_parse_args(lexed.text);
lexed = lex(ctx, lexed.text);
}
ParseInfo info = struct_zero(ParseInfo);
info.lexed = lexed;
// TODO(Ed): ParseInfo should be set to the parser context.
_ctx->parser.Tokens = toks;
push_scope();
CodeBody result = parse_global_nspace(CT_Global_Body);
parser_pop(& _ctx->parser);
ctx->parser = struct_zero(ParseContext);
ctx->parser.tokens = lexed.tokens;
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeBody result = parse_global_nspace(ctx,CT_Global_Body);
parser_pop(& ctx->parser);
return info;
}
CodeClass parse_class( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
push_scope();
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class, parser_not_inplace_def );
parser_pop(& _ctx->parser);
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeClass result = (CodeClass) parse_class_struct( ctx, Tok_Decl_Class, parser_not_inplace_def );
parser_pop(& ctx->parser);
return result;
}
CodeConstructor parse_constructor( Str def )
CodeConstructor parse_constructor(Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
// TODO(Ed): Constructors can have prefix attributes
CodeSpecifiers specifiers = NullCode;
@ -80,8 +98,8 @@ CodeConstructor parse_constructor( Str def )
break;
default :
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(_ctx->parser) );
parser_pop(& _ctx->parser);
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(& ctx->parser, ctx->Allocator_Temp) );
parser_pop(& ctx->parser);
return InvalidCode;
}
@ -94,247 +112,337 @@ CodeConstructor parse_constructor( Str def )
eat( currtok.Type );
}
if ( NumSpecifiers )
{
if ( NumSpecifiers ) {
specifiers = def_specifiers_arr( NumSpecifiers, specs_found );
// <specifiers> ...
}
_ctx->parser.Tokens = toks;
CodeConstructor result = parser_parse_constructor( specifiers );
CodeConstructor result = parser_parse_constructor(ctx, specifiers);
parser_pop(& ctx->parser);
return result;
}
CodeDefine parse_define( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
push_scope();
CodeDefine result = parser_parse_define();
parser_pop(& _ctx->parser);
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeDefine result = parser_parse_define(ctx);
parser_pop(& ctx->parser);
return result;
}
CodeDestructor parse_destructor( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
// TODO(Ed): Destructors can have prefix attributes
// TODO(Ed): Destructors can have virtual
_ctx->parser.Tokens = toks;
CodeDestructor result = parser_parse_destructor(NullCode);
CodeDestructor result = parser_parse_destructor(ctx, NullCode);
return result;
}
CodeEnum parse_enum( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
{
parser_pop(& _ctx->parser);
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr ) {
return InvalidCode;
}
_ctx->parser.Tokens = toks;
return parser_parse_enum( parser_not_inplace_def);
return parser_parse_enum(ctx, parser_not_inplace_def);
}
CodeBody parse_export_body( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_export_body();
return parser_parse_export_body(ctx);
}
CodeExtern parse_extern_link( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_extern_link();
return parser_parse_extern_link(ctx);
}
CodeFriend parse_friend( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_friend();
return parser_parse_friend(ctx);
}
CodeFn parse_function( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return (CodeFn) parser_parse_function();
return (CodeFn) parser_parse_function(ctx);
}
CodeBody parse_global_body( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
push_scope();
CodeBody result = parse_global_nspace( CT_Global_Body );
parser_pop(& _ctx->parser);
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeBody result = parse_global_nspace(ctx, CT_Global_Body );
parser_pop(& ctx->parser);
return result;
}
CodeNS parse_namespace( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_namespace();
return parser_parse_namespace(ctx);
}
CodeOperator parse_operator( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return (CodeOperator) parser_parse_operator();
return (CodeOperator) parser_parse_operator(ctx);
}
CodeOpCast parse_operator_cast( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_operator_cast(NullCode);
return parser_parse_operator_cast(ctx, NullCode);
}
CodeStruct parse_struct( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
push_scope();
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct, parser_not_inplace_def );
parser_pop(& _ctx->parser);
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeStruct result = (CodeStruct) parse_class_struct( ctx, Tok_Decl_Struct, parser_not_inplace_def );
parser_pop(& ctx->parser);
return result;
}
CodeTemplate parse_template( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_template();
return parser_parse_template(ctx);
}
CodeTypename parse_type( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_type( parser_not_from_template, nullptr);
return parser_parse_type( ctx, parser_not_from_template, nullptr);
}
CodeTypedef parse_typedef( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_typedef();
return parser_parse_typedef(ctx);
}
CodeUnion parse_union( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_union( parser_not_inplace_def);
return parser_parse_union(ctx, parser_not_inplace_def);
}
CodeUsing parse_using( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_using();
return parser_parse_using(ctx);
}
CodeVar parse_variable( Str def )
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def );
TokArray toks = lex( def );
if ( toks.Arr == nullptr )
ctx->parser = struct_zero(ParseContext);
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode;
_ctx->parser.Tokens = toks;
return parser_parse_variable();
return parser_parse_variable(ctx);
}
// Undef helper macros

View File

@ -179,7 +179,7 @@ Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... )
Code untyped_toks( TokenSlice tokens )
{
if ( tokens.Num == 0 ) {
if ( tokens.num == 0 ) {
log_failure( "untyped_toks: empty token slice" );
return InvalidCode;
}

View File

@ -4,9 +4,9 @@
#include "gen/etoktype.hpp"
#endif
StrBuilder tok_to_strbuilder(Token tok)
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok)
{
StrBuilder result = strbuilder_make_reserve( _ctx->Allocator_Temp, kilobytes(4) );
StrBuilder result = strbuilder_make_reserve( ainfo, kilobytes(4) );
Str type_str = toktype_to_str( tok.Type );
strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
@ -17,55 +17,55 @@ StrBuilder tok_to_strbuilder(Token tok)
return result;
}
bool lex__eat( TokArray* self, TokType type );
bool lex__eat(Context* ctx, ParseContext* self, TokType type );
Token* lex_current(TokArray* self, bool skip_formatting )
Token* lex_current(ParseContext* self, bool skip_formatting )
{
if ( skip_formatting )
{
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
self->Idx++;
while ( self->tokens.ptr[self->token_id].Type == Tok_NewLine || self->tokens.ptr[self->token_id].Type == Tok_Comment )
self->token_id++;
}
return & self->Arr[self->Idx];
return & self->tokens.ptr[self->token_id];
}
Token* lex_peek(TokArray self, bool skip_formatting)
Token* lex_peek(ParseContext const* self, bool skip_formatting)
{
s32 idx = self.Idx;
s32 idx = self->token_id;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
idx++;
return & self.Arr[idx];
return & self->tokens.ptr[idx];
}
return & self.Arr[idx];
return & self->tokens.ptr[idx];
}
Token* lex_previous(TokArray self, bool skip_formatting)
Token* lex_previous(ParseContext const* self, bool skip_formatting)
{
s32 idx = self.Idx;
s32 idx = self->token_id;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
idx --;
return & self.Arr[idx];
return & self->tokens.ptr[idx];
}
return & self.Arr[idx - 1];
return & self->tokens.ptr[idx - 1];
}
Token* lex_next(TokArray self, bool skip_formatting)
Token* lex_next(ParseContext const* self, bool skip_formatting)
{
s32 idx = self.Idx;
s32 idx = self->token_id;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
idx++;
return & self.Arr[idx + 1];
return & self->tokens.ptr[idx + 1];
}
return & self.Arr[idx + 1];
return & self->tokens.ptr[idx + 1];
}
enum
@ -137,7 +137,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
);
// GEN_DEBUG_TRAP();
}
array_append( _ctx->Lexer_Tokens, name );
array_append(ctx->tokens, name);
if ( ctx->left && (* ctx->scanner) == '(' )
{
@ -152,7 +152,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
}
Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess };
array_append( _ctx->Lexer_Tokens, opening_paren );
array_append(ctx->tokens, opening_paren);
move_forward();
Token last_parameter = {};
@ -168,7 +168,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
move_forward();
move_forward();
array_append(_ctx->Lexer_Tokens, parameter);
array_append(ctx->tokens, parameter);
skip_whitespace();
last_parameter = parameter;
@ -202,7 +202,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
move_forward();
parameter.Text.Len++;
}
array_append(_ctx->Lexer_Tokens, parameter);
array_append(ctx->tokens, parameter);
skip_whitespace();
last_parameter = parameter;
}
@ -229,7 +229,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
return Lex_ReturnNull;
}
Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess };
array_append(_ctx->Lexer_Tokens, comma);
array_append(ctx->tokens, comma);
move_forward();
}
@ -243,7 +243,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
return Lex_ReturnNull;
}
Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess };
array_append(_ctx->Lexer_Tokens, closing_paren);
array_append(ctx->tokens, closing_paren);
move_forward();
}
else if ( registered_macro && macro_is_functional( * registered_macro) ) {
@ -268,7 +268,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
{
char const* hash = ctx->scanner;
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
array_append( _ctx->Lexer_Tokens, hash_tok );
array_append(ctx->tokens, hash_tok);
move_forward();
skip_whitespace();
@ -344,14 +344,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
ctx->token.Text.Ptr = hash;
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
return Lex_Continue; // Skip found token, its all handled here.
}
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
{
ctx->token.Flags |= TF_Preprocess_Cond;
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
end_line();
return Lex_Continue;
}
@ -360,7 +360,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Flags |= TF_Preprocess_Cond;
}
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
skip_whitespace();
@ -379,7 +379,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' )
{
StrBuilder directive_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
StrBuilder directive_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s"
, (* ctx->scanner)
@ -411,7 +411,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
move_forward();
}
array_append( _ctx->Lexer_Tokens, preprocess_content );
array_append(ctx->tokens, preprocess_content);
return Lex_Continue; // Skip found token, its all handled here.
}
@ -446,8 +446,8 @@ s32 lex_preprocessor_directive( LexContext* ctx )
}
else
{
StrBuilder directive_str = strbuilder_make_length( _ctx->Allocator_Temp, ctx->token.Text.Ptr, ctx->token.Text.Len );
StrBuilder content_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
StrBuilder directive_str = strbuilder_make_length( ctx->allocator_temp, ctx->token.Text.Ptr, ctx->token.Text.Len );
StrBuilder content_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
" in preprocessor directive '%s' (%d, %d)\n%s"
@ -475,14 +475,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
preprocess_content.Text.Len++;
}
array_append( _ctx->Lexer_Tokens, preprocess_content );
array_append(ctx->tokens, preprocess_content);
return Lex_Continue; // Skip found token, its all handled here.
}
void lex_found_token( LexContext* ctx )
{
if ( ctx->token.Type != Tok_Invalid ) {
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
return;
}
@ -508,7 +508,7 @@ void lex_found_token( LexContext* ctx )
}
ctx->token.Type = type;
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
return;
}
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
@ -517,13 +517,13 @@ void lex_found_token( LexContext* ctx )
{
ctx->token.Type = type;
ctx->token.Flags |= TF_Specifier;
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
return;
}
if ( type != Tok_Invalid )
{
ctx->token.Type = type;
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
return;
}
@ -561,7 +561,7 @@ void lex_found_token( LexContext* ctx )
ctx->token.Type = Tok_Identifier;
}
array_append( _ctx->Lexer_Tokens, ctx->token );
array_append(ctx->tokens, ctx->token);
}
// TODO(Ed): We should dynamically allocate the lexer's array in Allocator_DyanmicContainers.
@ -569,45 +569,34 @@ void lex_found_token( LexContext* ctx )
// TODO(Ed): We need to to attempt to recover from a lex failure?
neverinline
// void lex( Array<Token> tokens, Str content )
TokArray lex( Str content )
LexedInfo lex(Context* lib_ctx, Str content)
{
LexContext c; LexContext* ctx = & c;
c.content = content;
c.left = content.Len;
c.scanner = content.Ptr;
LexedInfo info = struct_zero(LexedInfo);
char const* word = c.scanner;
s32 word_length = 0;
LexContext c = struct_zero(LexContext); LexContext* ctx = & c;
c.allocator_temp = lib_ctx->Allocator_Temp;
c.content = content;
c.left = content.Len;
c.scanner = content.Ptr;
c.line = 1;
c.column = 1;
c.tokens = array_init_reserve(Token, lib_ctx->Allocator_DyanmicContainers, lib_ctx->InitSize_LexerTokens );
c.line = 1;
c.column = 1;
// TODO(Ed): Re-implement to new constraints:
// 1. Ability to continue on error
// 2. Return a lexed info.
skip_whitespace();
if ( c.left <= 0 )
{
if ( c.left <= 0 ) {
log_failure( "gen::lex: no tokens found (only whitespace provided)" );
TokArray null_array = {};
return null_array;
return info;
}
array_clear(_ctx->Lexer_Tokens);
b32 preprocess_args = true;
while (c.left )
{
#if 0
if (Tokens.num())
{
log_fmt("\nLastTok: %SB", Tokens.back().to_strbuilder());
}
#endif
{
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
c.token = thanks_c;
}
c.token = struct_init(Token) { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
bool is_define = false;
@ -626,7 +615,7 @@ TokArray lex( Str content )
c.token.Type = Tok_NewLine;
c.token.Text.Len++;
array_append( _ctx->Lexer_Tokens, c.token );
array_append(c.tokens, c.token);
continue;
}
}
@ -665,7 +654,7 @@ TokArray lex( Str content )
c.token.Text.Len++;
move_forward();
array_append( _ctx->Lexer_Tokens, c.token );
array_append(c.tokens, c.token);
}
}
continue;
@ -673,8 +662,7 @@ TokArray lex( Str content )
case Lex_ReturnNull:
{
TokArray tok_array = {};
return tok_array;
return info;
}
}
}
@ -701,7 +689,7 @@ TokArray lex( Str content )
}
else
{
StrBuilder context_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) );
StrBuilder context_str = strbuilder_fmt_buf( lib_ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) );
log_failure( "gen::lex: invalid varadic argument, expected '...' got '..%c' (%d, %d)\n%s", (* ctx->scanner), c.line, c.column, context_str );
}
@ -1122,7 +1110,7 @@ TokArray lex( Str content )
move_forward();
c.token.Text.Len++;
}
array_append( _ctx->Lexer_Tokens, c.token );
array_append(c.tokens, c.token);
continue;
}
else if ( (* ctx->scanner) == '*' )
@ -1158,7 +1146,7 @@ TokArray lex( Str content )
move_forward();
c.token.Text.Len++;
}
array_append( _ctx->Lexer_Tokens, c.token );
array_append(c.tokens, c.token);
// end_line();
continue;
}
@ -1246,14 +1234,14 @@ TokArray lex( Str content )
}
else
{
s32 start = max( 0, array_num(_ctx->Lexer_Tokens) - 100 );
s32 start = max( 0, array_num(c.tokens) - 100 );
log_fmt("\n%d\n", start);
for ( s32 idx = start; idx < array_num(_ctx->Lexer_Tokens); idx++ )
for ( s32 idx = start; idx < array_num(c.tokens); idx++ )
{
log_fmt( "Token %d Type: %s : %.*s\n"
, idx
, toktype_to_str( _ctx->Lexer_Tokens[ idx ].Type ).Ptr
, _ctx->Lexer_Tokens[ idx ].Text.Len, _ctx->Lexer_Tokens[ idx ].Text.Ptr
, toktype_to_str( c.tokens[ idx ].Type ).Ptr
, c.tokens[ idx ].Text.Len, c.tokens[ idx ].Text.Ptr
);
}
@ -1269,7 +1257,7 @@ TokArray lex( Str content )
FoundToken:
{
lex_found_token( ctx );
TokType last_type = array_back(_ctx->Lexer_Tokens)->Type;
TokType last_type = array_back(c.tokens)->Type;
if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr )
{
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
@ -1284,21 +1272,22 @@ TokArray lex( Str content )
c.token.Text.Len++;
move_forward();
array_append( _ctx->Lexer_Tokens, c.token );
array_append(c.tokens, c.token);
continue;
}
}
}
}
if ( array_num(_ctx->Lexer_Tokens) == 0 ) {
if ( array_num(c.tokens) == 0 ) {
log_failure( "Failed to lex any tokens" );
TokArray tok_array = {};
return tok_array;
return info;
}
TokArray result = { _ctx->Lexer_Tokens, 0 };
return result;
info.messages = c.messages;
info.text = content;
info.tokens = struct_init(TokenSlice) { pcast(Token*, c.tokens), scast(s32, array_num(c.tokens)) };
return info;
}
#undef move_forward

File diff suppressed because it is too large Load Diff

View File

@ -89,16 +89,16 @@ bool tok_is_end_definition(Token tok) {
return bitfield_is_set( u32, tok.Flags, TF_EndDefinition );
}
StrBuilder tok_to_strbuilder(Token tok);
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok);
struct TokenSlice
{
Token* Ptr;
s32 Num;
Token* ptr;
s32 num;
#if GEN_COMPILER_CPP
forceinline operator Token* () const { return Ptr; }
forceinline Token& operator[]( ssize index ) const { return Ptr[index]; }
forceinline operator Token* () const { return ptr; }
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
#endif
};
@ -118,28 +118,52 @@ struct TokArray
s32 Idx;
};
typedef struct LexerMessage LexerMessage;
struct LexerMessage
{
LexerMessage* next;
Str content;
LogLevel level;
};
struct LexContext
{
AllocatorInfo allocator_temp;
LexerMessage* messages;
Str content;
s32 left;
char const* scanner;
s32 line;
s32 column;
Token token;
Array(Token) tokens;
};
struct LexedInfo
{
Str text;
TokenSlice tokens;
LexerMessage* messages;
Str text;
TokenSlice tokens;
};
typedef struct ParseStackNode ParseStackNode;
typedef struct ParseMessage ParseMessage;
struct ParseMessage
{
ParseMessage* Next;
ParseStackNode* Scope;
Str Content;
LogLevel Level;
};
struct ParseContext
{
TokArray Tokens;
ParseStackNode* Scope;
ParseMessage* messages;
ParseStackNode* scope;
// TokArray Tokens;
TokenSlice tokens;
s32 token_id;
};
enum MacroType : u16

View File

@ -31,6 +31,47 @@
*/
enum LogLevel //: u32
{
LL_Null,
LL_Note,
LL_Warning,
LL_Error,
LL_Fatal,
LL_UnderlyingType = GEN_U32_MAX,
};
typedef enum LogLevel LogLevel;
Str loglevel_to_str(LogLevel level)
{
local_persist
Str lookup[] = {
{ "Null", sizeof("Null") - 1 },
{ "Note", sizeof("Note") - 1 },
{ "Warning", sizeof("Info") - 1 },
{ "Error", sizeof("Error") - 1 },
{ "Fatal", sizeof("Fatal") - 1 },
};
return lookup[level];
}
typedef struct LogEntry LogEntry;
struct LogEntry
{
Str msg;
LogLevel level;
};
typedef void LoggerProc(LogEntry entry);
// By default this library will either crash or exit if an error is detected while generating codes.
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.
#ifdef GEN_DONT_USE_FATAL
#define log_failure log_fmt
#else
#define log_failure GEN_FATAL
#endif
enum AccessSpec : u32
{
AccessSpec_Default,

View File

@ -17,6 +17,39 @@ template <class TType> struct RemovePtr<TType*> { typedef TType Type; };
template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
#pragma region Slice
#if 0
#define Slice(Type) Slice<Type>
template<class Type> struct Slice;
template<class Type>
Type* slice_get(Slice<Type> self, ssize id) {
GEN_ASSERT(id > -1);
GEN_ASSERT(id < self.len);
return self.ptr[id];
}
template<class Type>
struct Slice
{
Type* ptr;
ssize len;
#if GEN_COMPILER_CPP
forceinline operator Token* () const { return ptr; }
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
forceinline Type* begin() { return ptr; }
forceinline Type* end() { return ptr + len; }
#endif
#if ! GEN_C_LIKE_CPP && GEN_COMPILER_CPP
forceinline Type& back() { return ptr[len - 1]; }
#endif
};
#endif
#pragma endregion Slice
#pragma region Array
#define Array(Type) Array<Type>
@ -26,10 +59,8 @@ template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
struct ArrayHeader;
#if GEN_COMPILER_CPP
template<class Type> struct Array;
# define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
#endif
template<class Type> struct Array;
#define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
usize array_grow_formula(ssize value);
@ -59,12 +90,12 @@ struct ArrayHeader {
usize Num;
};
#if GEN_COMPILER_CPP
template<class Type>
struct Array
{
Type* Data;
#if ! GEN_C_LIKE_CPP
#pragma region Member Mapping
forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); }
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); }
@ -88,6 +119,7 @@ struct Array
forceinline bool resize(usize num) { return array_resize<Type>(this, num); }
forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); }
#pragma endregion Member Mapping
#endif
forceinline operator Type*() { return Data; }
forceinline operator Type const*() const { return Data; }
@ -99,9 +131,8 @@ struct Array
using DataType = Type;
};
#endif
#if GEN_COMPILER_CPP && 0
#if 0
template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); }
template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); }
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); }

View File

@ -300,9 +300,9 @@
#ifndef struct_init
# if GEN_COMPILER_CPP
# define struct_init(type, value) value
# define struct_init(type)
# else
# define struct_init(type, value) (type) value
# define struct_init(type) (type)
# endif
#endif
@ -310,7 +310,15 @@
# if GEN_COMPILER_CPP
# define struct_zero(type) {}
# else
# define struct_zero(type) {0}
# define struct_zero(type) (type) {0}
# endif
#endif
#ifndef struct_zero_init
# if GEN_COMPILER_CPP
# define struct_zero_init() {}
# else
# define struct_zero_init() {0}
# endif
#endif

View File

@ -320,7 +320,7 @@ inline
StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...)
{
local_persist thread_local
PrintF_Buffer buf = struct_zero(PrintF_Buffer);
PrintF_Buffer buf = struct_zero_init();
va_list va;
va_start(va, fmt);

View File

@ -132,6 +132,7 @@ if ( $vendor -match "clang" )
$flag_all_c = @('-x', 'c')
$flag_c11 = '-std=c11'
$flag_all_cpp = '-x c++'
$flag_charset_utf8 = '-utf-8'
$flag_compile = '-c'
$flag_color_diagnostics = '-fcolor-diagnostics'
$flag_no_color_diagnostics = '-fno-color-diagnostics'
@ -371,6 +372,7 @@ if ( $vendor -match "msvc" )
$flag_all_c = '/TC'
$flag_c11 = '/std:c11'
$flag_all_cpp = '/TP'
$flag_charset_utf8 = '/utf-8'
$flag_compile = '/c'
$flag_debug = '/Zi'
$flag_define = '/D'