mirror of
https://github.com/Ed94/gencpp.git
synced 2025-06-15 03:01:47 -07:00
Library can now construct into AST and serialization itself (singleheader).
Still need to validate if they match.
This commit is contained in:
@ -29,6 +29,10 @@ String AST::to_string()
|
||||
log_failure("Attempted to serialize invalid code! - %s", Parent ? Parent->debug_str() : Name );
|
||||
break;
|
||||
|
||||
case NewLine:
|
||||
result.append("\n");
|
||||
break;
|
||||
|
||||
case Untyped:
|
||||
case Execution:
|
||||
result.append( Content );
|
||||
@ -257,7 +261,7 @@ String AST::to_string()
|
||||
s32 left = NumEntries;
|
||||
while ( left-- )
|
||||
{
|
||||
result.append_fmt( "%s\n", curr.to_string() );
|
||||
result.append_fmt( "%s", curr.to_string() );
|
||||
++curr;
|
||||
}
|
||||
|
||||
@ -455,9 +459,9 @@ String AST::to_string()
|
||||
if ( Specs )
|
||||
{
|
||||
if ( Name && Name.length() )
|
||||
result.append_fmt( "%.*soperator %s()", Name.length(), Name, EOperator::to_str( Op ));
|
||||
result.append_fmt( "%.*soperator %s()", Name.length(), Name, ValueType->to_string() );
|
||||
else
|
||||
result.append_fmt( "operator %s()", EOperator::to_str( Op ) );
|
||||
result.append_fmt( "operator %s()", ValueType->to_string() );
|
||||
|
||||
CodeSpecifiers specs = cast<CodeSpecifiers>();
|
||||
|
||||
@ -481,7 +485,7 @@ String AST::to_string()
|
||||
case Operator_Cast_Fwd:
|
||||
if ( Specs )
|
||||
{
|
||||
result.append_fmt( "operator %s()", ValueType->to_string() );
|
||||
result.append_fmt( "operator %s()", ValueType->to_string() );
|
||||
|
||||
CodeSpecifiers specs = cast<CodeSpecifiers>();
|
||||
|
||||
@ -524,9 +528,8 @@ String AST::to_string()
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case Preprocess_Define:
|
||||
result.append_fmt( "#define %s \\\n%s\n", Name, Content );
|
||||
result.append_fmt( "#define %s %s", Name, Content );
|
||||
break;
|
||||
|
||||
case Preprocess_If:
|
||||
@ -550,11 +553,11 @@ String AST::to_string()
|
||||
break;
|
||||
|
||||
case Preprocess_Else:
|
||||
result.append_fmt( "#else" );
|
||||
result.append_fmt( "\n#else" );
|
||||
break;
|
||||
|
||||
case Preprocess_EndIf:
|
||||
result.append_fmt( "#endif\n" );
|
||||
result.append_fmt( "#endif" );
|
||||
break;
|
||||
|
||||
case Preprocess_Pragma:
|
||||
@ -796,8 +799,21 @@ String AST::to_string()
|
||||
}
|
||||
break;
|
||||
|
||||
case Class_Body:
|
||||
#if 0
|
||||
{
|
||||
Code curr = Front->cast<Code>();
|
||||
s32 left = NumEntries;
|
||||
while ( left -- )
|
||||
{
|
||||
result.append_fmt( "%s", curr.to_string() );
|
||||
++curr;
|
||||
}
|
||||
}
|
||||
break;
|
||||
#endif
|
||||
|
||||
case Enum_Body:
|
||||
case Class_Body:
|
||||
case Extern_Linkage_Body:
|
||||
case Function_Body:
|
||||
case Global_Body:
|
||||
@ -809,7 +825,11 @@ String AST::to_string()
|
||||
s32 left = NumEntries;
|
||||
while ( left -- )
|
||||
{
|
||||
result.append_fmt( "%s\n", curr.to_string() );
|
||||
result.append_fmt( "%s", curr.to_string() );
|
||||
|
||||
if ( curr->Type != ECode::NewLine )
|
||||
result.append( "\n" );
|
||||
|
||||
++curr;
|
||||
}
|
||||
}
|
||||
|
@ -57,6 +57,9 @@ extern CodeAttributes attrib_api_import;
|
||||
extern Code module_global_fragment;
|
||||
extern Code module_private_fragment;
|
||||
|
||||
// Exposed, but this is really used for parsing.
|
||||
extern Code fmt_newline;
|
||||
|
||||
extern CodePragma pragma_once;
|
||||
|
||||
extern CodeParam param_varadic;
|
||||
|
@ -112,6 +112,10 @@ void define_constants()
|
||||
module_private_fragment->Content = module_private_fragment->Name;
|
||||
module_private_fragment.set_global();
|
||||
|
||||
fmt_newline = make_code();
|
||||
fmt_newline->Type = ECode::NewLine;
|
||||
fmt_newline.set_global();
|
||||
|
||||
pragma_once = (CodePragma) make_code();
|
||||
pragma_once->Type = ECode::Untyped;
|
||||
pragma_once->Name = get_cached_string( txt_StrC("once") );
|
||||
|
@ -63,19 +63,28 @@ namespace Parser
|
||||
|
||||
bool __eat( TokType type );
|
||||
|
||||
Token& current()
|
||||
Token& current( bool skip_new_lines = true )
|
||||
{
|
||||
while ( Arr[Idx].Type == TokType::Empty_Line )
|
||||
Idx++;
|
||||
if ( skip_new_lines )
|
||||
{
|
||||
while ( Arr[Idx].Type == TokType::NewLine )
|
||||
Idx++;
|
||||
}
|
||||
|
||||
return Arr[Idx];
|
||||
}
|
||||
|
||||
Token& previous()
|
||||
Token& previous( bool skip_new_lines = false )
|
||||
{
|
||||
s32 idx = this->Idx;
|
||||
while ( Arr[Idx].Type == TokType::Empty_Line )
|
||||
idx--;
|
||||
|
||||
if ( skip_new_lines )
|
||||
{
|
||||
while ( Arr[idx].Type == TokType::NewLine )
|
||||
idx--;
|
||||
|
||||
return Arr[idx];
|
||||
}
|
||||
|
||||
return Arr[idx - 1];
|
||||
}
|
||||
@ -86,6 +95,8 @@ namespace Parser
|
||||
}
|
||||
};
|
||||
|
||||
constexpr bool dont_skip_new_lines = false;
|
||||
|
||||
struct StackNode
|
||||
{
|
||||
StackNode* Prev;
|
||||
@ -145,7 +156,7 @@ namespace Parser
|
||||
{
|
||||
if ( curr_scope->Name )
|
||||
{
|
||||
result.append_fmt("\t%d: %s, AST Name: %.*s\n", level, curr_scope->ProcName.Ptr, curr_scope->Name.Length, (StrC)curr_scope->Name );
|
||||
result.append_fmt("\t%d: %s, AST Name: %.*s\n", level, curr_scope->ProcName.Ptr, curr_scope->Name.Length, curr_scope->Name.Text );
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -170,19 +181,16 @@ namespace Parser
|
||||
return false;
|
||||
}
|
||||
|
||||
if ( Arr[Idx].Type == TokType::Empty_Line && type != TokType::Empty_Line )
|
||||
if ( Arr[Idx].Type == TokType::NewLine && type != TokType::NewLine )
|
||||
{
|
||||
Idx++;
|
||||
return log_fmt( "Auto-skipping empty line (%d, %d)\n", current().Line, current().Column );
|
||||
}
|
||||
|
||||
if ( Arr[Idx].Type != type )
|
||||
{
|
||||
String token_str = String::make( GlobalAllocator, { Arr[Idx].Length, Arr[Idx].Text } );
|
||||
|
||||
log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %s ' (%d, %d)`\n%s"
|
||||
, ETokType::to_str(type)
|
||||
, token_str
|
||||
log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %.*s ' (%d, %d)`\n%s"
|
||||
, ETokType::to_str(type).Ptr
|
||||
, Arr[Idx].Length, Arr[Idx].Text
|
||||
, current().Line
|
||||
, current().Column
|
||||
, Context.to_string()
|
||||
@ -255,27 +263,31 @@ namespace Parser
|
||||
|
||||
while (left )
|
||||
{
|
||||
Token token = { nullptr, 0, TokType::Invalid, line, column, false };
|
||||
Token token = { scanner, 0, TokType::Invalid, line, column, false };
|
||||
|
||||
bool is_define = false;
|
||||
|
||||
if ( column == 1 )
|
||||
{
|
||||
token.Text = scanner;
|
||||
|
||||
if ( current == '\r')
|
||||
{
|
||||
move_forward();
|
||||
token.Length = 1;
|
||||
}
|
||||
|
||||
if ( current == '\n' )
|
||||
{
|
||||
token.Type = TokType::Empty_Line;
|
||||
token.Type = TokType::NewLine;
|
||||
token.Length ++;
|
||||
move_forward();
|
||||
|
||||
Tokens.append( token );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
token.Length = 0;
|
||||
|
||||
SkipWhitespace();
|
||||
if ( left <= 0 )
|
||||
break;
|
||||
@ -321,27 +333,30 @@ namespace Parser
|
||||
if ( current == '\r' )
|
||||
{
|
||||
move_forward();
|
||||
// token.Length++;
|
||||
token.Length++;
|
||||
}
|
||||
|
||||
if ( current == '\n' )
|
||||
{
|
||||
move_forward();
|
||||
// token.Length++;
|
||||
token.Length++;
|
||||
continue;
|
||||
}
|
||||
else
|
||||
{
|
||||
String directive_str = String::make_length( GlobalAllocator, token.Text, token.Length );
|
||||
|
||||
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
|
||||
" in preprocessor directive (%d, %d)\n%s"
|
||||
" in preprocessor directive (%d, %d)\n%.100s"
|
||||
, current, line, column
|
||||
, token.Line, token.Column, directive_str );
|
||||
, token.Line, token.Column, token.Text );
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if ( current == '\r' )
|
||||
{
|
||||
move_forward();
|
||||
}
|
||||
|
||||
if ( current == '\n' )
|
||||
{
|
||||
move_forward();
|
||||
@ -353,8 +368,8 @@ namespace Parser
|
||||
token.Length++;
|
||||
}
|
||||
|
||||
token.Length = token.Length + token.Text - hash;
|
||||
token.Text = hash;
|
||||
token.Length = (sptr)token.Text + token.Length - (sptr)hash;
|
||||
Tokens.append( token );
|
||||
continue; // Skip found token, its all handled here.
|
||||
}
|
||||
@ -817,11 +832,22 @@ namespace Parser
|
||||
token.Text = scanner;
|
||||
token.Length = 0;
|
||||
|
||||
while ( left && current != '\n' )
|
||||
while ( left && current != '\n' && current != '\r' )
|
||||
{
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
|
||||
if ( current == '\r' )
|
||||
{
|
||||
move_forward();
|
||||
}
|
||||
|
||||
if ( current == '\n' )
|
||||
{
|
||||
move_forward();
|
||||
// token.Length++;
|
||||
}
|
||||
}
|
||||
else if ( current == '*' )
|
||||
{
|
||||
@ -986,6 +1012,17 @@ namespace Parser
|
||||
move_forward();
|
||||
token.Length++;
|
||||
}
|
||||
|
||||
if ( current == '\r' )
|
||||
{
|
||||
move_forward();
|
||||
// token.Length++;
|
||||
}
|
||||
if ( current == '\n' )
|
||||
{
|
||||
move_forward();
|
||||
// token.Length++;
|
||||
}
|
||||
}
|
||||
else
|
||||
{
|
||||
@ -1042,16 +1079,14 @@ if ( def.Ptr == nullptr ) \
|
||||
return CodeInvalid; \
|
||||
}
|
||||
|
||||
# define currtok Context.Tokens.current()
|
||||
# define prevtok Context.Tokens.previous()
|
||||
# define eat( Type_ ) Context.Tokens.__eat( Type_ )
|
||||
# define left ( Context.Tokens.Arr.num() - Context.Tokens.Idx )
|
||||
# define currtok_noskip Context.Tokens.current( dont_skip_new_lines )
|
||||
# define currtok Context.Tokens.current()
|
||||
# define prevtok Context.Tokens.previous()
|
||||
# define eat( Type_ ) Context.Tokens.__eat( Type_ )
|
||||
# define left ( Context.Tokens.Arr.num() - Context.Tokens.Idx )
|
||||
|
||||
# define check( Type_ ) \
|
||||
( left \
|
||||
&& (currtok.Type == TokType::Empty_Line ? \
|
||||
eat( TokType::Empty_Line) : true) \
|
||||
&& currtok.Type == Type_ )
|
||||
# define check_noskip( Type_ ) ( left && currtok_noskip.Type == Type_ )
|
||||
# define check( Type_ ) ( left && currtok.Type == Type_ )
|
||||
|
||||
# define push_scope() \
|
||||
StackNode scope { nullptr, currtok, NullToken, txt_StrC( __func__ ) }; \
|
||||
@ -2415,7 +2450,7 @@ CodeBody parse_class_struct_body( Parser::TokType which )
|
||||
else
|
||||
result->Type = Struct_Body;
|
||||
|
||||
while ( left && currtok.Type != TokType::BraceCurly_Close )
|
||||
while ( left && currtok_noskip.Type != TokType::BraceCurly_Close )
|
||||
{
|
||||
Code member = Code::Invalid;
|
||||
CodeAttributes attributes = { nullptr };
|
||||
@ -2423,12 +2458,13 @@ CodeBody parse_class_struct_body( Parser::TokType which )
|
||||
|
||||
bool expects_function = false;
|
||||
|
||||
switch ( currtok.Type )
|
||||
Context.Scope->Start = currtok_noskip;
|
||||
|
||||
switch ( currtok_noskip.Type )
|
||||
{
|
||||
case TokType::Empty_Line:
|
||||
// Empty lines are auto skipped by Tokens.current()
|
||||
member = untyped_str( Context.Tokens.Arr[ Context.Tokens.Idx] );
|
||||
eat( TokType::Empty_Line );
|
||||
case TokType::NewLine:
|
||||
member = fmt_newline;
|
||||
eat( TokType::NewLine );
|
||||
break;
|
||||
|
||||
case TokType::Comment:
|
||||
@ -2773,7 +2809,7 @@ CodeBody parse_global_nspace( CodeT which )
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = which;
|
||||
|
||||
while ( left && currtok.Type != TokType::BraceCurly_Close )
|
||||
while ( left && currtok_noskip.Type != TokType::BraceCurly_Close )
|
||||
{
|
||||
Code member = Code::Invalid;
|
||||
CodeAttributes attributes = { nullptr };
|
||||
@ -2781,14 +2817,14 @@ CodeBody parse_global_nspace( CodeT which )
|
||||
|
||||
bool expects_function = false;
|
||||
|
||||
Context.Scope->Start = currtok;
|
||||
Context.Scope->Start = currtok_noskip;
|
||||
|
||||
switch ( currtok.Type )
|
||||
switch ( currtok_noskip.Type )
|
||||
{
|
||||
case TokType::Empty_Line:
|
||||
case TokType::NewLine:
|
||||
// Empty lines are auto skipped by Tokens.current()
|
||||
member = untyped_str( Context.Tokens.Arr[ Context.Tokens.Idx] );
|
||||
eat( TokType::Empty_Line );
|
||||
member = fmt_newline;
|
||||
eat( TokType::NewLine );
|
||||
break;
|
||||
|
||||
case TokType::Comment:
|
||||
@ -3103,14 +3139,13 @@ CodeEnum parse_enum( bool inplace_def )
|
||||
|
||||
Code member = CodeInvalid;
|
||||
|
||||
while ( currtok.Type != TokType::BraceCurly_Close )
|
||||
while ( left && currtok_noskip.Type != TokType::BraceCurly_Close )
|
||||
{
|
||||
switch ( currtok.Type )
|
||||
switch ( currtok_noskip.Type )
|
||||
{
|
||||
case TokType::Empty_Line:
|
||||
// Empty lines are auto skipped by Tokens.current()
|
||||
member = untyped_str( Context.Tokens.Arr[ Context.Tokens.Idx] );
|
||||
eat( TokType::Empty_Line );
|
||||
case TokType::NewLine:
|
||||
member = untyped_str( currtok_noskip );
|
||||
eat( TokType::NewLine );
|
||||
break;
|
||||
|
||||
case TokType::Comment:
|
||||
@ -3160,9 +3195,9 @@ CodeEnum parse_enum( bool inplace_def )
|
||||
{
|
||||
eat( TokType::Operator );
|
||||
|
||||
while ( currtok.Type != TokType::Comma && currtok.Type != TokType::BraceCurly_Close )
|
||||
while ( currtok_noskip.Type != TokType::Comma && currtok_noskip.Type != TokType::BraceCurly_Close )
|
||||
{
|
||||
eat( currtok.Type );
|
||||
eat( currtok_noskip.Type );
|
||||
}
|
||||
}
|
||||
|
||||
@ -3653,15 +3688,15 @@ CodeOpCast parse_operator_cast()
|
||||
if ( currtok.Type == TokType::BraceCurly_Open )
|
||||
level++;
|
||||
|
||||
else if ( currtok.Type == TokType::BraceCurly_Close && level > 0 )
|
||||
else if ( currtok.Type == TokType::BraceCurly_Close )
|
||||
level--;
|
||||
|
||||
eat( currtok.Type );
|
||||
}
|
||||
eat( TokType::BraceCurly_Close );
|
||||
|
||||
body_str.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)body_str.Text;
|
||||
|
||||
eat( TokType::BraceCurly_Close );
|
||||
|
||||
body = untyped_str( body_str );
|
||||
}
|
||||
else
|
||||
@ -4175,15 +4210,15 @@ CodeUnion parse_union( bool inplace_def )
|
||||
body = make_code();
|
||||
body->Type = ECode::Union_Body;
|
||||
|
||||
while ( ! check( TokType::BraceCurly_Close ) )
|
||||
while ( ! check_noskip( TokType::BraceCurly_Close ) )
|
||||
{
|
||||
Code member = { nullptr };
|
||||
switch ( currtok.Type )
|
||||
switch ( currtok_noskip.Type )
|
||||
{
|
||||
case TokType::Empty_Line:
|
||||
case TokType::NewLine:
|
||||
// Empty lines are auto skipped by Tokens.current()
|
||||
member = untyped_str( Context.Tokens.Arr[ Context.Tokens.Idx] );
|
||||
eat( TokType::Empty_Line );
|
||||
member = fmt_newline;
|
||||
eat( TokType::NewLine );
|
||||
break;
|
||||
|
||||
case TokType::Comment:
|
||||
|
@ -812,8 +812,13 @@ CodeOperator def_operator( OperatorT op, StrC nspace
|
||||
return CodeInvalid;
|
||||
}
|
||||
|
||||
char const* name = str_fmt_buf( "%.*soperator %s", nspace.Len, nspace.Ptr, to_str(op) );
|
||||
char const* name = nullptr;
|
||||
|
||||
StrC op_str = to_str( op );
|
||||
if ( nspace.Len > 0 )
|
||||
name = str_fmt_buf( "%.*soperator %.*s", nspace.Len, nspace.Ptr, op_str.Len, op_str.Ptr );
|
||||
else
|
||||
name = str_fmt_buf( "operator %.*s", op_str.Len, op_str.Ptr );
|
||||
CodeOperator
|
||||
result = (CodeOperator) make_code();
|
||||
result->Name = get_cached_string( { str_len(name), name } );
|
||||
|
@ -2,6 +2,8 @@
|
||||
# error Gen.hpp : GEN_TIME not defined
|
||||
#endif
|
||||
|
||||
#include "gen.hpp"
|
||||
|
||||
//! If its desired to roll your own dependencies, define GEN_ROLL_OWN_DEPENDENCIES before including this file.
|
||||
//! Dependencies are derived from the c-zpl library: https://github.com/zpl-c/zpl
|
||||
#ifndef GEN_ROLL_OWN_DEPENDENCIES
|
||||
|
@ -32,6 +32,8 @@ global CodeAttributes attrib_api_import;
|
||||
global Code module_global_fragment;
|
||||
global Code module_private_fragment;
|
||||
|
||||
global Code fmt_newline;
|
||||
|
||||
global CodeParam param_varadic;
|
||||
|
||||
global CodePragma pragma_once;
|
||||
|
@ -5,6 +5,7 @@ namespace ECode
|
||||
# define Define_Types \
|
||||
Entry( Invalid ) \
|
||||
Entry( Untyped ) \
|
||||
Entry( NewLine ) \
|
||||
Entry( Comment ) \
|
||||
Entry( Access_Private ) \
|
||||
Entry( Access_Protected ) \
|
||||
|
@ -57,14 +57,14 @@ namespace EOperator
|
||||
};
|
||||
|
||||
inline
|
||||
char const* to_str( Type op )
|
||||
StrC to_str( Type op )
|
||||
{
|
||||
local_persist
|
||||
char const* lookup[ Num_Ops ] = {
|
||||
# define Entry( Type_, Token_ ) stringize(Token_),
|
||||
StrC lookup[ Num_Ops ] = {
|
||||
# define Entry( Type_, Token_ ) { sizeof(stringize(Token_)), stringize(Token_) },
|
||||
Define_Operators
|
||||
# undef Entry
|
||||
","
|
||||
txt_StrC(",")
|
||||
};
|
||||
|
||||
return lookup[ op ];
|
||||
|
@ -50,10 +50,10 @@ namespace Parser
|
||||
Entry( Decl_Typedef, "typedef" ) \
|
||||
Entry( Decl_Using, "using" ) \
|
||||
Entry( Decl_Union, "union" ) \
|
||||
Entry( Empty_Line, "__empty_line__" ) \
|
||||
Entry( Identifier, "__identifier__" ) \
|
||||
Entry( Module_Import, "import" ) \
|
||||
Entry( Module_Export, "export" ) \
|
||||
Entry( NewLine, "__NewLine__" ) \
|
||||
Entry( Number, "__number__" ) \
|
||||
Entry( Operator, "__operator__" ) \
|
||||
Entry( Preprocess_Define, "define") \
|
||||
@ -144,12 +144,12 @@ namespace Parser
|
||||
}
|
||||
|
||||
internal inline
|
||||
char const* to_str( Type type )
|
||||
StrC to_str( Type type )
|
||||
{
|
||||
local_persist
|
||||
char const* lookup[(u32)NumTokens] =
|
||||
StrC lookup[(u32)NumTokens] =
|
||||
{
|
||||
# define Entry( Name_, Str_ ) Str_,
|
||||
# define Entry( Name_, Str_ ) { sizeof(Str_), Str_ },
|
||||
Define_TokType
|
||||
GEN_DEFINE_ATTRIBUTE_TOKENS
|
||||
# undef Entry
|
||||
|
Reference in New Issue
Block a user