mirror of
https://github.com/Ed94/gencpp.git
synced 2024-12-22 07:44:45 -08:00
All global vars (except concepts) have been retrofitetd to the library's new Context struct
This commit is contained in:
parent
76ac3a0f93
commit
16d0e0834f
@ -60,7 +60,7 @@ int gen_main()
|
||||
builder_print( & header_especifier, format(especifier) );
|
||||
builder_write( & header_especifier);
|
||||
|
||||
Builder header_etoktype = builder_open( "components/gen/etoktype.cpp" );
|
||||
Builder header_etoktype = builder_open( "components/gen/etoktype.hpp" );
|
||||
builder_print( & header_etoktype, gen_component_header );
|
||||
builder_print( & header_etoktype, format(etoktype) );
|
||||
builder_write( & header_etoktype);
|
||||
|
@ -3,9 +3,6 @@
|
||||
#include "static_data.cpp"
|
||||
#endif
|
||||
|
||||
global Code Code_Global;
|
||||
global Code Code_Invalid;
|
||||
|
||||
// This serializes all the data-members in a "debug" format, where each member is printed with its associated value.
|
||||
Str code_debug_str(Code self)
|
||||
{
|
||||
@ -1283,6 +1280,5 @@ bool code_validate_body(Code self)
|
||||
log_failure( "AST::validate_body: Invalid this AST does not have a body %S", code_debug_str(self) );
|
||||
return false;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -230,21 +230,6 @@ struct CodeUsing;
|
||||
struct CodeVar;
|
||||
#endif
|
||||
|
||||
GEN_NS_PARSER_BEGIN
|
||||
|
||||
struct Token;
|
||||
|
||||
GEN_NS_PARSER_END
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
// Note(Ed): This is to alleviate an edge case with parsing usings or typedefs where I don't really have it setup
|
||||
// to parse a 'namespace' macro or a type with a macro.
|
||||
// I have ideas for ways to pack that into the typedef/using ast, but for now just keeping it like this
|
||||
#define ParserTokenType GEN_NS_PARSER Token
|
||||
typedef ParserTokenType Token;
|
||||
#undef ParserTokenType
|
||||
#endif
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
template< class Type> forceinline Type tmpl_cast( Code self ) { return * rcast( Type*, & self ); }
|
||||
#endif
|
||||
@ -455,7 +440,7 @@ static_assert( sizeof(AST) == AST_POD_Size, "ERROR: AST is not size of AST_POD_S
|
||||
struct InvalidCode_ImplictCaster;
|
||||
#define InvalidCode (InvalidCode_ImplictCaster{})
|
||||
#else
|
||||
#define InvalidCode (void*){ (void*)Code_Invalid }
|
||||
#define InvalidCode (void*){ (void*)_ctx->Code_Invalid }
|
||||
#endif
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
|
@ -5,8 +5,6 @@
|
||||
|
||||
// This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp)
|
||||
|
||||
GEN_NS_PARSER_BEGIN
|
||||
|
||||
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
|
||||
|
||||
enum TokType : u32
|
||||
@ -231,5 +229,3 @@ inline TokType str_to_toktype( Str str )
|
||||
}
|
||||
return Tok_Invalid;
|
||||
}
|
||||
|
||||
GEN_NS_PARSER_END
|
@ -3,10 +3,8 @@
|
||||
#include "code_serialization.cpp"
|
||||
#endif
|
||||
|
||||
GEN_NS_PARSER_BEGIN
|
||||
internal void parser_init();
|
||||
internal void parser_deinit();
|
||||
GEN_NS_PARSER_END
|
||||
|
||||
internal
|
||||
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
||||
@ -76,6 +74,10 @@ void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size,
|
||||
internal
|
||||
void define_constants()
|
||||
{
|
||||
// We only initalize these if there is no base context.
|
||||
if ( context_counter > 0 )
|
||||
return;
|
||||
|
||||
Code_Global = make_code();
|
||||
Code_Global->Name = cache_str( txt("Global Code") );
|
||||
Code_Global->Content = Code_Global->Name;
|
||||
@ -208,12 +210,13 @@ void define_constants()
|
||||
enum_underlying_sig = txt("enum_underlying(");
|
||||
}
|
||||
array_append( _ctx->PreprocessorDefines, enum_underlying_sig);
|
||||
|
||||
# undef def_constant_spec
|
||||
}
|
||||
|
||||
void init(Context* ctx)
|
||||
{
|
||||
do_once() {
|
||||
context_counter = 0;
|
||||
}
|
||||
AllocatorInfo fallback_allocator = { & fallback_allocator_proc, nullptr };
|
||||
|
||||
b32 using_fallback_allocator = false;
|
||||
@ -307,23 +310,25 @@ void init(Context* ctx)
|
||||
GEN_FATAL( "gen::init: Failed to initialize the string arena" );
|
||||
array_append( ctx->StringArenas, strbuilder_arena );
|
||||
}
|
||||
|
||||
// Setup the hash tables
|
||||
{
|
||||
ctx->StrCache = hashtable_init(StrCached, ctx->Allocator_DyanmicContainers);
|
||||
if ( ctx->StrCache.Entries == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the StringCache");
|
||||
}
|
||||
|
||||
// Preprocessor Defines
|
||||
ctx->PreprocessorDefines = array_init_reserve(StrCached, ctx->Allocator_DyanmicContainers, kilobytes(1) );
|
||||
|
||||
define_constants();
|
||||
GEN_NS_PARSER parser_init();
|
||||
parser_init();
|
||||
|
||||
++ context_counter;
|
||||
}
|
||||
|
||||
void deinit(Context* ctx)
|
||||
{
|
||||
GEN_ASSERT(context_counter);
|
||||
GEN_ASSERT_MSG(context_counter > 0, "Attempted to deinit a context that for some reason wan't accounted for!");
|
||||
usize index = 0;
|
||||
usize left = array_num(ctx->CodePools);
|
||||
do
|
||||
@ -353,8 +358,10 @@ void deinit(Context* ctx)
|
||||
|
||||
array_free(ctx->PreprocessorDefines);
|
||||
|
||||
index = 0;
|
||||
left = array_num( ctx->Fallback_AllocatorBuckets);
|
||||
if (left)
|
||||
{
|
||||
index = 0;
|
||||
do
|
||||
{
|
||||
Arena* bucket = & ctx->Fallback_AllocatorBuckets[ index ];
|
||||
@ -362,12 +369,13 @@ void deinit(Context* ctx)
|
||||
index++;
|
||||
}
|
||||
while ( left--, left );
|
||||
|
||||
array_free( ctx->Fallback_AllocatorBuckets);
|
||||
GEN_NS_PARSER parser_deinit();
|
||||
}
|
||||
parser_deinit();
|
||||
|
||||
if (_ctx == ctx)
|
||||
_ctx = nullptr;
|
||||
-- context_counter;
|
||||
}
|
||||
|
||||
void reset(Context* ctx)
|
||||
@ -413,7 +421,6 @@ AllocatorInfo get_cached_str_allocator( s32 str_length )
|
||||
|
||||
last = array_back( _ctx->StringArenas);
|
||||
}
|
||||
|
||||
return arena_allocator_info(last);
|
||||
}
|
||||
|
||||
@ -451,7 +458,6 @@ Code make_code()
|
||||
|
||||
allocator = array_back( _ctx->CodePools);
|
||||
}
|
||||
|
||||
Code result = { rcast( AST*, alloc( pool_allocator_info(allocator), sizeof(AST) )) };
|
||||
mem_set( rcast(void*, cast(AST*, result)), 0, sizeof(AST) );
|
||||
return result;
|
||||
|
@ -33,6 +33,7 @@ struct LogEntry
|
||||
typedef void LoggerCallback(LogEntry entry);
|
||||
#endif
|
||||
|
||||
|
||||
// Note(Ed): This is subject to heavily change
|
||||
// with upcoming changes to the library's fallback (default) allocations strategy;
|
||||
// and major changes to lexer/parser context usage.
|
||||
@ -64,6 +65,9 @@ struct Context
|
||||
u32 InitSize_LexArena;
|
||||
u32 SizePer_StringArena;
|
||||
|
||||
// TODO(Ed): Symbol Table
|
||||
// Keep track of all resolved symbols (naemspaced identifiers)
|
||||
|
||||
// Parser
|
||||
|
||||
// Used by the lexer to persistently treat all these identifiers as preprocessor defines.
|
||||
@ -72,6 +76,7 @@ struct Context
|
||||
Array(StrCached) PreprocessorDefines;
|
||||
|
||||
// Backend
|
||||
|
||||
// The fallback allocator is utilized if any fo the three above allocators is not specified by the user.
|
||||
u32 InitSize_Fallback_Allocator_Bucket_Size;
|
||||
Array(Arena) Fallback_AllocatorBuckets;
|
||||
@ -81,12 +86,19 @@ struct Context
|
||||
Array(Pool) CodePools;
|
||||
Array(Arena) StringArenas;
|
||||
|
||||
Arena LexArena;
|
||||
|
||||
StringTable StrCache;
|
||||
|
||||
// TODO(Ed): This needs to be just handled by a parser context
|
||||
|
||||
Arena LexArena;
|
||||
StringTable Lexer_defines;
|
||||
Array(Token) Lexer_Tokens;
|
||||
|
||||
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually
|
||||
ParseContext parser;
|
||||
};
|
||||
|
||||
// Initialize the library.
|
||||
// Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that
|
||||
void init(Context* ctx);
|
||||
|
||||
// Currently manually free's the arenas, code for checking for leaks.
|
||||
@ -283,7 +295,6 @@ CodeBody def_union_body ( s32 num, Code* codes );
|
||||
// TODO(Ed) : Implmeent the new parser API design.
|
||||
|
||||
#if 0
|
||||
GEN_NS_PARSER_BEGIN
|
||||
struct StackNode
|
||||
{
|
||||
StackNode* Prev;
|
||||
@ -299,7 +310,6 @@ struct Error
|
||||
StrBuilder message;
|
||||
StackNode* context_stack;
|
||||
};
|
||||
GEN_NS_PARSER_END
|
||||
|
||||
struct ParseInfo
|
||||
{
|
||||
|
@ -10,23 +10,21 @@
|
||||
|
||||
CodeClass parse_class( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class, parser_not_inplace_def );
|
||||
parser_pop(& parser_ctx);
|
||||
parser_pop(& _ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeConstructor parse_constructor( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
@ -59,8 +57,8 @@ CodeConstructor parse_constructor( Str def )
|
||||
break;
|
||||
|
||||
default :
|
||||
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(parser_ctx) );
|
||||
parser_pop(& parser_ctx);
|
||||
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(_ctx->parser) );
|
||||
parser_pop(& _ctx->parser);
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
@ -79,14 +77,13 @@ CodeConstructor parse_constructor( Str def )
|
||||
// <specifiers> ...
|
||||
}
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
CodeConstructor result = parser_parse_constructor( specifiers );
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeDestructor parse_destructor( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
@ -96,225 +93,209 @@ CodeDestructor parse_destructor( Str def )
|
||||
// TODO(Ed): Destructors can have prefix attributes
|
||||
// TODO(Ed): Destructors can have virtual
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
CodeDestructor result = parser_parse_destructor(NullCode);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeEnum parse_enum( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
{
|
||||
parser_pop(& parser_ctx);
|
||||
parser_pop(& _ctx->parser);
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_enum( parser_not_inplace_def);
|
||||
}
|
||||
|
||||
CodeBody parse_export_body( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_export_body();
|
||||
}
|
||||
|
||||
CodeExtern parse_extern_link( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_extern_link();
|
||||
}
|
||||
|
||||
CodeFriend parse_friend( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_friend();
|
||||
}
|
||||
|
||||
CodeFn parse_function( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return (CodeFn) parser_parse_function();
|
||||
}
|
||||
|
||||
CodeBody parse_global_body( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeBody result = parse_global_nspace( CT_Global_Body );
|
||||
parser_pop(& parser_ctx);
|
||||
parser_pop(& _ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeNS parse_namespace( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_namespace();
|
||||
}
|
||||
|
||||
CodeOperator parse_operator( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return (CodeOperator) parser_parse_operator();
|
||||
}
|
||||
|
||||
CodeOpCast parse_operator_cast( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_operator_cast(NullCode);
|
||||
}
|
||||
|
||||
CodeStruct parse_struct( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct, parser_not_inplace_def );
|
||||
parser_pop(& parser_ctx);
|
||||
parser_pop(& _ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeTemplate parse_template( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_template();
|
||||
}
|
||||
|
||||
CodeTypename parse_type( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_type( parser_not_from_template, nullptr);
|
||||
}
|
||||
|
||||
CodeTypedef parse_typedef( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_typedef();
|
||||
}
|
||||
|
||||
CodeUnion parse_union( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_union( parser_not_inplace_def);
|
||||
}
|
||||
|
||||
CodeUsing parse_using( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_using();
|
||||
}
|
||||
|
||||
CodeVar parse_variable( Str def )
|
||||
{
|
||||
GEN_USING_NS_PARSER;
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
parser_ctx.Tokens = toks;
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_variable();
|
||||
}
|
||||
|
||||
|
@ -1352,15 +1352,12 @@ CodeBody def_class_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::"
|
||||
"def_class_body"
|
||||
": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_CLASS_UNALLOWED_TYPES:
|
||||
@ -1370,7 +1367,6 @@ CodeBody def_class_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1386,18 +1382,14 @@ CodeBody def_class_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Function_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_class_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_CLASS_UNALLOWED_TYPES:
|
||||
@ -1407,7 +1399,6 @@ CodeBody def_class_body( s32 num, Code* codes )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1429,19 +1420,14 @@ CodeBody def_enum_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if ( ! entry )
|
||||
{
|
||||
if ( ! entry ) {
|
||||
log_failure("gen::def_enum_body: Provided a null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment )
|
||||
{
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment ) {
|
||||
log_failure("gen::def_enum_body: Entry type is not allowed - %s. Must be of untyped or comment type.", code_debug_str(entry) );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
body_append(result, entry );
|
||||
}
|
||||
while ( num--, num > 0 );
|
||||
@ -1457,23 +1443,17 @@ CodeBody def_enum_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Enum_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
|
||||
if ( ! entry )
|
||||
{
|
||||
if ( ! entry ) {
|
||||
log_failure("gen::def_enum_body: Provided a null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment )
|
||||
{
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment ) {
|
||||
log_failure("gen::def_enum_body: Entry type is not allowed: %s", code_debug_str(entry) );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
body_append(result, entry );
|
||||
}
|
||||
while ( codes++, num--, num > 0 );
|
||||
@ -1495,13 +1475,11 @@ CodeBody def_export_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if ( ! entry)
|
||||
{
|
||||
log_failure("gen::" "def_export_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_EXPORT_UNALLOWED_TYPES:
|
||||
@ -1511,7 +1489,6 @@ CodeBody def_export_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1527,18 +1504,14 @@ CodeBody def_export_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Export_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_export_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_EXPORT_UNALLOWED_TYPES:
|
||||
@ -1548,7 +1521,6 @@ CodeBody def_export_body( s32 num, Code* codes )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1570,13 +1542,10 @@ CodeBody def_extern_link_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_extern_linkage_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_EXTERN_LINKAGE_UNALLOWED_TYPES:
|
||||
@ -1586,7 +1555,6 @@ CodeBody def_extern_link_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1602,18 +1570,15 @@ CodeBody def_extern_link_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Extern_Linkage_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
log_failure("gen::" "def_extern_linkage_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_EXTERN_LINKAGE_UNALLOWED_TYPES:
|
||||
@ -1623,7 +1588,6 @@ CodeBody def_extern_link_body( s32 num, Code* codes )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1645,16 +1609,12 @@ CodeBody def_function_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" stringize(def_function_body) ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
|
||||
GEN_AST_BODY_FUNCTION_UNALLOWED_TYPES:
|
||||
log_failure("gen::" stringize(def_function_body) ": Entry type is not allowed: %s", code_debug_str(entry));
|
||||
return InvalidCode;
|
||||
@ -1662,7 +1622,6 @@ CodeBody def_function_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1678,18 +1637,14 @@ CodeBody def_function_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Function_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if (!entry) {
|
||||
log_failure("gen::" "def_function_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_FUNCTION_UNALLOWED_TYPES:
|
||||
@ -1720,13 +1675,10 @@ CodeBody def_global_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_global_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
case CT_Global_Body:
|
||||
@ -1741,7 +1693,6 @@ CodeBody def_global_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1757,18 +1708,14 @@ CodeBody def_global_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Global_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_global_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
case CT_Global_Body:
|
||||
@ -1804,13 +1751,10 @@ CodeBody def_namespace_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_namespace_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_NAMESPACE_UNALLOWED_TYPES:
|
||||
@ -1820,7 +1764,6 @@ CodeBody def_namespace_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1836,18 +1779,14 @@ CodeBody def_namespace_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Global_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_namespace_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_NAMESPACE_UNALLOWED_TYPES:
|
||||
@ -1856,7 +1795,6 @@ CodeBody def_namespace_body( s32 num, Code* codes )
|
||||
|
||||
default: break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -1875,26 +1813,20 @@ CodeParams def_params( s32 num, ... )
|
||||
CodeParams param = pcast( CodeParams, pod );
|
||||
|
||||
null_check( def_params, param );
|
||||
|
||||
if ( param->Type != CT_Parameters )
|
||||
{
|
||||
if ( param->Type != CT_Parameters ) {
|
||||
log_failure( "gen::def_params: param %d is not a Parameters", num - num + 1 );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
CodeParams result = (CodeParams) code_duplicate(param);
|
||||
|
||||
while ( -- num )
|
||||
{
|
||||
pod = va_arg(va, Code_POD);
|
||||
param = pcast( CodeParams, pod );
|
||||
|
||||
if ( param->Type != CT_Parameters )
|
||||
{
|
||||
if ( param->Type != CT_Parameters ) {
|
||||
log_failure( "gen::def_params: param %d is not a Parameters", num - num + 1 );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
params_append(result, param );
|
||||
}
|
||||
va_end(va);
|
||||
@ -1907,18 +1839,14 @@ CodeParams def_params( s32 num, CodeParams* codes )
|
||||
def_body_code_array_start( def_params );
|
||||
|
||||
# define check_current(current) \
|
||||
if ( current == nullptr ) \
|
||||
{ \
|
||||
if ( current == nullptr ) { \
|
||||
log_failure("gen::def_params: Provide a null code in codes array"); \
|
||||
return InvalidCode; \
|
||||
} \
|
||||
\
|
||||
if (current->Type != CT_Parameters ) \
|
||||
{ \
|
||||
if (current->Type != CT_Parameters ) { \
|
||||
log_failure("gen::def_params: Code in coes array is not of paramter type - %s", code_debug_str(current) ); \
|
||||
return InvalidCode; \
|
||||
}
|
||||
|
||||
CodeParams current = (CodeParams)code_duplicate(* codes);
|
||||
check_current(current);
|
||||
|
||||
@ -1927,9 +1855,7 @@ CodeParams def_params( s32 num, CodeParams* codes )
|
||||
result->Name = current->Name;
|
||||
result->Type = current->Type;
|
||||
result->ValueType = current->ValueType;
|
||||
|
||||
while( codes++, current = * codes, num--, num > 0 )
|
||||
{
|
||||
while( codes++, current = * codes, num--, num > 0 ) {
|
||||
check_current(current);
|
||||
params_append(result, current );
|
||||
}
|
||||
@ -1940,28 +1866,22 @@ CodeParams def_params( s32 num, CodeParams* codes )
|
||||
|
||||
CodeSpecifiers def_specifiers( s32 num, ... )
|
||||
{
|
||||
if ( num <= 0 )
|
||||
{
|
||||
if ( num <= 0 ) {
|
||||
log_failure("gen::def_specifiers: num cannot be zero or less");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( num > AST_ArrSpecs_Cap )
|
||||
{
|
||||
if ( num > AST_ArrSpecs_Cap ) {
|
||||
log_failure("gen::def_specifiers: num of speciifers to define AST larger than AST specicifier capacity - %d", num);
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
CodeSpecifiers
|
||||
result = (CodeSpecifiers) make_code();
|
||||
result->Type = CT_Specifiers;
|
||||
|
||||
va_list va;
|
||||
va_start(va, num);
|
||||
do
|
||||
{
|
||||
do {
|
||||
Specifier type = (Specifier)va_arg(va, int);
|
||||
|
||||
specifiers_append(result, type );
|
||||
}
|
||||
while ( --num, num );
|
||||
@ -1972,25 +1892,20 @@ CodeSpecifiers def_specifiers( s32 num, ... )
|
||||
|
||||
CodeSpecifiers def_specifiers( s32 num, Specifier* specs )
|
||||
{
|
||||
if ( num <= 0 )
|
||||
{
|
||||
if ( num <= 0 ) {
|
||||
log_failure("gen::def_specifiers: num cannot be zero or less");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( num > AST_ArrSpecs_Cap )
|
||||
{
|
||||
if ( num > AST_ArrSpecs_Cap ) {
|
||||
log_failure("gen::def_specifiers: num of speciifers to define AST larger than AST specicifier capacity - %d", num);
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
CodeSpecifiers
|
||||
result = (CodeSpecifiers) make_code();
|
||||
result->Type = CT_Specifiers;
|
||||
|
||||
s32 idx = 0;
|
||||
do
|
||||
{
|
||||
do {
|
||||
specifiers_append(result, specs[idx] );
|
||||
idx++;
|
||||
}
|
||||
@ -2013,13 +1928,10 @@ CodeBody def_struct_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast(Code, pod);
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_struct_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_STRUCT_UNALLOWED_TYPES:
|
||||
@ -2029,7 +1941,6 @@ CodeBody def_struct_body( s32 num, ... )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -2045,18 +1956,14 @@ CodeBody def_struct_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Struct_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
codes++;
|
||||
|
||||
if (!entry)
|
||||
{
|
||||
if ( ! entry) {
|
||||
log_failure("gen::" "def_struct_body" ": Provided an null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
switch (entry->Type)
|
||||
{
|
||||
GEN_AST_BODY_STRUCT_UNALLOWED_TYPES:
|
||||
@ -2066,7 +1973,6 @@ CodeBody def_struct_body( s32 num, Code* codes )
|
||||
default:
|
||||
break;
|
||||
}
|
||||
|
||||
body_append(result, entry);
|
||||
}
|
||||
while (num--, num > 0);
|
||||
@ -2088,19 +1994,14 @@ CodeBody def_union_body( s32 num, ... )
|
||||
{
|
||||
Code_POD pod = va_arg(va, Code_POD);
|
||||
Code entry = pcast( Code, pod );
|
||||
|
||||
if ( ! entry )
|
||||
{
|
||||
if ( ! entry ) {
|
||||
log_failure("gen::def_union_body: Provided a null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment )
|
||||
{
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment ) {
|
||||
log_failure("gen::def_union_body: Entry type is not allowed - %s. Must be of untyped or comment type.", code_debug_str(entry) );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
body_append(result, entry );
|
||||
}
|
||||
while ( num--, num > 0 );
|
||||
@ -2116,23 +2017,17 @@ CodeBody def_union_body( s32 num, Code* codes )
|
||||
CodeBody
|
||||
result = (CodeBody) make_code();
|
||||
result->Type = CT_Union_Body;
|
||||
|
||||
do
|
||||
{
|
||||
Code entry = *codes;
|
||||
|
||||
if ( ! entry )
|
||||
{
|
||||
if ( ! entry ) {
|
||||
log_failure("gen::def_union_body: Provided a null entry");
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment )
|
||||
{
|
||||
if ( entry->Type != CT_Untyped && entry->Type != CT_Comment ) {
|
||||
log_failure("gen::def_union_body: Entry type is not allowed: %s", code_debug_str(entry) );
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
body_append(result, entry );
|
||||
}
|
||||
while ( codes++, num--, num > 0 );
|
||||
|
@ -4,92 +4,6 @@
|
||||
#include "gen/etoktype.cpp"
|
||||
#endif
|
||||
|
||||
GEN_NS_PARSER_BEGIN
|
||||
|
||||
enum TokFlags : u32
|
||||
{
|
||||
TF_Operator = bit(0),
|
||||
TF_Assign = bit(1),
|
||||
TF_Preprocess = bit(2),
|
||||
TF_Preprocess_Cond = bit(3),
|
||||
TF_Attribute = bit(6),
|
||||
TF_AccessOperator = bit( 7 ),
|
||||
TF_AccessSpecifier = bit( 8 ),
|
||||
TF_Specifier = bit( 9 ),
|
||||
TF_EndDefinition = bit( 10 ), // Either ; or }
|
||||
TF_Formatting = bit( 11 ),
|
||||
TF_Literal = bit( 12 ),
|
||||
|
||||
TF_Null = 0,
|
||||
TF_UnderlyingType = GEN_U32_MAX,
|
||||
};
|
||||
|
||||
struct Token
|
||||
{
|
||||
Str Text;
|
||||
TokType Type;
|
||||
s32 Line;
|
||||
s32 Column;
|
||||
u32 Flags;
|
||||
};
|
||||
|
||||
constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null };
|
||||
|
||||
forceinline
|
||||
AccessSpec tok_to_access_specifier(Token tok) {
|
||||
return scast(AccessSpec, tok.Type);
|
||||
}
|
||||
|
||||
forceinline
|
||||
Str tok_to_str(Token tok) {
|
||||
return tok.Text;
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_valid( Token tok ) {
|
||||
return tok.Text.Ptr && tok.Text.Len && tok.Type != Tok_Invalid;
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_access_operator(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_AccessOperator );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_access_specifier(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_AccessSpecifier );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_attribute(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Attribute );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_operator(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Operator );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_preprocessor(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_preprocess_cond(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess_Cond );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_specifier(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Specifier );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_end_definition(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_EndDefinition );
|
||||
}
|
||||
|
||||
StrBuilder tok_to_strbuilder(Token tok)
|
||||
{
|
||||
StrBuilder result = strbuilder_make_reserve( _ctx->Allocator_Temp, kilobytes(4) );
|
||||
@ -103,11 +17,6 @@ StrBuilder tok_to_strbuilder(Token tok)
|
||||
return result;
|
||||
}
|
||||
|
||||
struct TokArray
|
||||
{
|
||||
Array(Token) Arr;
|
||||
s32 Idx;
|
||||
};
|
||||
|
||||
bool lex__eat( TokArray* self, TokType type );
|
||||
|
||||
@ -160,27 +69,12 @@ Token* lex_next(TokArray self, bool skip_formatting)
|
||||
return & self.Arr[idx + 1];
|
||||
}
|
||||
|
||||
global FixedArena_256KB Lexer_defines_map_arena;
|
||||
global StringTable Lexer_defines;
|
||||
global Array(Token) Lexer_Tokens;
|
||||
|
||||
enum
|
||||
{
|
||||
Lex_Continue,
|
||||
Lex_ReturnNull,
|
||||
};
|
||||
|
||||
struct LexContext
|
||||
{
|
||||
Str content;
|
||||
s32 left;
|
||||
char const* scanner;
|
||||
s32 line;
|
||||
s32 column;
|
||||
StringTable defines;
|
||||
Token token;
|
||||
};
|
||||
|
||||
forceinline
|
||||
void lexer_move_forward( LexContext* ctx )
|
||||
{
|
||||
@ -224,7 +118,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
{
|
||||
char const* hash = ctx->scanner;
|
||||
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
|
||||
array_append( Lexer_Tokens, hash_tok );
|
||||
array_append( _ctx->Lexer_Tokens, hash_tok );
|
||||
|
||||
move_forward();
|
||||
skip_whitespace();
|
||||
@ -300,14 +194,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
|
||||
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
|
||||
ctx->token.Text.Ptr = hash;
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
|
||||
{
|
||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
end_line();
|
||||
return Lex_Continue;
|
||||
}
|
||||
@ -316,7 +210,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||
}
|
||||
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
|
||||
skip_whitespace();
|
||||
|
||||
@ -340,7 +234,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
name.Text.Len++;
|
||||
}
|
||||
|
||||
array_append( Lexer_Tokens, name );
|
||||
array_append( _ctx->Lexer_Tokens, name );
|
||||
|
||||
u64 key = crc32( name.Text.Ptr, name.Text.Len );
|
||||
hashtable_set(ctx->defines, key, tok_to_str(name) );
|
||||
@ -386,7 +280,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
move_forward();
|
||||
}
|
||||
|
||||
array_append( Lexer_Tokens, preprocess_content );
|
||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -449,7 +343,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
preprocess_content.Text.Len++;
|
||||
}
|
||||
|
||||
array_append( Lexer_Tokens, preprocess_content );
|
||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -458,7 +352,7 @@ void lex_found_token( LexContext* ctx )
|
||||
{
|
||||
if ( ctx->token.Type != Tok_Invalid )
|
||||
{
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -485,7 +379,7 @@ void lex_found_token( LexContext* ctx )
|
||||
}
|
||||
|
||||
ctx->token.Type = type;
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -495,7 +389,7 @@ void lex_found_token( LexContext* ctx )
|
||||
{
|
||||
ctx->token.Type = type;
|
||||
ctx->token.Flags |= TF_Specifier;
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -503,7 +397,7 @@ void lex_found_token( LexContext* ctx )
|
||||
if ( type != Tok_Invalid )
|
||||
{
|
||||
ctx->token.Type = type;
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
return;
|
||||
}
|
||||
|
||||
@ -557,7 +451,7 @@ void lex_found_token( LexContext* ctx )
|
||||
ctx->token.Type = Tok_Identifier;
|
||||
}
|
||||
|
||||
array_append( Lexer_Tokens, ctx->token );
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
}
|
||||
|
||||
neverinline
|
||||
@ -568,7 +462,7 @@ TokArray lex( Str content )
|
||||
c.content = content;
|
||||
c.left = content.Len;
|
||||
c.scanner = content.Ptr;
|
||||
c.defines = Lexer_defines;
|
||||
c.defines = _ctx->Lexer_defines;
|
||||
|
||||
char const* word = c.scanner;
|
||||
s32 word_length = 0;
|
||||
@ -602,7 +496,7 @@ TokArray lex( Str content )
|
||||
hashtable_set(c.defines, key, * entry );
|
||||
}
|
||||
|
||||
array_clear(Lexer_Tokens);
|
||||
array_clear(_ctx->Lexer_Tokens);
|
||||
|
||||
while (c.left )
|
||||
{
|
||||
@ -635,7 +529,7 @@ TokArray lex( Str content )
|
||||
c.token.Type = Tok_NewLine;
|
||||
c.token.Text.Len++;
|
||||
|
||||
array_append( Lexer_Tokens, c.token );
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -674,7 +568,7 @@ TokArray lex( Str content )
|
||||
c.token.Text.Len++;
|
||||
move_forward();
|
||||
|
||||
array_append( Lexer_Tokens, c.token );
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@ -1131,7 +1025,7 @@ TokArray lex( Str content )
|
||||
move_forward();
|
||||
c.token.Text.Len++;
|
||||
}
|
||||
array_append( Lexer_Tokens, c.token );
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
continue;
|
||||
}
|
||||
else if ( (* ctx->scanner) == '*' )
|
||||
@ -1167,7 +1061,7 @@ TokArray lex( Str content )
|
||||
move_forward();
|
||||
c.token.Text.Len++;
|
||||
}
|
||||
array_append( Lexer_Tokens, c.token );
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
// end_line();
|
||||
continue;
|
||||
}
|
||||
@ -1260,14 +1154,14 @@ TokArray lex( Str content )
|
||||
}
|
||||
else
|
||||
{
|
||||
s32 start = max( 0, array_num(Lexer_Tokens) - 100 );
|
||||
s32 start = max( 0, array_num(_ctx->Lexer_Tokens) - 100 );
|
||||
log_fmt("\n%d\n", start);
|
||||
for ( s32 idx = start; idx < array_num(Lexer_Tokens); idx++ )
|
||||
for ( s32 idx = start; idx < array_num(_ctx->Lexer_Tokens); idx++ )
|
||||
{
|
||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||
, idx
|
||||
, toktype_to_str( Lexer_Tokens[ idx ].Type ).Ptr
|
||||
, Lexer_Tokens[ idx ].Text.Len, Lexer_Tokens[ idx ].Text.Ptr
|
||||
, toktype_to_str( _ctx->Lexer_Tokens[ idx ].Type ).Ptr
|
||||
, _ctx->Lexer_Tokens[ idx ].Text.Len, _ctx->Lexer_Tokens[ idx ].Text.Ptr
|
||||
);
|
||||
}
|
||||
|
||||
@ -1284,7 +1178,7 @@ TokArray lex( Str content )
|
||||
FoundToken:
|
||||
{
|
||||
lex_found_token( ctx );
|
||||
TokType last_type = array_back(Lexer_Tokens)->Type;
|
||||
TokType last_type = array_back(_ctx->Lexer_Tokens)->Type;
|
||||
if ( last_type == Tok_Preprocess_Macro )
|
||||
{
|
||||
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||
@ -1301,14 +1195,14 @@ TokArray lex( Str content )
|
||||
c.token.Text.Len++;
|
||||
move_forward();
|
||||
|
||||
array_append( Lexer_Tokens, c.token );
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( array_num(Lexer_Tokens) == 0 )
|
||||
if ( array_num(_ctx->Lexer_Tokens) == 0 )
|
||||
{
|
||||
log_failure( "Failed to lex any tokens" );
|
||||
{
|
||||
@ -1317,13 +1211,11 @@ TokArray lex( Str content )
|
||||
}
|
||||
}
|
||||
|
||||
hashtable_clear(Lexer_defines);
|
||||
hashtable_clear(_ctx->Lexer_defines);
|
||||
// defines_map_arena.free();
|
||||
TokArray result = { Lexer_Tokens, 0 };
|
||||
TokArray result = { _ctx->Lexer_Tokens, 0 };
|
||||
return result;
|
||||
}
|
||||
#undef move_forward
|
||||
#undef skip_whitespace
|
||||
#undef end_line
|
||||
|
||||
GEN_NS_PARSER_END
|
||||
|
File diff suppressed because it is too large
Load Diff
125
base/components/parser_types.hpp
Normal file
125
base/components/parser_types.hpp
Normal file
@ -0,0 +1,125 @@
|
||||
#ifdef GEN_INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "types.hpp"
|
||||
#include "gen/ecode.hpp"
|
||||
#include "gen/eoperator.hpp"
|
||||
#include "gen/especifier.hpp"
|
||||
#endif
|
||||
|
||||
enum TokFlags : u32
|
||||
{
|
||||
TF_Operator = bit(0),
|
||||
TF_Assign = bit(1),
|
||||
TF_Preprocess = bit(2),
|
||||
TF_Preprocess_Cond = bit(3),
|
||||
TF_Attribute = bit(6),
|
||||
TF_AccessOperator = bit( 7 ),
|
||||
TF_AccessSpecifier = bit( 8 ),
|
||||
TF_Specifier = bit( 9 ),
|
||||
TF_EndDefinition = bit( 10 ), // Either ; or }
|
||||
TF_Formatting = bit( 11 ),
|
||||
TF_Literal = bit( 12 ),
|
||||
|
||||
TF_Null = 0,
|
||||
TF_UnderlyingType = GEN_U32_MAX,
|
||||
};
|
||||
|
||||
struct Token
|
||||
{
|
||||
Str Text;
|
||||
TokType Type;
|
||||
s32 Line;
|
||||
s32 Column;
|
||||
u32 Flags;
|
||||
};
|
||||
|
||||
constexpr Token NullToken { {}, Tok_Invalid, 0, 0, TF_Null };
|
||||
|
||||
forceinline
|
||||
AccessSpec tok_to_access_specifier(Token tok) {
|
||||
return scast(AccessSpec, tok.Type);
|
||||
}
|
||||
|
||||
forceinline
|
||||
Str tok_to_str(Token tok) {
|
||||
return tok.Text;
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_valid( Token tok ) {
|
||||
return tok.Text.Ptr && tok.Text.Len && tok.Type != Tok_Invalid;
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_access_operator(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_AccessOperator );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_access_specifier(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_AccessSpecifier );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_attribute(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Attribute );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_operator(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Operator );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_preprocessor(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_preprocess_cond(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess_Cond );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_specifier(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_Specifier );
|
||||
}
|
||||
|
||||
forceinline
|
||||
bool tok_is_end_definition(Token tok) {
|
||||
return bitfield_is_equal( u32, tok.Flags, TF_EndDefinition );
|
||||
}
|
||||
|
||||
StrBuilder tok_to_strbuilder(Token tok);
|
||||
|
||||
struct TokArray
|
||||
{
|
||||
Array(Token) Arr;
|
||||
s32 Idx;
|
||||
};
|
||||
|
||||
struct LexContext
|
||||
{
|
||||
Str content;
|
||||
s32 left;
|
||||
char const* scanner;
|
||||
s32 line;
|
||||
s32 column;
|
||||
StringTable defines;
|
||||
Token token;
|
||||
};
|
||||
|
||||
struct StackNode
|
||||
{
|
||||
StackNode* Prev;
|
||||
|
||||
Token* Start;
|
||||
Str Name; // The name of the AST node (if parsed)
|
||||
Str ProcName; // The name of the procedure
|
||||
};
|
||||
|
||||
struct ParseContext
|
||||
{
|
||||
TokArray Tokens;
|
||||
StackNode* Scope;
|
||||
};
|
@ -7,8 +7,13 @@
|
||||
global Context* _ctx;
|
||||
|
||||
#pragma region Constants
|
||||
global u32 context_counter;
|
||||
|
||||
global Str enum_underlying_sig;
|
||||
|
||||
global Code Code_Global;
|
||||
global Code Code_Invalid;
|
||||
|
||||
global Code access_public;
|
||||
global Code access_protected;
|
||||
global Code access_private;
|
||||
|
@ -284,7 +284,7 @@ GEN_FILE_OPEN_PROC( _posix_file_open )
|
||||
|
||||
internal void _dirinfo_free_entry( DirEntry* entry );
|
||||
|
||||
// TODO : Is this a bad idea?
|
||||
// TODO(zpl) : Is this a bad idea?
|
||||
global b32 _std_file_set = false;
|
||||
global FileInfo _std_files[ EFileStandard_COUNT ] = {
|
||||
{
|
||||
|
@ -143,27 +143,15 @@
|
||||
|
||||
#if GEN_DONT_USE_NAMESPACE || GEN_COMPILER_C
|
||||
# if GEN_COMPILER_C
|
||||
# define GEN_NS_PARSER_BEGIN
|
||||
# define GEN_NS_PARSER_END
|
||||
# define GEN_USING_NS_PARSER
|
||||
# define GEN_NS_PARSER
|
||||
# define GEN_NS
|
||||
# define GEN_NS_BEGIN
|
||||
# define GEN_NS_END
|
||||
# else
|
||||
# define GEN_NS_PARSER_BEGIN namespace parser {
|
||||
# define GEN_NS_PARSER_END }
|
||||
# define GEN_USING_NS_PARSER using namespace parser
|
||||
# define GEN_NS_PARSER parser::
|
||||
# define GEN_NS ::
|
||||
# define GEN_NS_BEGIN
|
||||
# define GEN_NS_END
|
||||
# endif
|
||||
#else
|
||||
# define GEN_NS_PARSER_BEGIN namespace parser {
|
||||
# define GEN_NS_PARSER_END }
|
||||
# define GEN_NS_PARSER parser::
|
||||
# define GEN_USING_NS_PARSER using namespace parser
|
||||
# define GEN_NS gen::
|
||||
# define GEN_NS_BEGIN namespace gen {
|
||||
# define GEN_NS_END }
|
||||
|
@ -31,7 +31,6 @@ GEN_NS_BEGIN
|
||||
|
||||
#include "components/interface.cpp"
|
||||
#include "components/interface.upfront.cpp"
|
||||
#include "components/gen/etoktype.cpp"
|
||||
#include "components/lexer.cpp"
|
||||
#include "components/parser.cpp"
|
||||
#include "components/interface.parsing.cpp"
|
||||
|
@ -20,6 +20,8 @@ GEN_NS_BEGIN
|
||||
#include "components/gen/ecodetypes.hpp"
|
||||
#include "components/gen/eoperator.hpp"
|
||||
#include "components/gen/especifier.hpp"
|
||||
#include "components/gen/etoktype.hpp"
|
||||
#include "components/parser_types.hpp"
|
||||
|
||||
#include "components/ast.hpp"
|
||||
#include "components/code_types.hpp"
|
||||
|
@ -449,7 +449,6 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
||||
#pragma pop_macro("do_once_end")
|
||||
|
||||
CodeBody result = def_body(CT_Global_Body);
|
||||
body_append(result, untyped_str(txt("GEN_NS_PARSER_BEGIN\n\n")));
|
||||
body_append(result, attribute_entires_def);
|
||||
body_append(result, enum_code);
|
||||
if (use_c_definition)
|
||||
@ -459,7 +458,6 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
||||
}
|
||||
body_append(result, to_str);
|
||||
body_append(result, to_type);
|
||||
body_append(result, untyped_str(txt("\nGEN_NS_PARSER_END\n\n")));
|
||||
return result;
|
||||
}
|
||||
|
||||
|
@ -127,6 +127,7 @@ int gen_main()
|
||||
{
|
||||
Code header_start = scan_file( path_base "components/header_start.hpp" );
|
||||
Code types = scan_file( path_base "components/types.hpp" );
|
||||
Code parser_types = scan_file( path_base "components/parser_types.hpp" );
|
||||
Code ast = scan_file( path_base "components/ast.hpp" );
|
||||
Code ast_types = scan_file( path_base "components/ast_types.hpp" );
|
||||
Code code_types = scan_file( path_base "components/code_types.hpp" );
|
||||
@ -137,6 +138,7 @@ int gen_main()
|
||||
CodeBody ecode = gen_ecode ( path_base "enums/ECodeTypes.csv" );
|
||||
CodeBody eoperator = gen_eoperator ( path_base "enums/EOperator.csv" );
|
||||
CodeBody especifier = gen_especifier( path_base "enums/ESpecifier.csv" );
|
||||
CodeBody etoktype = gen_etoktype ( path_base "enums/ETokType.csv", path_base "enums/AttributeTokens.csv" );
|
||||
CodeBody ast_inlines = gen_ast_inlines();
|
||||
|
||||
Builder _header = builder_open( "gen/gen.hpp" );
|
||||
@ -156,6 +158,8 @@ int gen_main()
|
||||
builder_print( header, fmt_newline);
|
||||
builder_print( header, format(especifier) );
|
||||
builder_print( header, fmt_newline);
|
||||
builder_print( header, format(etoktype));
|
||||
builder_print( header, parser_types);
|
||||
builder_print_fmt( header, "#pragma endregion Types\n\n" );
|
||||
|
||||
builder_print_fmt( header, "#pragma region AST\n" );
|
||||
@ -193,12 +197,6 @@ int gen_main()
|
||||
Code parsing_interface = scan_file( path_base "components/interface.parsing.cpp" );
|
||||
Code untyped = scan_file( path_base "components/interface.untyped.cpp" );
|
||||
|
||||
CodeBody etoktype = gen_etoktype( path_base "enums/ETokType.csv", path_base "enums/AttributeTokens.csv" );
|
||||
CodeBody nspaced_etoktype = def_global_body( args(
|
||||
etoktype
|
||||
));
|
||||
Code formatted_toktype = format(nspaced_etoktype);
|
||||
|
||||
Builder _src = builder_open( "gen/gen.cpp" );
|
||||
Builder* src = & _src;
|
||||
builder_print_fmt( src, generation_notice );
|
||||
@ -218,7 +216,6 @@ int gen_main()
|
||||
builder_print( src, interface );
|
||||
builder_print( src, upfront );
|
||||
builder_print_fmt( src, "\n#pragma region Parsing\n\n" );
|
||||
builder_print( src, formatted_toktype );
|
||||
builder_print( src, lexer );
|
||||
builder_print( src, parser );
|
||||
builder_print( src, parsing_interface );
|
||||
|
@ -54,7 +54,8 @@ Code format( Code code ) {
|
||||
|
||||
int gen_main()
|
||||
{
|
||||
gen::init();
|
||||
Context ctx {};
|
||||
gen::init( & ctx);
|
||||
|
||||
Code push_ignores = scan_file( path_base "helpers/push_ignores.inline.hpp" );
|
||||
Code pop_ignores = scan_file( path_base "helpers/pop_ignores.inline.hpp" );
|
||||
@ -112,6 +113,7 @@ int gen_main()
|
||||
}
|
||||
|
||||
Code types = scan_file( path_base "components/types.hpp" );
|
||||
Code parser_types = scan_file( path_base "components/parser_types.hpp");
|
||||
Code ast = scan_file( path_base "components/ast.hpp" );
|
||||
Code ast_types = scan_file( path_base "components/ast_types.hpp" );
|
||||
Code code_types = scan_file( path_base "components/code_types.hpp" );
|
||||
@ -122,6 +124,7 @@ int gen_main()
|
||||
CodeBody ecode = gen_ecode ( path_base "enums/ECodeTypes.csv" );
|
||||
CodeBody eoperator = gen_eoperator ( path_base "enums/EOperator.csv" );
|
||||
CodeBody especifier = gen_especifier( path_base "enums/ESpecifier.csv" );
|
||||
CodeBody etoktype = gen_etoktype ( path_base "enums/ETokType.csv", path_base "enums/AttributeTokens.csv" );
|
||||
CodeBody ast_inlines = gen_ast_inlines();
|
||||
|
||||
header.print_fmt( "GEN_NS_BEGIN\n\n" );
|
||||
@ -135,6 +138,9 @@ int gen_main()
|
||||
header.print( fmt_newline );
|
||||
header.print( format( especifier ));
|
||||
header.print( fmt_newline );
|
||||
header.print( format( etoktype ));
|
||||
header.print( parser_types );
|
||||
header.print( fmt_newline );
|
||||
header.print_fmt("#pragma endregion Types\n\n");
|
||||
|
||||
header.print_fmt("#pragma region AST\n");
|
||||
@ -215,8 +221,6 @@ int gen_main()
|
||||
Code parsing_interface = scan_file( path_base "components/interface.parsing.cpp" );
|
||||
Code untyped = scan_file( path_base "components/interface.untyped.cpp" );
|
||||
|
||||
CodeBody etoktype = gen_etoktype( path_base "enums/ETokType.csv", path_base "enums/AttributeTokens.csv" );
|
||||
|
||||
header.print_fmt( "\nGEN_NS_BEGIN\n");
|
||||
header.print( static_data );
|
||||
|
||||
@ -230,7 +234,6 @@ int gen_main()
|
||||
header.print( interface );
|
||||
header.print( upfront );
|
||||
header.print_fmt( "\n#pragma region Parsing\n\n" );
|
||||
header.print( format(etoktype) );
|
||||
header.print( lexer );
|
||||
header.print( parser );
|
||||
header.print( parsing_interface );
|
||||
@ -255,6 +258,6 @@ int gen_main()
|
||||
header.print( pop_ignores );
|
||||
header.write();
|
||||
|
||||
gen::deinit();
|
||||
gen::deinit( & ctx);
|
||||
return 0;
|
||||
}
|
||||
|
@ -7,12 +7,20 @@
|
||||
https://github.com/Ed94/gencpp
|
||||
|
||||
This is a variant intended for use with Unreal Engine 5
|
||||
|
||||
! ----------------------------------------------------------------------- VERSION: v0.20-Alpha !
|
||||
! ============================================================================================ !
|
||||
! WARNING: THIS IS AN ALPHA VERSION OF THE LIBRARY, USE AT YOUR OWN DISCRETION !
|
||||
! NEVER DO CODE GENERATION WITHOUT AT LEAST HAVING CONTENT IN A CODEBASE UNDER VERSION CONTROL !
|
||||
! ============================================================================================ !
|
||||
https://github.com/Ed94/gencpp --------------------------------------------------------------.
|
||||
| _____ _____ _ _ |
|
||||
| / ____) / ____} | | | |
|
||||
| | / ___ ___ _ __ ___ _ __ _ __ | {___ | |__ _ _, __ _, ___ __| | |
|
||||
| | |{_ |/ _ \ '_ \ / __} '_ l| '_ l `\___ \| __/ _` |/ _` |/ _ \/ _` | |
|
||||
| | l__j | ___/ | | | {__; |+l } |+l | ____) | l| (_| | {_| | ___/ (_| | |
|
||||
| \_____|\___}_l |_|\___} ,__/| ,__/ (_____/ \__\__/_|\__, |\___}\__,_l |
|
||||
| Unreal Engine | | | | __} | |
|
||||
| l_l l_l {___/ |
|
||||
! ----------------------------------------------------------------------- VERSION: v0.20-Alpha |
|
||||
! ============================================================================================ |
|
||||
! WARNING: THIS IS AN ALPHA VERSION OF THE LIBRARY, USE AT YOUR OWN DISCRETION |
|
||||
! NEVER DO CODE GENERATION WITHOUT AT LEAST HAVING CONTENT IN A CODEBASE UNDER VERSION CONTROL |
|
||||
! ============================================================================================ /
|
||||
*/
|
||||
#if ! defined(GEN_DONT_ENFORCE_GEN_TIME_GUARD) && ! defined(GEN_TIME)
|
||||
# error Gen.hpp : GEN_TIME not defined
|
||||
|
@ -55,7 +55,8 @@ Code format( Code code ) {
|
||||
|
||||
int gen_main()
|
||||
{
|
||||
gen::init();
|
||||
Context ctx {};
|
||||
gen::init( & ctx);
|
||||
|
||||
Code push_ignores = scan_file( path_base "helpers/push_ignores.inline.hpp" );
|
||||
Code pop_ignores = scan_file( path_base "helpers/pop_ignores.inline.hpp" );
|
||||
@ -67,7 +68,7 @@ int gen_main()
|
||||
{
|
||||
CodeBody macros = def_body( CT_Global_Body );
|
||||
{
|
||||
FileContents content = file_read_contents( FallbackAllocator, true, path_base "dependencies/macros.hpp" );
|
||||
FileContents content = file_read_contents( ctx.Allocator_Temp, file_zero_terminate, path_base "dependencies/macros.hpp" );
|
||||
CodeBody ori_macros = parse_global_body( Str { (char const*)content.data, content.size });
|
||||
|
||||
for (Code code = ori_macros.begin();
|
||||
@ -175,6 +176,7 @@ int gen_main()
|
||||
{
|
||||
Code header_start = scan_file( "components/header_start.hpp" );
|
||||
Code types = scan_file( path_base "components/types.hpp" );
|
||||
Code parser_types = scan_file( path_base "components/parser_types.hpp");
|
||||
Code ast = scan_file( path_base "components/ast.hpp" );
|
||||
Code ast_types = scan_file( path_base "components/ast_types.hpp" );
|
||||
Code code_types = scan_file( path_base "components/code_types.hpp" );
|
||||
@ -187,6 +189,11 @@ int gen_main()
|
||||
CodeBody especifier = gen_especifier( path_base "enums/ESpecifier.csv" );
|
||||
CodeBody ast_inlines = gen_ast_inlines();
|
||||
|
||||
// Note(Ed): The Attribute tokens need to be expanded and regenerated on a per-project/installation of this library for a specific codebase of Unreal.
|
||||
// We can support an arbitrary set of modules or plugin apis for parsing
|
||||
// but its up to the user to define them all (This will just provide whats I've used up till now).
|
||||
CodeBody etoktype = gen_etoktype( path_base "enums/ETokType.csv", "enums/AttributeTokens.csv" );
|
||||
|
||||
Builder
|
||||
header = Builder::open( "gen/gen.hpp" );
|
||||
header.print_fmt( generation_notice );
|
||||
@ -205,6 +212,8 @@ int gen_main()
|
||||
header.print( fmt_newline );
|
||||
header.print( format(especifier) );
|
||||
header.print( fmt_newline );
|
||||
header.print( format(etoktype) );
|
||||
header.print( parser_types );
|
||||
header.print_fmt( "#pragma endregion Types\n\n" );
|
||||
|
||||
header.print_fmt( "#pragma region AST\n" );
|
||||
@ -242,11 +251,6 @@ int gen_main()
|
||||
Code parsing_interface = scan_file( path_base "components/interface.parsing.cpp" );
|
||||
Code untyped = scan_file( path_base "components/interface.untyped.cpp" );
|
||||
|
||||
// Note(Ed): The Attribute tokens need to be expanded and regenerated on a per-project/installation of this library for a specific codebase of Unreal.
|
||||
// We can support an arbitrary set of modules or plugin apis for parsing
|
||||
// but its up to the user to define them all (This will just provide whats I've used up till now).
|
||||
CodeBody etoktype = gen_etoktype( path_base "enums/ETokType.csv", "enums/AttributeTokens.csv" );
|
||||
|
||||
Builder
|
||||
src = Builder::open( "gen/gen.cpp" );
|
||||
src.print_fmt( generation_notice );
|
||||
@ -268,7 +272,6 @@ int gen_main()
|
||||
src.print( interface );
|
||||
src.print( upfront );
|
||||
src.print_fmt( "\n#pragma region Parsing\n\n" );
|
||||
src.print( format(etoktype) );
|
||||
src.print( lexer );
|
||||
src.print( parser );
|
||||
src.print( parsing_interface );
|
||||
|
Loading…
Reference in New Issue
Block a user