mirror of
https://github.com/Ed94/gencpp.git
synced 2025-06-15 03:01:47 -07:00
Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
685bba36d5 | |||
346e8e3305 | |||
bfc754e66c | |||
84f4fc5ae9 | |||
ad5cb6597b | |||
1c7dd4ab32 | |||
1e7fdcec16 | |||
2ed36506b1 | |||
790087aa3c | |||
441a46daaa | |||
26623075ad | |||
7ea90ef349 | |||
6d531fdf97 | |||
3b81eea688 | |||
844d431e1c | |||
727b54c341 | |||
ef72d27f3e | |||
75b1d42cca |
2
.gitignore
vendored
2
.gitignore
vendored
@ -44,3 +44,5 @@ test/c_library/gen
|
||||
test/cpp_library/gen
|
||||
|
||||
!scripts/helpers/refactor.exe
|
||||
|
||||
ai/**
|
||||
|
@ -3,7 +3,7 @@
|
||||
# include "helpers/push_ignores.inline.hpp"
|
||||
# include "components/header_start.hpp"
|
||||
# include "components/types.hpp"
|
||||
# include "components/gen/ecode.hpp"
|
||||
# include "components/gen/ecodetypes.hpp"
|
||||
# include "components/gen/eoperator.hpp"
|
||||
# include "components/gen/especifier.hpp"
|
||||
# include "components/ast.hpp"
|
||||
|
@ -3,7 +3,7 @@
|
||||
# include "helpers/push_ignores.inline.hpp"
|
||||
# include "components/header_start.hpp"
|
||||
# include "components/types.hpp"
|
||||
# include "components/gen/ecode.hpp"
|
||||
# include "components/gen/ecodetypes.hpp"
|
||||
# include "components/gen/eoperator.hpp"
|
||||
# include "components/gen/especifier.hpp"
|
||||
# include "components/ast.hpp"
|
||||
|
@ -1,9 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "types.hpp"
|
||||
#include "gen/ecode.hpp"
|
||||
#include "gen/eoperator.hpp"
|
||||
#include "gen/especifier.hpp"
|
||||
#include "parser_types.hpp"
|
||||
#endif
|
||||
|
||||
/*
|
||||
@ -406,7 +403,8 @@ struct AST
|
||||
Code PostNameMacro; // Only used with parameters for specifically UE_REQUIRES (Thanks Unreal)
|
||||
};
|
||||
};
|
||||
StrCached Content; // Attributes, Comment, Execution, Include
|
||||
StrCached Content; // Attributes, Comment, Execution, Include
|
||||
TokenSlice ContentToks; // TODO(Ed): Use a token slice for content
|
||||
struct {
|
||||
Specifier ArrSpecs[AST_ArrSpecs_Cap]; // Specifiers
|
||||
Code NextSpecs; // Specifiers; If ArrSpecs is full, then NextSpecs is used.
|
||||
@ -422,7 +420,7 @@ struct AST
|
||||
Code Next;
|
||||
Code Back;
|
||||
};
|
||||
Token* Token; // Reference to starting token, only available if it was derived from parsing.
|
||||
Token* Token; // Reference to starting token, only available if it was derived from parsing. // TODO(Ed): Change this to a token slice.
|
||||
Code Parent;
|
||||
CodeType Type;
|
||||
// CodeFlag CodeFlags;
|
||||
|
@ -38,13 +38,13 @@ void body_to_strbuilder_export( CodeBody body, StrBuilder* result )
|
||||
GEN_ASSERT(result != nullptr);
|
||||
strbuilder_append_fmt( result, "export\n{\n" );
|
||||
|
||||
Code curr = cast(Code, body);
|
||||
Code curr = body->Front;
|
||||
s32 left = body->NumEntries;
|
||||
while ( left-- )
|
||||
{
|
||||
code_to_strbuilder_ref(curr, result);
|
||||
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
||||
++curr;
|
||||
curr = curr->Next;
|
||||
}
|
||||
|
||||
strbuilder_append_fmt( result, "};\n" );
|
||||
|
@ -252,7 +252,8 @@ struct CodeSpecifiers
|
||||
#if ! GEN_C_LIKE_CPP
|
||||
Using_Code( CodeSpecifiers );
|
||||
bool append( Specifier spec ) { return specifiers_append(* this, spec); }
|
||||
s32 has( Specifier spec ) { return specifiers_has(* this, spec); }
|
||||
bool has( Specifier spec ) { return specifiers_has(* this, spec); }
|
||||
s32 index_of( Specifier spec ) { return specifiers_index_of(* this, spec); }
|
||||
s32 remove( Specifier to_remove ) { return specifiers_remove(* this, to_remove); }
|
||||
StrBuilder to_strbuilder() { return specifiers_to_strbuilder(* this ); }
|
||||
void to_strbuilder( StrBuilder& result ) { return specifiers_to_strbuilder_ref(* this, & result); }
|
||||
@ -1073,11 +1074,12 @@ forceinline bool has_entries (CodeParams params ) {
|
||||
forceinline StrBuilder to_strbuilder(CodeParams params ) { return params_to_strbuilder(params); }
|
||||
forceinline void to_strbuilder(CodeParams params, StrBuilder& result ) { return params_to_strbuilder_ref(params, & result); }
|
||||
|
||||
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
|
||||
forceinline s32 has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
|
||||
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
|
||||
forceinline StrBuilder to_strbuilder(CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
|
||||
forceinline void to_strbuilder(CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
|
||||
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
|
||||
forceinline bool has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
|
||||
forceinline s32 specifiers_index_of(CodeSpecifiers specifiers, Specifier spec) { return specifiers_index_of(specifiers, spec); }
|
||||
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
|
||||
forceinline StrBuilder to_strbuilder (CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
|
||||
forceinline void to_strbuilder (CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
|
||||
|
||||
forceinline void add_interface (CodeStruct self, CodeTypename interface) { return struct_add_interface(self, interface); }
|
||||
forceinline StrBuilder to_strbuilder (CodeStruct self) { return struct_to_strbuilder(self); }
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "interface.hpp"
|
||||
#include "constants.hpp"
|
||||
#endif
|
||||
|
||||
#pragma region Serialization
|
||||
@ -38,7 +38,7 @@ void body_to_strbuilder_ref( CodeBody body, StrBuilder* result )
|
||||
{
|
||||
code_to_strbuilder_ref(curr, result);
|
||||
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
||||
++curr;
|
||||
curr = curr->Next;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -3,8 +3,8 @@
|
||||
#include "code_serialization.cpp"
|
||||
#endif
|
||||
|
||||
internal void parser_init();
|
||||
internal void parser_deinit();
|
||||
internal void parser_init(Context* ctx);
|
||||
internal void parser_deinit(Context* ctx);
|
||||
|
||||
internal
|
||||
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
||||
@ -71,6 +71,14 @@ void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size,
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
internal
|
||||
void fallback_logger(LogEntry entry)
|
||||
{
|
||||
GEN_ASSERT(entry.msg.Len > 0);
|
||||
GEN_ASSERT(entry.msg.Ptr);
|
||||
log_fmt("%S: %S", loglevel_to_str(entry.level), entry.msg);
|
||||
}
|
||||
|
||||
internal
|
||||
void define_constants()
|
||||
{
|
||||
@ -283,6 +291,19 @@ void init(Context* ctx)
|
||||
ctx->InitSize_Fallback_Allocator_Bucket_Size = megabytes(8);
|
||||
}
|
||||
|
||||
if (ctx->InitSize_StrCacheTable == 0)
|
||||
{
|
||||
ctx->InitSize_StrCacheTable = kilobytes(8);
|
||||
}
|
||||
if (ctx->InitSize_MacrosTable == 0)
|
||||
{
|
||||
ctx->InitSize_MacrosTable = kilobytes(8);
|
||||
}
|
||||
|
||||
if (ctx->Logger == nullptr) {
|
||||
ctx->Logger = & fallback_logger;
|
||||
}
|
||||
|
||||
// Override the current context (user has to put it back if unwanted).
|
||||
_ctx = ctx;
|
||||
|
||||
@ -298,7 +319,7 @@ void init(Context* ctx)
|
||||
}
|
||||
// Setup the code pool and code entries arena.
|
||||
{
|
||||
Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, sizeof(AST) );
|
||||
Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, size_of(AST) );
|
||||
if ( code_pool.PhysicalStart == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the code pool" );
|
||||
array_append( ctx->CodePools, code_pool );
|
||||
@ -311,18 +332,18 @@ void init(Context* ctx)
|
||||
}
|
||||
// Setup the hash tables
|
||||
{
|
||||
ctx->StrCache = hashtable_init(StrCached, ctx->Allocator_DyanmicContainers);
|
||||
ctx->StrCache = hashtable_init_reserve(StrCached, ctx->Allocator_DyanmicContainers, ctx->InitSize_StrCacheTable);
|
||||
if ( ctx->StrCache.Entries == nullptr )
|
||||
GEN_FATAL( "gen::init: Failed to initialize the StringCache");
|
||||
|
||||
ctx->Macros = hashtable_init(Macro, ctx->Allocator_DyanmicContainers);
|
||||
ctx->Macros = hashtable_init_reserve(Macro, ctx->Allocator_DyanmicContainers, ctx->InitSize_MacrosTable);
|
||||
if (ctx->Macros.Hashes == nullptr || ctx->Macros.Entries == nullptr) {
|
||||
GEN_FATAL( "gen::init: Failed to initialize the PreprocessMacros table" );
|
||||
}
|
||||
}
|
||||
|
||||
define_constants();
|
||||
parser_init();
|
||||
parser_init(ctx);
|
||||
|
||||
++ context_counter;
|
||||
}
|
||||
@ -371,7 +392,7 @@ void deinit(Context* ctx)
|
||||
while ( left--, left );
|
||||
array_free( ctx->Fallback_AllocatorBuckets);
|
||||
}
|
||||
parser_deinit();
|
||||
parser_deinit(ctx);
|
||||
|
||||
if (_ctx == ctx)
|
||||
_ctx = nullptr;
|
||||
|
@ -15,24 +15,6 @@
|
||||
\▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓
|
||||
*/
|
||||
|
||||
#if 0
|
||||
enum LogLevel : u32
|
||||
{
|
||||
Info,
|
||||
Warning,
|
||||
Panic,
|
||||
};
|
||||
|
||||
struct LogEntry
|
||||
{
|
||||
Str msg;
|
||||
u32 line_num;
|
||||
void* data;
|
||||
};
|
||||
|
||||
typedef void LoggerCallback(LogEntry entry);
|
||||
#endif
|
||||
|
||||
// Note(Ed): This is subject to heavily change
|
||||
// with upcoming changes to the library's fallback (default) allocations strategy;
|
||||
// and major changes to lexer/parser context usage.
|
||||
@ -64,9 +46,16 @@ struct Context
|
||||
u32 InitSize_LexerTokens;
|
||||
u32 SizePer_StringArena;
|
||||
|
||||
u32 InitSize_StrCacheTable;
|
||||
u32 InitSize_MacrosTable;
|
||||
|
||||
// TODO(Ed): Symbol Table
|
||||
// Keep track of all resolved symbols (naemspaced identifiers)
|
||||
|
||||
// Logging
|
||||
|
||||
LoggerProc* Logger;
|
||||
|
||||
// Parser
|
||||
|
||||
// Used by the lexer to persistently treat all these identifiers as preprocessor defines.
|
||||
@ -89,9 +78,6 @@ struct Context
|
||||
|
||||
StringTable StrCache;
|
||||
|
||||
// TODO(Ed): This needs to be just handled by a parser context
|
||||
Array(Token) Lexer_Tokens;
|
||||
|
||||
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually
|
||||
ParseContext parser;
|
||||
|
||||
@ -104,6 +90,37 @@ struct Context
|
||||
// An implicit context interface will be provided instead as wrapper procedures as convience.
|
||||
GEN_API extern Context* _ctx;
|
||||
|
||||
// TODO(Ed): Swap all usage of this with logger_fmt (then rename logger_fmt to log_fmt)
|
||||
inline
|
||||
ssize log_fmt(char const* fmt, ...)
|
||||
{
|
||||
ssize res;
|
||||
va_list va;
|
||||
|
||||
va_start(va, fmt);
|
||||
res = c_str_fmt_out_va(fmt, va);
|
||||
va_end(va);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
inline
|
||||
void logger_fmt(Context* ctx, LogLevel level, char const* fmt, ...)
|
||||
{
|
||||
local_persist thread_local
|
||||
PrintF_Buffer buf = struct_zero_init();
|
||||
|
||||
va_list va;
|
||||
va_start(va, fmt);
|
||||
ssize res = c_str_fmt_va(buf, GEN_PRINTF_MAXLEN, fmt, va) -1;
|
||||
va_end(va);
|
||||
|
||||
StrBuilder msg = strbuilder_make_length(ctx->Allocator_Temp, buf, res);
|
||||
|
||||
LogEntry entry = { strbuilder_to_str(msg), level };
|
||||
ctx->Logger(entry);
|
||||
}
|
||||
|
||||
// Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that
|
||||
GEN_API void init(Context* ctx);
|
||||
|
||||
@ -114,7 +131,7 @@ GEN_API void deinit(Context* ctx);
|
||||
// Retrieves the active context (not usually needed, but here in case...)
|
||||
GEN_API Context* get_context();
|
||||
|
||||
// Clears the allocations, but doesn't free the memoery, then calls init() again.
|
||||
// Clears the allocations, but doesn't free the memory, then calls init() again.
|
||||
// Ease of use.
|
||||
GEN_API void reset(Context* ctx);
|
||||
|
||||
@ -334,37 +351,33 @@ forceinline CodeBody def_union_body ( s32 num, Code* codes )
|
||||
|
||||
#pragma region Parsing
|
||||
|
||||
#if 0
|
||||
struct StackNode
|
||||
struct ParseStackNode
|
||||
{
|
||||
StackNode* Prev;
|
||||
ParseStackNode* prev;
|
||||
|
||||
Token Start;
|
||||
Token Name; // The name of the AST node (if parsed)
|
||||
Str FailedProc; // The name of the procedure that failed
|
||||
};
|
||||
// Stack nodes are allocated the error's allocator
|
||||
|
||||
struct Error
|
||||
{
|
||||
StrBuilder message;
|
||||
StackNode* context_stack;
|
||||
TokenSlice tokens;
|
||||
Token* start;
|
||||
Str name; // The name of the AST node (if parsed)
|
||||
Str proc_name; // The name of the procedure
|
||||
Code code_rel; // Relevant AST node
|
||||
// TODO(Ed): When an error occurs, the parse stack is not released and instead the scope is left dangling.
|
||||
};
|
||||
|
||||
struct ParseInfo
|
||||
{
|
||||
Arena FileMem;
|
||||
Arena TokMem;
|
||||
Arena CodeMem;
|
||||
|
||||
FileContents FileContent;
|
||||
Array<Token> Tokens;
|
||||
Array<Error> Errors;
|
||||
// Errors are allocated to a dedicated general arena.
|
||||
ParseMessage* messages;
|
||||
LexedInfo lexed;
|
||||
Code result;
|
||||
};
|
||||
|
||||
CodeBody parse_file( Str path );
|
||||
#endif
|
||||
struct ParseOpts
|
||||
{
|
||||
AllocatorInfo backing_msgs;
|
||||
AllocatorInfo backing_tokens;
|
||||
AllocatorInfo backing_ast;
|
||||
};
|
||||
|
||||
ParseInfo wip_parse_str( LexedInfo lexed, ParseOpts* opts GEN_PARAM_DEFAULT );
|
||||
|
||||
GEN_API CodeClass parse_class ( Str class_def );
|
||||
GEN_API CodeConstructor parse_constructor ( Str constructor_def );
|
||||
@ -395,9 +408,10 @@ GEN_API ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list v
|
||||
//! Do not use directly. Use the token_fmt macro instead.
|
||||
Str token_fmt_impl( ssize, ... );
|
||||
|
||||
GEN_API Code untyped_str( Str content);
|
||||
GEN_API Code untyped_str ( Str content);
|
||||
GEN_API Code untyped_fmt ( char const* fmt, ... );
|
||||
GEN_API Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... );
|
||||
GEN_API Code untyped_toks ( TokenSlice tokens );
|
||||
|
||||
#pragma endregion Untyped text
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "gen/etoktype.cpp"
|
||||
#include "gen/etoktype.hpp"
|
||||
#include "interface.upfront.cpp"
|
||||
#include "lexer.cpp"
|
||||
#include "parser.cpp"
|
||||
@ -8,29 +8,70 @@
|
||||
|
||||
// Publically Exposed Interface
|
||||
|
||||
ParseInfo wip_parse_str(LexedInfo lexed, ParseOpts* opts)
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
if (lexed.tokens.num == 0 && lexed.tokens.ptr == nullptr) {
|
||||
check_parse_args(lexed.text);
|
||||
lexed = lex(ctx, lexed.text);
|
||||
}
|
||||
ParseInfo info = struct_zero(ParseInfo);
|
||||
info.lexed = lexed;
|
||||
|
||||
// TODO(Ed): ParseInfo should be set to the parser context.
|
||||
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
|
||||
CodeBody result = parse_global_nspace(ctx,CT_Global_Body);
|
||||
|
||||
parser_pop(& ctx->parser);
|
||||
return info;
|
||||
}
|
||||
|
||||
CodeClass parse_class( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class, parser_not_inplace_def );
|
||||
parser_pop(& _ctx->parser);
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
CodeClass result = (CodeClass) parse_class_struct( ctx, Tok_Decl_Class, parser_not_inplace_def );
|
||||
parser_pop(& ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeConstructor parse_constructor( Str def )
|
||||
CodeConstructor parse_constructor(Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
|
||||
// TODO(Ed): Constructors can have prefix attributes
|
||||
|
||||
CodeSpecifiers specifiers = NullCode;
|
||||
@ -57,8 +98,8 @@ CodeConstructor parse_constructor( Str def )
|
||||
break;
|
||||
|
||||
default :
|
||||
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(_ctx->parser) );
|
||||
parser_pop(& _ctx->parser);
|
||||
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(& ctx->parser, ctx->Allocator_Temp) );
|
||||
parser_pop(& ctx->parser);
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
@ -71,247 +112,337 @@ CodeConstructor parse_constructor( Str def )
|
||||
eat( currtok.Type );
|
||||
}
|
||||
|
||||
if ( NumSpecifiers )
|
||||
{
|
||||
if ( NumSpecifiers ) {
|
||||
specifiers = def_specifiers_arr( NumSpecifiers, specs_found );
|
||||
// <specifiers> ...
|
||||
}
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
CodeConstructor result = parser_parse_constructor( specifiers );
|
||||
CodeConstructor result = parser_parse_constructor(ctx, specifiers);
|
||||
parser_pop(& ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeDefine parse_define( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeDefine result = parser_parse_define();
|
||||
parser_pop(& _ctx->parser);
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
CodeDefine result = parser_parse_define(ctx);
|
||||
parser_pop(& ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeDestructor parse_destructor( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
// TODO(Ed): Destructors can have prefix attributes
|
||||
// TODO(Ed): Destructors can have virtual
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
CodeDestructor result = parser_parse_destructor(NullCode);
|
||||
CodeDestructor result = parser_parse_destructor(ctx, NullCode);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeEnum parse_enum( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
{
|
||||
parser_pop(& _ctx->parser);
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr ) {
|
||||
return InvalidCode;
|
||||
}
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_enum( parser_not_inplace_def);
|
||||
return parser_parse_enum(ctx, parser_not_inplace_def);
|
||||
}
|
||||
|
||||
CodeBody parse_export_body( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_export_body();
|
||||
return parser_parse_export_body(ctx);
|
||||
}
|
||||
|
||||
CodeExtern parse_extern_link( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_extern_link();
|
||||
return parser_parse_extern_link(ctx);
|
||||
}
|
||||
|
||||
CodeFriend parse_friend( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_friend();
|
||||
return parser_parse_friend(ctx);
|
||||
}
|
||||
|
||||
CodeFn parse_function( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return (CodeFn) parser_parse_function();
|
||||
return (CodeFn) parser_parse_function(ctx);
|
||||
}
|
||||
|
||||
CodeBody parse_global_body( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeBody result = parse_global_nspace( CT_Global_Body );
|
||||
parser_pop(& _ctx->parser);
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
CodeBody result = parse_global_nspace(ctx, CT_Global_Body );
|
||||
parser_pop(& ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeNS parse_namespace( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_namespace();
|
||||
return parser_parse_namespace(ctx);
|
||||
}
|
||||
|
||||
CodeOperator parse_operator( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return (CodeOperator) parser_parse_operator();
|
||||
return (CodeOperator) parser_parse_operator(ctx);
|
||||
}
|
||||
|
||||
CodeOpCast parse_operator_cast( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_operator_cast(NullCode);
|
||||
return parser_parse_operator_cast(ctx, NullCode);
|
||||
}
|
||||
|
||||
CodeStruct parse_struct( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
push_scope();
|
||||
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct, parser_not_inplace_def );
|
||||
parser_pop(& _ctx->parser);
|
||||
ParseStackNode scope = NullScope;
|
||||
parser_push(& ctx->parser, & scope);
|
||||
CodeStruct result = (CodeStruct) parse_class_struct( ctx, Tok_Decl_Struct, parser_not_inplace_def );
|
||||
parser_pop(& ctx->parser);
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeTemplate parse_template( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_template();
|
||||
return parser_parse_template(ctx);
|
||||
}
|
||||
|
||||
CodeTypename parse_type( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_type( parser_not_from_template, nullptr);
|
||||
return parser_parse_type( ctx, parser_not_from_template, nullptr);
|
||||
}
|
||||
|
||||
CodeTypedef parse_typedef( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_typedef();
|
||||
return parser_parse_typedef(ctx);
|
||||
}
|
||||
|
||||
CodeUnion parse_union( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_union( parser_not_inplace_def);
|
||||
return parser_parse_union(ctx, parser_not_inplace_def);
|
||||
}
|
||||
|
||||
CodeUsing parse_using( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_using();
|
||||
return parser_parse_using(ctx);
|
||||
}
|
||||
|
||||
CodeVar parse_variable( Str def )
|
||||
{
|
||||
// TODO(Ed): Lift this.
|
||||
Context* ctx = _ctx;
|
||||
|
||||
check_parse_args( def );
|
||||
|
||||
TokArray toks = lex( def );
|
||||
if ( toks.Arr == nullptr )
|
||||
ctx->parser = struct_zero(ParseContext);
|
||||
|
||||
LexedInfo lexed = lex(ctx, def);
|
||||
ctx->parser.tokens = lexed.tokens;
|
||||
if ( ctx->parser.tokens.ptr == nullptr )
|
||||
return InvalidCode;
|
||||
|
||||
_ctx->parser.Tokens = toks;
|
||||
return parser_parse_variable();
|
||||
return parser_parse_variable(ctx);
|
||||
}
|
||||
|
||||
// Undef helper macros
|
||||
@ -326,6 +457,7 @@ CodeVar parse_variable( Str def )
|
||||
#undef left
|
||||
#undef check
|
||||
#undef push_scope
|
||||
#undef NullScope
|
||||
#undef def_assign
|
||||
|
||||
// Here for C Variant
|
||||
|
@ -176,3 +176,16 @@ Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... )
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
Code untyped_toks( TokenSlice tokens )
|
||||
{
|
||||
if ( tokens.num == 0 ) {
|
||||
log_failure( "untyped_toks: empty token slice" );
|
||||
return InvalidCode;
|
||||
}
|
||||
Code
|
||||
result = make_code();
|
||||
result->Type = CT_Untyped;
|
||||
result->ContentToks = tokens;
|
||||
return result;
|
||||
}
|
||||
|
@ -473,8 +473,10 @@ CodeComment def_comment( Str content )
|
||||
return (CodeComment) result;
|
||||
}
|
||||
|
||||
CodeConstructor def_constructor( Opts_def_constructor p )
|
||||
CodeConstructor def_constructor( Opts_def_constructor opt )
|
||||
{
|
||||
Opts_def_constructor p = get_optional(opt);
|
||||
|
||||
if ( p.params && p.params->Type != CT_Parameters ) {
|
||||
log_failure("gen::def_constructor: params must be of Parameters type - %s", code_debug_str((Code)p.params));
|
||||
GEN_DEBUG_TRAP();
|
||||
@ -510,8 +512,10 @@ CodeConstructor def_constructor( Opts_def_constructor p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeClass def_class( Str name, Opts_def_struct p )
|
||||
CodeClass def_class( Str name, Opts_def_struct opt )
|
||||
{
|
||||
Opts_def_struct p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_class, name ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -561,8 +565,10 @@ CodeClass def_class( Str name, Opts_def_struct p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeDefine def_define( Str name, MacroType type, Opts_def_define p )
|
||||
CodeDefine def_define( Str name, MacroType type, Opts_def_define opt )
|
||||
{
|
||||
Opts_def_define p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_define, name ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -585,8 +591,10 @@ CodeDefine def_define( Str name, MacroType type, Opts_def_define p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeDestructor def_destructor( Opts_def_destructor p )
|
||||
CodeDestructor def_destructor( Opts_def_destructor opt )
|
||||
{
|
||||
Opts_def_destructor p = get_optional(opt);
|
||||
|
||||
if ( p.specifiers && p.specifiers->Type != CT_Specifiers ) {
|
||||
log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", code_debug_str(p.specifiers) );
|
||||
GEN_DEBUG_TRAP();
|
||||
@ -619,8 +627,10 @@ CodeDestructor def_destructor( Opts_def_destructor p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeEnum def_enum( Str name, Opts_def_enum p )
|
||||
CodeEnum def_enum( Str name, Opts_def_enum opt )
|
||||
{
|
||||
Opts_def_enum p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_enum, name ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -742,8 +752,10 @@ CodeFriend def_friend( Code declaration )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeFn def_function( Str name, Opts_def_function p )
|
||||
CodeFn def_function( Str name, Opts_def_function opt )
|
||||
{
|
||||
Opts_def_function p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_function, name )) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -802,8 +814,10 @@ CodeFn def_function( Str name, Opts_def_function p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeInclude def_include( Str path, Opts_def_include p )
|
||||
CodeInclude def_include( Str path, Opts_def_include opt )
|
||||
{
|
||||
Opts_def_include p = get_optional(opt);
|
||||
|
||||
if ( path.Len <= 0 || path.Ptr == nullptr ) {
|
||||
log_failure( "gen::def_include: Invalid path provided - %d" );
|
||||
GEN_DEBUG_TRAP();
|
||||
@ -821,8 +835,10 @@ CodeInclude def_include( Str path, Opts_def_include p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeModule def_module( Str name, Opts_def_module p )
|
||||
CodeModule def_module( Str name, Opts_def_module opt )
|
||||
{
|
||||
Opts_def_module p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_module, name )) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -835,8 +851,10 @@ CodeModule def_module( Str name, Opts_def_module p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace p )
|
||||
CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace opt )
|
||||
{
|
||||
Opts_def_namespace p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_namespace, name )) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -859,8 +877,10 @@ CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator p )
|
||||
CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator opt )
|
||||
{
|
||||
Opts_def_operator p = get_optional(opt);
|
||||
|
||||
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
||||
log_failure( "gen::def_operator: PlatformAttributes was provided but its not of attributes type: %s", code_debug_str(p.attributes) );
|
||||
GEN_DEBUG_TRAP();
|
||||
@ -926,8 +946,10 @@ CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast p )
|
||||
CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast opt )
|
||||
{
|
||||
Opts_def_operator_cast p = get_optional(opt);
|
||||
|
||||
if ( ! null_check( def_operator_cast, type )) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -959,8 +981,10 @@ CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeParams def_param( CodeTypename type, Str name, Opts_def_param p )
|
||||
CodeParams def_param( CodeTypename type, Str name, Opts_def_param opt )
|
||||
{
|
||||
Opts_def_param p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_param, name ) || ! null_check( def_param, type ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1034,8 +1058,10 @@ CodeSpecifiers def_specifier( Specifier spec )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeStruct def_struct( Str name, Opts_def_struct p )
|
||||
CodeStruct def_struct( Str name, Opts_def_struct opt )
|
||||
{
|
||||
Opts_def_struct p = get_optional(opt);
|
||||
|
||||
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
||||
log_failure( "gen::def_struct: attributes was not a `PlatformAttributes` type - %s", code_debug_str(cast(Code, p.attributes)) );
|
||||
GEN_DEBUG_TRAP();
|
||||
@ -1076,8 +1102,10 @@ CodeStruct def_struct( Str name, Opts_def_struct p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template p )
|
||||
CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template opt )
|
||||
{
|
||||
Opts_def_template p = get_optional(opt);
|
||||
|
||||
if ( ! null_check( def_template, declaration ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1108,8 +1136,10 @@ CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_templat
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeTypename def_type( Str name, Opts_def_type p )
|
||||
CodeTypename def_type( Str name, Opts_def_type opt )
|
||||
{
|
||||
Opts_def_type p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_type, name )) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1143,8 +1173,10 @@ CodeTypename def_type( Str name, Opts_def_type p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef p )
|
||||
CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef opt )
|
||||
{
|
||||
Opts_def_typedef p = get_optional(opt);
|
||||
|
||||
if ( ! null_check( def_typedef, type ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1206,8 +1238,10 @@ CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeUnion def_union( Str name, CodeBody body, Opts_def_union p )
|
||||
CodeUnion def_union( Str name, CodeBody body, Opts_def_union opt )
|
||||
{
|
||||
Opts_def_union p = get_optional(opt);
|
||||
|
||||
if ( ! null_check( def_union, body ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1233,8 +1267,10 @@ CodeUnion def_union( Str name, CodeBody body, Opts_def_union p )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeUsing def_using( Str name, CodeTypename type, Opts_def_using p )
|
||||
CodeUsing def_using( Str name, CodeTypename type, Opts_def_using opt )
|
||||
{
|
||||
Opts_def_using p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_using, name ) || null_check( def_using, type ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
@ -1274,8 +1310,10 @@ CodeUsing def_using_namespace( Str name )
|
||||
return result;
|
||||
}
|
||||
|
||||
CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable p )
|
||||
CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable opt )
|
||||
{
|
||||
Opts_def_variable p = get_optional(opt);
|
||||
|
||||
if ( ! name_check( def_variable, name ) || ! null_check( def_variable, type ) ) {
|
||||
GEN_DEBUG_TRAP();
|
||||
return InvalidCode;
|
||||
|
@ -1,7 +1,7 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "interface.upfront.cpp"
|
||||
#include "gen/etoktype.cpp"
|
||||
#include "gen/etoktype.hpp"
|
||||
#endif
|
||||
|
||||
StrBuilder tok_to_strbuilder(Token tok)
|
||||
@ -17,55 +17,55 @@ StrBuilder tok_to_strbuilder(Token tok)
|
||||
return result;
|
||||
}
|
||||
|
||||
bool lex__eat( TokArray* self, TokType type );
|
||||
bool lex__eat(Context* ctx, ParseContext* self, TokType type );
|
||||
|
||||
Token* lex_current(TokArray* self, bool skip_formatting )
|
||||
Token* lex_current(ParseContext* self, bool skip_formatting )
|
||||
{
|
||||
if ( skip_formatting )
|
||||
{
|
||||
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
|
||||
self->Idx++;
|
||||
while ( self->tokens.ptr[self->token_id].Type == Tok_NewLine || self->tokens.ptr[self->token_id].Type == Tok_Comment )
|
||||
self->token_id++;
|
||||
}
|
||||
return & self->Arr[self->Idx];
|
||||
return & self->tokens.ptr[self->token_id];
|
||||
}
|
||||
|
||||
Token* lex_peek(TokArray self, bool skip_formatting)
|
||||
Token* lex_peek(ParseContext const* self, bool skip_formatting)
|
||||
{
|
||||
s32 idx = self.Idx;
|
||||
s32 idx = self->token_id;
|
||||
if ( skip_formatting )
|
||||
{
|
||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
||||
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||
idx++;
|
||||
|
||||
return & self.Arr[idx];
|
||||
return & self->tokens.ptr[idx];
|
||||
}
|
||||
return & self.Arr[idx];
|
||||
return & self->tokens.ptr[idx];
|
||||
}
|
||||
|
||||
Token* lex_previous(TokArray self, bool skip_formatting)
|
||||
Token* lex_previous(ParseContext const* self, bool skip_formatting)
|
||||
{
|
||||
s32 idx = self.Idx;
|
||||
s32 idx = self->token_id;
|
||||
if ( skip_formatting )
|
||||
{
|
||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
||||
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||
idx --;
|
||||
|
||||
return & self.Arr[idx];
|
||||
return & self->tokens.ptr[idx];
|
||||
}
|
||||
return & self.Arr[idx - 1];
|
||||
return & self->tokens.ptr[idx - 1];
|
||||
}
|
||||
|
||||
Token* lex_next(TokArray self, bool skip_formatting)
|
||||
Token* lex_next(ParseContext const* self, bool skip_formatting)
|
||||
{
|
||||
s32 idx = self.Idx;
|
||||
s32 idx = self->token_id;
|
||||
if ( skip_formatting )
|
||||
{
|
||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
||||
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||
idx++;
|
||||
|
||||
return & self.Arr[idx + 1];
|
||||
return & self->tokens.ptr[idx + 1];
|
||||
}
|
||||
return & self.Arr[idx + 1];
|
||||
return & self->tokens.ptr[idx + 1];
|
||||
}
|
||||
|
||||
enum
|
||||
@ -137,7 +137,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
);
|
||||
// GEN_DEBUG_TRAP();
|
||||
}
|
||||
array_append( _ctx->Lexer_Tokens, name );
|
||||
array_append(ctx->tokens, name);
|
||||
|
||||
if ( ctx->left && (* ctx->scanner) == '(' )
|
||||
{
|
||||
@ -152,7 +152,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
}
|
||||
|
||||
Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess };
|
||||
array_append( _ctx->Lexer_Tokens, opening_paren );
|
||||
array_append(ctx->tokens, opening_paren);
|
||||
move_forward();
|
||||
|
||||
Token last_parameter = {};
|
||||
@ -168,7 +168,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
move_forward();
|
||||
move_forward();
|
||||
|
||||
array_append(_ctx->Lexer_Tokens, parameter);
|
||||
array_append(ctx->tokens, parameter);
|
||||
skip_whitespace();
|
||||
last_parameter = parameter;
|
||||
|
||||
@ -202,7 +202,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
move_forward();
|
||||
parameter.Text.Len++;
|
||||
}
|
||||
array_append(_ctx->Lexer_Tokens, parameter);
|
||||
array_append(ctx->tokens, parameter);
|
||||
skip_whitespace();
|
||||
last_parameter = parameter;
|
||||
}
|
||||
@ -229,7 +229,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
return Lex_ReturnNull;
|
||||
}
|
||||
Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess };
|
||||
array_append(_ctx->Lexer_Tokens, comma);
|
||||
array_append(ctx->tokens, comma);
|
||||
move_forward();
|
||||
}
|
||||
|
||||
@ -243,7 +243,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
||||
return Lex_ReturnNull;
|
||||
}
|
||||
Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess };
|
||||
array_append(_ctx->Lexer_Tokens, closing_paren);
|
||||
array_append(ctx->tokens, closing_paren);
|
||||
move_forward();
|
||||
}
|
||||
else if ( registered_macro && macro_is_functional( * registered_macro) ) {
|
||||
@ -268,7 +268,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
{
|
||||
char const* hash = ctx->scanner;
|
||||
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
|
||||
array_append( _ctx->Lexer_Tokens, hash_tok );
|
||||
array_append(ctx->tokens, hash_tok);
|
||||
|
||||
move_forward();
|
||||
skip_whitespace();
|
||||
@ -344,14 +344,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
|
||||
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
|
||||
ctx->token.Text.Ptr = hash;
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
|
||||
{
|
||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
end_line();
|
||||
return Lex_Continue;
|
||||
}
|
||||
@ -360,7 +360,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||
}
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
|
||||
skip_whitespace();
|
||||
|
||||
@ -411,7 +411,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
move_forward();
|
||||
}
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
||||
array_append(ctx->tokens, preprocess_content);
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
@ -475,14 +475,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
||||
preprocess_content.Text.Len++;
|
||||
}
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
||||
array_append(ctx->tokens, preprocess_content);
|
||||
return Lex_Continue; // Skip found token, its all handled here.
|
||||
}
|
||||
|
||||
void lex_found_token( LexContext* ctx )
|
||||
{
|
||||
if ( ctx->token.Type != Tok_Invalid ) {
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -508,7 +508,7 @@ void lex_found_token( LexContext* ctx )
|
||||
}
|
||||
|
||||
ctx->token.Type = type;
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
return;
|
||||
}
|
||||
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|
||||
@ -517,13 +517,13 @@ void lex_found_token( LexContext* ctx )
|
||||
{
|
||||
ctx->token.Type = type;
|
||||
ctx->token.Flags |= TF_Specifier;
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
return;
|
||||
}
|
||||
if ( type != Tok_Invalid )
|
||||
{
|
||||
ctx->token.Type = type;
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
return;
|
||||
}
|
||||
|
||||
@ -561,50 +561,41 @@ void lex_found_token( LexContext* ctx )
|
||||
ctx->token.Type = Tok_Identifier;
|
||||
}
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
||||
array_append(ctx->tokens, ctx->token);
|
||||
}
|
||||
|
||||
// TODO(Ed): We should dynamically allocate the lexer's array in Allocator_DyanmicContainers.
|
||||
|
||||
// TODO(Ed): We need to to attempt to recover from a lex failure?
|
||||
|
||||
neverinline
|
||||
// TokArray lex( Array<Token> tokens, Str content )
|
||||
TokArray lex( Str content )
|
||||
LexedInfo lex(Context* lib_ctx, Str content)
|
||||
{
|
||||
LexContext c; LexContext* ctx = & c;
|
||||
LexedInfo info = struct_zero(LexedInfo);
|
||||
|
||||
LexContext c = struct_zero(LexContext); LexContext* ctx = & c;
|
||||
c.content = content;
|
||||
c.left = content.Len;
|
||||
c.scanner = content.Ptr;
|
||||
c.line = 1;
|
||||
c.column = 1;
|
||||
c.tokens = array_init_reserve(Token, lib_ctx->Allocator_DyanmicContainers, lib_ctx->InitSize_LexerTokens );
|
||||
|
||||
char const* word = c.scanner;
|
||||
s32 word_length = 0;
|
||||
|
||||
c.line = 1;
|
||||
c.column = 1;
|
||||
// TODO(Ed): Re-implement to new constraints:
|
||||
// 1. Ability to continue on error
|
||||
// 2. Return a lexed info.
|
||||
|
||||
skip_whitespace();
|
||||
if ( c.left <= 0 )
|
||||
{
|
||||
if ( c.left <= 0 ) {
|
||||
log_failure( "gen::lex: no tokens found (only whitespace provided)" );
|
||||
TokArray null_array = {};
|
||||
return null_array;
|
||||
return info;
|
||||
}
|
||||
|
||||
array_clear(_ctx->Lexer_Tokens);
|
||||
|
||||
b32 preprocess_args = true;
|
||||
|
||||
while (c.left )
|
||||
{
|
||||
#if 0
|
||||
if (Tokens.num())
|
||||
{
|
||||
log_fmt("\nLastTok: %SB", Tokens.back().to_strbuilder());
|
||||
}
|
||||
#endif
|
||||
|
||||
{
|
||||
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||
c.token = thanks_c;
|
||||
}
|
||||
c.token = struct_init(Token) { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||
|
||||
bool is_define = false;
|
||||
|
||||
@ -623,7 +614,7 @@ TokArray lex( Str content )
|
||||
c.token.Type = Tok_NewLine;
|
||||
c.token.Text.Len++;
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
array_append(c.tokens, c.token);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
@ -662,7 +653,7 @@ TokArray lex( Str content )
|
||||
c.token.Text.Len++;
|
||||
move_forward();
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
array_append(c.tokens, c.token);
|
||||
}
|
||||
}
|
||||
continue;
|
||||
@ -670,8 +661,7 @@ TokArray lex( Str content )
|
||||
|
||||
case Lex_ReturnNull:
|
||||
{
|
||||
TokArray tok_array = {};
|
||||
return tok_array;
|
||||
return info;
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -1119,7 +1109,7 @@ TokArray lex( Str content )
|
||||
move_forward();
|
||||
c.token.Text.Len++;
|
||||
}
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
array_append(c.tokens, c.token);
|
||||
continue;
|
||||
}
|
||||
else if ( (* ctx->scanner) == '*' )
|
||||
@ -1155,7 +1145,7 @@ TokArray lex( Str content )
|
||||
move_forward();
|
||||
c.token.Text.Len++;
|
||||
}
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
array_append(c.tokens, c.token);
|
||||
// end_line();
|
||||
continue;
|
||||
}
|
||||
@ -1243,14 +1233,14 @@ TokArray lex( Str content )
|
||||
}
|
||||
else
|
||||
{
|
||||
s32 start = max( 0, array_num(_ctx->Lexer_Tokens) - 100 );
|
||||
s32 start = max( 0, array_num(c.tokens) - 100 );
|
||||
log_fmt("\n%d\n", start);
|
||||
for ( s32 idx = start; idx < array_num(_ctx->Lexer_Tokens); idx++ )
|
||||
for ( s32 idx = start; idx < array_num(c.tokens); idx++ )
|
||||
{
|
||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||
, idx
|
||||
, toktype_to_str( _ctx->Lexer_Tokens[ idx ].Type ).Ptr
|
||||
, _ctx->Lexer_Tokens[ idx ].Text.Len, _ctx->Lexer_Tokens[ idx ].Text.Ptr
|
||||
, toktype_to_str( c.tokens[ idx ].Type ).Ptr
|
||||
, c.tokens[ idx ].Text.Len, c.tokens[ idx ].Text.Ptr
|
||||
);
|
||||
}
|
||||
|
||||
@ -1266,7 +1256,7 @@ TokArray lex( Str content )
|
||||
FoundToken:
|
||||
{
|
||||
lex_found_token( ctx );
|
||||
TokType last_type = array_back(_ctx->Lexer_Tokens)->Type;
|
||||
TokType last_type = array_back(c.tokens)->Type;
|
||||
if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr )
|
||||
{
|
||||
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||
@ -1281,21 +1271,22 @@ TokArray lex( Str content )
|
||||
c.token.Text.Len++;
|
||||
move_forward();
|
||||
|
||||
array_append( _ctx->Lexer_Tokens, c.token );
|
||||
array_append(c.tokens, c.token);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if ( array_num(_ctx->Lexer_Tokens) == 0 ) {
|
||||
if ( array_num(c.tokens) == 0 ) {
|
||||
log_failure( "Failed to lex any tokens" );
|
||||
TokArray tok_array = {};
|
||||
return tok_array;
|
||||
return info;
|
||||
}
|
||||
|
||||
TokArray result = { _ctx->Lexer_Tokens, 0 };
|
||||
return result;
|
||||
|
||||
info.messages = c.messages;
|
||||
info.text = content;
|
||||
info.tokens = struct_init(TokenSlice) { pcast(Token*, c.tokens), scast(s32, array_num(c.tokens)) };
|
||||
return info;
|
||||
}
|
||||
|
||||
#undef move_forward
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "types.hpp"
|
||||
#include "gen/ecode.hpp"
|
||||
#include "gen/ecodetypes.hpp"
|
||||
#include "gen/eoperator.hpp"
|
||||
#include "gen/especifier.hpp"
|
||||
#include "gen/etoktype.hpp"
|
||||
@ -91,36 +91,78 @@ bool tok_is_end_definition(Token tok) {
|
||||
|
||||
StrBuilder tok_to_strbuilder(Token tok);
|
||||
|
||||
struct TokenSlice
|
||||
{
|
||||
Token* ptr;
|
||||
s32 num;
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
forceinline operator Token* () const { return ptr; }
|
||||
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
|
||||
#endif
|
||||
};
|
||||
|
||||
forceinline
|
||||
Str token_range_to_str(Token start, Token end)
|
||||
{
|
||||
Str result = {
|
||||
start.Text.Ptr,
|
||||
(scast(sptr, rcast(uptr, end.Text.Ptr)) + end.Text.Len) - scast(sptr, rcast(uptr, start.Text.Ptr))
|
||||
};
|
||||
return result;
|
||||
}
|
||||
|
||||
struct TokArray
|
||||
{
|
||||
Array(Token) Arr;
|
||||
s32 Idx;
|
||||
};
|
||||
|
||||
typedef struct LexerMessage LexerMessage;
|
||||
struct LexerMessage
|
||||
{
|
||||
LexerMessage* next;
|
||||
Str content;
|
||||
LogLevel level;
|
||||
};
|
||||
|
||||
struct LexContext
|
||||
{
|
||||
LexerMessage* messages;
|
||||
Str content;
|
||||
s32 left;
|
||||
char const* scanner;
|
||||
s32 line;
|
||||
s32 column;
|
||||
// StringTable defines;
|
||||
Token token;
|
||||
Array(Token) tokens;
|
||||
};
|
||||
|
||||
struct StackNode
|
||||
struct LexedInfo
|
||||
{
|
||||
StackNode* Prev;
|
||||
LexerMessage* messages;
|
||||
Str text;
|
||||
TokenSlice tokens;
|
||||
};
|
||||
|
||||
Token* Start;
|
||||
Str Name; // The name of the AST node (if parsed)
|
||||
Str ProcName; // The name of the procedure
|
||||
typedef struct ParseStackNode ParseStackNode;
|
||||
|
||||
typedef struct ParseMessage ParseMessage;
|
||||
struct ParseMessage
|
||||
{
|
||||
ParseMessage* Next;
|
||||
ParseStackNode* Scope;
|
||||
Str Content;
|
||||
LogLevel Level;
|
||||
};
|
||||
|
||||
struct ParseContext
|
||||
{
|
||||
TokArray Tokens;
|
||||
StackNode* Scope;
|
||||
ParseMessage* messages;
|
||||
ParseStackNode* scope;
|
||||
// TokArray Tokens;
|
||||
TokenSlice tokens;
|
||||
s32 token_id;
|
||||
};
|
||||
|
||||
enum MacroType : u16
|
||||
@ -168,26 +210,36 @@ Str macrotype_to_str( MacroType type )
|
||||
|
||||
enum EMacroFlags : u16
|
||||
{
|
||||
MF_Functional = bit(0), // Macro has parameters (args expected to be passed)
|
||||
MF_Expects_Body = bit(1), // Expects to assign a braced scope to its body.
|
||||
// Macro has parameters (args expected to be passed)
|
||||
MF_Functional = bit(0),
|
||||
|
||||
// Expects to assign a braced scope to its body.
|
||||
MF_Expects_Body = bit(1),
|
||||
|
||||
// lex__eat wil treat this macro as an identifier if the parser attempts to consume it as one.
|
||||
// ^^^ This is a kludge because we don't support push/pop macro pragmas rn.
|
||||
// This is a kludge because we don't support push/pop macro pragmas rn.
|
||||
MF_Allow_As_Identifier = bit(2),
|
||||
|
||||
// When parsing identifiers, it will allow the consumption of the macro parameters (as its expected to be a part of constructing the identifier)
|
||||
// Example of a decarator macro from stb_sprintf.h:
|
||||
// STBSP__PUBLICDEC int STB_SPRINTF_DECORATE(sprintf)(char* buf, char const *fmt, ...) STBSP__ATTRIBUTE_FORMAT(2,3);
|
||||
// ^^ STB_SPRINTF_DECORATE is decorating sprintf
|
||||
MF_Identifier_Decorator = bit(3),
|
||||
|
||||
// lex__eat wil treat this macro as an attribute if the parser attempts to consume it as one.
|
||||
// ^^^ This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc)
|
||||
// This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc)
|
||||
// TODO(Ed): We can keep the MF_Allow_As_Attribute flag for macros, however, we need to add the ability of AST_Attributes to chain themselves.
|
||||
// Its thats already a thing in the standard language anyway
|
||||
// & it would allow UE_DEPRECATED, (UE_PROPERTY / UE_FUNCTION) to chain themselves as attributes of a resolved member function/variable definition
|
||||
MF_Allow_As_Attribute = bit(3),
|
||||
MF_Allow_As_Attribute = bit(4),
|
||||
|
||||
// When a macro is encountered after attributes and specifiers while parsing a function, or variable:
|
||||
// It will consume the macro and treat it as resolving the definition. (Yes this is for Unreal Engine)
|
||||
// It will consume the macro and treat it as resolving the definition.
|
||||
// (MUST BE OF MT_Statement TYPE)
|
||||
MF_Allow_As_Definition = bit(4),
|
||||
MF_Allow_As_Definition = bit(5),
|
||||
|
||||
MF_Allow_As_Specifier = bit(5), // Created for Unreal's PURE_VIRTUAL
|
||||
// Created for Unreal's PURE_VIRTUAL
|
||||
MF_Allow_As_Specifier = bit(6),
|
||||
|
||||
MF_Null = 0,
|
||||
MF_UnderlyingType = GEN_U16_MAX,
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "../gen.hpp"
|
||||
#include "interface.hpp"
|
||||
#endif
|
||||
|
||||
#pragma region StaticData
|
||||
|
@ -1,6 +1,18 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
#pragma once
|
||||
#include "header_start.hpp"
|
||||
#include "dependencies/platform.hpp"
|
||||
#include "dependencies/macros.hpp"
|
||||
#include "dependencies/basic_types.hpp"
|
||||
#include "dependencies/debug.hpp"
|
||||
#include "dependencies/memory.hpp"
|
||||
#include "dependencies/string_ops.hpp"
|
||||
#include "dependencies/printing.hpp"
|
||||
#include "dependencies/containers.hpp"
|
||||
#include "dependencies/hashing.hpp"
|
||||
#include "dependencies/strings.hpp"
|
||||
#include "dependencies/filesystem.hpp"
|
||||
#include "dependencies/timing.hpp"
|
||||
#include "dependencies/parsing.hpp"
|
||||
#endif
|
||||
|
||||
/*
|
||||
@ -19,7 +31,38 @@
|
||||
|
||||
*/
|
||||
|
||||
using LogFailType = ssize(*)(char const*, ...);
|
||||
enum LogLevel //: u32
|
||||
{
|
||||
LL_Null,
|
||||
LL_Note,
|
||||
LL_Warning,
|
||||
LL_Error,
|
||||
LL_Fatal,
|
||||
LL_UnderlyingType = GEN_U32_MAX,
|
||||
};
|
||||
typedef enum LogLevel LogLevel;
|
||||
|
||||
Str loglevel_to_str(LogLevel level)
|
||||
{
|
||||
local_persist
|
||||
Str lookup[] = {
|
||||
{ "Null", sizeof("Null") - 1 },
|
||||
{ "Note", sizeof("Note") - 1 },
|
||||
{ "Warning", sizeof("Info") - 1 },
|
||||
{ "Error", sizeof("Error") - 1 },
|
||||
{ "Fatal", sizeof("Fatal") - 1 },
|
||||
};
|
||||
return lookup[level];
|
||||
}
|
||||
|
||||
typedef struct LogEntry LogEntry;
|
||||
struct LogEntry
|
||||
{
|
||||
Str msg;
|
||||
LogLevel level;
|
||||
};
|
||||
|
||||
typedef void LoggerProc(LogEntry entry);
|
||||
|
||||
// By default this library will either crash or exit if an error is detected while generating codes.
|
||||
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.
|
||||
|
@ -17,6 +17,39 @@ template <class TType> struct RemovePtr<TType*> { typedef TType Type; };
|
||||
|
||||
template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
|
||||
|
||||
#pragma region Slice
|
||||
#if 0
|
||||
#define Slice(Type) Slice<Type>
|
||||
|
||||
template<class Type> struct Slice;
|
||||
|
||||
template<class Type>
|
||||
Type* slice_get(Slice<Type> self, ssize id) {
|
||||
GEN_ASSERT(id > -1);
|
||||
GEN_ASSERT(id < self.len);
|
||||
return self.ptr[id];
|
||||
}
|
||||
|
||||
template<class Type>
|
||||
struct Slice
|
||||
{
|
||||
Type* ptr;
|
||||
ssize len;
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
forceinline operator Token* () const { return ptr; }
|
||||
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
|
||||
|
||||
forceinline Type* begin() { return ptr; }
|
||||
forceinline Type* end() { return ptr + len; }
|
||||
#endif
|
||||
|
||||
#if ! GEN_C_LIKE_CPP && GEN_COMPILER_CPP
|
||||
forceinline Type& back() { return ptr[len - 1]; }
|
||||
#endif
|
||||
};
|
||||
#endif
|
||||
#pragma endregion Slice
|
||||
|
||||
#pragma region Array
|
||||
#define Array(Type) Array<Type>
|
||||
@ -26,10 +59,8 @@ template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
|
||||
|
||||
struct ArrayHeader;
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
template<class Type> struct Array;
|
||||
# define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
|
||||
#endif
|
||||
template<class Type> struct Array;
|
||||
#define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
|
||||
|
||||
usize array_grow_formula(ssize value);
|
||||
|
||||
@ -59,12 +90,12 @@ struct ArrayHeader {
|
||||
usize Num;
|
||||
};
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
template<class Type>
|
||||
struct Array
|
||||
{
|
||||
Type* Data;
|
||||
|
||||
#if ! GEN_C_LIKE_CPP
|
||||
#pragma region Member Mapping
|
||||
forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); }
|
||||
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); }
|
||||
@ -88,6 +119,7 @@ struct Array
|
||||
forceinline bool resize(usize num) { return array_resize<Type>(this, num); }
|
||||
forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); }
|
||||
#pragma endregion Member Mapping
|
||||
#endif
|
||||
|
||||
forceinline operator Type*() { return Data; }
|
||||
forceinline operator Type const*() const { return Data; }
|
||||
@ -99,9 +131,8 @@ struct Array
|
||||
|
||||
using DataType = Type;
|
||||
};
|
||||
#endif
|
||||
|
||||
#if GEN_COMPILER_CPP && 0
|
||||
#if 0
|
||||
template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); }
|
||||
template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); }
|
||||
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); }
|
||||
|
@ -1,9 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
# pragma once
|
||||
# include "dependencies/platform.hpp"
|
||||
# include "dependencies/macros.hpp"
|
||||
# include "basic_types.hpp"
|
||||
# include "macros.hpp"
|
||||
#endif
|
||||
|
||||
#pragma region Debug
|
||||
|
@ -187,7 +187,7 @@ struct FileContents
|
||||
{
|
||||
AllocatorInfo allocator;
|
||||
void* data;
|
||||
ssize size;
|
||||
ssize size;
|
||||
};
|
||||
|
||||
constexpr b32 file_zero_terminate = true;
|
||||
|
@ -198,21 +198,16 @@
|
||||
#ifndef forceinline
|
||||
# if GEN_COMPILER_MSVC
|
||||
# define forceinline __forceinline
|
||||
# define neverinline __declspec( noinline )
|
||||
# elif GEN_COMPILER_GCC
|
||||
# define forceinline inline __attribute__((__always_inline__))
|
||||
# define neverinline __attribute__( ( __noinline__ ) )
|
||||
# elif GEN_COMPILER_CLANG
|
||||
# if __has_attribute(__always_inline__)
|
||||
# define forceinline inline __attribute__((__always_inline__))
|
||||
# define neverinline __attribute__( ( __noinline__ ) )
|
||||
# else
|
||||
# define forceinline
|
||||
# define neverinline
|
||||
# endif
|
||||
# else
|
||||
# define forceinline
|
||||
# define neverinline
|
||||
# endif
|
||||
#endif
|
||||
|
||||
@ -303,10 +298,28 @@
|
||||
# define GEN_PARAM_DEFAULT
|
||||
#endif
|
||||
|
||||
#if GEN_COMPILER_CPP
|
||||
#define struct_init(type, value) {value}
|
||||
#else
|
||||
#define struct_init(type, value) {value}
|
||||
#ifndef struct_init
|
||||
# if GEN_COMPILER_CPP
|
||||
# define struct_init(type)
|
||||
# else
|
||||
# define struct_init(type) (type)
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#ifndef struct_zero
|
||||
# if GEN_COMPILER_CPP
|
||||
# define struct_zero(type) {}
|
||||
# else
|
||||
# define struct_zero(type) (type) {0}
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#ifndef struct_zero_init
|
||||
# if GEN_COMPILER_CPP
|
||||
# define struct_zero_init() {}
|
||||
# else
|
||||
# define struct_zero_init() {0}
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#if 0
|
||||
@ -319,4 +332,12 @@
|
||||
# define GEN_OPITMIZE_MAPPINGS_END
|
||||
#endif
|
||||
|
||||
#ifndef get_optional
|
||||
# if GEN_COMPILER_C
|
||||
# define get_optional(opt) opt ? *opt : (typeof(*opt)){0}
|
||||
# else
|
||||
# define get_optional(opt) opt
|
||||
# endif
|
||||
#endif
|
||||
|
||||
#pragma endregion Macros
|
||||
|
@ -134,12 +134,6 @@ GEN_API void* heap_allocator_proc( void* allocator_data, AllocType type, ssize s
|
||||
//! The heap allocator backed by operating system's memory manager.
|
||||
constexpr AllocatorInfo heap( void ) { AllocatorInfo allocator = { heap_allocator_proc, nullptr }; return allocator; }
|
||||
|
||||
//! Helper to allocate memory using heap allocator.
|
||||
#define malloc( sz ) alloc( heap(), sz )
|
||||
|
||||
//! Helper to free memory allocated by heap allocator.
|
||||
#define mfree( ptr ) free( heap(), ptr )
|
||||
|
||||
struct VirtualMemory
|
||||
{
|
||||
void* data;
|
||||
@ -185,6 +179,8 @@ void arena_check (Arena* arena);
|
||||
void arena_free (Arena* arena);
|
||||
ssize arena_size_remaining(Arena* arena, ssize alignment);
|
||||
|
||||
// TODO(Ed): Add arena_pos, arena_pop, and arena_pop_to
|
||||
|
||||
struct Arena
|
||||
{
|
||||
AllocatorInfo Backing;
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
# pragma once
|
||||
# include "strbuilder_ops.cpp"
|
||||
# include "string_ops.cpp"
|
||||
#endif
|
||||
|
||||
#pragma region Printing
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifdef INTELLISENSE_DIRECTIVES
|
||||
# pragma once
|
||||
# include "strbuilder_ops.hpp"
|
||||
# include "string_ops.hpp"
|
||||
#endif
|
||||
|
||||
#pragma region Printing
|
||||
@ -26,17 +26,4 @@ GEN_API ssize c_str_fmt_file_va ( FileInfo* f, char const* fmt, va_list va );
|
||||
constexpr
|
||||
char const* Msg_Invalid_Value = "INVALID VALUE PROVIDED";
|
||||
|
||||
inline
|
||||
ssize log_fmt(char const* fmt, ...)
|
||||
{
|
||||
ssize res;
|
||||
va_list va;
|
||||
|
||||
va_start(va, fmt);
|
||||
res = c_str_fmt_out_va(fmt, va);
|
||||
va_end(va);
|
||||
|
||||
return res;
|
||||
}
|
||||
|
||||
#pragma endregion Printing
|
||||
|
@ -320,7 +320,7 @@ inline
|
||||
StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...)
|
||||
{
|
||||
local_persist thread_local
|
||||
PrintF_Buffer buf = struct_init(PrintF_Buffer, {0});
|
||||
PrintF_Buffer buf = struct_zero_init();
|
||||
|
||||
va_list va;
|
||||
va_start(va, fmt);
|
||||
|
@ -11,9 +11,6 @@
|
||||
#include "helpers/push_ignores.inline.hpp"
|
||||
#include "components/header_start.hpp"
|
||||
|
||||
// Has container defines pushed
|
||||
#include "gen.dep.hpp"
|
||||
|
||||
GEN_NS_BEGIN
|
||||
|
||||
#include "components/types.hpp"
|
||||
|
@ -1159,6 +1159,8 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
||||
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
||||
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
||||
|
||||
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
|
||||
|
||||
// Resolve define's arguments
|
||||
b32 has_args = fn->Params->NumEntries > 1;
|
||||
StrBuilder params_str = StrBuilder::make_reserve(_ctx->Allocator_Temp, 32);
|
||||
@ -1172,10 +1174,10 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
||||
}
|
||||
char const* tmpl_fn_macro = nullptr;
|
||||
if (params_str.length() > 0 ) {
|
||||
tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> (<opts_type>) { __VA_ARGS__ } )\n";
|
||||
tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> & (<opts_type>) { __VA_ARGS__ } )\n";
|
||||
}
|
||||
else {
|
||||
tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( (<opts_type>) { __VA_ARGS__ } )\n";
|
||||
tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( & (<opts_type>) { __VA_ARGS__ } )\n";
|
||||
}
|
||||
Code fn_macro = untyped_str(token_fmt(
|
||||
"def_name", fn->Name
|
||||
@ -1504,6 +1506,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
||||
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
||||
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
||||
fn->Name = cache_str(new_name);
|
||||
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
|
||||
}
|
||||
src_upfront.append(fn);
|
||||
}
|
||||
|
@ -53,6 +53,7 @@ word enum_underlying, gen_enum_underlying
|
||||
word nullptr, gen_nullptr
|
||||
word struct_init, gen_struct_init
|
||||
word hash, gen_hash
|
||||
word txt, gen_txt
|
||||
|
||||
// Basic Types
|
||||
|
||||
@ -410,6 +411,8 @@ namespace var_, gen_var_
|
||||
|
||||
word _ctx, gen__ctx
|
||||
|
||||
word get_context, gen_get_context
|
||||
|
||||
word init, gen_init
|
||||
word deinit, gen_deinit
|
||||
word reset, gen_reset
|
||||
@ -532,7 +535,7 @@ namespace Lexer_, gen_Lexer_
|
||||
word LexContext, gen_LexContext
|
||||
word lex, gen_lex
|
||||
|
||||
word StackNode, gen_StackNode
|
||||
word ParseStackNode, gen_ParseStackNode
|
||||
word ParseContext, gen_ParseContext
|
||||
|
||||
// namespace parse_, gen_parse_
|
||||
|
@ -404,7 +404,7 @@ if ( $vendor -match "msvc" )
|
||||
$flag_optimize_intrinsics = '/Oi'
|
||||
$flag_optimized_debug_forceinline = '/d2Obforceinline'
|
||||
$flag_optimized_debug = '/Zo'
|
||||
$flag_
|
||||
# $flag_
|
||||
# $flag_out_name = '/OUT:'
|
||||
$flag_path_interm = '/Fo'
|
||||
$flag_path_debug = '/Fd'
|
||||
|
Reference in New Issue
Block a user