mirror of
https://github.com/Ed94/gencpp.git
synced 2025-06-16 11:41:46 -07:00
Compare commits
19 Commits
v0.25-Alph
...
dev
Author | SHA1 | Date | |
---|---|---|---|
822a3b07dc | |||
685bba36d5 | |||
346e8e3305 | |||
bfc754e66c | |||
84f4fc5ae9 | |||
ad5cb6597b | |||
1c7dd4ab32 | |||
1e7fdcec16 | |||
2ed36506b1 | |||
790087aa3c | |||
441a46daaa | |||
26623075ad | |||
7ea90ef349 | |||
6d531fdf97 | |||
3b81eea688 | |||
844d431e1c | |||
727b54c341 | |||
ef72d27f3e | |||
75b1d42cca |
2
.gitignore
vendored
2
.gitignore
vendored
@ -44,3 +44,5 @@ test/c_library/gen
|
|||||||
test/cpp_library/gen
|
test/cpp_library/gen
|
||||||
|
|
||||||
!scripts/helpers/refactor.exe
|
!scripts/helpers/refactor.exe
|
||||||
|
|
||||||
|
# ai/**
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# include "helpers/push_ignores.inline.hpp"
|
# include "helpers/push_ignores.inline.hpp"
|
||||||
# include "components/header_start.hpp"
|
# include "components/header_start.hpp"
|
||||||
# include "components/types.hpp"
|
# include "components/types.hpp"
|
||||||
# include "components/gen/ecode.hpp"
|
# include "components/gen/ecodetypes.hpp"
|
||||||
# include "components/gen/eoperator.hpp"
|
# include "components/gen/eoperator.hpp"
|
||||||
# include "components/gen/especifier.hpp"
|
# include "components/gen/especifier.hpp"
|
||||||
# include "components/ast.hpp"
|
# include "components/ast.hpp"
|
||||||
|
@ -3,7 +3,7 @@
|
|||||||
# include "helpers/push_ignores.inline.hpp"
|
# include "helpers/push_ignores.inline.hpp"
|
||||||
# include "components/header_start.hpp"
|
# include "components/header_start.hpp"
|
||||||
# include "components/types.hpp"
|
# include "components/types.hpp"
|
||||||
# include "components/gen/ecode.hpp"
|
# include "components/gen/ecodetypes.hpp"
|
||||||
# include "components/gen/eoperator.hpp"
|
# include "components/gen/eoperator.hpp"
|
||||||
# include "components/gen/especifier.hpp"
|
# include "components/gen/especifier.hpp"
|
||||||
# include "components/ast.hpp"
|
# include "components/ast.hpp"
|
||||||
|
@ -1,9 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "types.hpp"
|
#include "parser_types.hpp"
|
||||||
#include "gen/ecode.hpp"
|
|
||||||
#include "gen/eoperator.hpp"
|
|
||||||
#include "gen/especifier.hpp"
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -406,7 +403,8 @@ struct AST
|
|||||||
Code PostNameMacro; // Only used with parameters for specifically UE_REQUIRES (Thanks Unreal)
|
Code PostNameMacro; // Only used with parameters for specifically UE_REQUIRES (Thanks Unreal)
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
StrCached Content; // Attributes, Comment, Execution, Include
|
StrCached Content; // Attributes, Comment, Execution, Include
|
||||||
|
TokenSlice ContentToks; // TODO(Ed): Use a token slice for content
|
||||||
struct {
|
struct {
|
||||||
Specifier ArrSpecs[AST_ArrSpecs_Cap]; // Specifiers
|
Specifier ArrSpecs[AST_ArrSpecs_Cap]; // Specifiers
|
||||||
Code NextSpecs; // Specifiers; If ArrSpecs is full, then NextSpecs is used.
|
Code NextSpecs; // Specifiers; If ArrSpecs is full, then NextSpecs is used.
|
||||||
@ -422,7 +420,7 @@ struct AST
|
|||||||
Code Next;
|
Code Next;
|
||||||
Code Back;
|
Code Back;
|
||||||
};
|
};
|
||||||
Token* Token; // Reference to starting token, only available if it was derived from parsing.
|
Token* Token; // Reference to starting token, only available if it was derived from parsing. // TODO(Ed): Change this to a token slice.
|
||||||
Code Parent;
|
Code Parent;
|
||||||
CodeType Type;
|
CodeType Type;
|
||||||
// CodeFlag CodeFlags;
|
// CodeFlag CodeFlags;
|
||||||
|
@ -38,13 +38,13 @@ void body_to_strbuilder_export( CodeBody body, StrBuilder* result )
|
|||||||
GEN_ASSERT(result != nullptr);
|
GEN_ASSERT(result != nullptr);
|
||||||
strbuilder_append_fmt( result, "export\n{\n" );
|
strbuilder_append_fmt( result, "export\n{\n" );
|
||||||
|
|
||||||
Code curr = cast(Code, body);
|
Code curr = body->Front;
|
||||||
s32 left = body->NumEntries;
|
s32 left = body->NumEntries;
|
||||||
while ( left-- )
|
while ( left-- )
|
||||||
{
|
{
|
||||||
code_to_strbuilder_ref(curr, result);
|
code_to_strbuilder_ref(curr, result);
|
||||||
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
||||||
++curr;
|
curr = curr->Next;
|
||||||
}
|
}
|
||||||
|
|
||||||
strbuilder_append_fmt( result, "};\n" );
|
strbuilder_append_fmt( result, "};\n" );
|
||||||
|
@ -252,7 +252,8 @@ struct CodeSpecifiers
|
|||||||
#if ! GEN_C_LIKE_CPP
|
#if ! GEN_C_LIKE_CPP
|
||||||
Using_Code( CodeSpecifiers );
|
Using_Code( CodeSpecifiers );
|
||||||
bool append( Specifier spec ) { return specifiers_append(* this, spec); }
|
bool append( Specifier spec ) { return specifiers_append(* this, spec); }
|
||||||
s32 has( Specifier spec ) { return specifiers_has(* this, spec); }
|
bool has( Specifier spec ) { return specifiers_has(* this, spec); }
|
||||||
|
s32 index_of( Specifier spec ) { return specifiers_index_of(* this, spec); }
|
||||||
s32 remove( Specifier to_remove ) { return specifiers_remove(* this, to_remove); }
|
s32 remove( Specifier to_remove ) { return specifiers_remove(* this, to_remove); }
|
||||||
StrBuilder to_strbuilder() { return specifiers_to_strbuilder(* this ); }
|
StrBuilder to_strbuilder() { return specifiers_to_strbuilder(* this ); }
|
||||||
void to_strbuilder( StrBuilder& result ) { return specifiers_to_strbuilder_ref(* this, & result); }
|
void to_strbuilder( StrBuilder& result ) { return specifiers_to_strbuilder_ref(* this, & result); }
|
||||||
@ -1073,11 +1074,12 @@ forceinline bool has_entries (CodeParams params ) {
|
|||||||
forceinline StrBuilder to_strbuilder(CodeParams params ) { return params_to_strbuilder(params); }
|
forceinline StrBuilder to_strbuilder(CodeParams params ) { return params_to_strbuilder(params); }
|
||||||
forceinline void to_strbuilder(CodeParams params, StrBuilder& result ) { return params_to_strbuilder_ref(params, & result); }
|
forceinline void to_strbuilder(CodeParams params, StrBuilder& result ) { return params_to_strbuilder_ref(params, & result); }
|
||||||
|
|
||||||
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
|
forceinline bool append (CodeSpecifiers specifiers, Specifier spec) { return specifiers_append(specifiers, spec); }
|
||||||
forceinline s32 has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
|
forceinline bool has (CodeSpecifiers specifiers, Specifier spec) { return specifiers_has(specifiers, spec); }
|
||||||
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
|
forceinline s32 index_of (CodeSpecifiers specifiers, Specifier spec) { return specifiers_index_of(specifiers, spec); }
|
||||||
forceinline StrBuilder to_strbuilder(CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
|
forceinline s32 remove (CodeSpecifiers specifiers, Specifier to_remove ) { return specifiers_remove(specifiers, to_remove); }
|
||||||
forceinline void to_strbuilder(CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
|
forceinline StrBuilder to_strbuilder (CodeSpecifiers specifiers) { return specifiers_to_strbuilder(specifiers); }
|
||||||
|
forceinline void to_strbuilder (CodeSpecifiers specifiers, StrBuilder& result) { return specifiers_to_strbuilder_ref(specifiers, & result); }
|
||||||
|
|
||||||
forceinline void add_interface (CodeStruct self, CodeTypename interface) { return struct_add_interface(self, interface); }
|
forceinline void add_interface (CodeStruct self, CodeTypename interface) { return struct_add_interface(self, interface); }
|
||||||
forceinline StrBuilder to_strbuilder (CodeStruct self) { return struct_to_strbuilder(self); }
|
forceinline StrBuilder to_strbuilder (CodeStruct self) { return struct_to_strbuilder(self); }
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "interface.hpp"
|
#include "constants.hpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#pragma region Serialization
|
#pragma region Serialization
|
||||||
@ -38,7 +38,7 @@ void body_to_strbuilder_ref( CodeBody body, StrBuilder* result )
|
|||||||
{
|
{
|
||||||
code_to_strbuilder_ref(curr, result);
|
code_to_strbuilder_ref(curr, result);
|
||||||
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
|
||||||
++curr;
|
curr = curr->Next;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -3,8 +3,8 @@
|
|||||||
#include "code_serialization.cpp"
|
#include "code_serialization.cpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
internal void parser_init();
|
internal void parser_init(Context* ctx);
|
||||||
internal void parser_deinit();
|
internal void parser_deinit(Context* ctx);
|
||||||
|
|
||||||
internal
|
internal
|
||||||
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
|
||||||
@ -71,6 +71,14 @@ void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size,
|
|||||||
return nullptr;
|
return nullptr;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
internal
|
||||||
|
void fallback_logger(LogEntry entry)
|
||||||
|
{
|
||||||
|
GEN_ASSERT(entry.msg.Len > 0);
|
||||||
|
GEN_ASSERT(entry.msg.Ptr);
|
||||||
|
log_fmt("%S: %S", loglevel_to_str(entry.level), entry.msg);
|
||||||
|
}
|
||||||
|
|
||||||
internal
|
internal
|
||||||
void define_constants()
|
void define_constants()
|
||||||
{
|
{
|
||||||
@ -283,6 +291,19 @@ void init(Context* ctx)
|
|||||||
ctx->InitSize_Fallback_Allocator_Bucket_Size = megabytes(8);
|
ctx->InitSize_Fallback_Allocator_Bucket_Size = megabytes(8);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (ctx->InitSize_StrCacheTable == 0)
|
||||||
|
{
|
||||||
|
ctx->InitSize_StrCacheTable = kilobytes(8);
|
||||||
|
}
|
||||||
|
if (ctx->InitSize_MacrosTable == 0)
|
||||||
|
{
|
||||||
|
ctx->InitSize_MacrosTable = kilobytes(8);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (ctx->Logger == nullptr) {
|
||||||
|
ctx->Logger = & fallback_logger;
|
||||||
|
}
|
||||||
|
|
||||||
// Override the current context (user has to put it back if unwanted).
|
// Override the current context (user has to put it back if unwanted).
|
||||||
_ctx = ctx;
|
_ctx = ctx;
|
||||||
|
|
||||||
@ -298,7 +319,7 @@ void init(Context* ctx)
|
|||||||
}
|
}
|
||||||
// Setup the code pool and code entries arena.
|
// Setup the code pool and code entries arena.
|
||||||
{
|
{
|
||||||
Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, sizeof(AST) );
|
Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, size_of(AST) );
|
||||||
if ( code_pool.PhysicalStart == nullptr )
|
if ( code_pool.PhysicalStart == nullptr )
|
||||||
GEN_FATAL( "gen::init: Failed to initialize the code pool" );
|
GEN_FATAL( "gen::init: Failed to initialize the code pool" );
|
||||||
array_append( ctx->CodePools, code_pool );
|
array_append( ctx->CodePools, code_pool );
|
||||||
@ -311,18 +332,18 @@ void init(Context* ctx)
|
|||||||
}
|
}
|
||||||
// Setup the hash tables
|
// Setup the hash tables
|
||||||
{
|
{
|
||||||
ctx->StrCache = hashtable_init(StrCached, ctx->Allocator_DyanmicContainers);
|
ctx->StrCache = hashtable_init_reserve(StrCached, ctx->Allocator_DyanmicContainers, ctx->InitSize_StrCacheTable);
|
||||||
if ( ctx->StrCache.Entries == nullptr )
|
if ( ctx->StrCache.Entries == nullptr )
|
||||||
GEN_FATAL( "gen::init: Failed to initialize the StringCache");
|
GEN_FATAL( "gen::init: Failed to initialize the StringCache");
|
||||||
|
|
||||||
ctx->Macros = hashtable_init(Macro, ctx->Allocator_DyanmicContainers);
|
ctx->Macros = hashtable_init_reserve(Macro, ctx->Allocator_DyanmicContainers, ctx->InitSize_MacrosTable);
|
||||||
if (ctx->Macros.Hashes == nullptr || ctx->Macros.Entries == nullptr) {
|
if (ctx->Macros.Hashes == nullptr || ctx->Macros.Entries == nullptr) {
|
||||||
GEN_FATAL( "gen::init: Failed to initialize the PreprocessMacros table" );
|
GEN_FATAL( "gen::init: Failed to initialize the PreprocessMacros table" );
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
define_constants();
|
define_constants();
|
||||||
parser_init();
|
parser_init(ctx);
|
||||||
|
|
||||||
++ context_counter;
|
++ context_counter;
|
||||||
}
|
}
|
||||||
@ -371,7 +392,7 @@ void deinit(Context* ctx)
|
|||||||
while ( left--, left );
|
while ( left--, left );
|
||||||
array_free( ctx->Fallback_AllocatorBuckets);
|
array_free( ctx->Fallback_AllocatorBuckets);
|
||||||
}
|
}
|
||||||
parser_deinit();
|
parser_deinit(ctx);
|
||||||
|
|
||||||
if (_ctx == ctx)
|
if (_ctx == ctx)
|
||||||
_ctx = nullptr;
|
_ctx = nullptr;
|
||||||
|
@ -15,24 +15,6 @@
|
|||||||
\▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓
|
\▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#if 0
|
|
||||||
enum LogLevel : u32
|
|
||||||
{
|
|
||||||
Info,
|
|
||||||
Warning,
|
|
||||||
Panic,
|
|
||||||
};
|
|
||||||
|
|
||||||
struct LogEntry
|
|
||||||
{
|
|
||||||
Str msg;
|
|
||||||
u32 line_num;
|
|
||||||
void* data;
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef void LoggerCallback(LogEntry entry);
|
|
||||||
#endif
|
|
||||||
|
|
||||||
// Note(Ed): This is subject to heavily change
|
// Note(Ed): This is subject to heavily change
|
||||||
// with upcoming changes to the library's fallback (default) allocations strategy;
|
// with upcoming changes to the library's fallback (default) allocations strategy;
|
||||||
// and major changes to lexer/parser context usage.
|
// and major changes to lexer/parser context usage.
|
||||||
@ -64,9 +46,16 @@ struct Context
|
|||||||
u32 InitSize_LexerTokens;
|
u32 InitSize_LexerTokens;
|
||||||
u32 SizePer_StringArena;
|
u32 SizePer_StringArena;
|
||||||
|
|
||||||
|
u32 InitSize_StrCacheTable;
|
||||||
|
u32 InitSize_MacrosTable;
|
||||||
|
|
||||||
// TODO(Ed): Symbol Table
|
// TODO(Ed): Symbol Table
|
||||||
// Keep track of all resolved symbols (naemspaced identifiers)
|
// Keep track of all resolved symbols (naemspaced identifiers)
|
||||||
|
|
||||||
|
// Logging
|
||||||
|
|
||||||
|
LoggerProc* Logger;
|
||||||
|
|
||||||
// Parser
|
// Parser
|
||||||
|
|
||||||
// Used by the lexer to persistently treat all these identifiers as preprocessor defines.
|
// Used by the lexer to persistently treat all these identifiers as preprocessor defines.
|
||||||
@ -89,9 +78,6 @@ struct Context
|
|||||||
|
|
||||||
StringTable StrCache;
|
StringTable StrCache;
|
||||||
|
|
||||||
// TODO(Ed): This needs to be just handled by a parser context
|
|
||||||
Array(Token) Lexer_Tokens;
|
|
||||||
|
|
||||||
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually
|
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually
|
||||||
ParseContext parser;
|
ParseContext parser;
|
||||||
|
|
||||||
@ -104,6 +90,37 @@ struct Context
|
|||||||
// An implicit context interface will be provided instead as wrapper procedures as convience.
|
// An implicit context interface will be provided instead as wrapper procedures as convience.
|
||||||
GEN_API extern Context* _ctx;
|
GEN_API extern Context* _ctx;
|
||||||
|
|
||||||
|
// TODO(Ed): Swap all usage of this with logger_fmt (then rename logger_fmt to log_fmt)
|
||||||
|
inline
|
||||||
|
ssize log_fmt(char const* fmt, ...)
|
||||||
|
{
|
||||||
|
ssize res;
|
||||||
|
va_list va;
|
||||||
|
|
||||||
|
va_start(va, fmt);
|
||||||
|
res = c_str_fmt_out_va(fmt, va);
|
||||||
|
va_end(va);
|
||||||
|
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
inline
|
||||||
|
void logger_fmt(Context* ctx, LogLevel level, char const* fmt, ...)
|
||||||
|
{
|
||||||
|
local_persist thread_local
|
||||||
|
PrintF_Buffer buf = struct_zero_init();
|
||||||
|
|
||||||
|
va_list va;
|
||||||
|
va_start(va, fmt);
|
||||||
|
ssize res = c_str_fmt_va(buf, GEN_PRINTF_MAXLEN, fmt, va) -1;
|
||||||
|
va_end(va);
|
||||||
|
|
||||||
|
StrBuilder msg = strbuilder_make_length(ctx->Allocator_Temp, buf, res);
|
||||||
|
|
||||||
|
LogEntry entry = { strbuilder_to_str(msg), level };
|
||||||
|
ctx->Logger(entry);
|
||||||
|
}
|
||||||
|
|
||||||
// Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that
|
// Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that
|
||||||
GEN_API void init(Context* ctx);
|
GEN_API void init(Context* ctx);
|
||||||
|
|
||||||
@ -114,7 +131,7 @@ GEN_API void deinit(Context* ctx);
|
|||||||
// Retrieves the active context (not usually needed, but here in case...)
|
// Retrieves the active context (not usually needed, but here in case...)
|
||||||
GEN_API Context* get_context();
|
GEN_API Context* get_context();
|
||||||
|
|
||||||
// Clears the allocations, but doesn't free the memoery, then calls init() again.
|
// Clears the allocations, but doesn't free the memory, then calls init() again.
|
||||||
// Ease of use.
|
// Ease of use.
|
||||||
GEN_API void reset(Context* ctx);
|
GEN_API void reset(Context* ctx);
|
||||||
|
|
||||||
@ -334,37 +351,33 @@ forceinline CodeBody def_union_body ( s32 num, Code* codes )
|
|||||||
|
|
||||||
#pragma region Parsing
|
#pragma region Parsing
|
||||||
|
|
||||||
#if 0
|
struct ParseStackNode
|
||||||
struct StackNode
|
|
||||||
{
|
{
|
||||||
StackNode* Prev;
|
ParseStackNode* prev;
|
||||||
|
|
||||||
Token Start;
|
TokenSlice tokens;
|
||||||
Token Name; // The name of the AST node (if parsed)
|
Token* start;
|
||||||
Str FailedProc; // The name of the procedure that failed
|
Str name; // The name of the AST node (if parsed)
|
||||||
};
|
Str proc_name; // The name of the procedure
|
||||||
// Stack nodes are allocated the error's allocator
|
Code code_rel; // Relevant AST node
|
||||||
|
// TODO(Ed): When an error occurs, the parse stack is not released and instead the scope is left dangling.
|
||||||
struct Error
|
|
||||||
{
|
|
||||||
StrBuilder message;
|
|
||||||
StackNode* context_stack;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ParseInfo
|
struct ParseInfo
|
||||||
{
|
{
|
||||||
Arena FileMem;
|
ParseMessage* messages;
|
||||||
Arena TokMem;
|
LexedInfo lexed;
|
||||||
Arena CodeMem;
|
Code result;
|
||||||
|
|
||||||
FileContents FileContent;
|
|
||||||
Array<Token> Tokens;
|
|
||||||
Array<Error> Errors;
|
|
||||||
// Errors are allocated to a dedicated general arena.
|
|
||||||
};
|
};
|
||||||
|
|
||||||
CodeBody parse_file( Str path );
|
struct ParseOpts
|
||||||
#endif
|
{
|
||||||
|
AllocatorInfo backing_msgs;
|
||||||
|
AllocatorInfo backing_tokens;
|
||||||
|
AllocatorInfo backing_ast;
|
||||||
|
};
|
||||||
|
|
||||||
|
ParseInfo wip_parse_str( LexedInfo lexed, ParseOpts* opts GEN_PARAM_DEFAULT );
|
||||||
|
|
||||||
GEN_API CodeClass parse_class ( Str class_def );
|
GEN_API CodeClass parse_class ( Str class_def );
|
||||||
GEN_API CodeConstructor parse_constructor ( Str constructor_def );
|
GEN_API CodeConstructor parse_constructor ( Str constructor_def );
|
||||||
@ -395,9 +408,10 @@ GEN_API ssize token_fmt_va( char* buf, usize buf_size, s32 num_tokens, va_list v
|
|||||||
//! Do not use directly. Use the token_fmt macro instead.
|
//! Do not use directly. Use the token_fmt macro instead.
|
||||||
Str token_fmt_impl( ssize, ... );
|
Str token_fmt_impl( ssize, ... );
|
||||||
|
|
||||||
GEN_API Code untyped_str( Str content);
|
GEN_API Code untyped_str ( Str content);
|
||||||
GEN_API Code untyped_fmt ( char const* fmt, ... );
|
GEN_API Code untyped_fmt ( char const* fmt, ... );
|
||||||
GEN_API Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... );
|
GEN_API Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... );
|
||||||
|
GEN_API Code untyped_toks ( TokenSlice tokens );
|
||||||
|
|
||||||
#pragma endregion Untyped text
|
#pragma endregion Untyped text
|
||||||
|
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "gen/etoktype.cpp"
|
#include "gen/etoktype.hpp"
|
||||||
#include "interface.upfront.cpp"
|
#include "interface.upfront.cpp"
|
||||||
#include "lexer.cpp"
|
#include "lexer.cpp"
|
||||||
#include "parser.cpp"
|
#include "parser.cpp"
|
||||||
@ -8,29 +8,70 @@
|
|||||||
|
|
||||||
// Publically Exposed Interface
|
// Publically Exposed Interface
|
||||||
|
|
||||||
|
ParseInfo wip_parse_str(LexedInfo lexed, ParseOpts* opts)
|
||||||
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
|
if (lexed.tokens.num == 0 && lexed.tokens.ptr == nullptr) {
|
||||||
|
check_parse_args(lexed.text);
|
||||||
|
lexed = lex(ctx, lexed.text);
|
||||||
|
}
|
||||||
|
ParseInfo info = struct_zero(ParseInfo);
|
||||||
|
info.lexed = lexed;
|
||||||
|
|
||||||
|
// TODO(Ed): ParseInfo should be set to the parser context.
|
||||||
|
|
||||||
|
ctx->parser = struct_zero(ParseContext);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
|
||||||
|
ParseStackNode scope = NullScope;
|
||||||
|
parser_push(& ctx->parser, & scope);
|
||||||
|
|
||||||
|
CodeBody result = parse_global_nspace(ctx,CT_Global_Body);
|
||||||
|
|
||||||
|
parser_pop(& ctx->parser);
|
||||||
|
return info;
|
||||||
|
}
|
||||||
|
|
||||||
CodeClass parse_class( Str def )
|
CodeClass parse_class( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
ParseStackNode scope = NullScope;
|
||||||
push_scope();
|
parser_push(& ctx->parser, & scope);
|
||||||
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class, parser_not_inplace_def );
|
CodeClass result = (CodeClass) parse_class_struct( ctx, Tok_Decl_Class, parser_not_inplace_def );
|
||||||
parser_pop(& _ctx->parser);
|
parser_pop(& ctx->parser);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeConstructor parse_constructor( Str def )
|
CodeConstructor parse_constructor(Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
|
ParseStackNode scope = NullScope;
|
||||||
|
parser_push(& ctx->parser, & scope);
|
||||||
|
|
||||||
// TODO(Ed): Constructors can have prefix attributes
|
// TODO(Ed): Constructors can have prefix attributes
|
||||||
|
|
||||||
CodeSpecifiers specifiers = NullCode;
|
CodeSpecifiers specifiers = NullCode;
|
||||||
@ -57,8 +98,8 @@ CodeConstructor parse_constructor( Str def )
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
default :
|
default :
|
||||||
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(_ctx->parser) );
|
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(& ctx->parser, ctx->Allocator_Temp) );
|
||||||
parser_pop(& _ctx->parser);
|
parser_pop(& ctx->parser);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -71,247 +112,337 @@ CodeConstructor parse_constructor( Str def )
|
|||||||
eat( currtok.Type );
|
eat( currtok.Type );
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( NumSpecifiers )
|
if ( NumSpecifiers ) {
|
||||||
{
|
|
||||||
specifiers = def_specifiers_arr( NumSpecifiers, specs_found );
|
specifiers = def_specifiers_arr( NumSpecifiers, specs_found );
|
||||||
// <specifiers> ...
|
// <specifiers> ...
|
||||||
}
|
}
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
CodeConstructor result = parser_parse_constructor(ctx, specifiers);
|
||||||
CodeConstructor result = parser_parse_constructor( specifiers );
|
parser_pop(& ctx->parser);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeDefine parse_define( Str def )
|
CodeDefine parse_define( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
ParseStackNode scope = NullScope;
|
||||||
push_scope();
|
parser_push(& ctx->parser, & scope);
|
||||||
CodeDefine result = parser_parse_define();
|
CodeDefine result = parser_parse_define(ctx);
|
||||||
parser_pop(& _ctx->parser);
|
parser_pop(& ctx->parser);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeDestructor parse_destructor( Str def )
|
CodeDestructor parse_destructor( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
// TODO(Ed): Destructors can have prefix attributes
|
// TODO(Ed): Destructors can have prefix attributes
|
||||||
// TODO(Ed): Destructors can have virtual
|
// TODO(Ed): Destructors can have virtual
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
CodeDestructor result = parser_parse_destructor(ctx, NullCode);
|
||||||
CodeDestructor result = parser_parse_destructor(NullCode);
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeEnum parse_enum( Str def )
|
CodeEnum parse_enum( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
{
|
LexedInfo lexed = lex(ctx, def);
|
||||||
parser_pop(& _ctx->parser);
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr ) {
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_enum(ctx, parser_not_inplace_def);
|
||||||
return parser_parse_enum( parser_not_inplace_def);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeBody parse_export_body( Str def )
|
CodeBody parse_export_body( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_export_body(ctx);
|
||||||
return parser_parse_export_body();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeExtern parse_extern_link( Str def )
|
CodeExtern parse_extern_link( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_extern_link(ctx);
|
||||||
return parser_parse_extern_link();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeFriend parse_friend( Str def )
|
CodeFriend parse_friend( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_friend(ctx);
|
||||||
return parser_parse_friend();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeFn parse_function( Str def )
|
CodeFn parse_function( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return (CodeFn) parser_parse_function(ctx);
|
||||||
return (CodeFn) parser_parse_function();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeBody parse_global_body( Str def )
|
CodeBody parse_global_body( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
ParseStackNode scope = NullScope;
|
||||||
push_scope();
|
parser_push(& ctx->parser, & scope);
|
||||||
CodeBody result = parse_global_nspace( CT_Global_Body );
|
CodeBody result = parse_global_nspace(ctx, CT_Global_Body );
|
||||||
parser_pop(& _ctx->parser);
|
parser_pop(& ctx->parser);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeNS parse_namespace( Str def )
|
CodeNS parse_namespace( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_namespace(ctx);
|
||||||
return parser_parse_namespace();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeOperator parse_operator( Str def )
|
CodeOperator parse_operator( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return (CodeOperator) parser_parse_operator(ctx);
|
||||||
return (CodeOperator) parser_parse_operator();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeOpCast parse_operator_cast( Str def )
|
CodeOpCast parse_operator_cast( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_operator_cast(ctx, NullCode);
|
||||||
return parser_parse_operator_cast(NullCode);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeStruct parse_struct( Str def )
|
CodeStruct parse_struct( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
ParseStackNode scope = NullScope;
|
||||||
push_scope();
|
parser_push(& ctx->parser, & scope);
|
||||||
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct, parser_not_inplace_def );
|
CodeStruct result = (CodeStruct) parse_class_struct( ctx, Tok_Decl_Struct, parser_not_inplace_def );
|
||||||
parser_pop(& _ctx->parser);
|
parser_pop(& ctx->parser);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTemplate parse_template( Str def )
|
CodeTemplate parse_template( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_template(ctx);
|
||||||
return parser_parse_template();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTypename parse_type( Str def )
|
CodeTypename parse_type( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_type( ctx, parser_not_from_template, nullptr);
|
||||||
return parser_parse_type( parser_not_from_template, nullptr);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTypedef parse_typedef( Str def )
|
CodeTypedef parse_typedef( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_typedef(ctx);
|
||||||
return parser_parse_typedef();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeUnion parse_union( Str def )
|
CodeUnion parse_union( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_union(ctx, parser_not_inplace_def);
|
||||||
return parser_parse_union( parser_not_inplace_def);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeUsing parse_using( Str def )
|
CodeUsing parse_using( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_using(ctx);
|
||||||
return parser_parse_using();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeVar parse_variable( Str def )
|
CodeVar parse_variable( Str def )
|
||||||
{
|
{
|
||||||
|
// TODO(Ed): Lift this.
|
||||||
|
Context* ctx = _ctx;
|
||||||
|
|
||||||
check_parse_args( def );
|
check_parse_args( def );
|
||||||
|
|
||||||
TokArray toks = lex( def );
|
ctx->parser = struct_zero(ParseContext);
|
||||||
if ( toks.Arr == nullptr )
|
|
||||||
|
LexedInfo lexed = lex(ctx, def);
|
||||||
|
ctx->parser.tokens = lexed.tokens;
|
||||||
|
if ( ctx->parser.tokens.ptr == nullptr )
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
|
||||||
_ctx->parser.Tokens = toks;
|
return parser_parse_variable(ctx);
|
||||||
return parser_parse_variable();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Undef helper macros
|
// Undef helper macros
|
||||||
@ -326,6 +457,7 @@ CodeVar parse_variable( Str def )
|
|||||||
#undef left
|
#undef left
|
||||||
#undef check
|
#undef check
|
||||||
#undef push_scope
|
#undef push_scope
|
||||||
|
#undef NullScope
|
||||||
#undef def_assign
|
#undef def_assign
|
||||||
|
|
||||||
// Here for C Variant
|
// Here for C Variant
|
||||||
|
@ -176,3 +176,16 @@ Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... )
|
|||||||
|
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Code untyped_toks( TokenSlice tokens )
|
||||||
|
{
|
||||||
|
if ( tokens.num == 0 ) {
|
||||||
|
log_failure( "untyped_toks: empty token slice" );
|
||||||
|
return InvalidCode;
|
||||||
|
}
|
||||||
|
Code
|
||||||
|
result = make_code();
|
||||||
|
result->Type = CT_Untyped;
|
||||||
|
result->ContentToks = tokens;
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
@ -473,8 +473,10 @@ CodeComment def_comment( Str content )
|
|||||||
return (CodeComment) result;
|
return (CodeComment) result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeConstructor def_constructor( Opts_def_constructor p )
|
CodeConstructor def_constructor( Opts_def_constructor opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_constructor p = get_optional(opt);
|
||||||
|
|
||||||
if ( p.params && p.params->Type != CT_Parameters ) {
|
if ( p.params && p.params->Type != CT_Parameters ) {
|
||||||
log_failure("gen::def_constructor: params must be of Parameters type - %s", code_debug_str((Code)p.params));
|
log_failure("gen::def_constructor: params must be of Parameters type - %s", code_debug_str((Code)p.params));
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
@ -510,8 +512,10 @@ CodeConstructor def_constructor( Opts_def_constructor p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeClass def_class( Str name, Opts_def_struct p )
|
CodeClass def_class( Str name, Opts_def_struct opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_struct p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_class, name ) ) {
|
if ( ! name_check( def_class, name ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -561,8 +565,10 @@ CodeClass def_class( Str name, Opts_def_struct p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeDefine def_define( Str name, MacroType type, Opts_def_define p )
|
CodeDefine def_define( Str name, MacroType type, Opts_def_define opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_define p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_define, name ) ) {
|
if ( ! name_check( def_define, name ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -585,8 +591,10 @@ CodeDefine def_define( Str name, MacroType type, Opts_def_define p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeDestructor def_destructor( Opts_def_destructor p )
|
CodeDestructor def_destructor( Opts_def_destructor opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_destructor p = get_optional(opt);
|
||||||
|
|
||||||
if ( p.specifiers && p.specifiers->Type != CT_Specifiers ) {
|
if ( p.specifiers && p.specifiers->Type != CT_Specifiers ) {
|
||||||
log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", code_debug_str(p.specifiers) );
|
log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", code_debug_str(p.specifiers) );
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
@ -619,8 +627,10 @@ CodeDestructor def_destructor( Opts_def_destructor p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeEnum def_enum( Str name, Opts_def_enum p )
|
CodeEnum def_enum( Str name, Opts_def_enum opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_enum p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_enum, name ) ) {
|
if ( ! name_check( def_enum, name ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -742,8 +752,10 @@ CodeFriend def_friend( Code declaration )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeFn def_function( Str name, Opts_def_function p )
|
CodeFn def_function( Str name, Opts_def_function opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_function p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_function, name )) {
|
if ( ! name_check( def_function, name )) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -802,8 +814,10 @@ CodeFn def_function( Str name, Opts_def_function p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeInclude def_include( Str path, Opts_def_include p )
|
CodeInclude def_include( Str path, Opts_def_include opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_include p = get_optional(opt);
|
||||||
|
|
||||||
if ( path.Len <= 0 || path.Ptr == nullptr ) {
|
if ( path.Len <= 0 || path.Ptr == nullptr ) {
|
||||||
log_failure( "gen::def_include: Invalid path provided - %d" );
|
log_failure( "gen::def_include: Invalid path provided - %d" );
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
@ -821,8 +835,10 @@ CodeInclude def_include( Str path, Opts_def_include p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeModule def_module( Str name, Opts_def_module p )
|
CodeModule def_module( Str name, Opts_def_module opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_module p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_module, name )) {
|
if ( ! name_check( def_module, name )) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -835,8 +851,10 @@ CodeModule def_module( Str name, Opts_def_module p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace p )
|
CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_namespace p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_namespace, name )) {
|
if ( ! name_check( def_namespace, name )) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -859,8 +877,10 @@ CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator p )
|
CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_operator p = get_optional(opt);
|
||||||
|
|
||||||
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
||||||
log_failure( "gen::def_operator: PlatformAttributes was provided but its not of attributes type: %s", code_debug_str(p.attributes) );
|
log_failure( "gen::def_operator: PlatformAttributes was provided but its not of attributes type: %s", code_debug_str(p.attributes) );
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
@ -926,8 +946,10 @@ CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast p )
|
CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_operator_cast p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! null_check( def_operator_cast, type )) {
|
if ( ! null_check( def_operator_cast, type )) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -959,8 +981,10 @@ CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeParams def_param( CodeTypename type, Str name, Opts_def_param p )
|
CodeParams def_param( CodeTypename type, Str name, Opts_def_param opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_param p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_param, name ) || ! null_check( def_param, type ) ) {
|
if ( ! name_check( def_param, name ) || ! null_check( def_param, type ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1034,8 +1058,10 @@ CodeSpecifiers def_specifier( Specifier spec )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeStruct def_struct( Str name, Opts_def_struct p )
|
CodeStruct def_struct( Str name, Opts_def_struct opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_struct p = get_optional(opt);
|
||||||
|
|
||||||
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
|
||||||
log_failure( "gen::def_struct: attributes was not a `PlatformAttributes` type - %s", code_debug_str(cast(Code, p.attributes)) );
|
log_failure( "gen::def_struct: attributes was not a `PlatformAttributes` type - %s", code_debug_str(cast(Code, p.attributes)) );
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
@ -1076,8 +1102,10 @@ CodeStruct def_struct( Str name, Opts_def_struct p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template p )
|
CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_template p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! null_check( def_template, declaration ) ) {
|
if ( ! null_check( def_template, declaration ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1108,8 +1136,10 @@ CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_templat
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTypename def_type( Str name, Opts_def_type p )
|
CodeTypename def_type( Str name, Opts_def_type opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_type p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_type, name )) {
|
if ( ! name_check( def_type, name )) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1143,8 +1173,10 @@ CodeTypename def_type( Str name, Opts_def_type p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef p )
|
CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_typedef p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! null_check( def_typedef, type ) ) {
|
if ( ! null_check( def_typedef, type ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1206,8 +1238,10 @@ CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeUnion def_union( Str name, CodeBody body, Opts_def_union p )
|
CodeUnion def_union( Str name, CodeBody body, Opts_def_union opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_union p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! null_check( def_union, body ) ) {
|
if ( ! null_check( def_union, body ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1233,8 +1267,10 @@ CodeUnion def_union( Str name, CodeBody body, Opts_def_union p )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeUsing def_using( Str name, CodeTypename type, Opts_def_using p )
|
CodeUsing def_using( Str name, CodeTypename type, Opts_def_using opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_using p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_using, name ) || null_check( def_using, type ) ) {
|
if ( ! name_check( def_using, name ) || null_check( def_using, type ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
@ -1274,8 +1310,10 @@ CodeUsing def_using_namespace( Str name )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable p )
|
CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable opt )
|
||||||
{
|
{
|
||||||
|
Opts_def_variable p = get_optional(opt);
|
||||||
|
|
||||||
if ( ! name_check( def_variable, name ) || ! null_check( def_variable, type ) ) {
|
if ( ! name_check( def_variable, name ) || ! null_check( def_variable, type ) ) {
|
||||||
GEN_DEBUG_TRAP();
|
GEN_DEBUG_TRAP();
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
|
@ -1,12 +1,12 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "interface.upfront.cpp"
|
#include "interface.upfront.cpp"
|
||||||
#include "gen/etoktype.cpp"
|
#include "gen/etoktype.hpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
StrBuilder tok_to_strbuilder(Token tok)
|
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok)
|
||||||
{
|
{
|
||||||
StrBuilder result = strbuilder_make_reserve( _ctx->Allocator_Temp, kilobytes(4) );
|
StrBuilder result = strbuilder_make_reserve( ainfo, kilobytes(4) );
|
||||||
Str type_str = toktype_to_str( tok.Type );
|
Str type_str = toktype_to_str( tok.Type );
|
||||||
|
|
||||||
strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
||||||
@ -17,55 +17,55 @@ StrBuilder tok_to_strbuilder(Token tok)
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool lex__eat( TokArray* self, TokType type );
|
bool lex__eat(Context* ctx, ParseContext* self, TokType type );
|
||||||
|
|
||||||
Token* lex_current(TokArray* self, bool skip_formatting )
|
Token* lex_current(ParseContext* self, bool skip_formatting )
|
||||||
{
|
{
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
|
while ( self->tokens.ptr[self->token_id].Type == Tok_NewLine || self->tokens.ptr[self->token_id].Type == Tok_Comment )
|
||||||
self->Idx++;
|
self->token_id++;
|
||||||
}
|
}
|
||||||
return & self->Arr[self->Idx];
|
return & self->tokens.ptr[self->token_id];
|
||||||
}
|
}
|
||||||
|
|
||||||
Token* lex_peek(TokArray self, bool skip_formatting)
|
Token* lex_peek(ParseContext const* self, bool skip_formatting)
|
||||||
{
|
{
|
||||||
s32 idx = self.Idx;
|
s32 idx = self->token_id;
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||||
idx++;
|
idx++;
|
||||||
|
|
||||||
return & self.Arr[idx];
|
return & self->tokens.ptr[idx];
|
||||||
}
|
}
|
||||||
return & self.Arr[idx];
|
return & self->tokens.ptr[idx];
|
||||||
}
|
}
|
||||||
|
|
||||||
Token* lex_previous(TokArray self, bool skip_formatting)
|
Token* lex_previous(ParseContext const* self, bool skip_formatting)
|
||||||
{
|
{
|
||||||
s32 idx = self.Idx;
|
s32 idx = self->token_id;
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||||
idx --;
|
idx --;
|
||||||
|
|
||||||
return & self.Arr[idx];
|
return & self->tokens.ptr[idx];
|
||||||
}
|
}
|
||||||
return & self.Arr[idx - 1];
|
return & self->tokens.ptr[idx - 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
Token* lex_next(TokArray self, bool skip_formatting)
|
Token* lex_next(ParseContext const* self, bool skip_formatting)
|
||||||
{
|
{
|
||||||
s32 idx = self.Idx;
|
s32 idx = self->token_id;
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self.Arr[idx].Type == Tok_NewLine )
|
while ( self->tokens.ptr[idx].Type == Tok_NewLine )
|
||||||
idx++;
|
idx++;
|
||||||
|
|
||||||
return & self.Arr[idx + 1];
|
return & self->tokens.ptr[idx + 1];
|
||||||
}
|
}
|
||||||
return & self.Arr[idx + 1];
|
return & self->tokens.ptr[idx + 1];
|
||||||
}
|
}
|
||||||
|
|
||||||
enum
|
enum
|
||||||
@ -137,7 +137,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
);
|
);
|
||||||
// GEN_DEBUG_TRAP();
|
// GEN_DEBUG_TRAP();
|
||||||
}
|
}
|
||||||
array_append( _ctx->Lexer_Tokens, name );
|
array_append(ctx->tokens, name);
|
||||||
|
|
||||||
if ( ctx->left && (* ctx->scanner) == '(' )
|
if ( ctx->left && (* ctx->scanner) == '(' )
|
||||||
{
|
{
|
||||||
@ -152,7 +152,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
}
|
}
|
||||||
|
|
||||||
Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess };
|
Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess };
|
||||||
array_append( _ctx->Lexer_Tokens, opening_paren );
|
array_append(ctx->tokens, opening_paren);
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
Token last_parameter = {};
|
Token last_parameter = {};
|
||||||
@ -168,7 +168,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
move_forward();
|
move_forward();
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
array_append(_ctx->Lexer_Tokens, parameter);
|
array_append(ctx->tokens, parameter);
|
||||||
skip_whitespace();
|
skip_whitespace();
|
||||||
last_parameter = parameter;
|
last_parameter = parameter;
|
||||||
|
|
||||||
@ -202,7 +202,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
move_forward();
|
move_forward();
|
||||||
parameter.Text.Len++;
|
parameter.Text.Len++;
|
||||||
}
|
}
|
||||||
array_append(_ctx->Lexer_Tokens, parameter);
|
array_append(ctx->tokens, parameter);
|
||||||
skip_whitespace();
|
skip_whitespace();
|
||||||
last_parameter = parameter;
|
last_parameter = parameter;
|
||||||
}
|
}
|
||||||
@ -229,7 +229,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
return Lex_ReturnNull;
|
return Lex_ReturnNull;
|
||||||
}
|
}
|
||||||
Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess };
|
Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess };
|
||||||
array_append(_ctx->Lexer_Tokens, comma);
|
array_append(ctx->tokens, comma);
|
||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -243,7 +243,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
|
|||||||
return Lex_ReturnNull;
|
return Lex_ReturnNull;
|
||||||
}
|
}
|
||||||
Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess };
|
Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess };
|
||||||
array_append(_ctx->Lexer_Tokens, closing_paren);
|
array_append(ctx->tokens, closing_paren);
|
||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
else if ( registered_macro && macro_is_functional( * registered_macro) ) {
|
else if ( registered_macro && macro_is_functional( * registered_macro) ) {
|
||||||
@ -268,7 +268,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
{
|
{
|
||||||
char const* hash = ctx->scanner;
|
char const* hash = ctx->scanner;
|
||||||
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
|
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
|
||||||
array_append( _ctx->Lexer_Tokens, hash_tok );
|
array_append(ctx->tokens, hash_tok);
|
||||||
|
|
||||||
move_forward();
|
move_forward();
|
||||||
skip_whitespace();
|
skip_whitespace();
|
||||||
@ -344,14 +344,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
|
|
||||||
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
|
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
|
||||||
ctx->token.Text.Ptr = hash;
|
ctx->token.Text.Ptr = hash;
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
return Lex_Continue; // Skip found token, its all handled here.
|
return Lex_Continue; // Skip found token, its all handled here.
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
|
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
|
||||||
{
|
{
|
||||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
end_line();
|
end_line();
|
||||||
return Lex_Continue;
|
return Lex_Continue;
|
||||||
}
|
}
|
||||||
@ -360,7 +360,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||||
}
|
}
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
|
|
||||||
skip_whitespace();
|
skip_whitespace();
|
||||||
|
|
||||||
@ -379,7 +379,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
|
|
||||||
if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' )
|
if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' )
|
||||||
{
|
{
|
||||||
StrBuilder directive_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
|
StrBuilder directive_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
|
||||||
|
|
||||||
log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s"
|
log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s"
|
||||||
, (* ctx->scanner)
|
, (* ctx->scanner)
|
||||||
@ -411,7 +411,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
array_append(ctx->tokens, preprocess_content);
|
||||||
return Lex_Continue; // Skip found token, its all handled here.
|
return Lex_Continue; // Skip found token, its all handled here.
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -446,8 +446,8 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
StrBuilder directive_str = strbuilder_make_length( _ctx->Allocator_Temp, ctx->token.Text.Ptr, ctx->token.Text.Len );
|
StrBuilder directive_str = strbuilder_make_length( ctx->allocator_temp, ctx->token.Text.Ptr, ctx->token.Text.Len );
|
||||||
StrBuilder content_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
|
StrBuilder content_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
|
||||||
|
|
||||||
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
|
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
|
||||||
" in preprocessor directive '%s' (%d, %d)\n%s"
|
" in preprocessor directive '%s' (%d, %d)\n%s"
|
||||||
@ -475,14 +475,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
preprocess_content.Text.Len++;
|
preprocess_content.Text.Len++;
|
||||||
}
|
}
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, preprocess_content );
|
array_append(ctx->tokens, preprocess_content);
|
||||||
return Lex_Continue; // Skip found token, its all handled here.
|
return Lex_Continue; // Skip found token, its all handled here.
|
||||||
}
|
}
|
||||||
|
|
||||||
void lex_found_token( LexContext* ctx )
|
void lex_found_token( LexContext* ctx )
|
||||||
{
|
{
|
||||||
if ( ctx->token.Type != Tok_Invalid ) {
|
if ( ctx->token.Type != Tok_Invalid ) {
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -508,7 +508,7 @@ void lex_found_token( LexContext* ctx )
|
|||||||
}
|
}
|
||||||
|
|
||||||
ctx->token.Type = type;
|
ctx->token.Type = type;
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|
||||||
@ -517,13 +517,13 @@ void lex_found_token( LexContext* ctx )
|
|||||||
{
|
{
|
||||||
ctx->token.Type = type;
|
ctx->token.Type = type;
|
||||||
ctx->token.Flags |= TF_Specifier;
|
ctx->token.Flags |= TF_Specifier;
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if ( type != Tok_Invalid )
|
if ( type != Tok_Invalid )
|
||||||
{
|
{
|
||||||
ctx->token.Type = type;
|
ctx->token.Type = type;
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -561,50 +561,42 @@ void lex_found_token( LexContext* ctx )
|
|||||||
ctx->token.Type = Tok_Identifier;
|
ctx->token.Type = Tok_Identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, ctx->token );
|
array_append(ctx->tokens, ctx->token);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// TODO(Ed): We should dynamically allocate the lexer's array in Allocator_DyanmicContainers.
|
||||||
|
|
||||||
// TODO(Ed): We need to to attempt to recover from a lex failure?
|
// TODO(Ed): We need to to attempt to recover from a lex failure?
|
||||||
|
|
||||||
neverinline
|
neverinline
|
||||||
// TokArray lex( Array<Token> tokens, Str content )
|
LexedInfo lex(Context* lib_ctx, Str content)
|
||||||
TokArray lex( Str content )
|
|
||||||
{
|
{
|
||||||
LexContext c; LexContext* ctx = & c;
|
LexedInfo info = struct_zero(LexedInfo);
|
||||||
c.content = content;
|
|
||||||
c.left = content.Len;
|
|
||||||
c.scanner = content.Ptr;
|
|
||||||
|
|
||||||
char const* word = c.scanner;
|
LexContext c = struct_zero(LexContext); LexContext* ctx = & c;
|
||||||
s32 word_length = 0;
|
c.allocator_temp = lib_ctx->Allocator_Temp;
|
||||||
|
c.content = content;
|
||||||
|
c.left = content.Len;
|
||||||
|
c.scanner = content.Ptr;
|
||||||
|
c.line = 1;
|
||||||
|
c.column = 1;
|
||||||
|
c.tokens = array_init_reserve(Token, lib_ctx->Allocator_DyanmicContainers, lib_ctx->InitSize_LexerTokens );
|
||||||
|
|
||||||
c.line = 1;
|
// TODO(Ed): Re-implement to new constraints:
|
||||||
c.column = 1;
|
// 1. Ability to continue on error
|
||||||
|
// 2. Return a lexed info.
|
||||||
|
|
||||||
skip_whitespace();
|
skip_whitespace();
|
||||||
if ( c.left <= 0 )
|
if ( c.left <= 0 ) {
|
||||||
{
|
|
||||||
log_failure( "gen::lex: no tokens found (only whitespace provided)" );
|
log_failure( "gen::lex: no tokens found (only whitespace provided)" );
|
||||||
TokArray null_array = {};
|
return info;
|
||||||
return null_array;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
array_clear(_ctx->Lexer_Tokens);
|
|
||||||
|
|
||||||
b32 preprocess_args = true;
|
b32 preprocess_args = true;
|
||||||
|
|
||||||
while (c.left )
|
while (c.left )
|
||||||
{
|
{
|
||||||
#if 0
|
c.token = struct_init(Token) { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||||
if (Tokens.num())
|
|
||||||
{
|
|
||||||
log_fmt("\nLastTok: %SB", Tokens.back().to_strbuilder());
|
|
||||||
}
|
|
||||||
#endif
|
|
||||||
|
|
||||||
{
|
|
||||||
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
|
||||||
c.token = thanks_c;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool is_define = false;
|
bool is_define = false;
|
||||||
|
|
||||||
@ -623,7 +615,7 @@ TokArray lex( Str content )
|
|||||||
c.token.Type = Tok_NewLine;
|
c.token.Type = Tok_NewLine;
|
||||||
c.token.Text.Len++;
|
c.token.Text.Len++;
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, c.token );
|
array_append(c.tokens, c.token);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -662,7 +654,7 @@ TokArray lex( Str content )
|
|||||||
c.token.Text.Len++;
|
c.token.Text.Len++;
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, c.token );
|
array_append(c.tokens, c.token);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
@ -670,8 +662,7 @@ TokArray lex( Str content )
|
|||||||
|
|
||||||
case Lex_ReturnNull:
|
case Lex_ReturnNull:
|
||||||
{
|
{
|
||||||
TokArray tok_array = {};
|
return info;
|
||||||
return tok_array;
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -698,7 +689,7 @@ TokArray lex( Str content )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
StrBuilder context_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) );
|
StrBuilder context_str = strbuilder_fmt_buf( lib_ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) );
|
||||||
|
|
||||||
log_failure( "gen::lex: invalid varadic argument, expected '...' got '..%c' (%d, %d)\n%s", (* ctx->scanner), c.line, c.column, context_str );
|
log_failure( "gen::lex: invalid varadic argument, expected '...' got '..%c' (%d, %d)\n%s", (* ctx->scanner), c.line, c.column, context_str );
|
||||||
}
|
}
|
||||||
@ -1119,7 +1110,7 @@ TokArray lex( Str content )
|
|||||||
move_forward();
|
move_forward();
|
||||||
c.token.Text.Len++;
|
c.token.Text.Len++;
|
||||||
}
|
}
|
||||||
array_append( _ctx->Lexer_Tokens, c.token );
|
array_append(c.tokens, c.token);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
else if ( (* ctx->scanner) == '*' )
|
else if ( (* ctx->scanner) == '*' )
|
||||||
@ -1155,7 +1146,7 @@ TokArray lex( Str content )
|
|||||||
move_forward();
|
move_forward();
|
||||||
c.token.Text.Len++;
|
c.token.Text.Len++;
|
||||||
}
|
}
|
||||||
array_append( _ctx->Lexer_Tokens, c.token );
|
array_append(c.tokens, c.token);
|
||||||
// end_line();
|
// end_line();
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -1243,14 +1234,14 @@ TokArray lex( Str content )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
s32 start = max( 0, array_num(_ctx->Lexer_Tokens) - 100 );
|
s32 start = max( 0, array_num(c.tokens) - 100 );
|
||||||
log_fmt("\n%d\n", start);
|
log_fmt("\n%d\n", start);
|
||||||
for ( s32 idx = start; idx < array_num(_ctx->Lexer_Tokens); idx++ )
|
for ( s32 idx = start; idx < array_num(c.tokens); idx++ )
|
||||||
{
|
{
|
||||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||||
, idx
|
, idx
|
||||||
, toktype_to_str( _ctx->Lexer_Tokens[ idx ].Type ).Ptr
|
, toktype_to_str( c.tokens[ idx ].Type ).Ptr
|
||||||
, _ctx->Lexer_Tokens[ idx ].Text.Len, _ctx->Lexer_Tokens[ idx ].Text.Ptr
|
, c.tokens[ idx ].Text.Len, c.tokens[ idx ].Text.Ptr
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1266,7 +1257,7 @@ TokArray lex( Str content )
|
|||||||
FoundToken:
|
FoundToken:
|
||||||
{
|
{
|
||||||
lex_found_token( ctx );
|
lex_found_token( ctx );
|
||||||
TokType last_type = array_back(_ctx->Lexer_Tokens)->Type;
|
TokType last_type = array_back(c.tokens)->Type;
|
||||||
if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr )
|
if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr )
|
||||||
{
|
{
|
||||||
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
|
||||||
@ -1281,21 +1272,22 @@ TokArray lex( Str content )
|
|||||||
c.token.Text.Len++;
|
c.token.Text.Len++;
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
array_append( _ctx->Lexer_Tokens, c.token );
|
array_append(c.tokens, c.token);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( array_num(_ctx->Lexer_Tokens) == 0 ) {
|
if ( array_num(c.tokens) == 0 ) {
|
||||||
log_failure( "Failed to lex any tokens" );
|
log_failure( "Failed to lex any tokens" );
|
||||||
TokArray tok_array = {};
|
return info;
|
||||||
return tok_array;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
TokArray result = { _ctx->Lexer_Tokens, 0 };
|
info.messages = c.messages;
|
||||||
return result;
|
info.text = content;
|
||||||
|
info.tokens = struct_init(TokenSlice) { pcast(Token*, c.tokens), scast(s32, array_num(c.tokens)) };
|
||||||
|
return info;
|
||||||
}
|
}
|
||||||
|
|
||||||
#undef move_forward
|
#undef move_forward
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -1,7 +1,7 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "types.hpp"
|
#include "types.hpp"
|
||||||
#include "gen/ecode.hpp"
|
#include "gen/ecodetypes.hpp"
|
||||||
#include "gen/eoperator.hpp"
|
#include "gen/eoperator.hpp"
|
||||||
#include "gen/especifier.hpp"
|
#include "gen/especifier.hpp"
|
||||||
#include "gen/etoktype.hpp"
|
#include "gen/etoktype.hpp"
|
||||||
@ -89,7 +89,28 @@ bool tok_is_end_definition(Token tok) {
|
|||||||
return bitfield_is_set( u32, tok.Flags, TF_EndDefinition );
|
return bitfield_is_set( u32, tok.Flags, TF_EndDefinition );
|
||||||
}
|
}
|
||||||
|
|
||||||
StrBuilder tok_to_strbuilder(Token tok);
|
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok);
|
||||||
|
|
||||||
|
struct TokenSlice
|
||||||
|
{
|
||||||
|
Token* ptr;
|
||||||
|
s32 num;
|
||||||
|
|
||||||
|
#if GEN_COMPILER_CPP
|
||||||
|
forceinline operator Token* () const { return ptr; }
|
||||||
|
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
|
||||||
|
#endif
|
||||||
|
};
|
||||||
|
|
||||||
|
forceinline
|
||||||
|
Str token_range_to_str(Token start, Token end)
|
||||||
|
{
|
||||||
|
Str result = {
|
||||||
|
start.Text.Ptr,
|
||||||
|
(scast(sptr, rcast(uptr, end.Text.Ptr)) + end.Text.Len) - scast(sptr, rcast(uptr, start.Text.Ptr))
|
||||||
|
};
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
struct TokArray
|
struct TokArray
|
||||||
{
|
{
|
||||||
@ -97,30 +118,52 @@ struct TokArray
|
|||||||
s32 Idx;
|
s32 Idx;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
typedef struct LexerMessage LexerMessage;
|
||||||
|
struct LexerMessage
|
||||||
|
{
|
||||||
|
LexerMessage* next;
|
||||||
|
Str content;
|
||||||
|
LogLevel level;
|
||||||
|
};
|
||||||
|
|
||||||
struct LexContext
|
struct LexContext
|
||||||
{
|
{
|
||||||
|
AllocatorInfo allocator_temp;
|
||||||
|
LexerMessage* messages;
|
||||||
Str content;
|
Str content;
|
||||||
s32 left;
|
s32 left;
|
||||||
char const* scanner;
|
char const* scanner;
|
||||||
s32 line;
|
s32 line;
|
||||||
s32 column;
|
s32 column;
|
||||||
// StringTable defines;
|
|
||||||
Token token;
|
Token token;
|
||||||
|
Array(Token) tokens;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct StackNode
|
struct LexedInfo
|
||||||
{
|
{
|
||||||
StackNode* Prev;
|
LexerMessage* messages;
|
||||||
|
Str text;
|
||||||
|
TokenSlice tokens;
|
||||||
|
};
|
||||||
|
|
||||||
Token* Start;
|
typedef struct ParseStackNode ParseStackNode;
|
||||||
Str Name; // The name of the AST node (if parsed)
|
|
||||||
Str ProcName; // The name of the procedure
|
typedef struct ParseMessage ParseMessage;
|
||||||
|
struct ParseMessage
|
||||||
|
{
|
||||||
|
ParseMessage* Next;
|
||||||
|
ParseStackNode* Scope;
|
||||||
|
Str Content;
|
||||||
|
LogLevel Level;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct ParseContext
|
struct ParseContext
|
||||||
{
|
{
|
||||||
TokArray Tokens;
|
ParseMessage* messages;
|
||||||
StackNode* Scope;
|
ParseStackNode* scope;
|
||||||
|
// TokArray Tokens;
|
||||||
|
TokenSlice tokens;
|
||||||
|
s32 token_id;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum MacroType : u16
|
enum MacroType : u16
|
||||||
@ -168,26 +211,36 @@ Str macrotype_to_str( MacroType type )
|
|||||||
|
|
||||||
enum EMacroFlags : u16
|
enum EMacroFlags : u16
|
||||||
{
|
{
|
||||||
MF_Functional = bit(0), // Macro has parameters (args expected to be passed)
|
// Macro has parameters (args expected to be passed)
|
||||||
MF_Expects_Body = bit(1), // Expects to assign a braced scope to its body.
|
MF_Functional = bit(0),
|
||||||
|
|
||||||
|
// Expects to assign a braced scope to its body.
|
||||||
|
MF_Expects_Body = bit(1),
|
||||||
|
|
||||||
// lex__eat wil treat this macro as an identifier if the parser attempts to consume it as one.
|
// lex__eat wil treat this macro as an identifier if the parser attempts to consume it as one.
|
||||||
// ^^^ This is a kludge because we don't support push/pop macro pragmas rn.
|
// This is a kludge because we don't support push/pop macro pragmas rn.
|
||||||
MF_Allow_As_Identifier = bit(2),
|
MF_Allow_As_Identifier = bit(2),
|
||||||
|
|
||||||
|
// When parsing identifiers, it will allow the consumption of the macro parameters (as its expected to be a part of constructing the identifier)
|
||||||
|
// Example of a decarator macro from stb_sprintf.h:
|
||||||
|
// STBSP__PUBLICDEC int STB_SPRINTF_DECORATE(sprintf)(char* buf, char const *fmt, ...) STBSP__ATTRIBUTE_FORMAT(2,3);
|
||||||
|
// ^^ STB_SPRINTF_DECORATE is decorating sprintf
|
||||||
|
MF_Identifier_Decorator = bit(3),
|
||||||
|
|
||||||
// lex__eat wil treat this macro as an attribute if the parser attempts to consume it as one.
|
// lex__eat wil treat this macro as an attribute if the parser attempts to consume it as one.
|
||||||
// ^^^ This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc)
|
// This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc)
|
||||||
// TODO(Ed): We can keep the MF_Allow_As_Attribute flag for macros, however, we need to add the ability of AST_Attributes to chain themselves.
|
// TODO(Ed): We can keep the MF_Allow_As_Attribute flag for macros, however, we need to add the ability of AST_Attributes to chain themselves.
|
||||||
// Its thats already a thing in the standard language anyway
|
// Its thats already a thing in the standard language anyway
|
||||||
// & it would allow UE_DEPRECATED, (UE_PROPERTY / UE_FUNCTION) to chain themselves as attributes of a resolved member function/variable definition
|
// & it would allow UE_DEPRECATED, (UE_PROPERTY / UE_FUNCTION) to chain themselves as attributes of a resolved member function/variable definition
|
||||||
MF_Allow_As_Attribute = bit(3),
|
MF_Allow_As_Attribute = bit(4),
|
||||||
|
|
||||||
// When a macro is encountered after attributes and specifiers while parsing a function, or variable:
|
// When a macro is encountered after attributes and specifiers while parsing a function, or variable:
|
||||||
// It will consume the macro and treat it as resolving the definition. (Yes this is for Unreal Engine)
|
// It will consume the macro and treat it as resolving the definition.
|
||||||
// (MUST BE OF MT_Statement TYPE)
|
// (MUST BE OF MT_Statement TYPE)
|
||||||
MF_Allow_As_Definition = bit(4),
|
MF_Allow_As_Definition = bit(5),
|
||||||
|
|
||||||
MF_Allow_As_Specifier = bit(5), // Created for Unreal's PURE_VIRTUAL
|
// Created for Unreal's PURE_VIRTUAL
|
||||||
|
MF_Allow_As_Specifier = bit(6),
|
||||||
|
|
||||||
MF_Null = 0,
|
MF_Null = 0,
|
||||||
MF_UnderlyingType = GEN_U16_MAX,
|
MF_UnderlyingType = GEN_U16_MAX,
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "../gen.hpp"
|
#include "interface.hpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#pragma region StaticData
|
#pragma region StaticData
|
||||||
|
@ -1,6 +1,18 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
#pragma once
|
#pragma once
|
||||||
#include "header_start.hpp"
|
#include "dependencies/platform.hpp"
|
||||||
|
#include "dependencies/macros.hpp"
|
||||||
|
#include "dependencies/basic_types.hpp"
|
||||||
|
#include "dependencies/debug.hpp"
|
||||||
|
#include "dependencies/memory.hpp"
|
||||||
|
#include "dependencies/string_ops.hpp"
|
||||||
|
#include "dependencies/printing.hpp"
|
||||||
|
#include "dependencies/containers.hpp"
|
||||||
|
#include "dependencies/hashing.hpp"
|
||||||
|
#include "dependencies/strings.hpp"
|
||||||
|
#include "dependencies/filesystem.hpp"
|
||||||
|
#include "dependencies/timing.hpp"
|
||||||
|
#include "dependencies/parsing.hpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/*
|
/*
|
||||||
@ -19,7 +31,38 @@
|
|||||||
|
|
||||||
*/
|
*/
|
||||||
|
|
||||||
using LogFailType = ssize(*)(char const*, ...);
|
enum LogLevel //: u32
|
||||||
|
{
|
||||||
|
LL_Null,
|
||||||
|
LL_Note,
|
||||||
|
LL_Warning,
|
||||||
|
LL_Error,
|
||||||
|
LL_Fatal,
|
||||||
|
LL_UnderlyingType = GEN_U32_MAX,
|
||||||
|
};
|
||||||
|
typedef enum LogLevel LogLevel;
|
||||||
|
|
||||||
|
Str loglevel_to_str(LogLevel level)
|
||||||
|
{
|
||||||
|
local_persist
|
||||||
|
Str lookup[] = {
|
||||||
|
{ "Null", sizeof("Null") - 1 },
|
||||||
|
{ "Note", sizeof("Note") - 1 },
|
||||||
|
{ "Warning", sizeof("Info") - 1 },
|
||||||
|
{ "Error", sizeof("Error") - 1 },
|
||||||
|
{ "Fatal", sizeof("Fatal") - 1 },
|
||||||
|
};
|
||||||
|
return lookup[level];
|
||||||
|
}
|
||||||
|
|
||||||
|
typedef struct LogEntry LogEntry;
|
||||||
|
struct LogEntry
|
||||||
|
{
|
||||||
|
Str msg;
|
||||||
|
LogLevel level;
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef void LoggerProc(LogEntry entry);
|
||||||
|
|
||||||
// By default this library will either crash or exit if an error is detected while generating codes.
|
// By default this library will either crash or exit if an error is detected while generating codes.
|
||||||
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.
|
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.
|
||||||
|
@ -17,6 +17,39 @@ template <class TType> struct RemovePtr<TType*> { typedef TType Type; };
|
|||||||
|
|
||||||
template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
|
template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
|
||||||
|
|
||||||
|
#pragma region Slice
|
||||||
|
#if 0
|
||||||
|
#define Slice(Type) Slice<Type>
|
||||||
|
|
||||||
|
template<class Type> struct Slice;
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
Type* slice_get(Slice<Type> self, ssize id) {
|
||||||
|
GEN_ASSERT(id > -1);
|
||||||
|
GEN_ASSERT(id < self.len);
|
||||||
|
return self.ptr[id];
|
||||||
|
}
|
||||||
|
|
||||||
|
template<class Type>
|
||||||
|
struct Slice
|
||||||
|
{
|
||||||
|
Type* ptr;
|
||||||
|
ssize len;
|
||||||
|
|
||||||
|
#if GEN_COMPILER_CPP
|
||||||
|
forceinline operator Token* () const { return ptr; }
|
||||||
|
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
|
||||||
|
|
||||||
|
forceinline Type* begin() { return ptr; }
|
||||||
|
forceinline Type* end() { return ptr + len; }
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#if ! GEN_C_LIKE_CPP && GEN_COMPILER_CPP
|
||||||
|
forceinline Type& back() { return ptr[len - 1]; }
|
||||||
|
#endif
|
||||||
|
};
|
||||||
|
#endif
|
||||||
|
#pragma endregion Slice
|
||||||
|
|
||||||
#pragma region Array
|
#pragma region Array
|
||||||
#define Array(Type) Array<Type>
|
#define Array(Type) Array<Type>
|
||||||
@ -26,10 +59,8 @@ template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
|
|||||||
|
|
||||||
struct ArrayHeader;
|
struct ArrayHeader;
|
||||||
|
|
||||||
#if GEN_COMPILER_CPP
|
template<class Type> struct Array;
|
||||||
template<class Type> struct Array;
|
#define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
|
||||||
# define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
|
|
||||||
#endif
|
|
||||||
|
|
||||||
usize array_grow_formula(ssize value);
|
usize array_grow_formula(ssize value);
|
||||||
|
|
||||||
@ -59,12 +90,12 @@ struct ArrayHeader {
|
|||||||
usize Num;
|
usize Num;
|
||||||
};
|
};
|
||||||
|
|
||||||
#if GEN_COMPILER_CPP
|
|
||||||
template<class Type>
|
template<class Type>
|
||||||
struct Array
|
struct Array
|
||||||
{
|
{
|
||||||
Type* Data;
|
Type* Data;
|
||||||
|
|
||||||
|
#if ! GEN_C_LIKE_CPP
|
||||||
#pragma region Member Mapping
|
#pragma region Member Mapping
|
||||||
forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); }
|
forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); }
|
||||||
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); }
|
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); }
|
||||||
@ -88,6 +119,7 @@ struct Array
|
|||||||
forceinline bool resize(usize num) { return array_resize<Type>(this, num); }
|
forceinline bool resize(usize num) { return array_resize<Type>(this, num); }
|
||||||
forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); }
|
forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); }
|
||||||
#pragma endregion Member Mapping
|
#pragma endregion Member Mapping
|
||||||
|
#endif
|
||||||
|
|
||||||
forceinline operator Type*() { return Data; }
|
forceinline operator Type*() { return Data; }
|
||||||
forceinline operator Type const*() const { return Data; }
|
forceinline operator Type const*() const { return Data; }
|
||||||
@ -99,9 +131,8 @@ struct Array
|
|||||||
|
|
||||||
using DataType = Type;
|
using DataType = Type;
|
||||||
};
|
};
|
||||||
#endif
|
|
||||||
|
|
||||||
#if GEN_COMPILER_CPP && 0
|
#if 0
|
||||||
template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); }
|
template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); }
|
||||||
template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); }
|
template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); }
|
||||||
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); }
|
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); }
|
||||||
|
@ -1,9 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
# pragma once
|
# pragma once
|
||||||
# include "dependencies/platform.hpp"
|
|
||||||
# include "dependencies/macros.hpp"
|
|
||||||
# include "basic_types.hpp"
|
# include "basic_types.hpp"
|
||||||
# include "macros.hpp"
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#pragma region Debug
|
#pragma region Debug
|
||||||
|
@ -187,7 +187,7 @@ struct FileContents
|
|||||||
{
|
{
|
||||||
AllocatorInfo allocator;
|
AllocatorInfo allocator;
|
||||||
void* data;
|
void* data;
|
||||||
ssize size;
|
ssize size;
|
||||||
};
|
};
|
||||||
|
|
||||||
constexpr b32 file_zero_terminate = true;
|
constexpr b32 file_zero_terminate = true;
|
||||||
|
@ -198,21 +198,16 @@
|
|||||||
#ifndef forceinline
|
#ifndef forceinline
|
||||||
# if GEN_COMPILER_MSVC
|
# if GEN_COMPILER_MSVC
|
||||||
# define forceinline __forceinline
|
# define forceinline __forceinline
|
||||||
# define neverinline __declspec( noinline )
|
|
||||||
# elif GEN_COMPILER_GCC
|
# elif GEN_COMPILER_GCC
|
||||||
# define forceinline inline __attribute__((__always_inline__))
|
# define forceinline inline __attribute__((__always_inline__))
|
||||||
# define neverinline __attribute__( ( __noinline__ ) )
|
|
||||||
# elif GEN_COMPILER_CLANG
|
# elif GEN_COMPILER_CLANG
|
||||||
# if __has_attribute(__always_inline__)
|
# if __has_attribute(__always_inline__)
|
||||||
# define forceinline inline __attribute__((__always_inline__))
|
# define forceinline inline __attribute__((__always_inline__))
|
||||||
# define neverinline __attribute__( ( __noinline__ ) )
|
|
||||||
# else
|
# else
|
||||||
# define forceinline
|
# define forceinline
|
||||||
# define neverinline
|
|
||||||
# endif
|
# endif
|
||||||
# else
|
# else
|
||||||
# define forceinline
|
# define forceinline
|
||||||
# define neverinline
|
|
||||||
# endif
|
# endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -303,10 +298,28 @@
|
|||||||
# define GEN_PARAM_DEFAULT
|
# define GEN_PARAM_DEFAULT
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if GEN_COMPILER_CPP
|
#ifndef struct_init
|
||||||
#define struct_init(type, value) {value}
|
# if GEN_COMPILER_CPP
|
||||||
#else
|
# define struct_init(type)
|
||||||
#define struct_init(type, value) {value}
|
# else
|
||||||
|
# define struct_init(type) (type)
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef struct_zero
|
||||||
|
# if GEN_COMPILER_CPP
|
||||||
|
# define struct_zero(type) {}
|
||||||
|
# else
|
||||||
|
# define struct_zero(type) (type) {0}
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#ifndef struct_zero_init
|
||||||
|
# if GEN_COMPILER_CPP
|
||||||
|
# define struct_zero_init() {}
|
||||||
|
# else
|
||||||
|
# define struct_zero_init() {0}
|
||||||
|
# endif
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#if 0
|
#if 0
|
||||||
@ -319,4 +332,12 @@
|
|||||||
# define GEN_OPITMIZE_MAPPINGS_END
|
# define GEN_OPITMIZE_MAPPINGS_END
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
#ifndef get_optional
|
||||||
|
# if GEN_COMPILER_C
|
||||||
|
# define get_optional(opt) opt ? *opt : (typeof(*opt)){0}
|
||||||
|
# else
|
||||||
|
# define get_optional(opt) opt
|
||||||
|
# endif
|
||||||
|
#endif
|
||||||
|
|
||||||
#pragma endregion Macros
|
#pragma endregion Macros
|
||||||
|
@ -134,12 +134,6 @@ GEN_API void* heap_allocator_proc( void* allocator_data, AllocType type, ssize s
|
|||||||
//! The heap allocator backed by operating system's memory manager.
|
//! The heap allocator backed by operating system's memory manager.
|
||||||
constexpr AllocatorInfo heap( void ) { AllocatorInfo allocator = { heap_allocator_proc, nullptr }; return allocator; }
|
constexpr AllocatorInfo heap( void ) { AllocatorInfo allocator = { heap_allocator_proc, nullptr }; return allocator; }
|
||||||
|
|
||||||
//! Helper to allocate memory using heap allocator.
|
|
||||||
#define malloc( sz ) alloc( heap(), sz )
|
|
||||||
|
|
||||||
//! Helper to free memory allocated by heap allocator.
|
|
||||||
#define mfree( ptr ) free( heap(), ptr )
|
|
||||||
|
|
||||||
struct VirtualMemory
|
struct VirtualMemory
|
||||||
{
|
{
|
||||||
void* data;
|
void* data;
|
||||||
@ -185,6 +179,8 @@ void arena_check (Arena* arena);
|
|||||||
void arena_free (Arena* arena);
|
void arena_free (Arena* arena);
|
||||||
ssize arena_size_remaining(Arena* arena, ssize alignment);
|
ssize arena_size_remaining(Arena* arena, ssize alignment);
|
||||||
|
|
||||||
|
// TODO(Ed): Add arena_pos, arena_pop, and arena_pop_to
|
||||||
|
|
||||||
struct Arena
|
struct Arena
|
||||||
{
|
{
|
||||||
AllocatorInfo Backing;
|
AllocatorInfo Backing;
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
# pragma once
|
# pragma once
|
||||||
# include "strbuilder_ops.cpp"
|
# include "string_ops.cpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#pragma region Printing
|
#pragma region Printing
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
#ifdef INTELLISENSE_DIRECTIVES
|
#ifdef INTELLISENSE_DIRECTIVES
|
||||||
# pragma once
|
# pragma once
|
||||||
# include "strbuilder_ops.hpp"
|
# include "string_ops.hpp"
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#pragma region Printing
|
#pragma region Printing
|
||||||
@ -26,17 +26,4 @@ GEN_API ssize c_str_fmt_file_va ( FileInfo* f, char const* fmt, va_list va );
|
|||||||
constexpr
|
constexpr
|
||||||
char const* Msg_Invalid_Value = "INVALID VALUE PROVIDED";
|
char const* Msg_Invalid_Value = "INVALID VALUE PROVIDED";
|
||||||
|
|
||||||
inline
|
|
||||||
ssize log_fmt(char const* fmt, ...)
|
|
||||||
{
|
|
||||||
ssize res;
|
|
||||||
va_list va;
|
|
||||||
|
|
||||||
va_start(va, fmt);
|
|
||||||
res = c_str_fmt_out_va(fmt, va);
|
|
||||||
va_end(va);
|
|
||||||
|
|
||||||
return res;
|
|
||||||
}
|
|
||||||
|
|
||||||
#pragma endregion Printing
|
#pragma endregion Printing
|
||||||
|
@ -320,7 +320,7 @@ inline
|
|||||||
StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...)
|
StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...)
|
||||||
{
|
{
|
||||||
local_persist thread_local
|
local_persist thread_local
|
||||||
PrintF_Buffer buf = struct_init(PrintF_Buffer, {0});
|
PrintF_Buffer buf = struct_zero_init();
|
||||||
|
|
||||||
va_list va;
|
va_list va;
|
||||||
va_start(va, fmt);
|
va_start(va, fmt);
|
||||||
|
@ -11,9 +11,6 @@
|
|||||||
#include "helpers/push_ignores.inline.hpp"
|
#include "helpers/push_ignores.inline.hpp"
|
||||||
#include "components/header_start.hpp"
|
#include "components/header_start.hpp"
|
||||||
|
|
||||||
// Has container defines pushed
|
|
||||||
#include "gen.dep.hpp"
|
|
||||||
|
|
||||||
GEN_NS_BEGIN
|
GEN_NS_BEGIN
|
||||||
|
|
||||||
#include "components/types.hpp"
|
#include "components/types.hpp"
|
||||||
|
@ -1159,6 +1159,8 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
|||||||
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
||||||
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
||||||
|
|
||||||
|
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
|
||||||
|
|
||||||
// Resolve define's arguments
|
// Resolve define's arguments
|
||||||
b32 has_args = fn->Params->NumEntries > 1;
|
b32 has_args = fn->Params->NumEntries > 1;
|
||||||
StrBuilder params_str = StrBuilder::make_reserve(_ctx->Allocator_Temp, 32);
|
StrBuilder params_str = StrBuilder::make_reserve(_ctx->Allocator_Temp, 32);
|
||||||
@ -1172,10 +1174,10 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
|||||||
}
|
}
|
||||||
char const* tmpl_fn_macro = nullptr;
|
char const* tmpl_fn_macro = nullptr;
|
||||||
if (params_str.length() > 0 ) {
|
if (params_str.length() > 0 ) {
|
||||||
tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> (<opts_type>) { __VA_ARGS__ } )\n";
|
tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> & (<opts_type>) { __VA_ARGS__ } )\n";
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( (<opts_type>) { __VA_ARGS__ } )\n";
|
tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( & (<opts_type>) { __VA_ARGS__ } )\n";
|
||||||
}
|
}
|
||||||
Code fn_macro = untyped_str(token_fmt(
|
Code fn_macro = untyped_str(token_fmt(
|
||||||
"def_name", fn->Name
|
"def_name", fn->Name
|
||||||
@ -1504,6 +1506,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
|||||||
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
|
||||||
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
|
||||||
fn->Name = cache_str(new_name);
|
fn->Name = cache_str(new_name);
|
||||||
|
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
|
||||||
}
|
}
|
||||||
src_upfront.append(fn);
|
src_upfront.append(fn);
|
||||||
}
|
}
|
||||||
|
@ -53,6 +53,7 @@ word enum_underlying, gen_enum_underlying
|
|||||||
word nullptr, gen_nullptr
|
word nullptr, gen_nullptr
|
||||||
word struct_init, gen_struct_init
|
word struct_init, gen_struct_init
|
||||||
word hash, gen_hash
|
word hash, gen_hash
|
||||||
|
word txt, gen_txt
|
||||||
|
|
||||||
// Basic Types
|
// Basic Types
|
||||||
|
|
||||||
@ -410,6 +411,8 @@ namespace var_, gen_var_
|
|||||||
|
|
||||||
word _ctx, gen__ctx
|
word _ctx, gen__ctx
|
||||||
|
|
||||||
|
word get_context, gen_get_context
|
||||||
|
|
||||||
word init, gen_init
|
word init, gen_init
|
||||||
word deinit, gen_deinit
|
word deinit, gen_deinit
|
||||||
word reset, gen_reset
|
word reset, gen_reset
|
||||||
@ -532,7 +535,7 @@ namespace Lexer_, gen_Lexer_
|
|||||||
word LexContext, gen_LexContext
|
word LexContext, gen_LexContext
|
||||||
word lex, gen_lex
|
word lex, gen_lex
|
||||||
|
|
||||||
word StackNode, gen_StackNode
|
word ParseStackNode, gen_ParseStackNode
|
||||||
word ParseContext, gen_ParseContext
|
word ParseContext, gen_ParseContext
|
||||||
|
|
||||||
// namespace parse_, gen_parse_
|
// namespace parse_, gen_parse_
|
||||||
|
@ -132,6 +132,7 @@ if ( $vendor -match "clang" )
|
|||||||
$flag_all_c = @('-x', 'c')
|
$flag_all_c = @('-x', 'c')
|
||||||
$flag_c11 = '-std=c11'
|
$flag_c11 = '-std=c11'
|
||||||
$flag_all_cpp = '-x c++'
|
$flag_all_cpp = '-x c++'
|
||||||
|
$flag_charset_utf8 = '-utf-8'
|
||||||
$flag_compile = '-c'
|
$flag_compile = '-c'
|
||||||
$flag_color_diagnostics = '-fcolor-diagnostics'
|
$flag_color_diagnostics = '-fcolor-diagnostics'
|
||||||
$flag_no_color_diagnostics = '-fno-color-diagnostics'
|
$flag_no_color_diagnostics = '-fno-color-diagnostics'
|
||||||
@ -371,6 +372,7 @@ if ( $vendor -match "msvc" )
|
|||||||
$flag_all_c = '/TC'
|
$flag_all_c = '/TC'
|
||||||
$flag_c11 = '/std:c11'
|
$flag_c11 = '/std:c11'
|
||||||
$flag_all_cpp = '/TP'
|
$flag_all_cpp = '/TP'
|
||||||
|
$flag_charset_utf8 = '/utf-8'
|
||||||
$flag_compile = '/c'
|
$flag_compile = '/c'
|
||||||
$flag_debug = '/Zi'
|
$flag_debug = '/Zi'
|
||||||
$flag_define = '/D'
|
$flag_define = '/D'
|
||||||
@ -404,7 +406,7 @@ if ( $vendor -match "msvc" )
|
|||||||
$flag_optimize_intrinsics = '/Oi'
|
$flag_optimize_intrinsics = '/Oi'
|
||||||
$flag_optimized_debug_forceinline = '/d2Obforceinline'
|
$flag_optimized_debug_forceinline = '/d2Obforceinline'
|
||||||
$flag_optimized_debug = '/Zo'
|
$flag_optimized_debug = '/Zo'
|
||||||
$flag_
|
# $flag_
|
||||||
# $flag_out_name = '/OUT:'
|
# $flag_out_name = '/OUT:'
|
||||||
$flag_path_interm = '/Fo'
|
$flag_path_interm = '/Fo'
|
||||||
$flag_path_debug = '/Fd'
|
$flag_path_debug = '/Fd'
|
||||||
|
Reference in New Issue
Block a user