2 Commits

Author SHA1 Message Date
Ed_
9ff5908e40 Update LICENSE 2025-10-31 22:18:21 -04:00
Ed_
2ad164dc39 CodeSpecifiers fixes 2025-04-02 21:19:15 -04:00
30 changed files with 1083 additions and 1562 deletions

2
.gitignore vendored
View File

@@ -44,5 +44,3 @@ test/c_library/gen
test/cpp_library/gen test/cpp_library/gen
!scripts/helpers/refactor.exe !scripts/helpers/refactor.exe
# ai/**

52
LICENSE
View File

@@ -1,44 +1,26 @@
BSD 3-Clause License MIT License
Copyright (c) 2023, Edward R. Gonzalez Copyright (c) 2025 Edward R. Gonzalez
Redistribution and use in source and binary forms, with or without Permission is hereby granted, free of charge, to any person obtaining a copy
modification, are permitted provided that the following conditions are met: of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
1. Redistributions of source code must retain the above copyright notice, this The above copyright notice and this permission notice shall be included in all
list of conditions and the following disclaimer. copies or substantial portions of the Software.
2. Redistributions in binary form must reproduce the above copyright notice, THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
this list of conditions and the following disclaimer in the documentation IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
and/or other materials provided with the distribution. FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
3. Neither the name of the copyright holder nor the names of its LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
contributors may be used to endorse or promote products derived from OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
this software without specific prior written permission. SOFTWARE.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
Source URL: https://github.com/Ed94/gencpp Source URL: https://github.com/Ed94/gencpp
Acknowledgements Acknowledgements
* The dependencies for gencpp source are derived from the zpl library: https://github.com/zpl-c/zpl * The dependencies for gencpp source are derived from the zpl library: https://github.com/zpl-c/zpl
Special thanks to:
* The Handmade Community.
* Casey Muratori, Ginger Bill (Bill Hall), Mr. 4th (Allen Webster), Ryan Fluery: Influnced conceptually how to handle staged metaprograming.
* Jonathan Blow: Jai's metaprogramming influenced the design of this library.
* My friends for putting up with discord spam on this library.

View File

@@ -3,7 +3,7 @@
# include "helpers/push_ignores.inline.hpp" # include "helpers/push_ignores.inline.hpp"
# include "components/header_start.hpp" # include "components/header_start.hpp"
# include "components/types.hpp" # include "components/types.hpp"
# include "components/gen/ecodetypes.hpp" # include "components/gen/ecode.hpp"
# include "components/gen/eoperator.hpp" # include "components/gen/eoperator.hpp"
# include "components/gen/especifier.hpp" # include "components/gen/especifier.hpp"
# include "components/ast.hpp" # include "components/ast.hpp"

View File

@@ -3,7 +3,7 @@
# include "helpers/push_ignores.inline.hpp" # include "helpers/push_ignores.inline.hpp"
# include "components/header_start.hpp" # include "components/header_start.hpp"
# include "components/types.hpp" # include "components/types.hpp"
# include "components/gen/ecodetypes.hpp" # include "components/gen/ecode.hpp"
# include "components/gen/eoperator.hpp" # include "components/gen/eoperator.hpp"
# include "components/gen/especifier.hpp" # include "components/gen/especifier.hpp"
# include "components/ast.hpp" # include "components/ast.hpp"

View File

@@ -1,6 +1,9 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "parser_types.hpp" #include "types.hpp"
#include "gen/ecode.hpp"
#include "gen/eoperator.hpp"
#include "gen/especifier.hpp"
#endif #endif
/* /*
@@ -404,7 +407,6 @@ struct AST
}; };
}; };
StrCached Content; // Attributes, Comment, Execution, Include StrCached Content; // Attributes, Comment, Execution, Include
TokenSlice ContentToks; // TODO(Ed): Use a token slice for content
struct { struct {
Specifier ArrSpecs[AST_ArrSpecs_Cap]; // Specifiers Specifier ArrSpecs[AST_ArrSpecs_Cap]; // Specifiers
Code NextSpecs; // Specifiers; If ArrSpecs is full, then NextSpecs is used. Code NextSpecs; // Specifiers; If ArrSpecs is full, then NextSpecs is used.
@@ -420,7 +422,7 @@ struct AST
Code Next; Code Next;
Code Back; Code Back;
}; };
Token* Token; // Reference to starting token, only available if it was derived from parsing. // TODO(Ed): Change this to a token slice. Token* Token; // Reference to starting token, only available if it was derived from parsing.
Code Parent; Code Parent;
CodeType Type; CodeType Type;
// CodeFlag CodeFlags; // CodeFlag CodeFlags;

View File

@@ -38,13 +38,13 @@ void body_to_strbuilder_export( CodeBody body, StrBuilder* result )
GEN_ASSERT(result != nullptr); GEN_ASSERT(result != nullptr);
strbuilder_append_fmt( result, "export\n{\n" ); strbuilder_append_fmt( result, "export\n{\n" );
Code curr = body->Front; Code curr = cast(Code, body);
s32 left = body->NumEntries; s32 left = body->NumEntries;
while ( left-- ) while ( left-- )
{ {
code_to_strbuilder_ref(curr, result); code_to_strbuilder_ref(curr, result);
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) ); // strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
curr = curr->Next; ++curr;
} }
strbuilder_append_fmt( result, "};\n" ); strbuilder_append_fmt( result, "};\n" );

View File

@@ -1,6 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "constants.hpp" #include "interface.hpp"
#endif #endif
#pragma region Serialization #pragma region Serialization
@@ -38,7 +38,7 @@ void body_to_strbuilder_ref( CodeBody body, StrBuilder* result )
{ {
code_to_strbuilder_ref(curr, result); code_to_strbuilder_ref(curr, result);
// strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) ); // strbuilder_append_fmt( result, "%SB", code_to_strbuilder(curr) );
curr = curr->Next; ++curr;
} }
} }

View File

@@ -3,8 +3,8 @@
#include "code_serialization.cpp" #include "code_serialization.cpp"
#endif #endif
internal void parser_init(Context* ctx); internal void parser_init();
internal void parser_deinit(Context* ctx); internal void parser_deinit();
internal internal
void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags ) void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size, ssize alignment, void* old_memory, ssize old_size, u64 flags )
@@ -71,14 +71,6 @@ void* fallback_allocator_proc( void* allocator_data, AllocType type, ssize size,
return nullptr; return nullptr;
} }
internal
void fallback_logger(LogEntry entry)
{
GEN_ASSERT(entry.msg.Len > 0);
GEN_ASSERT(entry.msg.Ptr);
log_fmt("%S: %S", loglevel_to_str(entry.level), entry.msg);
}
internal internal
void define_constants() void define_constants()
{ {
@@ -291,19 +283,6 @@ void init(Context* ctx)
ctx->InitSize_Fallback_Allocator_Bucket_Size = megabytes(8); ctx->InitSize_Fallback_Allocator_Bucket_Size = megabytes(8);
} }
if (ctx->InitSize_StrCacheTable == 0)
{
ctx->InitSize_StrCacheTable = kilobytes(8);
}
if (ctx->InitSize_MacrosTable == 0)
{
ctx->InitSize_MacrosTable = kilobytes(8);
}
if (ctx->Logger == nullptr) {
ctx->Logger = & fallback_logger;
}
// Override the current context (user has to put it back if unwanted). // Override the current context (user has to put it back if unwanted).
_ctx = ctx; _ctx = ctx;
@@ -319,7 +298,7 @@ void init(Context* ctx)
} }
// Setup the code pool and code entries arena. // Setup the code pool and code entries arena.
{ {
Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, size_of(AST) ); Pool code_pool = pool_init( ctx->Allocator_Pool, ctx->CodePool_NumBlocks, sizeof(AST) );
if ( code_pool.PhysicalStart == nullptr ) if ( code_pool.PhysicalStart == nullptr )
GEN_FATAL( "gen::init: Failed to initialize the code pool" ); GEN_FATAL( "gen::init: Failed to initialize the code pool" );
array_append( ctx->CodePools, code_pool ); array_append( ctx->CodePools, code_pool );
@@ -332,18 +311,18 @@ void init(Context* ctx)
} }
// Setup the hash tables // Setup the hash tables
{ {
ctx->StrCache = hashtable_init_reserve(StrCached, ctx->Allocator_DyanmicContainers, ctx->InitSize_StrCacheTable); ctx->StrCache = hashtable_init(StrCached, ctx->Allocator_DyanmicContainers);
if ( ctx->StrCache.Entries == nullptr ) if ( ctx->StrCache.Entries == nullptr )
GEN_FATAL( "gen::init: Failed to initialize the StringCache"); GEN_FATAL( "gen::init: Failed to initialize the StringCache");
ctx->Macros = hashtable_init_reserve(Macro, ctx->Allocator_DyanmicContainers, ctx->InitSize_MacrosTable); ctx->Macros = hashtable_init(Macro, ctx->Allocator_DyanmicContainers);
if (ctx->Macros.Hashes == nullptr || ctx->Macros.Entries == nullptr) { if (ctx->Macros.Hashes == nullptr || ctx->Macros.Entries == nullptr) {
GEN_FATAL( "gen::init: Failed to initialize the PreprocessMacros table" ); GEN_FATAL( "gen::init: Failed to initialize the PreprocessMacros table" );
} }
} }
define_constants(); define_constants();
parser_init(ctx); parser_init();
++ context_counter; ++ context_counter;
} }
@@ -392,7 +371,7 @@ void deinit(Context* ctx)
while ( left--, left ); while ( left--, left );
array_free( ctx->Fallback_AllocatorBuckets); array_free( ctx->Fallback_AllocatorBuckets);
} }
parser_deinit(ctx); parser_deinit();
if (_ctx == ctx) if (_ctx == ctx)
_ctx = nullptr; _ctx = nullptr;

View File

@@ -15,6 +15,24 @@
\▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓ \▓▓▓▓▓▓▓\▓▓ \▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓ \▓▓▓▓▓▓▓
*/ */
#if 0
enum LogLevel : u32
{
Info,
Warning,
Panic,
};
struct LogEntry
{
Str msg;
u32 line_num;
void* data;
};
typedef void LoggerCallback(LogEntry entry);
#endif
// Note(Ed): This is subject to heavily change // Note(Ed): This is subject to heavily change
// with upcoming changes to the library's fallback (default) allocations strategy; // with upcoming changes to the library's fallback (default) allocations strategy;
// and major changes to lexer/parser context usage. // and major changes to lexer/parser context usage.
@@ -46,16 +64,9 @@ struct Context
u32 InitSize_LexerTokens; u32 InitSize_LexerTokens;
u32 SizePer_StringArena; u32 SizePer_StringArena;
u32 InitSize_StrCacheTable;
u32 InitSize_MacrosTable;
// TODO(Ed): Symbol Table // TODO(Ed): Symbol Table
// Keep track of all resolved symbols (naemspaced identifiers) // Keep track of all resolved symbols (naemspaced identifiers)
// Logging
LoggerProc* Logger;
// Parser // Parser
// Used by the lexer to persistently treat all these identifiers as preprocessor defines. // Used by the lexer to persistently treat all these identifiers as preprocessor defines.
@@ -78,6 +89,9 @@ struct Context
StringTable StrCache; StringTable StrCache;
// TODO(Ed): This needs to be just handled by a parser context
Array(Token) Lexer_Tokens;
// TODO(Ed): Active parse context vs a parse result need to be separated conceptually // TODO(Ed): Active parse context vs a parse result need to be separated conceptually
ParseContext parser; ParseContext parser;
@@ -90,37 +104,6 @@ struct Context
// An implicit context interface will be provided instead as wrapper procedures as convience. // An implicit context interface will be provided instead as wrapper procedures as convience.
GEN_API extern Context* _ctx; GEN_API extern Context* _ctx;
// TODO(Ed): Swap all usage of this with logger_fmt (then rename logger_fmt to log_fmt)
inline
ssize log_fmt(char const* fmt, ...)
{
ssize res;
va_list va;
va_start(va, fmt);
res = c_str_fmt_out_va(fmt, va);
va_end(va);
return res;
}
inline
void logger_fmt(Context* ctx, LogLevel level, char const* fmt, ...)
{
local_persist thread_local
PrintF_Buffer buf = struct_zero_init();
va_list va;
va_start(va, fmt);
ssize res = c_str_fmt_va(buf, GEN_PRINTF_MAXLEN, fmt, va) -1;
va_end(va);
StrBuilder msg = strbuilder_make_length(ctx->Allocator_Temp, buf, res);
LogEntry entry = { strbuilder_to_str(msg), level };
ctx->Logger(entry);
}
// Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that // Initialize the library. There first ctx initialized must exist for lifetime of other contextes that come after as its the one that
GEN_API void init(Context* ctx); GEN_API void init(Context* ctx);
@@ -131,7 +114,7 @@ GEN_API void deinit(Context* ctx);
// Retrieves the active context (not usually needed, but here in case...) // Retrieves the active context (not usually needed, but here in case...)
GEN_API Context* get_context(); GEN_API Context* get_context();
// Clears the allocations, but doesn't free the memory, then calls init() again. // Clears the allocations, but doesn't free the memoery, then calls init() again.
// Ease of use. // Ease of use.
GEN_API void reset(Context* ctx); GEN_API void reset(Context* ctx);
@@ -351,33 +334,37 @@ forceinline CodeBody def_union_body ( s32 num, Code* codes )
#pragma region Parsing #pragma region Parsing
struct ParseStackNode #if 0
struct StackNode
{ {
ParseStackNode* prev; StackNode* Prev;
TokenSlice tokens; Token Start;
Token* start; Token Name; // The name of the AST node (if parsed)
Str name; // The name of the AST node (if parsed) Str FailedProc; // The name of the procedure that failed
Str proc_name; // The name of the procedure };
Code code_rel; // Relevant AST node // Stack nodes are allocated the error's allocator
// TODO(Ed): When an error occurs, the parse stack is not released and instead the scope is left dangling.
struct Error
{
StrBuilder message;
StackNode* context_stack;
}; };
struct ParseInfo struct ParseInfo
{ {
ParseMessage* messages; Arena FileMem;
LexedInfo lexed; Arena TokMem;
Code result; Arena CodeMem;
FileContents FileContent;
Array<Token> Tokens;
Array<Error> Errors;
// Errors are allocated to a dedicated general arena.
}; };
struct ParseOpts CodeBody parse_file( Str path );
{ #endif
AllocatorInfo backing_msgs;
AllocatorInfo backing_tokens;
AllocatorInfo backing_ast;
};
ParseInfo wip_parse_str( LexedInfo lexed, ParseOpts* opts GEN_PARAM_DEFAULT );
GEN_API CodeClass parse_class ( Str class_def ); GEN_API CodeClass parse_class ( Str class_def );
GEN_API CodeConstructor parse_constructor ( Str constructor_def ); GEN_API CodeConstructor parse_constructor ( Str constructor_def );
@@ -411,7 +398,6 @@ Str token_fmt_impl( ssize, ... );
GEN_API Code untyped_str( Str content); GEN_API Code untyped_str( Str content);
GEN_API Code untyped_fmt ( char const* fmt, ... ); GEN_API Code untyped_fmt ( char const* fmt, ... );
GEN_API Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... ); GEN_API Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... );
GEN_API Code untyped_toks ( TokenSlice tokens );
#pragma endregion Untyped text #pragma endregion Untyped text

View File

@@ -1,6 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "gen/etoktype.hpp" #include "gen/etoktype.cpp"
#include "interface.upfront.cpp" #include "interface.upfront.cpp"
#include "lexer.cpp" #include "lexer.cpp"
#include "parser.cpp" #include "parser.cpp"
@@ -8,70 +8,29 @@
// Publically Exposed Interface // Publically Exposed Interface
ParseInfo wip_parse_str(LexedInfo lexed, ParseOpts* opts)
{
// TODO(Ed): Lift this.
Context* ctx = _ctx;
if (lexed.tokens.num == 0 && lexed.tokens.ptr == nullptr) {
check_parse_args(lexed.text);
lexed = lex(ctx, lexed.text);
}
ParseInfo info = struct_zero(ParseInfo);
info.lexed = lexed;
// TODO(Ed): ParseInfo should be set to the parser context.
ctx->parser = struct_zero(ParseContext);
ctx->parser.tokens = lexed.tokens;
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
CodeBody result = parse_global_nspace(ctx,CT_Global_Body);
parser_pop(& ctx->parser);
return info;
}
CodeClass parse_class( Str def ) CodeClass parse_class( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
ParseStackNode scope = NullScope; _ctx->parser.Tokens = toks;
parser_push(& ctx->parser, & scope); push_scope();
CodeClass result = (CodeClass) parse_class_struct( ctx, Tok_Decl_Class, parser_not_inplace_def ); CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class, parser_not_inplace_def );
parser_pop(& ctx->parser); parser_pop(& _ctx->parser);
return result; return result;
} }
CodeConstructor parse_constructor( Str def ) CodeConstructor parse_constructor( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
ParseStackNode scope = NullScope;
parser_push(& ctx->parser, & scope);
// TODO(Ed): Constructors can have prefix attributes // TODO(Ed): Constructors can have prefix attributes
CodeSpecifiers specifiers = NullCode; CodeSpecifiers specifiers = NullCode;
@@ -98,8 +57,8 @@ CodeConstructor parse_constructor(Str def )
break; break;
default : default :
log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(& ctx->parser, ctx->Allocator_Temp) ); log_failure( "Invalid specifier %s for variable\n%S", spec_to_str( spec ), parser_to_strbuilder(_ctx->parser) );
parser_pop(& ctx->parser); parser_pop(& _ctx->parser);
return InvalidCode; return InvalidCode;
} }
@@ -112,337 +71,247 @@ CodeConstructor parse_constructor(Str def )
eat( currtok.Type ); eat( currtok.Type );
} }
if ( NumSpecifiers ) { if ( NumSpecifiers )
{
specifiers = def_specifiers_arr( NumSpecifiers, specs_found ); specifiers = def_specifiers_arr( NumSpecifiers, specs_found );
// <specifiers> ... // <specifiers> ...
} }
CodeConstructor result = parser_parse_constructor(ctx, specifiers); _ctx->parser.Tokens = toks;
parser_pop(& ctx->parser); CodeConstructor result = parser_parse_constructor( specifiers );
return result; return result;
} }
CodeDefine parse_define( Str def ) CodeDefine parse_define( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
ParseStackNode scope = NullScope; _ctx->parser.Tokens = toks;
parser_push(& ctx->parser, & scope); push_scope();
CodeDefine result = parser_parse_define(ctx); CodeDefine result = parser_parse_define();
parser_pop(& ctx->parser); parser_pop(& _ctx->parser);
return result; return result;
} }
CodeDestructor parse_destructor( Str def ) CodeDestructor parse_destructor( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
// TODO(Ed): Destructors can have prefix attributes // TODO(Ed): Destructors can have prefix attributes
// TODO(Ed): Destructors can have virtual // TODO(Ed): Destructors can have virtual
CodeDestructor result = parser_parse_destructor(ctx, NullCode); _ctx->parser.Tokens = toks;
CodeDestructor result = parser_parse_destructor(NullCode);
return result; return result;
} }
CodeEnum parse_enum( Str def ) CodeEnum parse_enum( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def); {
ctx->parser.tokens = lexed.tokens; parser_pop(& _ctx->parser);
if ( ctx->parser.tokens.ptr == nullptr ) {
return InvalidCode; return InvalidCode;
} }
return parser_parse_enum(ctx, parser_not_inplace_def); _ctx->parser.Tokens = toks;
return parser_parse_enum( parser_not_inplace_def);
} }
CodeBody parse_export_body( Str def ) CodeBody parse_export_body( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_export_body(ctx); _ctx->parser.Tokens = toks;
return parser_parse_export_body();
} }
CodeExtern parse_extern_link( Str def ) CodeExtern parse_extern_link( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_extern_link(ctx); _ctx->parser.Tokens = toks;
return parser_parse_extern_link();
} }
CodeFriend parse_friend( Str def ) CodeFriend parse_friend( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_friend(ctx); _ctx->parser.Tokens = toks;
return parser_parse_friend();
} }
CodeFn parse_function( Str def ) CodeFn parse_function( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return (CodeFn) parser_parse_function(ctx); _ctx->parser.Tokens = toks;
return (CodeFn) parser_parse_function();
} }
CodeBody parse_global_body( Str def ) CodeBody parse_global_body( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
ParseStackNode scope = NullScope; _ctx->parser.Tokens = toks;
parser_push(& ctx->parser, & scope); push_scope();
CodeBody result = parse_global_nspace(ctx, CT_Global_Body ); CodeBody result = parse_global_nspace( CT_Global_Body );
parser_pop(& ctx->parser); parser_pop(& _ctx->parser);
return result; return result;
} }
CodeNS parse_namespace( Str def ) CodeNS parse_namespace( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_namespace(ctx); _ctx->parser.Tokens = toks;
return parser_parse_namespace();
} }
CodeOperator parse_operator( Str def ) CodeOperator parse_operator( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return (CodeOperator) parser_parse_operator(ctx); _ctx->parser.Tokens = toks;
return (CodeOperator) parser_parse_operator();
} }
CodeOpCast parse_operator_cast( Str def ) CodeOpCast parse_operator_cast( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_operator_cast(ctx, NullCode); _ctx->parser.Tokens = toks;
return parser_parse_operator_cast(NullCode);
} }
CodeStruct parse_struct( Str def ) CodeStruct parse_struct( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
ParseStackNode scope = NullScope; _ctx->parser.Tokens = toks;
parser_push(& ctx->parser, & scope); push_scope();
CodeStruct result = (CodeStruct) parse_class_struct( ctx, Tok_Decl_Struct, parser_not_inplace_def ); CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct, parser_not_inplace_def );
parser_pop(& ctx->parser); parser_pop(& _ctx->parser);
return result; return result;
} }
CodeTemplate parse_template( Str def ) CodeTemplate parse_template( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_template(ctx); _ctx->parser.Tokens = toks;
return parser_parse_template();
} }
CodeTypename parse_type( Str def ) CodeTypename parse_type( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_type( ctx, parser_not_from_template, nullptr); _ctx->parser.Tokens = toks;
return parser_parse_type( parser_not_from_template, nullptr);
} }
CodeTypedef parse_typedef( Str def ) CodeTypedef parse_typedef( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_typedef(ctx); _ctx->parser.Tokens = toks;
return parser_parse_typedef();
} }
CodeUnion parse_union( Str def ) CodeUnion parse_union( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_union(ctx, parser_not_inplace_def); _ctx->parser.Tokens = toks;
return parser_parse_union( parser_not_inplace_def);
} }
CodeUsing parse_using( Str def ) CodeUsing parse_using( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_using(ctx); _ctx->parser.Tokens = toks;
return parser_parse_using();
} }
CodeVar parse_variable( Str def ) CodeVar parse_variable( Str def )
{ {
// TODO(Ed): Lift this.
Context* ctx = _ctx;
check_parse_args( def ); check_parse_args( def );
ctx->parser = struct_zero(ParseContext); TokArray toks = lex( def );
if ( toks.Arr == nullptr )
LexedInfo lexed = lex(ctx, def);
ctx->parser.tokens = lexed.tokens;
if ( ctx->parser.tokens.ptr == nullptr )
return InvalidCode; return InvalidCode;
return parser_parse_variable(ctx); _ctx->parser.Tokens = toks;
return parser_parse_variable();
} }
// Undef helper macros // Undef helper macros
@@ -457,7 +326,6 @@ CodeVar parse_variable( Str def )
#undef left #undef left
#undef check #undef check
#undef push_scope #undef push_scope
#undef NullScope
#undef def_assign #undef def_assign
// Here for C Variant // Here for C Variant

View File

@@ -176,16 +176,3 @@ Code untyped_token_fmt( s32 num_tokens, char const* fmt, ... )
return result; return result;
} }
Code untyped_toks( TokenSlice tokens )
{
if ( tokens.num == 0 ) {
log_failure( "untyped_toks: empty token slice" );
return InvalidCode;
}
Code
result = make_code();
result->Type = CT_Untyped;
result->ContentToks = tokens;
return result;
}

View File

@@ -473,10 +473,8 @@ CodeComment def_comment( Str content )
return (CodeComment) result; return (CodeComment) result;
} }
CodeConstructor def_constructor( Opts_def_constructor opt ) CodeConstructor def_constructor( Opts_def_constructor p )
{ {
Opts_def_constructor p = get_optional(opt);
if ( p.params && p.params->Type != CT_Parameters ) { if ( p.params && p.params->Type != CT_Parameters ) {
log_failure("gen::def_constructor: params must be of Parameters type - %s", code_debug_str((Code)p.params)); log_failure("gen::def_constructor: params must be of Parameters type - %s", code_debug_str((Code)p.params));
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
@@ -512,10 +510,8 @@ CodeConstructor def_constructor( Opts_def_constructor opt )
return result; return result;
} }
CodeClass def_class( Str name, Opts_def_struct opt ) CodeClass def_class( Str name, Opts_def_struct p )
{ {
Opts_def_struct p = get_optional(opt);
if ( ! name_check( def_class, name ) ) { if ( ! name_check( def_class, name ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -565,10 +561,8 @@ CodeClass def_class( Str name, Opts_def_struct opt )
return result; return result;
} }
CodeDefine def_define( Str name, MacroType type, Opts_def_define opt ) CodeDefine def_define( Str name, MacroType type, Opts_def_define p )
{ {
Opts_def_define p = get_optional(opt);
if ( ! name_check( def_define, name ) ) { if ( ! name_check( def_define, name ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -591,10 +585,8 @@ CodeDefine def_define( Str name, MacroType type, Opts_def_define opt )
return result; return result;
} }
CodeDestructor def_destructor( Opts_def_destructor opt ) CodeDestructor def_destructor( Opts_def_destructor p )
{ {
Opts_def_destructor p = get_optional(opt);
if ( p.specifiers && p.specifiers->Type != CT_Specifiers ) { if ( p.specifiers && p.specifiers->Type != CT_Specifiers ) {
log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", code_debug_str(p.specifiers) ); log_failure( "gen::def_destructor: specifiers was not a 'Specifiers' type: %s", code_debug_str(p.specifiers) );
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
@@ -627,10 +619,8 @@ CodeDestructor def_destructor( Opts_def_destructor opt )
return result; return result;
} }
CodeEnum def_enum( Str name, Opts_def_enum opt ) CodeEnum def_enum( Str name, Opts_def_enum p )
{ {
Opts_def_enum p = get_optional(opt);
if ( ! name_check( def_enum, name ) ) { if ( ! name_check( def_enum, name ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -752,10 +742,8 @@ CodeFriend def_friend( Code declaration )
return result; return result;
} }
CodeFn def_function( Str name, Opts_def_function opt ) CodeFn def_function( Str name, Opts_def_function p )
{ {
Opts_def_function p = get_optional(opt);
if ( ! name_check( def_function, name )) { if ( ! name_check( def_function, name )) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -814,10 +802,8 @@ CodeFn def_function( Str name, Opts_def_function opt )
return result; return result;
} }
CodeInclude def_include( Str path, Opts_def_include opt ) CodeInclude def_include( Str path, Opts_def_include p )
{ {
Opts_def_include p = get_optional(opt);
if ( path.Len <= 0 || path.Ptr == nullptr ) { if ( path.Len <= 0 || path.Ptr == nullptr ) {
log_failure( "gen::def_include: Invalid path provided - %d" ); log_failure( "gen::def_include: Invalid path provided - %d" );
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
@@ -835,10 +821,8 @@ CodeInclude def_include( Str path, Opts_def_include opt )
return result; return result;
} }
CodeModule def_module( Str name, Opts_def_module opt ) CodeModule def_module( Str name, Opts_def_module p )
{ {
Opts_def_module p = get_optional(opt);
if ( ! name_check( def_module, name )) { if ( ! name_check( def_module, name )) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -851,10 +835,8 @@ CodeModule def_module( Str name, Opts_def_module opt )
return result; return result;
} }
CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace opt ) CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace p )
{ {
Opts_def_namespace p = get_optional(opt);
if ( ! name_check( def_namespace, name )) { if ( ! name_check( def_namespace, name )) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -877,10 +859,8 @@ CodeNS def_namespace( Str name, CodeBody body, Opts_def_namespace opt )
return result; return result;
} }
CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator opt ) CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator p )
{ {
Opts_def_operator p = get_optional(opt);
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) { if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
log_failure( "gen::def_operator: PlatformAttributes was provided but its not of attributes type: %s", code_debug_str(p.attributes) ); log_failure( "gen::def_operator: PlatformAttributes was provided but its not of attributes type: %s", code_debug_str(p.attributes) );
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
@@ -946,10 +926,8 @@ CodeOperator def_operator( Operator op, Str nspace, Opts_def_operator opt )
return result; return result;
} }
CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast opt ) CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast p )
{ {
Opts_def_operator_cast p = get_optional(opt);
if ( ! null_check( def_operator_cast, type )) { if ( ! null_check( def_operator_cast, type )) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -981,10 +959,8 @@ CodeOpCast def_operator_cast( CodeTypename type, Opts_def_operator_cast opt )
return result; return result;
} }
CodeParams def_param( CodeTypename type, Str name, Opts_def_param opt ) CodeParams def_param( CodeTypename type, Str name, Opts_def_param p )
{ {
Opts_def_param p = get_optional(opt);
if ( ! name_check( def_param, name ) || ! null_check( def_param, type ) ) { if ( ! name_check( def_param, name ) || ! null_check( def_param, type ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1058,10 +1034,8 @@ CodeSpecifiers def_specifier( Specifier spec )
return result; return result;
} }
CodeStruct def_struct( Str name, Opts_def_struct opt ) CodeStruct def_struct( Str name, Opts_def_struct p )
{ {
Opts_def_struct p = get_optional(opt);
if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) { if ( p.attributes && p.attributes->Type != CT_PlatformAttributes ) {
log_failure( "gen::def_struct: attributes was not a `PlatformAttributes` type - %s", code_debug_str(cast(Code, p.attributes)) ); log_failure( "gen::def_struct: attributes was not a `PlatformAttributes` type - %s", code_debug_str(cast(Code, p.attributes)) );
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
@@ -1102,10 +1076,8 @@ CodeStruct def_struct( Str name, Opts_def_struct opt )
return result; return result;
} }
CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template opt ) CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_template p )
{ {
Opts_def_template p = get_optional(opt);
if ( ! null_check( def_template, declaration ) ) { if ( ! null_check( def_template, declaration ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1136,10 +1108,8 @@ CodeTemplate def_template( CodeParams params, Code declaration, Opts_def_templat
return result; return result;
} }
CodeTypename def_type( Str name, Opts_def_type opt ) CodeTypename def_type( Str name, Opts_def_type p )
{ {
Opts_def_type p = get_optional(opt);
if ( ! name_check( def_type, name )) { if ( ! name_check( def_type, name )) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1173,10 +1143,8 @@ CodeTypename def_type( Str name, Opts_def_type opt )
return result; return result;
} }
CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef opt ) CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef p )
{ {
Opts_def_typedef p = get_optional(opt);
if ( ! null_check( def_typedef, type ) ) { if ( ! null_check( def_typedef, type ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1238,10 +1206,8 @@ CodeTypedef def_typedef( Str name, Code type, Opts_def_typedef opt )
return result; return result;
} }
CodeUnion def_union( Str name, CodeBody body, Opts_def_union opt ) CodeUnion def_union( Str name, CodeBody body, Opts_def_union p )
{ {
Opts_def_union p = get_optional(opt);
if ( ! null_check( def_union, body ) ) { if ( ! null_check( def_union, body ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1267,10 +1233,8 @@ CodeUnion def_union( Str name, CodeBody body, Opts_def_union opt )
return result; return result;
} }
CodeUsing def_using( Str name, CodeTypename type, Opts_def_using opt ) CodeUsing def_using( Str name, CodeTypename type, Opts_def_using p )
{ {
Opts_def_using p = get_optional(opt);
if ( ! name_check( def_using, name ) || null_check( def_using, type ) ) { if ( ! name_check( def_using, name ) || null_check( def_using, type ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;
@@ -1310,10 +1274,8 @@ CodeUsing def_using_namespace( Str name )
return result; return result;
} }
CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable opt ) CodeVar def_variable( CodeTypename type, Str name, Opts_def_variable p )
{ {
Opts_def_variable p = get_optional(opt);
if ( ! name_check( def_variable, name ) || ! null_check( def_variable, type ) ) { if ( ! name_check( def_variable, name ) || ! null_check( def_variable, type ) ) {
GEN_DEBUG_TRAP(); GEN_DEBUG_TRAP();
return InvalidCode; return InvalidCode;

View File

@@ -1,12 +1,12 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "interface.upfront.cpp" #include "interface.upfront.cpp"
#include "gen/etoktype.hpp" #include "gen/etoktype.cpp"
#endif #endif
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok) StrBuilder tok_to_strbuilder(Token tok)
{ {
StrBuilder result = strbuilder_make_reserve( ainfo, kilobytes(4) ); StrBuilder result = strbuilder_make_reserve( _ctx->Allocator_Temp, kilobytes(4) );
Str type_str = toktype_to_str( tok.Type ); Str type_str = toktype_to_str( tok.Type );
strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s" strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
@@ -17,55 +17,55 @@ StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok)
return result; return result;
} }
bool lex__eat(Context* ctx, ParseContext* self, TokType type ); bool lex__eat( TokArray* self, TokType type );
Token* lex_current(ParseContext* self, bool skip_formatting ) Token* lex_current(TokArray* self, bool skip_formatting )
{ {
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self->tokens.ptr[self->token_id].Type == Tok_NewLine || self->tokens.ptr[self->token_id].Type == Tok_Comment ) while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
self->token_id++; self->Idx++;
} }
return & self->tokens.ptr[self->token_id]; return & self->Arr[self->Idx];
} }
Token* lex_peek(ParseContext const* self, bool skip_formatting) Token* lex_peek(TokArray self, bool skip_formatting)
{ {
s32 idx = self->token_id; s32 idx = self.Idx;
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self->tokens.ptr[idx].Type == Tok_NewLine ) while ( self.Arr[idx].Type == Tok_NewLine )
idx++; idx++;
return & self->tokens.ptr[idx]; return & self.Arr[idx];
} }
return & self->tokens.ptr[idx]; return & self.Arr[idx];
} }
Token* lex_previous(ParseContext const* self, bool skip_formatting) Token* lex_previous(TokArray self, bool skip_formatting)
{ {
s32 idx = self->token_id; s32 idx = self.Idx;
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self->tokens.ptr[idx].Type == Tok_NewLine ) while ( self.Arr[idx].Type == Tok_NewLine )
idx --; idx --;
return & self->tokens.ptr[idx]; return & self.Arr[idx];
} }
return & self->tokens.ptr[idx - 1]; return & self.Arr[idx - 1];
} }
Token* lex_next(ParseContext const* self, bool skip_formatting) Token* lex_next(TokArray self, bool skip_formatting)
{ {
s32 idx = self->token_id; s32 idx = self.Idx;
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self->tokens.ptr[idx].Type == Tok_NewLine ) while ( self.Arr[idx].Type == Tok_NewLine )
idx++; idx++;
return & self->tokens.ptr[idx + 1]; return & self.Arr[idx + 1];
} }
return & self->tokens.ptr[idx + 1]; return & self.Arr[idx + 1];
} }
enum enum
@@ -137,7 +137,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
); );
// GEN_DEBUG_TRAP(); // GEN_DEBUG_TRAP();
} }
array_append(ctx->tokens, name); array_append( _ctx->Lexer_Tokens, name );
if ( ctx->left && (* ctx->scanner) == '(' ) if ( ctx->left && (* ctx->scanner) == '(' )
{ {
@@ -152,7 +152,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
} }
Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess }; Token opening_paren = { { ctx->scanner, 1 }, Tok_Paren_Open, ctx->line, ctx->column, TF_Preprocess };
array_append(ctx->tokens, opening_paren); array_append( _ctx->Lexer_Tokens, opening_paren );
move_forward(); move_forward();
Token last_parameter = {}; Token last_parameter = {};
@@ -168,7 +168,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
move_forward(); move_forward();
move_forward(); move_forward();
array_append(ctx->tokens, parameter); array_append(_ctx->Lexer_Tokens, parameter);
skip_whitespace(); skip_whitespace();
last_parameter = parameter; last_parameter = parameter;
@@ -202,7 +202,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
move_forward(); move_forward();
parameter.Text.Len++; parameter.Text.Len++;
} }
array_append(ctx->tokens, parameter); array_append(_ctx->Lexer_Tokens, parameter);
skip_whitespace(); skip_whitespace();
last_parameter = parameter; last_parameter = parameter;
} }
@@ -229,7 +229,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
return Lex_ReturnNull; return Lex_ReturnNull;
} }
Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess }; Token comma = { { ctx->scanner, 1 }, Tok_Comma, ctx->line, ctx->column, TF_Preprocess };
array_append(ctx->tokens, comma); array_append(_ctx->Lexer_Tokens, comma);
move_forward(); move_forward();
} }
@@ -243,7 +243,7 @@ s32 lex_preprocessor_define( LexContext* ctx )
return Lex_ReturnNull; return Lex_ReturnNull;
} }
Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess }; Token closing_paren = { { ctx->scanner, 1 }, Tok_Paren_Close, ctx->line, ctx->column, TF_Preprocess };
array_append(ctx->tokens, closing_paren); array_append(_ctx->Lexer_Tokens, closing_paren);
move_forward(); move_forward();
} }
else if ( registered_macro && macro_is_functional( * registered_macro) ) { else if ( registered_macro && macro_is_functional( * registered_macro) ) {
@@ -268,7 +268,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
{ {
char const* hash = ctx->scanner; char const* hash = ctx->scanner;
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess }; Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
array_append(ctx->tokens, hash_tok); array_append( _ctx->Lexer_Tokens, hash_tok );
move_forward(); move_forward();
skip_whitespace(); skip_whitespace();
@@ -344,14 +344,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash; ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
ctx->token.Text.Ptr = hash; ctx->token.Text.Ptr = hash;
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
return Lex_Continue; // Skip found token, its all handled here. return Lex_Continue; // Skip found token, its all handled here.
} }
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf ) if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
{ {
ctx->token.Flags |= TF_Preprocess_Cond; ctx->token.Flags |= TF_Preprocess_Cond;
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
end_line(); end_line();
return Lex_Continue; return Lex_Continue;
} }
@@ -360,7 +360,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Flags |= TF_Preprocess_Cond; ctx->token.Flags |= TF_Preprocess_Cond;
} }
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
skip_whitespace(); skip_whitespace();
@@ -379,7 +379,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' ) if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' )
{ {
StrBuilder directive_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr ); StrBuilder directive_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s" log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s"
, (* ctx->scanner) , (* ctx->scanner)
@@ -411,7 +411,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
move_forward(); move_forward();
} }
array_append(ctx->tokens, preprocess_content); array_append( _ctx->Lexer_Tokens, preprocess_content );
return Lex_Continue; // Skip found token, its all handled here. return Lex_Continue; // Skip found token, its all handled here.
} }
@@ -446,8 +446,8 @@ s32 lex_preprocessor_directive( LexContext* ctx )
} }
else else
{ {
StrBuilder directive_str = strbuilder_make_length( ctx->allocator_temp, ctx->token.Text.Ptr, ctx->token.Text.Len ); StrBuilder directive_str = strbuilder_make_length( _ctx->Allocator_Temp, ctx->token.Text.Ptr, ctx->token.Text.Len );
StrBuilder content_str = strbuilder_fmt_buf( ctx->allocator_temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr ); StrBuilder content_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)" log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
" in preprocessor directive '%s' (%d, %d)\n%s" " in preprocessor directive '%s' (%d, %d)\n%s"
@@ -475,14 +475,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
preprocess_content.Text.Len++; preprocess_content.Text.Len++;
} }
array_append(ctx->tokens, preprocess_content); array_append( _ctx->Lexer_Tokens, preprocess_content );
return Lex_Continue; // Skip found token, its all handled here. return Lex_Continue; // Skip found token, its all handled here.
} }
void lex_found_token( LexContext* ctx ) void lex_found_token( LexContext* ctx )
{ {
if ( ctx->token.Type != Tok_Invalid ) { if ( ctx->token.Type != Tok_Invalid ) {
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
return; return;
} }
@@ -508,7 +508,7 @@ void lex_found_token( LexContext* ctx )
} }
ctx->token.Type = type; ctx->token.Type = type;
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
return; return;
} }
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas) if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
@@ -517,13 +517,13 @@ void lex_found_token( LexContext* ctx )
{ {
ctx->token.Type = type; ctx->token.Type = type;
ctx->token.Flags |= TF_Specifier; ctx->token.Flags |= TF_Specifier;
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
return; return;
} }
if ( type != Tok_Invalid ) if ( type != Tok_Invalid )
{ {
ctx->token.Type = type; ctx->token.Type = type;
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
return; return;
} }
@@ -561,42 +561,50 @@ void lex_found_token( LexContext* ctx )
ctx->token.Type = Tok_Identifier; ctx->token.Type = Tok_Identifier;
} }
array_append(ctx->tokens, ctx->token); array_append( _ctx->Lexer_Tokens, ctx->token );
} }
// TODO(Ed): We should dynamically allocate the lexer's array in Allocator_DyanmicContainers.
// TODO(Ed): We need to to attempt to recover from a lex failure? // TODO(Ed): We need to to attempt to recover from a lex failure?
neverinline neverinline
LexedInfo lex(Context* lib_ctx, Str content) // TokArray lex( Array<Token> tokens, Str content )
TokArray lex( Str content )
{ {
LexedInfo info = struct_zero(LexedInfo); LexContext c; LexContext* ctx = & c;
LexContext c = struct_zero(LexContext); LexContext* ctx = & c;
c.allocator_temp = lib_ctx->Allocator_Temp;
c.content = content; c.content = content;
c.left = content.Len; c.left = content.Len;
c.scanner = content.Ptr; c.scanner = content.Ptr;
char const* word = c.scanner;
s32 word_length = 0;
c.line = 1; c.line = 1;
c.column = 1; c.column = 1;
c.tokens = array_init_reserve(Token, lib_ctx->Allocator_DyanmicContainers, lib_ctx->InitSize_LexerTokens );
// TODO(Ed): Re-implement to new constraints:
// 1. Ability to continue on error
// 2. Return a lexed info.
skip_whitespace(); skip_whitespace();
if ( c.left <= 0 ) { if ( c.left <= 0 )
{
log_failure( "gen::lex: no tokens found (only whitespace provided)" ); log_failure( "gen::lex: no tokens found (only whitespace provided)" );
return info; TokArray null_array = {};
return null_array;
} }
array_clear(_ctx->Lexer_Tokens);
b32 preprocess_args = true; b32 preprocess_args = true;
while (c.left ) while (c.left )
{ {
c.token = struct_init(Token) { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null }; #if 0
if (Tokens.num())
{
log_fmt("\nLastTok: %SB", Tokens.back().to_strbuilder());
}
#endif
{
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
c.token = thanks_c;
}
bool is_define = false; bool is_define = false;
@@ -615,7 +623,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
c.token.Type = Tok_NewLine; c.token.Type = Tok_NewLine;
c.token.Text.Len++; c.token.Text.Len++;
array_append(c.tokens, c.token); array_append( _ctx->Lexer_Tokens, c.token );
continue; continue;
} }
} }
@@ -654,7 +662,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
c.token.Text.Len++; c.token.Text.Len++;
move_forward(); move_forward();
array_append(c.tokens, c.token); array_append( _ctx->Lexer_Tokens, c.token );
} }
} }
continue; continue;
@@ -662,7 +670,8 @@ LexedInfo lex(Context* lib_ctx, Str content)
case Lex_ReturnNull: case Lex_ReturnNull:
{ {
return info; TokArray tok_array = {};
return tok_array;
} }
} }
} }
@@ -689,7 +698,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
} }
else else
{ {
StrBuilder context_str = strbuilder_fmt_buf( lib_ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) ); StrBuilder context_str = strbuilder_fmt_buf( _ctx->Allocator_Temp, "%s", c.scanner, min( 100, c.left ) );
log_failure( "gen::lex: invalid varadic argument, expected '...' got '..%c' (%d, %d)\n%s", (* ctx->scanner), c.line, c.column, context_str ); log_failure( "gen::lex: invalid varadic argument, expected '...' got '..%c' (%d, %d)\n%s", (* ctx->scanner), c.line, c.column, context_str );
} }
@@ -1110,7 +1119,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
move_forward(); move_forward();
c.token.Text.Len++; c.token.Text.Len++;
} }
array_append(c.tokens, c.token); array_append( _ctx->Lexer_Tokens, c.token );
continue; continue;
} }
else if ( (* ctx->scanner) == '*' ) else if ( (* ctx->scanner) == '*' )
@@ -1146,7 +1155,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
move_forward(); move_forward();
c.token.Text.Len++; c.token.Text.Len++;
} }
array_append(c.tokens, c.token); array_append( _ctx->Lexer_Tokens, c.token );
// end_line(); // end_line();
continue; continue;
} }
@@ -1234,14 +1243,14 @@ LexedInfo lex(Context* lib_ctx, Str content)
} }
else else
{ {
s32 start = max( 0, array_num(c.tokens) - 100 ); s32 start = max( 0, array_num(_ctx->Lexer_Tokens) - 100 );
log_fmt("\n%d\n", start); log_fmt("\n%d\n", start);
for ( s32 idx = start; idx < array_num(c.tokens); idx++ ) for ( s32 idx = start; idx < array_num(_ctx->Lexer_Tokens); idx++ )
{ {
log_fmt( "Token %d Type: %s : %.*s\n" log_fmt( "Token %d Type: %s : %.*s\n"
, idx , idx
, toktype_to_str( c.tokens[ idx ].Type ).Ptr , toktype_to_str( _ctx->Lexer_Tokens[ idx ].Type ).Ptr
, c.tokens[ idx ].Text.Len, c.tokens[ idx ].Text.Ptr , _ctx->Lexer_Tokens[ idx ].Text.Len, _ctx->Lexer_Tokens[ idx ].Text.Ptr
); );
} }
@@ -1257,7 +1266,7 @@ LexedInfo lex(Context* lib_ctx, Str content)
FoundToken: FoundToken:
{ {
lex_found_token( ctx ); lex_found_token( ctx );
TokType last_type = array_back(c.tokens)->Type; TokType last_type = array_back(_ctx->Lexer_Tokens)->Type;
if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr ) if ( last_type == Tok_Preprocess_Macro_Stmt || last_type == Tok_Preprocess_Macro_Expr )
{ {
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null }; Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
@@ -1272,22 +1281,21 @@ LexedInfo lex(Context* lib_ctx, Str content)
c.token.Text.Len++; c.token.Text.Len++;
move_forward(); move_forward();
array_append(c.tokens, c.token); array_append( _ctx->Lexer_Tokens, c.token );
continue; continue;
} }
} }
} }
} }
if ( array_num(c.tokens) == 0 ) { if ( array_num(_ctx->Lexer_Tokens) == 0 ) {
log_failure( "Failed to lex any tokens" ); log_failure( "Failed to lex any tokens" );
return info; TokArray tok_array = {};
return tok_array;
} }
info.messages = c.messages; TokArray result = { _ctx->Lexer_Tokens, 0 };
info.text = content; return result;
info.tokens = struct_init(TokenSlice) { pcast(Token*, c.tokens), scast(s32, array_num(c.tokens)) };
return info;
} }
#undef move_forward #undef move_forward

File diff suppressed because it is too large Load Diff

View File

@@ -1,7 +1,7 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "types.hpp" #include "types.hpp"
#include "gen/ecodetypes.hpp" #include "gen/ecode.hpp"
#include "gen/eoperator.hpp" #include "gen/eoperator.hpp"
#include "gen/especifier.hpp" #include "gen/especifier.hpp"
#include "gen/etoktype.hpp" #include "gen/etoktype.hpp"
@@ -89,28 +89,7 @@ bool tok_is_end_definition(Token tok) {
return bitfield_is_set( u32, tok.Flags, TF_EndDefinition ); return bitfield_is_set( u32, tok.Flags, TF_EndDefinition );
} }
StrBuilder tok_to_strbuilder(AllocatorInfo ainfo, Token tok); StrBuilder tok_to_strbuilder(Token tok);
struct TokenSlice
{
Token* ptr;
s32 num;
#if GEN_COMPILER_CPP
forceinline operator Token* () const { return ptr; }
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
#endif
};
forceinline
Str token_range_to_str(Token start, Token end)
{
Str result = {
start.Text.Ptr,
(scast(sptr, rcast(uptr, end.Text.Ptr)) + end.Text.Len) - scast(sptr, rcast(uptr, start.Text.Ptr))
};
return result;
}
struct TokArray struct TokArray
{ {
@@ -118,52 +97,30 @@ struct TokArray
s32 Idx; s32 Idx;
}; };
typedef struct LexerMessage LexerMessage;
struct LexerMessage
{
LexerMessage* next;
Str content;
LogLevel level;
};
struct LexContext struct LexContext
{ {
AllocatorInfo allocator_temp;
LexerMessage* messages;
Str content; Str content;
s32 left; s32 left;
char const* scanner; char const* scanner;
s32 line; s32 line;
s32 column; s32 column;
// StringTable defines;
Token token; Token token;
Array(Token) tokens;
}; };
struct LexedInfo struct StackNode
{ {
LexerMessage* messages; StackNode* Prev;
Str text;
TokenSlice tokens;
};
typedef struct ParseStackNode ParseStackNode; Token* Start;
Str Name; // The name of the AST node (if parsed)
typedef struct ParseMessage ParseMessage; Str ProcName; // The name of the procedure
struct ParseMessage
{
ParseMessage* Next;
ParseStackNode* Scope;
Str Content;
LogLevel Level;
}; };
struct ParseContext struct ParseContext
{ {
ParseMessage* messages; TokArray Tokens;
ParseStackNode* scope; StackNode* Scope;
// TokArray Tokens;
TokenSlice tokens;
s32 token_id;
}; };
enum MacroType : u16 enum MacroType : u16
@@ -211,36 +168,26 @@ Str macrotype_to_str( MacroType type )
enum EMacroFlags : u16 enum EMacroFlags : u16
{ {
// Macro has parameters (args expected to be passed) MF_Functional = bit(0), // Macro has parameters (args expected to be passed)
MF_Functional = bit(0), MF_Expects_Body = bit(1), // Expects to assign a braced scope to its body.
// Expects to assign a braced scope to its body.
MF_Expects_Body = bit(1),
// lex__eat wil treat this macro as an identifier if the parser attempts to consume it as one. // lex__eat wil treat this macro as an identifier if the parser attempts to consume it as one.
// This is a kludge because we don't support push/pop macro pragmas rn. // ^^^ This is a kludge because we don't support push/pop macro pragmas rn.
MF_Allow_As_Identifier = bit(2), MF_Allow_As_Identifier = bit(2),
// When parsing identifiers, it will allow the consumption of the macro parameters (as its expected to be a part of constructing the identifier)
// Example of a decarator macro from stb_sprintf.h:
// STBSP__PUBLICDEC int STB_SPRINTF_DECORATE(sprintf)(char* buf, char const *fmt, ...) STBSP__ATTRIBUTE_FORMAT(2,3);
// ^^ STB_SPRINTF_DECORATE is decorating sprintf
MF_Identifier_Decorator = bit(3),
// lex__eat wil treat this macro as an attribute if the parser attempts to consume it as one. // lex__eat wil treat this macro as an attribute if the parser attempts to consume it as one.
// This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc) // ^^^ This a kludge because unreal has a macro that behaves as both a 'statement' and an attribute (UE_DEPRECATED, PRAGMA_ENABLE_DEPRECATION_WARNINGS, etc)
// TODO(Ed): We can keep the MF_Allow_As_Attribute flag for macros, however, we need to add the ability of AST_Attributes to chain themselves. // TODO(Ed): We can keep the MF_Allow_As_Attribute flag for macros, however, we need to add the ability of AST_Attributes to chain themselves.
// Its thats already a thing in the standard language anyway // Its thats already a thing in the standard language anyway
// & it would allow UE_DEPRECATED, (UE_PROPERTY / UE_FUNCTION) to chain themselves as attributes of a resolved member function/variable definition // & it would allow UE_DEPRECATED, (UE_PROPERTY / UE_FUNCTION) to chain themselves as attributes of a resolved member function/variable definition
MF_Allow_As_Attribute = bit(4), MF_Allow_As_Attribute = bit(3),
// When a macro is encountered after attributes and specifiers while parsing a function, or variable: // When a macro is encountered after attributes and specifiers while parsing a function, or variable:
// It will consume the macro and treat it as resolving the definition. // It will consume the macro and treat it as resolving the definition. (Yes this is for Unreal Engine)
// (MUST BE OF MT_Statement TYPE) // (MUST BE OF MT_Statement TYPE)
MF_Allow_As_Definition = bit(5), MF_Allow_As_Definition = bit(4),
// Created for Unreal's PURE_VIRTUAL MF_Allow_As_Specifier = bit(5), // Created for Unreal's PURE_VIRTUAL
MF_Allow_As_Specifier = bit(6),
MF_Null = 0, MF_Null = 0,
MF_UnderlyingType = GEN_U16_MAX, MF_UnderlyingType = GEN_U16_MAX,

View File

@@ -1,6 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "interface.hpp" #include "../gen.hpp"
#endif #endif
#pragma region StaticData #pragma region StaticData

View File

@@ -1,18 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
#pragma once #pragma once
#include "dependencies/platform.hpp" #include "header_start.hpp"
#include "dependencies/macros.hpp"
#include "dependencies/basic_types.hpp"
#include "dependencies/debug.hpp"
#include "dependencies/memory.hpp"
#include "dependencies/string_ops.hpp"
#include "dependencies/printing.hpp"
#include "dependencies/containers.hpp"
#include "dependencies/hashing.hpp"
#include "dependencies/strings.hpp"
#include "dependencies/filesystem.hpp"
#include "dependencies/timing.hpp"
#include "dependencies/parsing.hpp"
#endif #endif
/* /*
@@ -31,38 +19,7 @@
*/ */
enum LogLevel //: u32 using LogFailType = ssize(*)(char const*, ...);
{
LL_Null,
LL_Note,
LL_Warning,
LL_Error,
LL_Fatal,
LL_UnderlyingType = GEN_U32_MAX,
};
typedef enum LogLevel LogLevel;
Str loglevel_to_str(LogLevel level)
{
local_persist
Str lookup[] = {
{ "Null", sizeof("Null") - 1 },
{ "Note", sizeof("Note") - 1 },
{ "Warning", sizeof("Info") - 1 },
{ "Error", sizeof("Error") - 1 },
{ "Fatal", sizeof("Fatal") - 1 },
};
return lookup[level];
}
typedef struct LogEntry LogEntry;
struct LogEntry
{
Str msg;
LogLevel level;
};
typedef void LoggerProc(LogEntry entry);
// By default this library will either crash or exit if an error is detected while generating codes. // By default this library will either crash or exit if an error is detected while generating codes.
// Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur. // Even if set to not use GEN_FATAL, GEN_FATAL will still be used for memory failures as the library is unusable when they occur.

View File

@@ -17,39 +17,6 @@ template <class TType> struct RemovePtr<TType*> { typedef TType Type; };
template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type; template <class TType> using TRemovePtr = typename RemovePtr<TType>::Type;
#pragma region Slice
#if 0
#define Slice(Type) Slice<Type>
template<class Type> struct Slice;
template<class Type>
Type* slice_get(Slice<Type> self, ssize id) {
GEN_ASSERT(id > -1);
GEN_ASSERT(id < self.len);
return self.ptr[id];
}
template<class Type>
struct Slice
{
Type* ptr;
ssize len;
#if GEN_COMPILER_CPP
forceinline operator Token* () const { return ptr; }
forceinline Token& operator[]( ssize index ) const { return ptr[index]; }
forceinline Type* begin() { return ptr; }
forceinline Type* end() { return ptr + len; }
#endif
#if ! GEN_C_LIKE_CPP && GEN_COMPILER_CPP
forceinline Type& back() { return ptr[len - 1]; }
#endif
};
#endif
#pragma endregion Slice
#pragma region Array #pragma region Array
#define Array(Type) Array<Type> #define Array(Type) Array<Type>
@@ -59,8 +26,10 @@ struct Slice
struct ArrayHeader; struct ArrayHeader;
#if GEN_COMPILER_CPP
template<class Type> struct Array; template<class Type> struct Array;
# define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType # define get_array_underlying_type(array) typename TRemovePtr<typeof(array)>:: DataType
#endif
usize array_grow_formula(ssize value); usize array_grow_formula(ssize value);
@@ -90,12 +59,12 @@ struct ArrayHeader {
usize Num; usize Num;
}; };
#if GEN_COMPILER_CPP
template<class Type> template<class Type>
struct Array struct Array
{ {
Type* Data; Type* Data;
#if ! GEN_C_LIKE_CPP
#pragma region Member Mapping #pragma region Member Mapping
forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); } forceinline static Array init(AllocatorInfo allocator) { return array_init<Type>(allocator); }
forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); } forceinline static Array init_reserve(AllocatorInfo allocator, ssize capacity) { return array_init_reserve<Type>(allocator, capacity); }
@@ -119,7 +88,6 @@ struct Array
forceinline bool resize(usize num) { return array_resize<Type>(this, num); } forceinline bool resize(usize num) { return array_resize<Type>(this, num); }
forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); } forceinline bool set_capacity(usize new_capacity) { return array_set_capacity<Type>(this, new_capacity); }
#pragma endregion Member Mapping #pragma endregion Member Mapping
#endif
forceinline operator Type*() { return Data; } forceinline operator Type*() { return Data; }
forceinline operator Type const*() const { return Data; } forceinline operator Type const*() const { return Data; }
@@ -131,8 +99,9 @@ struct Array
using DataType = Type; using DataType = Type;
}; };
#endif
#if 0 #if GEN_COMPILER_CPP && 0
template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); } template<class Type> bool append(Array<Type>& array, Array<Type> other) { return append( & array, other ); }
template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); } template<class Type> bool append(Array<Type>& array, Type value) { return append( & array, value ); }
template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); } template<class Type> bool append(Array<Type>& array, Type* items, usize item_num) { return append( & array, items, item_num ); }

View File

@@ -1,6 +1,9 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
# pragma once # pragma once
# include "dependencies/platform.hpp"
# include "dependencies/macros.hpp"
# include "basic_types.hpp" # include "basic_types.hpp"
# include "macros.hpp"
#endif #endif
#pragma region Debug #pragma region Debug

View File

@@ -198,16 +198,21 @@
#ifndef forceinline #ifndef forceinline
# if GEN_COMPILER_MSVC # if GEN_COMPILER_MSVC
# define forceinline __forceinline # define forceinline __forceinline
# define neverinline __declspec( noinline )
# elif GEN_COMPILER_GCC # elif GEN_COMPILER_GCC
# define forceinline inline __attribute__((__always_inline__)) # define forceinline inline __attribute__((__always_inline__))
# define neverinline __attribute__( ( __noinline__ ) )
# elif GEN_COMPILER_CLANG # elif GEN_COMPILER_CLANG
# if __has_attribute(__always_inline__) # if __has_attribute(__always_inline__)
# define forceinline inline __attribute__((__always_inline__)) # define forceinline inline __attribute__((__always_inline__))
# define neverinline __attribute__( ( __noinline__ ) )
# else # else
# define forceinline # define forceinline
# define neverinline
# endif # endif
# else # else
# define forceinline # define forceinline
# define neverinline
# endif # endif
#endif #endif
@@ -298,28 +303,10 @@
# define GEN_PARAM_DEFAULT # define GEN_PARAM_DEFAULT
#endif #endif
#ifndef struct_init
#if GEN_COMPILER_CPP #if GEN_COMPILER_CPP
# define struct_init(type) #define struct_init(type, value) {value}
#else #else
# define struct_init(type) (type) #define struct_init(type, value) {value}
# endif
#endif
#ifndef struct_zero
# if GEN_COMPILER_CPP
# define struct_zero(type) {}
# else
# define struct_zero(type) (type) {0}
# endif
#endif
#ifndef struct_zero_init
# if GEN_COMPILER_CPP
# define struct_zero_init() {}
# else
# define struct_zero_init() {0}
# endif
#endif #endif
#if 0 #if 0
@@ -332,12 +319,4 @@
# define GEN_OPITMIZE_MAPPINGS_END # define GEN_OPITMIZE_MAPPINGS_END
#endif #endif
#ifndef get_optional
# if GEN_COMPILER_C
# define get_optional(opt) opt ? *opt : (typeof(*opt)){0}
# else
# define get_optional(opt) opt
# endif
#endif
#pragma endregion Macros #pragma endregion Macros

View File

@@ -134,6 +134,12 @@ GEN_API void* heap_allocator_proc( void* allocator_data, AllocType type, ssize s
//! The heap allocator backed by operating system's memory manager. //! The heap allocator backed by operating system's memory manager.
constexpr AllocatorInfo heap( void ) { AllocatorInfo allocator = { heap_allocator_proc, nullptr }; return allocator; } constexpr AllocatorInfo heap( void ) { AllocatorInfo allocator = { heap_allocator_proc, nullptr }; return allocator; }
//! Helper to allocate memory using heap allocator.
#define malloc( sz ) alloc( heap(), sz )
//! Helper to free memory allocated by heap allocator.
#define mfree( ptr ) free( heap(), ptr )
struct VirtualMemory struct VirtualMemory
{ {
void* data; void* data;
@@ -179,8 +185,6 @@ void arena_check (Arena* arena);
void arena_free (Arena* arena); void arena_free (Arena* arena);
ssize arena_size_remaining(Arena* arena, ssize alignment); ssize arena_size_remaining(Arena* arena, ssize alignment);
// TODO(Ed): Add arena_pos, arena_pop, and arena_pop_to
struct Arena struct Arena
{ {
AllocatorInfo Backing; AllocatorInfo Backing;

View File

@@ -1,6 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
# pragma once # pragma once
# include "string_ops.cpp" # include "strbuilder_ops.cpp"
#endif #endif
#pragma region Printing #pragma region Printing

View File

@@ -1,6 +1,6 @@
#ifdef INTELLISENSE_DIRECTIVES #ifdef INTELLISENSE_DIRECTIVES
# pragma once # pragma once
# include "string_ops.hpp" # include "strbuilder_ops.hpp"
#endif #endif
#pragma region Printing #pragma region Printing
@@ -26,4 +26,17 @@ GEN_API ssize c_str_fmt_file_va ( FileInfo* f, char const* fmt, va_list va );
constexpr constexpr
char const* Msg_Invalid_Value = "INVALID VALUE PROVIDED"; char const* Msg_Invalid_Value = "INVALID VALUE PROVIDED";
inline
ssize log_fmt(char const* fmt, ...)
{
ssize res;
va_list va;
va_start(va, fmt);
res = c_str_fmt_out_va(fmt, va);
va_end(va);
return res;
}
#pragma endregion Printing #pragma endregion Printing

View File

@@ -320,7 +320,7 @@ inline
StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...) StrBuilder strbuilder_fmt_buf(AllocatorInfo allocator, char const* fmt, ...)
{ {
local_persist thread_local local_persist thread_local
PrintF_Buffer buf = struct_zero_init(); PrintF_Buffer buf = struct_init(PrintF_Buffer, {0});
va_list va; va_list va;
va_start(va, fmt); va_start(va, fmt);

View File

@@ -11,6 +11,9 @@
#include "helpers/push_ignores.inline.hpp" #include "helpers/push_ignores.inline.hpp"
#include "components/header_start.hpp" #include "components/header_start.hpp"
// Has container defines pushed
#include "gen.dep.hpp"
GEN_NS_BEGIN GEN_NS_BEGIN
#include "components/types.hpp" #include "components/types.hpp"

View File

@@ -1159,8 +1159,6 @@ R"(#define <interface_name>( code ) _Generic( (code), \
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len }; Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str(); Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
// Resolve define's arguments // Resolve define's arguments
b32 has_args = fn->Params->NumEntries > 1; b32 has_args = fn->Params->NumEntries > 1;
StrBuilder params_str = StrBuilder::make_reserve(_ctx->Allocator_Temp, 32); StrBuilder params_str = StrBuilder::make_reserve(_ctx->Allocator_Temp, 32);
@@ -1174,10 +1172,10 @@ R"(#define <interface_name>( code ) _Generic( (code), \
} }
char const* tmpl_fn_macro = nullptr; char const* tmpl_fn_macro = nullptr;
if (params_str.length() > 0 ) { if (params_str.length() > 0 ) {
tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> & (<opts_type>) { __VA_ARGS__ } )\n"; tmpl_fn_macro= "#define <def_name>( <params> ... ) <def__name>( <params> (<opts_type>) { __VA_ARGS__ } )\n";
} }
else { else {
tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( & (<opts_type>) { __VA_ARGS__ } )\n"; tmpl_fn_macro= "#define <def_name>( ... ) <def__name>( (<opts_type>) { __VA_ARGS__ } )\n";
} }
Code fn_macro = untyped_str(token_fmt( Code fn_macro = untyped_str(token_fmt(
"def_name", fn->Name "def_name", fn->Name
@@ -1506,7 +1504,6 @@ R"(#define <interface_name>( code ) _Generic( (code), \
Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len }; Str actual_name = { fn->Name.Ptr + prefix.Len, fn->Name.Len - prefix.Len };
Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str(); Str new_name = StrBuilder::fmt_buf(_ctx->Allocator_Temp, "def__%S", actual_name ).to_str();
fn->Name = cache_str(new_name); fn->Name = cache_str(new_name);
opt_param->ValueType->Specs = def_specifier(Spec_Ptr);
} }
src_upfront.append(fn); src_upfront.append(fn);
} }

View File

@@ -53,7 +53,6 @@ word enum_underlying, gen_enum_underlying
word nullptr, gen_nullptr word nullptr, gen_nullptr
word struct_init, gen_struct_init word struct_init, gen_struct_init
word hash, gen_hash word hash, gen_hash
word txt, gen_txt
// Basic Types // Basic Types
@@ -411,8 +410,6 @@ namespace var_, gen_var_
word _ctx, gen__ctx word _ctx, gen__ctx
word get_context, gen_get_context
word init, gen_init word init, gen_init
word deinit, gen_deinit word deinit, gen_deinit
word reset, gen_reset word reset, gen_reset
@@ -535,7 +532,7 @@ namespace Lexer_, gen_Lexer_
word LexContext, gen_LexContext word LexContext, gen_LexContext
word lex, gen_lex word lex, gen_lex
word ParseStackNode, gen_ParseStackNode word StackNode, gen_StackNode
word ParseContext, gen_ParseContext word ParseContext, gen_ParseContext
// namespace parse_, gen_parse_ // namespace parse_, gen_parse_

View File

@@ -132,7 +132,6 @@ if ( $vendor -match "clang" )
$flag_all_c = @('-x', 'c') $flag_all_c = @('-x', 'c')
$flag_c11 = '-std=c11' $flag_c11 = '-std=c11'
$flag_all_cpp = '-x c++' $flag_all_cpp = '-x c++'
$flag_charset_utf8 = '-utf-8'
$flag_compile = '-c' $flag_compile = '-c'
$flag_color_diagnostics = '-fcolor-diagnostics' $flag_color_diagnostics = '-fcolor-diagnostics'
$flag_no_color_diagnostics = '-fno-color-diagnostics' $flag_no_color_diagnostics = '-fno-color-diagnostics'
@@ -372,7 +371,6 @@ if ( $vendor -match "msvc" )
$flag_all_c = '/TC' $flag_all_c = '/TC'
$flag_c11 = '/std:c11' $flag_c11 = '/std:c11'
$flag_all_cpp = '/TP' $flag_all_cpp = '/TP'
$flag_charset_utf8 = '/utf-8'
$flag_compile = '/c' $flag_compile = '/c'
$flag_debug = '/Zi' $flag_debug = '/Zi'
$flag_define = '/D' $flag_define = '/D'
@@ -406,7 +404,7 @@ if ( $vendor -match "msvc" )
$flag_optimize_intrinsics = '/Oi' $flag_optimize_intrinsics = '/Oi'
$flag_optimized_debug_forceinline = '/d2Obforceinline' $flag_optimized_debug_forceinline = '/d2Obforceinline'
$flag_optimized_debug = '/Zo' $flag_optimized_debug = '/Zo'
# $flag_ $flag_
# $flag_out_name = '/OUT:' # $flag_out_name = '/OUT:'
$flag_path_interm = '/Fo' $flag_path_interm = '/Fo'
$flag_path_debug = '/Fd' $flag_path_debug = '/Fd'