reduce TokType enum to c-compatiable

This commit is contained in:
Edward R. Gonzalez 2024-12-03 09:50:30 -05:00
parent a7c9dad9fd
commit d45908fb32
5 changed files with 930 additions and 935 deletions

View File

@ -6,114 +6,113 @@
// This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp) // This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp)
GEN_NS_PARSER_BEGIN GEN_NS_PARSER_BEGIN
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
namespace ETokType typedef TokType_Def TokType;
{
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Attribute_API_Import, "GEN_API_Import_Code" )
enum Type : u32 enum TokType_Def : u32
{ {
Invalid, Tok_Invalid,
Access_Private, Tok_Access_Private,
Access_Protected, Tok_Access_Protected,
Access_Public, Tok_Access_Public,
Access_MemberSymbol, Tok_Access_MemberSymbol,
Access_StaticSymbol, Tok_Access_StaticSymbol,
Ampersand, Tok_Ampersand,
Ampersand_DBL, Tok_Ampersand_DBL,
Assign_Classifer, Tok_Assign_Classifer,
Attribute_Open, Tok_Attribute_Open,
Attribute_Close, Tok_Attribute_Close,
BraceCurly_Open, Tok_BraceCurly_Open,
BraceCurly_Close, Tok_BraceCurly_Close,
BraceSquare_Open, Tok_BraceSquare_Open,
BraceSquare_Close, Tok_BraceSquare_Close,
Capture_Start, Tok_Capture_Start,
Capture_End, Tok_Capture_End,
Comment, Tok_Comment,
Comment_End, Tok_Comment_End,
Comment_Start, Tok_Comment_Start,
Char, Tok_Char,
Comma, Tok_Comma,
Decl_Class, Tok_Decl_Class,
Decl_GNU_Attribute, Tok_Decl_GNU_Attribute,
Decl_MSVC_Attribute, Tok_Decl_MSVC_Attribute,
Decl_Enum, Tok_Decl_Enum,
Decl_Extern_Linkage, Tok_Decl_Extern_Linkage,
Decl_Friend, Tok_Decl_Friend,
Decl_Module, Tok_Decl_Module,
Decl_Namespace, Tok_Decl_Namespace,
Decl_Operator, Tok_Decl_Operator,
Decl_Struct, Tok_Decl_Struct,
Decl_Template, Tok_Decl_Template,
Decl_Typedef, Tok_Decl_Typedef,
Decl_Using, Tok_Decl_Using,
Decl_Union, Tok_Decl_Union,
Identifier, Tok_Identifier,
Module_Import, Tok_Module_Import,
Module_Export, Tok_Module_Export,
NewLine, Tok_NewLine,
Number, Tok_Number,
Operator, Tok_Operator,
Preprocess_Hash, Tok_Preprocess_Hash,
Preprocess_Define, Tok_Preprocess_Define,
Preprocess_If, Tok_Preprocess_If,
Preprocess_IfDef, Tok_Preprocess_IfDef,
Preprocess_IfNotDef, Tok_Preprocess_IfNotDef,
Preprocess_ElIf, Tok_Preprocess_ElIf,
Preprocess_Else, Tok_Preprocess_Else,
Preprocess_EndIf, Tok_Preprocess_EndIf,
Preprocess_Include, Tok_Preprocess_Include,
Preprocess_Pragma, Tok_Preprocess_Pragma,
Preprocess_Content, Tok_Preprocess_Content,
Preprocess_Macro, Tok_Preprocess_Macro,
Preprocess_Unsupported, Tok_Preprocess_Unsupported,
Spec_Alignas, Tok_Spec_Alignas,
Spec_Const, Tok_Spec_Const,
Spec_Consteval, Tok_Spec_Consteval,
Spec_Constexpr, Tok_Spec_Constexpr,
Spec_Constinit, Tok_Spec_Constinit,
Spec_Explicit, Tok_Spec_Explicit,
Spec_Extern, Tok_Spec_Extern,
Spec_Final, Tok_Spec_Final,
Spec_ForceInline, Tok_Spec_ForceInline,
Spec_Global, Tok_Spec_Global,
Spec_Inline, Tok_Spec_Inline,
Spec_Internal_Linkage, Tok_Spec_Internal_Linkage,
Spec_LocalPersist, Tok_Spec_LocalPersist,
Spec_Mutable, Tok_Spec_Mutable,
Spec_NeverInline, Tok_Spec_NeverInline,
Spec_Override, Tok_Spec_Override,
Spec_Static, Tok_Spec_Static,
Spec_ThreadLocal, Tok_Spec_ThreadLocal,
Spec_Volatile, Tok_Spec_Volatile,
Spec_Virtual, Tok_Spec_Virtual,
Star, Tok_Star,
Statement_End, Tok_Statement_End,
StaticAssert, Tok_StaticAssert,
String, Tok_String,
Type_Typename, Tok_Type_Typename,
Type_Unsigned, Tok_Type_Unsigned,
Type_Signed, Tok_Type_Signed,
Type_Short, Tok_Type_Short,
Type_Long, Tok_Type_Long,
Type_bool, Tok_Type_bool,
Type_char, Tok_Type_char,
Type_int, Tok_Type_int,
Type_double, Tok_Type_double,
Type_MS_int8, Tok_Type_MS_int8,
Type_MS_int16, Tok_Type_MS_int16,
Type_MS_int32, Tok_Type_MS_int32,
Type_MS_int64, Tok_Type_MS_int64,
Type_MS_W64, Tok_Type_MS_W64,
Varadic_Argument, Tok_Varadic_Argument,
__Attributes_Start, Tok___Attributes_Start,
Attribute_API_Export, Tok_Attribute_API_Export,
Attribute_API_Import, Tok_Attribute_API_Import,
NumTokens Tok_NumTokens
}; };
inline StrC to_str( Type type ) inline StrC to_str( TokType type )
{ {
local_persist StrC lookup[] { local_persist StrC lookup[] {
{ sizeof( "__invalid__" ), "__invalid__" }, { sizeof( "__invalid__" ), "__invalid__" },
@ -217,24 +216,21 @@ namespace ETokType
return lookup[type]; return lookup[type];
} }
inline Type to_type( StrC str ) inline TokType to_type( StrC str )
{ {
local_persist u32 keymap[NumTokens]; local_persist u32 keymap[Tok_NumTokens];
do_once_start for ( u32 index = 0; index < NumTokens; index++ ) do_once_start for ( u32 index = 0; index < Tok_NumTokens; index++ )
{ {
StrC enum_str = to_str( (Type)index ); StrC enum_str = to_str( (TokType)index );
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 ); keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
} }
do_once_end u32 hash = crc32( str.Ptr, str.Len ); do_once_end u32 hash = crc32( str.Ptr, str.Len );
for ( u32 index = 0; index < NumTokens; index++ ) for ( u32 index = 0; index < Tok_NumTokens; index++ )
{ {
if ( keymap[index] == hash ) if ( keymap[index] == hash )
return (Type)index; return (TokType)index;
} }
return Invalid; return Tok_Invalid;
} }
} // namespace ETokType
using TokType = ETokType::Type;
GEN_NS_PARSER_END GEN_NS_PARSER_END

View File

@ -19,7 +19,7 @@ CodeClass parse_class( StrC def )
Context.Tokens = toks; Context.Tokens = toks;
push_scope(); push_scope();
CodeClass result = (CodeClass) parse_class_struct( parser::TokType::Decl_Class ); CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class );
pop(& Context); pop(& Context);
return result; return result;
} }
@ -235,7 +235,7 @@ CodeStruct parse_struct( StrC def )
Context.Tokens = toks; Context.Tokens = toks;
push_scope(); push_scope();
CodeStruct result = (CodeStruct) parse_class_struct( TokType::Decl_Struct ); CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct );
pop(& Context); pop(& Context);
return result; return result;
} }

View File

@ -33,7 +33,7 @@ struct Token
u32 Flags; u32 Flags;
}; };
constexpr Token NullToken { nullptr, 0, TokType::Invalid, false, 0, TF_Null }; constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null };
AccessSpec to_access_specifier(Token tok) AccessSpec to_access_specifier(Token tok)
{ {
@ -47,7 +47,7 @@ StrC to_str(Token tok)
bool is_valid( Token tok ) bool is_valid( Token tok )
{ {
return tok.Text && tok.Length && tok.Type != TokType::Invalid; return tok.Text && tok.Length && tok.Type != Tok_Invalid;
} }
bool is_access_operator(Token tok) bool is_access_operator(Token tok)
@ -94,7 +94,7 @@ String to_string(Token tok)
{ {
String result = string_make_reserve( GlobalAllocator, kilobytes(4) ); String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
StrC type_str = ETokType::to_str( tok.Type ); StrC type_str = to_str( tok.Type );
append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s" append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
, tok.Line, tok.Column , tok.Line, tok.Column
@ -117,7 +117,7 @@ Token* current(TokArray* self, bool skip_formatting )
{ {
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self->Arr[self->Idx].Type == TokType::NewLine || self->Arr[self->Idx].Type == TokType::Comment ) while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
self->Idx++; self->Idx++;
} }
@ -130,7 +130,7 @@ Token* previous(TokArray self, bool skip_formatting)
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self.Arr[idx].Type == TokType::NewLine ) while ( self.Arr[idx].Type == Tok_NewLine )
idx --; idx --;
return & self.Arr[idx]; return & self.Arr[idx];
@ -145,7 +145,7 @@ Token* next(TokArray self, bool skip_formatting)
if ( skip_formatting ) if ( skip_formatting )
{ {
while ( self.Arr[idx].Type == TokType::NewLine ) while ( self.Arr[idx].Type == Tok_NewLine )
idx++; idx++;
return & self.Arr[idx + 1]; return & self.Arr[idx + 1];
@ -221,7 +221,7 @@ forceinline
s32 lex_preprocessor_directive( LexContext* ctx ) s32 lex_preprocessor_directive( LexContext* ctx )
{ {
char const* hash = ctx->scanner; char const* hash = ctx->scanner;
append( & Tokens, { hash, 1, TokType::Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } ); append( & Tokens, { hash, 1, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } );
move_forward(); move_forward();
SkipWhitespace(); SkipWhitespace();
@ -233,12 +233,12 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Length++; ctx->token.Length++;
} }
ctx->token.Type = ETokType::to_type( to_str(ctx->token) ); ctx->token.Type = to_type( to_str(ctx->token) );
bool is_preprocessor = ctx->token.Type >= TokType::Preprocess_Define && ctx->token.Type <= TokType::Preprocess_Pragma; bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
if ( ! is_preprocessor ) if ( ! is_preprocessor )
{ {
ctx->token.Type = TokType::Preprocess_Unsupported; ctx->token.Type = Tok_Preprocess_Unsupported;
// Its an unsupported directive, skip it // Its an unsupported directive, skip it
s32 within_string = false; s32 within_string = false;
@ -301,14 +301,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
return Lex_Continue; // Skip found token, its all handled here. return Lex_Continue; // Skip found token, its all handled here.
} }
if ( ctx->token.Type == TokType::Preprocess_Else || ctx->token.Type == TokType::Preprocess_EndIf ) if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
{ {
ctx->token.Flags |= TF_Preprocess_Cond; ctx->token.Flags |= TF_Preprocess_Cond;
append( & Tokens, ctx->token ); append( & Tokens, ctx->token );
end_line(); end_line();
return Lex_Continue; return Lex_Continue;
} }
else if ( ctx->token.Type >= TokType::Preprocess_If && ctx->token.Type <= TokType::Preprocess_ElIf ) else if ( ctx->token.Type >= Tok_Preprocess_If && ctx->token.Type <= Tok_Preprocess_ElIf )
{ {
ctx->token.Flags |= TF_Preprocess_Cond; ctx->token.Flags |= TF_Preprocess_Cond;
} }
@ -317,9 +317,9 @@ s32 lex_preprocessor_directive( LexContext* ctx )
SkipWhitespace(); SkipWhitespace();
if ( ctx->token.Type == TokType::Preprocess_Define ) if ( ctx->token.Type == Tok_Preprocess_Define )
{ {
Token name = { ctx->scanner, 0, TokType::Identifier, ctx->line, ctx->column, TF_Preprocess }; Token name = { ctx->scanner, 0, Tok_Identifier, ctx->line, ctx->column, TF_Preprocess };
name.Text = ctx->scanner; name.Text = ctx->scanner;
name.Length = 1; name.Length = 1;
@ -343,11 +343,11 @@ s32 lex_preprocessor_directive( LexContext* ctx )
set(& ctx->defines, key, to_str(name) ); set(& ctx->defines, key, to_str(name) );
} }
Token preprocess_content = { ctx->scanner, 0, TokType::Preprocess_Content, ctx->line, ctx->column, TF_Preprocess }; Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
if ( ctx->token.Type == TokType::Preprocess_Include ) if ( ctx->token.Type == Tok_Preprocess_Include )
{ {
preprocess_content.Type = TokType::String; preprocess_content.Type = Tok_String;
if ( current != '"' && current != '<' ) if ( current != '"' && current != '<' )
{ {
@ -452,31 +452,31 @@ s32 lex_preprocessor_directive( LexContext* ctx )
forceinline forceinline
void lex_found_token( LexContext* ctx ) void lex_found_token( LexContext* ctx )
{ {
if ( ctx->token.Type != TokType::Invalid ) if ( ctx->token.Type != Tok_Invalid )
{ {
append( & Tokens, ctx->token ); append( & Tokens, ctx->token );
return; return;
} }
TokType type = ETokType::to_type( to_str(ctx->token) ); TokType type = to_type( to_str(ctx->token) );
if (type <= TokType::Access_Public && type >= TokType::Access_Private ) if (type <= Tok_Access_Public && type >= Tok_Access_Private )
{ {
ctx->token.Flags |= TF_AccessSpecifier; ctx->token.Flags |= TF_AccessSpecifier;
} }
if ( type > TokType::__Attributes_Start ) if ( type > Tok___Attributes_Start )
{ {
ctx->token.Flags |= TF_Attribute; ctx->token.Flags |= TF_Attribute;
} }
if ( type == ETokType::Decl_Extern_Linkage ) if ( type == Tok_Decl_Extern_Linkage )
{ {
SkipWhitespace(); SkipWhitespace();
if ( current != '"' ) if ( current != '"' )
{ {
type = ETokType::Spec_Extern; type = Tok_Spec_Extern;
ctx->token.Flags |= TF_Specifier; ctx->token.Flags |= TF_Specifier;
} }
@ -485,9 +485,9 @@ void lex_found_token( LexContext* ctx )
return; return;
} }
if ( ( type <= TokType::Star && type >= TokType::Spec_Alignas) if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|| type == TokType::Ampersand || type == Tok_Ampersand
|| type == TokType::Ampersand_DBL ) || type == Tok_Ampersand_DBL )
{ {
ctx->token.Type = type; ctx->token.Type = type;
ctx->token.Flags |= TF_Specifier; ctx->token.Flags |= TF_Specifier;
@ -496,7 +496,7 @@ void lex_found_token( LexContext* ctx )
} }
if ( type != TokType::Invalid ) if ( type != Tok_Invalid )
{ {
ctx->token.Type = type; ctx->token.Type = type;
append( & Tokens, ctx->token ); append( & Tokens, ctx->token );
@ -512,7 +512,7 @@ void lex_found_token( LexContext* ctx )
StrC* define = get(ctx->defines, key ); StrC* define = get(ctx->defines, key );
if ( define ) if ( define )
{ {
ctx->token.Type = TokType::Preprocess_Macro; ctx->token.Type = Tok_Preprocess_Macro;
// Want to ignore any arguments the define may have as they can be execution expressions. // Want to ignore any arguments the define may have as they can be execution expressions.
if ( ctx->left && current == '(' ) if ( ctx->left && current == '(' )
@ -548,7 +548,7 @@ void lex_found_token( LexContext* ctx )
} }
else else
{ {
ctx->token.Type = TokType::Identifier; ctx->token.Type = Tok_Identifier;
} }
append( & Tokens, ctx->token ); append( & Tokens, ctx->token );
@ -607,7 +607,7 @@ TokArray lex( StrC content )
} }
#endif #endif
c.token = { c.scanner, 0, TokType::Invalid, c.line, c.column, TF_Null }; c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
bool is_define = false; bool is_define = false;
@ -623,7 +623,7 @@ TokArray lex( StrC content )
{ {
move_forward(); move_forward();
c.token.Type = TokType::NewLine; c.token.Type = Tok_NewLine;
c.token.Length++; c.token.Length++;
append( & Tokens, c.token ); append( & Tokens, c.token );
@ -655,7 +655,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Access_MemberSymbol; c.token.Type = Tok_Access_MemberSymbol;
c.token.Flags = TF_AccessOperator; c.token.Flags = TF_AccessOperator;
if (c.left) { if (c.left) {
@ -668,7 +668,7 @@ TokArray lex( StrC content )
if( current == '.' ) if( current == '.' )
{ {
c.token.Length = 3; c.token.Length = 3;
c.token.Type = TokType::Varadic_Argument; c.token.Type = Tok_Varadic_Argument;
c.token.Flags = TF_Null; c.token.Flags = TF_Null;
move_forward(); move_forward();
} }
@ -686,7 +686,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Ampersand; c.token.Type = Tok_Ampersand;
c.token.Flags |= TF_Operator; c.token.Flags |= TF_Operator;
c.token.Flags |= TF_Specifier; c.token.Flags |= TF_Specifier;
@ -696,7 +696,7 @@ TokArray lex( StrC content )
if ( current == '&' ) // && if ( current == '&' ) // &&
{ {
c.token.Length = 2; c.token.Length = 2;
c.token.Type = TokType::Ampersand_DBL; c.token.Type = Tok_Ampersand_DBL;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -708,9 +708,9 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Assign_Classifer; c.token.Type = Tok_Assign_Classifer;
// Can be either a classifier (ParentType, Bitfield width), or ternary else // Can be either a classifier (ParentType, Bitfield width), or ternary else
// token.Type = TokType::Colon; // token.Type = Tok_Colon;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -718,7 +718,7 @@ TokArray lex( StrC content )
if ( current == ':' ) if ( current == ':' )
{ {
move_forward(); move_forward();
c.token.Type = TokType::Access_StaticSymbol; c.token.Type = Tok_Access_StaticSymbol;
c.token.Length++; c.token.Length++;
} }
goto FoundToken; goto FoundToken;
@ -727,7 +727,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::BraceCurly_Open; c.token.Type = Tok_BraceCurly_Open;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -737,7 +737,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::BraceCurly_Close; c.token.Type = Tok_BraceCurly_Close;
c.token.Flags = TF_EndDefinition; c.token.Flags = TF_EndDefinition;
if (c.left) if (c.left)
@ -750,7 +750,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::BraceSquare_Open; c.token.Type = Tok_BraceSquare_Open;
if ( c.left ) if ( c.left )
{ {
move_forward(); move_forward();
@ -758,7 +758,7 @@ TokArray lex( StrC content )
if ( current == ']' ) if ( current == ']' )
{ {
c.token.Length = 2; c.token.Length = 2;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
move_forward(); move_forward();
} }
} }
@ -768,7 +768,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::BraceSquare_Close; c.token.Type = Tok_BraceSquare_Close;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -778,7 +778,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Capture_Start; c.token.Type = Tok_Capture_Start;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -788,7 +788,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Capture_End; c.token.Type = Tok_Capture_End;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -798,7 +798,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Char; c.token.Type = Tok_Char;
c.token.Flags = TF_Literal; c.token.Flags = TF_Literal;
move_forward(); move_forward();
@ -832,7 +832,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Comma; c.token.Type = Tok_Comma;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
if (c.left) if (c.left)
@ -843,7 +843,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Star; c.token.Type = Tok_Star;
c.token.Flags |= TF_Specifier; c.token.Flags |= TF_Specifier;
c.token.Flags |= TF_Operator; c.token.Flags |= TF_Operator;
@ -854,7 +854,7 @@ TokArray lex( StrC content )
{ {
c.token.Length++; c.token.Length++;
c.token.Flags |= TF_Assign; c.token.Flags |= TF_Assign;
// c.token.Type = TokType::Assign_Multiply; // c.token.Type = Tok_Assign_Multiply;
if ( c.left ) if ( c.left )
move_forward(); move_forward();
@ -866,7 +866,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Statement_End; c.token.Type = Tok_Statement_End;
c.token.Flags = TF_EndDefinition; c.token.Flags = TF_EndDefinition;
if (c.left) if (c.left)
@ -879,7 +879,7 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::String; c.token.Type = Tok_String;
c.token.Flags |= TF_Literal; c.token.Flags |= TF_Literal;
move_forward(); move_forward();
@ -913,8 +913,8 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
// c.token.Type = TokType::Ternary; // c.token.Type = Tok_Ternary;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
if (c.left) if (c.left)
@ -926,8 +926,8 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
// c.token.Type = TokType::Assign; // c.token.Type = Tok_Assign;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
c.token.Flags |= TF_Assign; c.token.Flags |= TF_Assign;
@ -947,44 +947,44 @@ TokArray lex( StrC content )
} }
case '+': case '+':
{ {
// c.token.Type = TokType::Add // c.token.Type = Tok_Add
} }
case '%': case '%':
{ {
// c.token.Type = TokType::Modulo; // c.token.Type = Tok_Modulo;
} }
case '^': case '^':
{ {
// c.token.Type = TokType::B_XOr; // c.token.Type = Tok_B_XOr;
} }
case '~': case '~':
{ {
// c.token.Type = TokType::Unary_Not; // c.token.Type = Tok_Unary_Not;
} }
case '!': case '!':
{ {
// c.token.Type = TokType::L_Not; // c.token.Type = Tok_L_Not;
} }
case '<': case '<':
{ {
// c.token.Type = TokType::Lesser; // c.token.Type = Tok_Lesser;
} }
case '>': case '>':
{ {
// c.token.Type = TokType::Greater; // c.token.Type = Tok_Greater;
} }
case '|': case '|':
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
// token.Type = TokType::L_Or; // token.Type = Tok_L_Or;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -994,7 +994,7 @@ TokArray lex( StrC content )
c.token.Length++; c.token.Length++;
c.token.Flags |= TF_Assign; c.token.Flags |= TF_Assign;
// token.Flags |= TokFlags::Assignment; // token.Flags |= TokFlags::Assignment;
// token.Type = TokType::Assign_L_Or; // token.Type = Tok_Assign_L_Or;
if (c.left) if (c.left)
move_forward(); move_forward();
@ -1014,8 +1014,8 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
// token.Type = TokType::Subtract; // token.Type = Tok_Subtract;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
if ( c.left ) if ( c.left )
{ {
@ -1024,13 +1024,13 @@ TokArray lex( StrC content )
if ( current == '>' ) if ( current == '>' )
{ {
c.token.Length++; c.token.Length++;
// token.Type = TokType::Access_PointerToMemberSymbol; // token.Type = Tok_Access_PointerToMemberSymbol;
c.token.Flags |= TF_AccessOperator; c.token.Flags |= TF_AccessOperator;
move_forward(); move_forward();
if ( current == '*' ) if ( current == '*' )
{ {
// token.Type = TokType::Access_PointerToMemberOfPointerSymbol; // token.Type = Tok_Access_PointerToMemberOfPointerSymbol;
c.token.Length++; c.token.Length++;
move_forward(); move_forward();
} }
@ -1038,7 +1038,7 @@ TokArray lex( StrC content )
else if ( current == '=' ) else if ( current == '=' )
{ {
c.token.Length++; c.token.Length++;
// token.Type = TokType::Assign_Subtract; // token.Type = Tok_Assign_Subtract;
c.token.Flags |= TF_Assign; c.token.Flags |= TF_Assign;
if (c.left) if (c.left)
@ -1058,8 +1058,8 @@ TokArray lex( StrC content )
{ {
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Operator; c.token.Type = Tok_Operator;
// token.Type = TokType::Divide; // token.Type = Tok_Divide;
c.token.Flags = TF_Operator; c.token.Flags = TF_Operator;
move_forward(); move_forward();
@ -1074,7 +1074,7 @@ TokArray lex( StrC content )
} }
else if ( current == '/' ) else if ( current == '/' )
{ {
c.token.Type = TokType::Comment; c.token.Type = Tok_Comment;
c.token.Length = 2; c.token.Length = 2;
c.token.Flags = TF_Null; c.token.Flags = TF_Null;
move_forward(); move_forward();
@ -1100,7 +1100,7 @@ TokArray lex( StrC content )
} }
else if ( current == '*' ) else if ( current == '*' )
{ {
c.token.Type = TokType::Comment; c.token.Type = Tok_Comment;
c.token.Length = 2; c.token.Length = 2;
c.token.Flags = TF_Null; c.token.Flags = TF_Null;
move_forward(); move_forward();
@ -1160,7 +1160,7 @@ TokArray lex( StrC content )
c.token.Text = c.scanner; c.token.Text = c.scanner;
c.token.Length = 1; c.token.Length = 1;
c.token.Type = TokType::Number; c.token.Type = Tok_Number;
c.token.Flags = TF_Literal; c.token.Flags = TF_Literal;
move_forward(); move_forward();
@ -1230,7 +1230,7 @@ TokArray lex( StrC content )
{ {
log_fmt( "Token %d Type: %s : %.*s\n" log_fmt( "Token %d Type: %s : %.*s\n"
, idx , idx
, ETokType::to_str( Tokens[ idx ].Type ).Ptr , to_str( Tokens[ idx ].Type ).Ptr
, Tokens[ idx ].Length, Tokens[ idx ].Text , Tokens[ idx ].Length, Tokens[ idx ].Text
); );
} }

File diff suppressed because it is too large Load Diff

View File

@ -249,7 +249,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
char const* enum_str = enum_strs[idx].string; char const* enum_str = enum_strs[idx].string;
char const* entry_to_str = enum_str_strs [idx].string; char const* entry_to_str = enum_str_strs [idx].string;
append_fmt( & enum_entries, "%s,\n", enum_str ); append_fmt( & enum_entries, "Tok_%s,\n", enum_str );
append_fmt( & to_str_entries, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str); append_fmt( & to_str_entries, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
} }
@ -258,9 +258,9 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
char const* attribute_str = attribute_strs[idx].string; char const* attribute_str = attribute_strs[idx].string;
char const* entry_to_str = attribute_str_strs [idx].string; char const* entry_to_str = attribute_str_strs [idx].string;
append_fmt( & attribute_entries, "Attribute_%s,\n", attribute_str ); append_fmt( & attribute_entries, "Tok_Attribute_%s,\n", attribute_str );
append_fmt( & to_str_attributes, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str); append_fmt( & to_str_attributes, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
append_fmt( & attribute_define_entries, "Entry( Attribute_%s, \"%s\" )", attribute_str, entry_to_str ); append_fmt( & attribute_define_entries, "Entry( Tok_Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
if ( idx < num(attribute_strs) - 1 ) if ( idx < num(attribute_strs) - 1 )
append( & attribute_define_entries, " \\\n"); append( & attribute_define_entries, " \\\n");
@ -275,11 +275,11 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
// We cannot parse this enum, it has Attribute names as enums // We cannot parse this enum, it has Attribute names as enums
CodeEnum enum_code = parse_enum(token_fmt("entries", (StrC)enum_entries, "attribute_toks", (StrC)attribute_entries, stringize( CodeEnum enum_code = parse_enum(token_fmt("entries", (StrC)enum_entries, "attribute_toks", (StrC)attribute_entries, stringize(
enum Type : u32 enum TokType_Def : u32
{ {
<entries> <entries>
<attribute_toks> <attribute_toks>
NumTokens Tok_NumTokens
}; };
))); )));
@ -291,7 +291,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
#undef do_once_end #undef do_once_end
CodeFn to_str = parse_function(token_fmt("entries", (StrC)to_str_entries, "attribute_toks", (StrC)to_str_attributes, stringize( CodeFn to_str = parse_function(token_fmt("entries", (StrC)to_str_entries, "attribute_toks", (StrC)to_str_attributes, stringize(
inline inline
StrC to_str( Type type ) StrC to_str( TokType type )
{ {
local_persist local_persist
StrC lookup[] { StrC lookup[] {
@ -305,14 +305,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
CodeFn to_type = parse_function( token_fmt( "entries", (StrC)to_str_entries, stringize( CodeFn to_type = parse_function( token_fmt( "entries", (StrC)to_str_entries, stringize(
inline inline
Type to_type( StrC str ) TokType to_type( StrC str )
{ {
local_persist local_persist
u32 keymap[ NumTokens ]; u32 keymap[ Tok_NumTokens ];
do_once_start do_once_start
for ( u32 index = 0; index < NumTokens; index++ ) for ( u32 index = 0; index < Tok_NumTokens; index++ )
{ {
StrC enum_str = to_str( (Type)index ); StrC enum_str = to_str( (TokType)index );
// We subtract 1 to remove the null terminator // We subtract 1 to remove the null terminator
// This is because the tokens lexed are not null terminated. // This is because the tokens lexed are not null terminated.
@ -322,13 +322,13 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
u32 hash = crc32( str.Ptr, str.Len ); u32 hash = crc32( str.Ptr, str.Len );
for ( u32 index = 0; index < NumTokens; index++ ) for ( u32 index = 0; index < Tok_NumTokens; index++ )
{ {
if ( keymap[index] == hash ) if ( keymap[index] == hash )
return (Type)index; return (TokType)index;
} }
return Invalid; return Tok_Invalid;
} }
))); )));
#pragma pop_macro("local_persist") #pragma pop_macro("local_persist")
@ -336,15 +336,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
#pragma pop_macro("do_once_end") #pragma pop_macro("do_once_end")
//CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) ); //CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) );
CodeUsing td_toktype = def_using( name(TokType), def_type( name(ETokType::Type) ) ); CodeTypedef td_toktype = parse_typedef( code( typedef TokType_Def TokType; ));
return def_global_body( args( return def_global_body( args(
untyped_str(txt("GEN_NS_PARSER_BEGIN\n")),
attribute_entires_def, attribute_entires_def,
td_toktype,
enum_code, enum_code,
to_str, to_str,
td_toktype, to_type
untyped_str(txt("GEN_NS_PARSER_END\n"))
)); ));
} }