reduce TokType enum to c-compatiable

This commit is contained in:
Edward R. Gonzalez 2024-12-03 09:50:30 -05:00
parent a7c9dad9fd
commit d45908fb32
5 changed files with 930 additions and 935 deletions

View File

@ -6,235 +6,231 @@
// This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp)
GEN_NS_PARSER_BEGIN
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
namespace ETokType
typedef TokType_Def TokType;
enum TokType_Def : u32
{
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Attribute_API_Import, "GEN_API_Import_Code" )
Tok_Invalid,
Tok_Access_Private,
Tok_Access_Protected,
Tok_Access_Public,
Tok_Access_MemberSymbol,
Tok_Access_StaticSymbol,
Tok_Ampersand,
Tok_Ampersand_DBL,
Tok_Assign_Classifer,
Tok_Attribute_Open,
Tok_Attribute_Close,
Tok_BraceCurly_Open,
Tok_BraceCurly_Close,
Tok_BraceSquare_Open,
Tok_BraceSquare_Close,
Tok_Capture_Start,
Tok_Capture_End,
Tok_Comment,
Tok_Comment_End,
Tok_Comment_Start,
Tok_Char,
Tok_Comma,
Tok_Decl_Class,
Tok_Decl_GNU_Attribute,
Tok_Decl_MSVC_Attribute,
Tok_Decl_Enum,
Tok_Decl_Extern_Linkage,
Tok_Decl_Friend,
Tok_Decl_Module,
Tok_Decl_Namespace,
Tok_Decl_Operator,
Tok_Decl_Struct,
Tok_Decl_Template,
Tok_Decl_Typedef,
Tok_Decl_Using,
Tok_Decl_Union,
Tok_Identifier,
Tok_Module_Import,
Tok_Module_Export,
Tok_NewLine,
Tok_Number,
Tok_Operator,
Tok_Preprocess_Hash,
Tok_Preprocess_Define,
Tok_Preprocess_If,
Tok_Preprocess_IfDef,
Tok_Preprocess_IfNotDef,
Tok_Preprocess_ElIf,
Tok_Preprocess_Else,
Tok_Preprocess_EndIf,
Tok_Preprocess_Include,
Tok_Preprocess_Pragma,
Tok_Preprocess_Content,
Tok_Preprocess_Macro,
Tok_Preprocess_Unsupported,
Tok_Spec_Alignas,
Tok_Spec_Const,
Tok_Spec_Consteval,
Tok_Spec_Constexpr,
Tok_Spec_Constinit,
Tok_Spec_Explicit,
Tok_Spec_Extern,
Tok_Spec_Final,
Tok_Spec_ForceInline,
Tok_Spec_Global,
Tok_Spec_Inline,
Tok_Spec_Internal_Linkage,
Tok_Spec_LocalPersist,
Tok_Spec_Mutable,
Tok_Spec_NeverInline,
Tok_Spec_Override,
Tok_Spec_Static,
Tok_Spec_ThreadLocal,
Tok_Spec_Volatile,
Tok_Spec_Virtual,
Tok_Star,
Tok_Statement_End,
Tok_StaticAssert,
Tok_String,
Tok_Type_Typename,
Tok_Type_Unsigned,
Tok_Type_Signed,
Tok_Type_Short,
Tok_Type_Long,
Tok_Type_bool,
Tok_Type_char,
Tok_Type_int,
Tok_Type_double,
Tok_Type_MS_int8,
Tok_Type_MS_int16,
Tok_Type_MS_int32,
Tok_Type_MS_int64,
Tok_Type_MS_W64,
Tok_Varadic_Argument,
Tok___Attributes_Start,
Tok_Attribute_API_Export,
Tok_Attribute_API_Import,
Tok_NumTokens
};
enum Type : u32
{
Invalid,
Access_Private,
Access_Protected,
Access_Public,
Access_MemberSymbol,
Access_StaticSymbol,
Ampersand,
Ampersand_DBL,
Assign_Classifer,
Attribute_Open,
Attribute_Close,
BraceCurly_Open,
BraceCurly_Close,
BraceSquare_Open,
BraceSquare_Close,
Capture_Start,
Capture_End,
Comment,
Comment_End,
Comment_Start,
Char,
Comma,
Decl_Class,
Decl_GNU_Attribute,
Decl_MSVC_Attribute,
Decl_Enum,
Decl_Extern_Linkage,
Decl_Friend,
Decl_Module,
Decl_Namespace,
Decl_Operator,
Decl_Struct,
Decl_Template,
Decl_Typedef,
Decl_Using,
Decl_Union,
Identifier,
Module_Import,
Module_Export,
NewLine,
Number,
Operator,
Preprocess_Hash,
Preprocess_Define,
Preprocess_If,
Preprocess_IfDef,
Preprocess_IfNotDef,
Preprocess_ElIf,
Preprocess_Else,
Preprocess_EndIf,
Preprocess_Include,
Preprocess_Pragma,
Preprocess_Content,
Preprocess_Macro,
Preprocess_Unsupported,
Spec_Alignas,
Spec_Const,
Spec_Consteval,
Spec_Constexpr,
Spec_Constinit,
Spec_Explicit,
Spec_Extern,
Spec_Final,
Spec_ForceInline,
Spec_Global,
Spec_Inline,
Spec_Internal_Linkage,
Spec_LocalPersist,
Spec_Mutable,
Spec_NeverInline,
Spec_Override,
Spec_Static,
Spec_ThreadLocal,
Spec_Volatile,
Spec_Virtual,
Star,
Statement_End,
StaticAssert,
String,
Type_Typename,
Type_Unsigned,
Type_Signed,
Type_Short,
Type_Long,
Type_bool,
Type_char,
Type_int,
Type_double,
Type_MS_int8,
Type_MS_int16,
Type_MS_int32,
Type_MS_int64,
Type_MS_W64,
Varadic_Argument,
__Attributes_Start,
Attribute_API_Export,
Attribute_API_Import,
NumTokens
inline StrC to_str( TokType type )
{
local_persist StrC lookup[] {
{ sizeof( "__invalid__" ), "__invalid__" },
{ sizeof( "private" ), "private" },
{ sizeof( "protected" ), "protected" },
{ sizeof( "public" ), "public" },
{ sizeof( "." ), "." },
{ sizeof( "::" ), "::" },
{ sizeof( "&" ), "&" },
{ sizeof( "&&" ), "&&" },
{ sizeof( ":" ), ":" },
{ sizeof( "[[" ), "[[" },
{ sizeof( "]]" ), "]]" },
{ sizeof( "{" ), "{" },
{ sizeof( "}" ), "}" },
{ sizeof( "[" ), "[" },
{ sizeof( "]" ), "]" },
{ sizeof( "(" ), "(" },
{ sizeof( ")" ), ")" },
{ sizeof( "__comment__" ), "__comment__" },
{ sizeof( "__comment_end__" ), "__comment_end__" },
{ sizeof( "__comment_start__" ), "__comment_start__" },
{ sizeof( "__character__" ), "__character__" },
{ sizeof( "," ), "," },
{ sizeof( "class" ), "class" },
{ sizeof( "__attribute__" ), "__attribute__" },
{ sizeof( "__declspec" ), "__declspec" },
{ sizeof( "enum" ), "enum" },
{ sizeof( "extern" ), "extern" },
{ sizeof( "friend" ), "friend" },
{ sizeof( "module" ), "module" },
{ sizeof( "namespace" ), "namespace" },
{ sizeof( "operator" ), "operator" },
{ sizeof( "struct" ), "struct" },
{ sizeof( "template" ), "template" },
{ sizeof( "typedef" ), "typedef" },
{ sizeof( "using" ), "using" },
{ sizeof( "union" ), "union" },
{ sizeof( "__identifier__" ), "__identifier__" },
{ sizeof( "import" ), "import" },
{ sizeof( "export" ), "export" },
{ sizeof( "__new_line__" ), "__new_line__" },
{ sizeof( "__number__" ), "__number__" },
{ sizeof( "__operator__" ), "__operator__" },
{ sizeof( "#" ), "#" },
{ sizeof( "define" ), "define" },
{ sizeof( "if" ), "if" },
{ sizeof( "ifdef" ), "ifdef" },
{ sizeof( "ifndef" ), "ifndef" },
{ sizeof( "elif" ), "elif" },
{ sizeof( "else" ), "else" },
{ sizeof( "endif" ), "endif" },
{ sizeof( "include" ), "include" },
{ sizeof( "pragma" ), "pragma" },
{ sizeof( "__macro_content__" ), "__macro_content__" },
{ sizeof( "__macro__" ), "__macro__" },
{ sizeof( "__unsupported__" ), "__unsupported__" },
{ sizeof( "alignas" ), "alignas" },
{ sizeof( "const" ), "const" },
{ sizeof( "consteval" ), "consteval" },
{ sizeof( "constexpr" ), "constexpr" },
{ sizeof( "constinit" ), "constinit" },
{ sizeof( "explicit" ), "explicit" },
{ sizeof( "extern" ), "extern" },
{ sizeof( "final" ), "final" },
{ sizeof( "forceinline" ), "forceinline" },
{ sizeof( "global" ), "global" },
{ sizeof( "inline" ), "inline" },
{ sizeof( "internal" ), "internal" },
{ sizeof( "local_persist" ), "local_persist" },
{ sizeof( "mutable" ), "mutable" },
{ sizeof( "neverinline" ), "neverinline" },
{ sizeof( "override" ), "override" },
{ sizeof( "static" ), "static" },
{ sizeof( "thread_local" ), "thread_local" },
{ sizeof( "volatile" ), "volatile" },
{ sizeof( "virtual" ), "virtual" },
{ sizeof( "*" ), "*" },
{ sizeof( ";" ), ";" },
{ sizeof( "static_assert" ), "static_assert" },
{ sizeof( "__string__" ), "__string__" },
{ sizeof( "typename" ), "typename" },
{ sizeof( "unsigned" ), "unsigned" },
{ sizeof( "signed" ), "signed" },
{ sizeof( "short" ), "short" },
{ sizeof( "long" ), "long" },
{ sizeof( "bool" ), "bool" },
{ sizeof( "char" ), "char" },
{ sizeof( "int" ), "int" },
{ sizeof( "double" ), "double" },
{ sizeof( "__int8" ), "__int8" },
{ sizeof( "__int16" ), "__int16" },
{ sizeof( "__int32" ), "__int32" },
{ sizeof( "__int64" ), "__int64" },
{ sizeof( "_W64" ), "_W64" },
{ sizeof( "..." ), "..." },
{ sizeof( "__attrib_start__" ), "__attrib_start__" },
{ sizeof( "GEN_API_Export_Code" ), "GEN_API_Export_Code" },
{ sizeof( "GEN_API_Import_Code" ), "GEN_API_Import_Code" },
};
return lookup[type];
}
inline StrC to_str( Type type )
inline TokType to_type( StrC str )
{
local_persist u32 keymap[Tok_NumTokens];
do_once_start for ( u32 index = 0; index < Tok_NumTokens; index++ )
{
local_persist StrC lookup[] {
{ sizeof( "__invalid__" ), "__invalid__" },
{ sizeof( "private" ), "private" },
{ sizeof( "protected" ), "protected" },
{ sizeof( "public" ), "public" },
{ sizeof( "." ), "." },
{ sizeof( "::" ), "::" },
{ sizeof( "&" ), "&" },
{ sizeof( "&&" ), "&&" },
{ sizeof( ":" ), ":" },
{ sizeof( "[[" ), "[[" },
{ sizeof( "]]" ), "]]" },
{ sizeof( "{" ), "{" },
{ sizeof( "}" ), "}" },
{ sizeof( "[" ), "[" },
{ sizeof( "]" ), "]" },
{ sizeof( "(" ), "(" },
{ sizeof( ")" ), ")" },
{ sizeof( "__comment__" ), "__comment__" },
{ sizeof( "__comment_end__" ), "__comment_end__" },
{ sizeof( "__comment_start__" ), "__comment_start__" },
{ sizeof( "__character__" ), "__character__" },
{ sizeof( "," ), "," },
{ sizeof( "class" ), "class" },
{ sizeof( "__attribute__" ), "__attribute__" },
{ sizeof( "__declspec" ), "__declspec" },
{ sizeof( "enum" ), "enum" },
{ sizeof( "extern" ), "extern" },
{ sizeof( "friend" ), "friend" },
{ sizeof( "module" ), "module" },
{ sizeof( "namespace" ), "namespace" },
{ sizeof( "operator" ), "operator" },
{ sizeof( "struct" ), "struct" },
{ sizeof( "template" ), "template" },
{ sizeof( "typedef" ), "typedef" },
{ sizeof( "using" ), "using" },
{ sizeof( "union" ), "union" },
{ sizeof( "__identifier__" ), "__identifier__" },
{ sizeof( "import" ), "import" },
{ sizeof( "export" ), "export" },
{ sizeof( "__new_line__" ), "__new_line__" },
{ sizeof( "__number__" ), "__number__" },
{ sizeof( "__operator__" ), "__operator__" },
{ sizeof( "#" ), "#" },
{ sizeof( "define" ), "define" },
{ sizeof( "if" ), "if" },
{ sizeof( "ifdef" ), "ifdef" },
{ sizeof( "ifndef" ), "ifndef" },
{ sizeof( "elif" ), "elif" },
{ sizeof( "else" ), "else" },
{ sizeof( "endif" ), "endif" },
{ sizeof( "include" ), "include" },
{ sizeof( "pragma" ), "pragma" },
{ sizeof( "__macro_content__" ), "__macro_content__" },
{ sizeof( "__macro__" ), "__macro__" },
{ sizeof( "__unsupported__" ), "__unsupported__" },
{ sizeof( "alignas" ), "alignas" },
{ sizeof( "const" ), "const" },
{ sizeof( "consteval" ), "consteval" },
{ sizeof( "constexpr" ), "constexpr" },
{ sizeof( "constinit" ), "constinit" },
{ sizeof( "explicit" ), "explicit" },
{ sizeof( "extern" ), "extern" },
{ sizeof( "final" ), "final" },
{ sizeof( "forceinline" ), "forceinline" },
{ sizeof( "global" ), "global" },
{ sizeof( "inline" ), "inline" },
{ sizeof( "internal" ), "internal" },
{ sizeof( "local_persist" ), "local_persist" },
{ sizeof( "mutable" ), "mutable" },
{ sizeof( "neverinline" ), "neverinline" },
{ sizeof( "override" ), "override" },
{ sizeof( "static" ), "static" },
{ sizeof( "thread_local" ), "thread_local" },
{ sizeof( "volatile" ), "volatile" },
{ sizeof( "virtual" ), "virtual" },
{ sizeof( "*" ), "*" },
{ sizeof( ";" ), ";" },
{ sizeof( "static_assert" ), "static_assert" },
{ sizeof( "__string__" ), "__string__" },
{ sizeof( "typename" ), "typename" },
{ sizeof( "unsigned" ), "unsigned" },
{ sizeof( "signed" ), "signed" },
{ sizeof( "short" ), "short" },
{ sizeof( "long" ), "long" },
{ sizeof( "bool" ), "bool" },
{ sizeof( "char" ), "char" },
{ sizeof( "int" ), "int" },
{ sizeof( "double" ), "double" },
{ sizeof( "__int8" ), "__int8" },
{ sizeof( "__int16" ), "__int16" },
{ sizeof( "__int32" ), "__int32" },
{ sizeof( "__int64" ), "__int64" },
{ sizeof( "_W64" ), "_W64" },
{ sizeof( "..." ), "..." },
{ sizeof( "__attrib_start__" ), "__attrib_start__" },
{ sizeof( "GEN_API_Export_Code" ), "GEN_API_Export_Code" },
{ sizeof( "GEN_API_Import_Code" ), "GEN_API_Import_Code" },
};
return lookup[type];
StrC enum_str = to_str( (TokType)index );
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
}
inline Type to_type( StrC str )
do_once_end u32 hash = crc32( str.Ptr, str.Len );
for ( u32 index = 0; index < Tok_NumTokens; index++ )
{
local_persist u32 keymap[NumTokens];
do_once_start for ( u32 index = 0; index < NumTokens; index++ )
{
StrC enum_str = to_str( (Type)index );
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
}
do_once_end u32 hash = crc32( str.Ptr, str.Len );
for ( u32 index = 0; index < NumTokens; index++ )
{
if ( keymap[index] == hash )
return (Type)index;
}
return Invalid;
if ( keymap[index] == hash )
return (TokType)index;
}
return Tok_Invalid;
}
} // namespace ETokType
using TokType = ETokType::Type;
GEN_NS_PARSER_END

View File

@ -19,7 +19,7 @@ CodeClass parse_class( StrC def )
Context.Tokens = toks;
push_scope();
CodeClass result = (CodeClass) parse_class_struct( parser::TokType::Decl_Class );
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class );
pop(& Context);
return result;
}
@ -235,7 +235,7 @@ CodeStruct parse_struct( StrC def )
Context.Tokens = toks;
push_scope();
CodeStruct result = (CodeStruct) parse_class_struct( TokType::Decl_Struct );
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct );
pop(& Context);
return result;
}

View File

@ -33,7 +33,7 @@ struct Token
u32 Flags;
};
constexpr Token NullToken { nullptr, 0, TokType::Invalid, false, 0, TF_Null };
constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null };
AccessSpec to_access_specifier(Token tok)
{
@ -47,7 +47,7 @@ StrC to_str(Token tok)
bool is_valid( Token tok )
{
return tok.Text && tok.Length && tok.Type != TokType::Invalid;
return tok.Text && tok.Length && tok.Type != Tok_Invalid;
}
bool is_access_operator(Token tok)
@ -94,7 +94,7 @@ String to_string(Token tok)
{
String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
StrC type_str = ETokType::to_str( tok.Type );
StrC type_str = to_str( tok.Type );
append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
, tok.Line, tok.Column
@ -117,7 +117,7 @@ Token* current(TokArray* self, bool skip_formatting )
{
if ( skip_formatting )
{
while ( self->Arr[self->Idx].Type == TokType::NewLine || self->Arr[self->Idx].Type == TokType::Comment )
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
self->Idx++;
}
@ -130,7 +130,7 @@ Token* previous(TokArray self, bool skip_formatting)
if ( skip_formatting )
{
while ( self.Arr[idx].Type == TokType::NewLine )
while ( self.Arr[idx].Type == Tok_NewLine )
idx --;
return & self.Arr[idx];
@ -145,7 +145,7 @@ Token* next(TokArray self, bool skip_formatting)
if ( skip_formatting )
{
while ( self.Arr[idx].Type == TokType::NewLine )
while ( self.Arr[idx].Type == Tok_NewLine )
idx++;
return & self.Arr[idx + 1];
@ -221,7 +221,7 @@ forceinline
s32 lex_preprocessor_directive( LexContext* ctx )
{
char const* hash = ctx->scanner;
append( & Tokens, { hash, 1, TokType::Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } );
append( & Tokens, { hash, 1, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } );
move_forward();
SkipWhitespace();
@ -233,12 +233,12 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Length++;
}
ctx->token.Type = ETokType::to_type( to_str(ctx->token) );
ctx->token.Type = to_type( to_str(ctx->token) );
bool is_preprocessor = ctx->token.Type >= TokType::Preprocess_Define && ctx->token.Type <= TokType::Preprocess_Pragma;
bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
if ( ! is_preprocessor )
{
ctx->token.Type = TokType::Preprocess_Unsupported;
ctx->token.Type = Tok_Preprocess_Unsupported;
// Its an unsupported directive, skip it
s32 within_string = false;
@ -301,14 +301,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
return Lex_Continue; // Skip found token, its all handled here.
}
if ( ctx->token.Type == TokType::Preprocess_Else || ctx->token.Type == TokType::Preprocess_EndIf )
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
{
ctx->token.Flags |= TF_Preprocess_Cond;
append( & Tokens, ctx->token );
end_line();
return Lex_Continue;
}
else if ( ctx->token.Type >= TokType::Preprocess_If && ctx->token.Type <= TokType::Preprocess_ElIf )
else if ( ctx->token.Type >= Tok_Preprocess_If && ctx->token.Type <= Tok_Preprocess_ElIf )
{
ctx->token.Flags |= TF_Preprocess_Cond;
}
@ -317,9 +317,9 @@ s32 lex_preprocessor_directive( LexContext* ctx )
SkipWhitespace();
if ( ctx->token.Type == TokType::Preprocess_Define )
if ( ctx->token.Type == Tok_Preprocess_Define )
{
Token name = { ctx->scanner, 0, TokType::Identifier, ctx->line, ctx->column, TF_Preprocess };
Token name = { ctx->scanner, 0, Tok_Identifier, ctx->line, ctx->column, TF_Preprocess };
name.Text = ctx->scanner;
name.Length = 1;
@ -343,11 +343,11 @@ s32 lex_preprocessor_directive( LexContext* ctx )
set(& ctx->defines, key, to_str(name) );
}
Token preprocess_content = { ctx->scanner, 0, TokType::Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
if ( ctx->token.Type == TokType::Preprocess_Include )
if ( ctx->token.Type == Tok_Preprocess_Include )
{
preprocess_content.Type = TokType::String;
preprocess_content.Type = Tok_String;
if ( current != '"' && current != '<' )
{
@ -452,31 +452,31 @@ s32 lex_preprocessor_directive( LexContext* ctx )
forceinline
void lex_found_token( LexContext* ctx )
{
if ( ctx->token.Type != TokType::Invalid )
if ( ctx->token.Type != Tok_Invalid )
{
append( & Tokens, ctx->token );
return;
}
TokType type = ETokType::to_type( to_str(ctx->token) );
TokType type = to_type( to_str(ctx->token) );
if (type <= TokType::Access_Public && type >= TokType::Access_Private )
if (type <= Tok_Access_Public && type >= Tok_Access_Private )
{
ctx->token.Flags |= TF_AccessSpecifier;
}
if ( type > TokType::__Attributes_Start )
if ( type > Tok___Attributes_Start )
{
ctx->token.Flags |= TF_Attribute;
}
if ( type == ETokType::Decl_Extern_Linkage )
if ( type == Tok_Decl_Extern_Linkage )
{
SkipWhitespace();
if ( current != '"' )
{
type = ETokType::Spec_Extern;
type = Tok_Spec_Extern;
ctx->token.Flags |= TF_Specifier;
}
@ -485,9 +485,9 @@ void lex_found_token( LexContext* ctx )
return;
}
if ( ( type <= TokType::Star && type >= TokType::Spec_Alignas)
|| type == TokType::Ampersand
|| type == TokType::Ampersand_DBL )
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|| type == Tok_Ampersand
|| type == Tok_Ampersand_DBL )
{
ctx->token.Type = type;
ctx->token.Flags |= TF_Specifier;
@ -496,7 +496,7 @@ void lex_found_token( LexContext* ctx )
}
if ( type != TokType::Invalid )
if ( type != Tok_Invalid )
{
ctx->token.Type = type;
append( & Tokens, ctx->token );
@ -512,7 +512,7 @@ void lex_found_token( LexContext* ctx )
StrC* define = get(ctx->defines, key );
if ( define )
{
ctx->token.Type = TokType::Preprocess_Macro;
ctx->token.Type = Tok_Preprocess_Macro;
// Want to ignore any arguments the define may have as they can be execution expressions.
if ( ctx->left && current == '(' )
@ -548,7 +548,7 @@ void lex_found_token( LexContext* ctx )
}
else
{
ctx->token.Type = TokType::Identifier;
ctx->token.Type = Tok_Identifier;
}
append( & Tokens, ctx->token );
@ -607,7 +607,7 @@ TokArray lex( StrC content )
}
#endif
c.token = { c.scanner, 0, TokType::Invalid, c.line, c.column, TF_Null };
c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
bool is_define = false;
@ -623,7 +623,7 @@ TokArray lex( StrC content )
{
move_forward();
c.token.Type = TokType::NewLine;
c.token.Type = Tok_NewLine;
c.token.Length++;
append( & Tokens, c.token );
@ -655,7 +655,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Access_MemberSymbol;
c.token.Type = Tok_Access_MemberSymbol;
c.token.Flags = TF_AccessOperator;
if (c.left) {
@ -668,7 +668,7 @@ TokArray lex( StrC content )
if( current == '.' )
{
c.token.Length = 3;
c.token.Type = TokType::Varadic_Argument;
c.token.Type = Tok_Varadic_Argument;
c.token.Flags = TF_Null;
move_forward();
}
@ -686,7 +686,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Ampersand;
c.token.Type = Tok_Ampersand;
c.token.Flags |= TF_Operator;
c.token.Flags |= TF_Specifier;
@ -696,7 +696,7 @@ TokArray lex( StrC content )
if ( current == '&' ) // &&
{
c.token.Length = 2;
c.token.Type = TokType::Ampersand_DBL;
c.token.Type = Tok_Ampersand_DBL;
if (c.left)
move_forward();
@ -708,9 +708,9 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Assign_Classifer;
c.token.Type = Tok_Assign_Classifer;
// Can be either a classifier (ParentType, Bitfield width), or ternary else
// token.Type = TokType::Colon;
// token.Type = Tok_Colon;
if (c.left)
move_forward();
@ -718,7 +718,7 @@ TokArray lex( StrC content )
if ( current == ':' )
{
move_forward();
c.token.Type = TokType::Access_StaticSymbol;
c.token.Type = Tok_Access_StaticSymbol;
c.token.Length++;
}
goto FoundToken;
@ -727,7 +727,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::BraceCurly_Open;
c.token.Type = Tok_BraceCurly_Open;
if (c.left)
move_forward();
@ -737,7 +737,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::BraceCurly_Close;
c.token.Type = Tok_BraceCurly_Close;
c.token.Flags = TF_EndDefinition;
if (c.left)
@ -750,7 +750,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::BraceSquare_Open;
c.token.Type = Tok_BraceSquare_Open;
if ( c.left )
{
move_forward();
@ -758,7 +758,7 @@ TokArray lex( StrC content )
if ( current == ']' )
{
c.token.Length = 2;
c.token.Type = TokType::Operator;
c.token.Type = Tok_Operator;
move_forward();
}
}
@ -768,7 +768,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::BraceSquare_Close;
c.token.Type = Tok_BraceSquare_Close;
if (c.left)
move_forward();
@ -778,7 +778,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Capture_Start;
c.token.Type = Tok_Capture_Start;
if (c.left)
move_forward();
@ -788,7 +788,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Capture_End;
c.token.Type = Tok_Capture_End;
if (c.left)
move_forward();
@ -798,7 +798,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Char;
c.token.Type = Tok_Char;
c.token.Flags = TF_Literal;
move_forward();
@ -832,7 +832,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Comma;
c.token.Type = Tok_Comma;
c.token.Flags = TF_Operator;
if (c.left)
@ -843,7 +843,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Star;
c.token.Type = Tok_Star;
c.token.Flags |= TF_Specifier;
c.token.Flags |= TF_Operator;
@ -854,7 +854,7 @@ TokArray lex( StrC content )
{
c.token.Length++;
c.token.Flags |= TF_Assign;
// c.token.Type = TokType::Assign_Multiply;
// c.token.Type = Tok_Assign_Multiply;
if ( c.left )
move_forward();
@ -866,7 +866,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Statement_End;
c.token.Type = Tok_Statement_End;
c.token.Flags = TF_EndDefinition;
if (c.left)
@ -879,7 +879,7 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::String;
c.token.Type = Tok_String;
c.token.Flags |= TF_Literal;
move_forward();
@ -913,8 +913,8 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Operator;
// c.token.Type = TokType::Ternary;
c.token.Type = Tok_Operator;
// c.token.Type = Tok_Ternary;
c.token.Flags = TF_Operator;
if (c.left)
@ -926,8 +926,8 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Operator;
// c.token.Type = TokType::Assign;
c.token.Type = Tok_Operator;
// c.token.Type = Tok_Assign;
c.token.Flags = TF_Operator;
c.token.Flags |= TF_Assign;
@ -947,44 +947,44 @@ TokArray lex( StrC content )
}
case '+':
{
// c.token.Type = TokType::Add
// c.token.Type = Tok_Add
}
case '%':
{
// c.token.Type = TokType::Modulo;
// c.token.Type = Tok_Modulo;
}
case '^':
{
// c.token.Type = TokType::B_XOr;
// c.token.Type = Tok_B_XOr;
}
case '~':
{
// c.token.Type = TokType::Unary_Not;
// c.token.Type = Tok_Unary_Not;
}
case '!':
{
// c.token.Type = TokType::L_Not;
// c.token.Type = Tok_L_Not;
}
case '<':
{
// c.token.Type = TokType::Lesser;
// c.token.Type = Tok_Lesser;
}
case '>':
{
// c.token.Type = TokType::Greater;
// c.token.Type = Tok_Greater;
}
case '|':
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Operator;
c.token.Type = Tok_Operator;
c.token.Flags = TF_Operator;
// token.Type = TokType::L_Or;
// token.Type = Tok_L_Or;
if (c.left)
move_forward();
@ -994,7 +994,7 @@ TokArray lex( StrC content )
c.token.Length++;
c.token.Flags |= TF_Assign;
// token.Flags |= TokFlags::Assignment;
// token.Type = TokType::Assign_L_Or;
// token.Type = Tok_Assign_L_Or;
if (c.left)
move_forward();
@ -1014,8 +1014,8 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Operator;
// token.Type = TokType::Subtract;
c.token.Type = Tok_Operator;
// token.Type = Tok_Subtract;
c.token.Flags = TF_Operator;
if ( c.left )
{
@ -1024,13 +1024,13 @@ TokArray lex( StrC content )
if ( current == '>' )
{
c.token.Length++;
// token.Type = TokType::Access_PointerToMemberSymbol;
// token.Type = Tok_Access_PointerToMemberSymbol;
c.token.Flags |= TF_AccessOperator;
move_forward();
if ( current == '*' )
{
// token.Type = TokType::Access_PointerToMemberOfPointerSymbol;
// token.Type = Tok_Access_PointerToMemberOfPointerSymbol;
c.token.Length++;
move_forward();
}
@ -1038,7 +1038,7 @@ TokArray lex( StrC content )
else if ( current == '=' )
{
c.token.Length++;
// token.Type = TokType::Assign_Subtract;
// token.Type = Tok_Assign_Subtract;
c.token.Flags |= TF_Assign;
if (c.left)
@ -1058,8 +1058,8 @@ TokArray lex( StrC content )
{
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Operator;
// token.Type = TokType::Divide;
c.token.Type = Tok_Operator;
// token.Type = Tok_Divide;
c.token.Flags = TF_Operator;
move_forward();
@ -1074,7 +1074,7 @@ TokArray lex( StrC content )
}
else if ( current == '/' )
{
c.token.Type = TokType::Comment;
c.token.Type = Tok_Comment;
c.token.Length = 2;
c.token.Flags = TF_Null;
move_forward();
@ -1100,7 +1100,7 @@ TokArray lex( StrC content )
}
else if ( current == '*' )
{
c.token.Type = TokType::Comment;
c.token.Type = Tok_Comment;
c.token.Length = 2;
c.token.Flags = TF_Null;
move_forward();
@ -1160,7 +1160,7 @@ TokArray lex( StrC content )
c.token.Text = c.scanner;
c.token.Length = 1;
c.token.Type = TokType::Number;
c.token.Type = Tok_Number;
c.token.Flags = TF_Literal;
move_forward();
@ -1230,7 +1230,7 @@ TokArray lex( StrC content )
{
log_fmt( "Token %d Type: %s : %.*s\n"
, idx
, ETokType::to_str( Tokens[ idx ].Type ).Ptr
, to_str( Tokens[ idx ].Type ).Ptr
, Tokens[ idx ].Length, Tokens[ idx ].Text
);
}

File diff suppressed because it is too large Load Diff

View File

@ -249,7 +249,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
char const* enum_str = enum_strs[idx].string;
char const* entry_to_str = enum_str_strs [idx].string;
append_fmt( & enum_entries, "%s,\n", enum_str );
append_fmt( & enum_entries, "Tok_%s,\n", enum_str );
append_fmt( & to_str_entries, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
}
@ -258,9 +258,9 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
char const* attribute_str = attribute_strs[idx].string;
char const* entry_to_str = attribute_str_strs [idx].string;
append_fmt( & attribute_entries, "Attribute_%s,\n", attribute_str );
append_fmt( & attribute_entries, "Tok_Attribute_%s,\n", attribute_str );
append_fmt( & to_str_attributes, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
append_fmt( & attribute_define_entries, "Entry( Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
append_fmt( & attribute_define_entries, "Entry( Tok_Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
if ( idx < num(attribute_strs) - 1 )
append( & attribute_define_entries, " \\\n");
@ -275,11 +275,11 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
// We cannot parse this enum, it has Attribute names as enums
CodeEnum enum_code = parse_enum(token_fmt("entries", (StrC)enum_entries, "attribute_toks", (StrC)attribute_entries, stringize(
enum Type : u32
enum TokType_Def : u32
{
<entries>
<attribute_toks>
NumTokens
Tok_NumTokens
};
)));
@ -291,7 +291,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
#undef do_once_end
CodeFn to_str = parse_function(token_fmt("entries", (StrC)to_str_entries, "attribute_toks", (StrC)to_str_attributes, stringize(
inline
StrC to_str( Type type )
StrC to_str( TokType type )
{
local_persist
StrC lookup[] {
@ -305,14 +305,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
CodeFn to_type = parse_function( token_fmt( "entries", (StrC)to_str_entries, stringize(
inline
Type to_type( StrC str )
TokType to_type( StrC str )
{
local_persist
u32 keymap[ NumTokens ];
u32 keymap[ Tok_NumTokens ];
do_once_start
for ( u32 index = 0; index < NumTokens; index++ )
for ( u32 index = 0; index < Tok_NumTokens; index++ )
{
StrC enum_str = to_str( (Type)index );
StrC enum_str = to_str( (TokType)index );
// We subtract 1 to remove the null terminator
// This is because the tokens lexed are not null terminated.
@ -322,13 +322,13 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
u32 hash = crc32( str.Ptr, str.Len );
for ( u32 index = 0; index < NumTokens; index++ )
for ( u32 index = 0; index < Tok_NumTokens; index++ )
{
if ( keymap[index] == hash )
return (Type)index;
return (TokType)index;
}
return Invalid;
return Tok_Invalid;
}
)));
#pragma pop_macro("local_persist")
@ -336,15 +336,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
#pragma pop_macro("do_once_end")
//CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) );
CodeUsing td_toktype = def_using( name(TokType), def_type( name(ETokType::Type) ) );
CodeTypedef td_toktype = parse_typedef( code( typedef TokType_Def TokType; ));
return def_global_body( args(
untyped_str(txt("GEN_NS_PARSER_BEGIN\n")),
attribute_entires_def,
td_toktype,
enum_code,
to_str,
td_toktype,
untyped_str(txt("GEN_NS_PARSER_END\n"))
to_type
));
}