mirror of
https://github.com/Ed94/gencpp.git
synced 2024-12-22 15:54:45 -08:00
reduce TokType enum to c-compatiable
This commit is contained in:
parent
a7c9dad9fd
commit
d45908fb32
@ -6,235 +6,231 @@
|
|||||||
// This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp)
|
// This file was generated automatially by gencpp's bootstrap.cpp (See: https://github.com/Ed94/gencpp)
|
||||||
|
|
||||||
GEN_NS_PARSER_BEGIN
|
GEN_NS_PARSER_BEGIN
|
||||||
|
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
|
||||||
|
|
||||||
namespace ETokType
|
typedef TokType_Def TokType;
|
||||||
|
|
||||||
|
enum TokType_Def : u32
|
||||||
{
|
{
|
||||||
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Attribute_API_Import, "GEN_API_Import_Code" )
|
Tok_Invalid,
|
||||||
|
Tok_Access_Private,
|
||||||
|
Tok_Access_Protected,
|
||||||
|
Tok_Access_Public,
|
||||||
|
Tok_Access_MemberSymbol,
|
||||||
|
Tok_Access_StaticSymbol,
|
||||||
|
Tok_Ampersand,
|
||||||
|
Tok_Ampersand_DBL,
|
||||||
|
Tok_Assign_Classifer,
|
||||||
|
Tok_Attribute_Open,
|
||||||
|
Tok_Attribute_Close,
|
||||||
|
Tok_BraceCurly_Open,
|
||||||
|
Tok_BraceCurly_Close,
|
||||||
|
Tok_BraceSquare_Open,
|
||||||
|
Tok_BraceSquare_Close,
|
||||||
|
Tok_Capture_Start,
|
||||||
|
Tok_Capture_End,
|
||||||
|
Tok_Comment,
|
||||||
|
Tok_Comment_End,
|
||||||
|
Tok_Comment_Start,
|
||||||
|
Tok_Char,
|
||||||
|
Tok_Comma,
|
||||||
|
Tok_Decl_Class,
|
||||||
|
Tok_Decl_GNU_Attribute,
|
||||||
|
Tok_Decl_MSVC_Attribute,
|
||||||
|
Tok_Decl_Enum,
|
||||||
|
Tok_Decl_Extern_Linkage,
|
||||||
|
Tok_Decl_Friend,
|
||||||
|
Tok_Decl_Module,
|
||||||
|
Tok_Decl_Namespace,
|
||||||
|
Tok_Decl_Operator,
|
||||||
|
Tok_Decl_Struct,
|
||||||
|
Tok_Decl_Template,
|
||||||
|
Tok_Decl_Typedef,
|
||||||
|
Tok_Decl_Using,
|
||||||
|
Tok_Decl_Union,
|
||||||
|
Tok_Identifier,
|
||||||
|
Tok_Module_Import,
|
||||||
|
Tok_Module_Export,
|
||||||
|
Tok_NewLine,
|
||||||
|
Tok_Number,
|
||||||
|
Tok_Operator,
|
||||||
|
Tok_Preprocess_Hash,
|
||||||
|
Tok_Preprocess_Define,
|
||||||
|
Tok_Preprocess_If,
|
||||||
|
Tok_Preprocess_IfDef,
|
||||||
|
Tok_Preprocess_IfNotDef,
|
||||||
|
Tok_Preprocess_ElIf,
|
||||||
|
Tok_Preprocess_Else,
|
||||||
|
Tok_Preprocess_EndIf,
|
||||||
|
Tok_Preprocess_Include,
|
||||||
|
Tok_Preprocess_Pragma,
|
||||||
|
Tok_Preprocess_Content,
|
||||||
|
Tok_Preprocess_Macro,
|
||||||
|
Tok_Preprocess_Unsupported,
|
||||||
|
Tok_Spec_Alignas,
|
||||||
|
Tok_Spec_Const,
|
||||||
|
Tok_Spec_Consteval,
|
||||||
|
Tok_Spec_Constexpr,
|
||||||
|
Tok_Spec_Constinit,
|
||||||
|
Tok_Spec_Explicit,
|
||||||
|
Tok_Spec_Extern,
|
||||||
|
Tok_Spec_Final,
|
||||||
|
Tok_Spec_ForceInline,
|
||||||
|
Tok_Spec_Global,
|
||||||
|
Tok_Spec_Inline,
|
||||||
|
Tok_Spec_Internal_Linkage,
|
||||||
|
Tok_Spec_LocalPersist,
|
||||||
|
Tok_Spec_Mutable,
|
||||||
|
Tok_Spec_NeverInline,
|
||||||
|
Tok_Spec_Override,
|
||||||
|
Tok_Spec_Static,
|
||||||
|
Tok_Spec_ThreadLocal,
|
||||||
|
Tok_Spec_Volatile,
|
||||||
|
Tok_Spec_Virtual,
|
||||||
|
Tok_Star,
|
||||||
|
Tok_Statement_End,
|
||||||
|
Tok_StaticAssert,
|
||||||
|
Tok_String,
|
||||||
|
Tok_Type_Typename,
|
||||||
|
Tok_Type_Unsigned,
|
||||||
|
Tok_Type_Signed,
|
||||||
|
Tok_Type_Short,
|
||||||
|
Tok_Type_Long,
|
||||||
|
Tok_Type_bool,
|
||||||
|
Tok_Type_char,
|
||||||
|
Tok_Type_int,
|
||||||
|
Tok_Type_double,
|
||||||
|
Tok_Type_MS_int8,
|
||||||
|
Tok_Type_MS_int16,
|
||||||
|
Tok_Type_MS_int32,
|
||||||
|
Tok_Type_MS_int64,
|
||||||
|
Tok_Type_MS_W64,
|
||||||
|
Tok_Varadic_Argument,
|
||||||
|
Tok___Attributes_Start,
|
||||||
|
Tok_Attribute_API_Export,
|
||||||
|
Tok_Attribute_API_Import,
|
||||||
|
Tok_NumTokens
|
||||||
|
};
|
||||||
|
|
||||||
enum Type : u32
|
inline StrC to_str( TokType type )
|
||||||
{
|
{
|
||||||
Invalid,
|
local_persist StrC lookup[] {
|
||||||
Access_Private,
|
{ sizeof( "__invalid__" ), "__invalid__" },
|
||||||
Access_Protected,
|
{ sizeof( "private" ), "private" },
|
||||||
Access_Public,
|
{ sizeof( "protected" ), "protected" },
|
||||||
Access_MemberSymbol,
|
{ sizeof( "public" ), "public" },
|
||||||
Access_StaticSymbol,
|
{ sizeof( "." ), "." },
|
||||||
Ampersand,
|
{ sizeof( "::" ), "::" },
|
||||||
Ampersand_DBL,
|
{ sizeof( "&" ), "&" },
|
||||||
Assign_Classifer,
|
{ sizeof( "&&" ), "&&" },
|
||||||
Attribute_Open,
|
{ sizeof( ":" ), ":" },
|
||||||
Attribute_Close,
|
{ sizeof( "[[" ), "[[" },
|
||||||
BraceCurly_Open,
|
{ sizeof( "]]" ), "]]" },
|
||||||
BraceCurly_Close,
|
{ sizeof( "{" ), "{" },
|
||||||
BraceSquare_Open,
|
{ sizeof( "}" ), "}" },
|
||||||
BraceSquare_Close,
|
{ sizeof( "[" ), "[" },
|
||||||
Capture_Start,
|
{ sizeof( "]" ), "]" },
|
||||||
Capture_End,
|
{ sizeof( "(" ), "(" },
|
||||||
Comment,
|
{ sizeof( ")" ), ")" },
|
||||||
Comment_End,
|
{ sizeof( "__comment__" ), "__comment__" },
|
||||||
Comment_Start,
|
{ sizeof( "__comment_end__" ), "__comment_end__" },
|
||||||
Char,
|
{ sizeof( "__comment_start__" ), "__comment_start__" },
|
||||||
Comma,
|
{ sizeof( "__character__" ), "__character__" },
|
||||||
Decl_Class,
|
{ sizeof( "," ), "," },
|
||||||
Decl_GNU_Attribute,
|
{ sizeof( "class" ), "class" },
|
||||||
Decl_MSVC_Attribute,
|
{ sizeof( "__attribute__" ), "__attribute__" },
|
||||||
Decl_Enum,
|
{ sizeof( "__declspec" ), "__declspec" },
|
||||||
Decl_Extern_Linkage,
|
{ sizeof( "enum" ), "enum" },
|
||||||
Decl_Friend,
|
{ sizeof( "extern" ), "extern" },
|
||||||
Decl_Module,
|
{ sizeof( "friend" ), "friend" },
|
||||||
Decl_Namespace,
|
{ sizeof( "module" ), "module" },
|
||||||
Decl_Operator,
|
{ sizeof( "namespace" ), "namespace" },
|
||||||
Decl_Struct,
|
{ sizeof( "operator" ), "operator" },
|
||||||
Decl_Template,
|
{ sizeof( "struct" ), "struct" },
|
||||||
Decl_Typedef,
|
{ sizeof( "template" ), "template" },
|
||||||
Decl_Using,
|
{ sizeof( "typedef" ), "typedef" },
|
||||||
Decl_Union,
|
{ sizeof( "using" ), "using" },
|
||||||
Identifier,
|
{ sizeof( "union" ), "union" },
|
||||||
Module_Import,
|
{ sizeof( "__identifier__" ), "__identifier__" },
|
||||||
Module_Export,
|
{ sizeof( "import" ), "import" },
|
||||||
NewLine,
|
{ sizeof( "export" ), "export" },
|
||||||
Number,
|
{ sizeof( "__new_line__" ), "__new_line__" },
|
||||||
Operator,
|
{ sizeof( "__number__" ), "__number__" },
|
||||||
Preprocess_Hash,
|
{ sizeof( "__operator__" ), "__operator__" },
|
||||||
Preprocess_Define,
|
{ sizeof( "#" ), "#" },
|
||||||
Preprocess_If,
|
{ sizeof( "define" ), "define" },
|
||||||
Preprocess_IfDef,
|
{ sizeof( "if" ), "if" },
|
||||||
Preprocess_IfNotDef,
|
{ sizeof( "ifdef" ), "ifdef" },
|
||||||
Preprocess_ElIf,
|
{ sizeof( "ifndef" ), "ifndef" },
|
||||||
Preprocess_Else,
|
{ sizeof( "elif" ), "elif" },
|
||||||
Preprocess_EndIf,
|
{ sizeof( "else" ), "else" },
|
||||||
Preprocess_Include,
|
{ sizeof( "endif" ), "endif" },
|
||||||
Preprocess_Pragma,
|
{ sizeof( "include" ), "include" },
|
||||||
Preprocess_Content,
|
{ sizeof( "pragma" ), "pragma" },
|
||||||
Preprocess_Macro,
|
{ sizeof( "__macro_content__" ), "__macro_content__" },
|
||||||
Preprocess_Unsupported,
|
{ sizeof( "__macro__" ), "__macro__" },
|
||||||
Spec_Alignas,
|
{ sizeof( "__unsupported__" ), "__unsupported__" },
|
||||||
Spec_Const,
|
{ sizeof( "alignas" ), "alignas" },
|
||||||
Spec_Consteval,
|
{ sizeof( "const" ), "const" },
|
||||||
Spec_Constexpr,
|
{ sizeof( "consteval" ), "consteval" },
|
||||||
Spec_Constinit,
|
{ sizeof( "constexpr" ), "constexpr" },
|
||||||
Spec_Explicit,
|
{ sizeof( "constinit" ), "constinit" },
|
||||||
Spec_Extern,
|
{ sizeof( "explicit" ), "explicit" },
|
||||||
Spec_Final,
|
{ sizeof( "extern" ), "extern" },
|
||||||
Spec_ForceInline,
|
{ sizeof( "final" ), "final" },
|
||||||
Spec_Global,
|
{ sizeof( "forceinline" ), "forceinline" },
|
||||||
Spec_Inline,
|
{ sizeof( "global" ), "global" },
|
||||||
Spec_Internal_Linkage,
|
{ sizeof( "inline" ), "inline" },
|
||||||
Spec_LocalPersist,
|
{ sizeof( "internal" ), "internal" },
|
||||||
Spec_Mutable,
|
{ sizeof( "local_persist" ), "local_persist" },
|
||||||
Spec_NeverInline,
|
{ sizeof( "mutable" ), "mutable" },
|
||||||
Spec_Override,
|
{ sizeof( "neverinline" ), "neverinline" },
|
||||||
Spec_Static,
|
{ sizeof( "override" ), "override" },
|
||||||
Spec_ThreadLocal,
|
{ sizeof( "static" ), "static" },
|
||||||
Spec_Volatile,
|
{ sizeof( "thread_local" ), "thread_local" },
|
||||||
Spec_Virtual,
|
{ sizeof( "volatile" ), "volatile" },
|
||||||
Star,
|
{ sizeof( "virtual" ), "virtual" },
|
||||||
Statement_End,
|
{ sizeof( "*" ), "*" },
|
||||||
StaticAssert,
|
{ sizeof( ";" ), ";" },
|
||||||
String,
|
{ sizeof( "static_assert" ), "static_assert" },
|
||||||
Type_Typename,
|
{ sizeof( "__string__" ), "__string__" },
|
||||||
Type_Unsigned,
|
{ sizeof( "typename" ), "typename" },
|
||||||
Type_Signed,
|
{ sizeof( "unsigned" ), "unsigned" },
|
||||||
Type_Short,
|
{ sizeof( "signed" ), "signed" },
|
||||||
Type_Long,
|
{ sizeof( "short" ), "short" },
|
||||||
Type_bool,
|
{ sizeof( "long" ), "long" },
|
||||||
Type_char,
|
{ sizeof( "bool" ), "bool" },
|
||||||
Type_int,
|
{ sizeof( "char" ), "char" },
|
||||||
Type_double,
|
{ sizeof( "int" ), "int" },
|
||||||
Type_MS_int8,
|
{ sizeof( "double" ), "double" },
|
||||||
Type_MS_int16,
|
{ sizeof( "__int8" ), "__int8" },
|
||||||
Type_MS_int32,
|
{ sizeof( "__int16" ), "__int16" },
|
||||||
Type_MS_int64,
|
{ sizeof( "__int32" ), "__int32" },
|
||||||
Type_MS_W64,
|
{ sizeof( "__int64" ), "__int64" },
|
||||||
Varadic_Argument,
|
{ sizeof( "_W64" ), "_W64" },
|
||||||
__Attributes_Start,
|
{ sizeof( "..." ), "..." },
|
||||||
Attribute_API_Export,
|
{ sizeof( "__attrib_start__" ), "__attrib_start__" },
|
||||||
Attribute_API_Import,
|
{ sizeof( "GEN_API_Export_Code" ), "GEN_API_Export_Code" },
|
||||||
NumTokens
|
{ sizeof( "GEN_API_Import_Code" ), "GEN_API_Import_Code" },
|
||||||
};
|
};
|
||||||
|
return lookup[type];
|
||||||
|
}
|
||||||
|
|
||||||
inline StrC to_str( Type type )
|
inline TokType to_type( StrC str )
|
||||||
|
{
|
||||||
|
local_persist u32 keymap[Tok_NumTokens];
|
||||||
|
do_once_start for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
local_persist StrC lookup[] {
|
StrC enum_str = to_str( (TokType)index );
|
||||||
{ sizeof( "__invalid__" ), "__invalid__" },
|
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
|
||||||
{ sizeof( "private" ), "private" },
|
|
||||||
{ sizeof( "protected" ), "protected" },
|
|
||||||
{ sizeof( "public" ), "public" },
|
|
||||||
{ sizeof( "." ), "." },
|
|
||||||
{ sizeof( "::" ), "::" },
|
|
||||||
{ sizeof( "&" ), "&" },
|
|
||||||
{ sizeof( "&&" ), "&&" },
|
|
||||||
{ sizeof( ":" ), ":" },
|
|
||||||
{ sizeof( "[[" ), "[[" },
|
|
||||||
{ sizeof( "]]" ), "]]" },
|
|
||||||
{ sizeof( "{" ), "{" },
|
|
||||||
{ sizeof( "}" ), "}" },
|
|
||||||
{ sizeof( "[" ), "[" },
|
|
||||||
{ sizeof( "]" ), "]" },
|
|
||||||
{ sizeof( "(" ), "(" },
|
|
||||||
{ sizeof( ")" ), ")" },
|
|
||||||
{ sizeof( "__comment__" ), "__comment__" },
|
|
||||||
{ sizeof( "__comment_end__" ), "__comment_end__" },
|
|
||||||
{ sizeof( "__comment_start__" ), "__comment_start__" },
|
|
||||||
{ sizeof( "__character__" ), "__character__" },
|
|
||||||
{ sizeof( "," ), "," },
|
|
||||||
{ sizeof( "class" ), "class" },
|
|
||||||
{ sizeof( "__attribute__" ), "__attribute__" },
|
|
||||||
{ sizeof( "__declspec" ), "__declspec" },
|
|
||||||
{ sizeof( "enum" ), "enum" },
|
|
||||||
{ sizeof( "extern" ), "extern" },
|
|
||||||
{ sizeof( "friend" ), "friend" },
|
|
||||||
{ sizeof( "module" ), "module" },
|
|
||||||
{ sizeof( "namespace" ), "namespace" },
|
|
||||||
{ sizeof( "operator" ), "operator" },
|
|
||||||
{ sizeof( "struct" ), "struct" },
|
|
||||||
{ sizeof( "template" ), "template" },
|
|
||||||
{ sizeof( "typedef" ), "typedef" },
|
|
||||||
{ sizeof( "using" ), "using" },
|
|
||||||
{ sizeof( "union" ), "union" },
|
|
||||||
{ sizeof( "__identifier__" ), "__identifier__" },
|
|
||||||
{ sizeof( "import" ), "import" },
|
|
||||||
{ sizeof( "export" ), "export" },
|
|
||||||
{ sizeof( "__new_line__" ), "__new_line__" },
|
|
||||||
{ sizeof( "__number__" ), "__number__" },
|
|
||||||
{ sizeof( "__operator__" ), "__operator__" },
|
|
||||||
{ sizeof( "#" ), "#" },
|
|
||||||
{ sizeof( "define" ), "define" },
|
|
||||||
{ sizeof( "if" ), "if" },
|
|
||||||
{ sizeof( "ifdef" ), "ifdef" },
|
|
||||||
{ sizeof( "ifndef" ), "ifndef" },
|
|
||||||
{ sizeof( "elif" ), "elif" },
|
|
||||||
{ sizeof( "else" ), "else" },
|
|
||||||
{ sizeof( "endif" ), "endif" },
|
|
||||||
{ sizeof( "include" ), "include" },
|
|
||||||
{ sizeof( "pragma" ), "pragma" },
|
|
||||||
{ sizeof( "__macro_content__" ), "__macro_content__" },
|
|
||||||
{ sizeof( "__macro__" ), "__macro__" },
|
|
||||||
{ sizeof( "__unsupported__" ), "__unsupported__" },
|
|
||||||
{ sizeof( "alignas" ), "alignas" },
|
|
||||||
{ sizeof( "const" ), "const" },
|
|
||||||
{ sizeof( "consteval" ), "consteval" },
|
|
||||||
{ sizeof( "constexpr" ), "constexpr" },
|
|
||||||
{ sizeof( "constinit" ), "constinit" },
|
|
||||||
{ sizeof( "explicit" ), "explicit" },
|
|
||||||
{ sizeof( "extern" ), "extern" },
|
|
||||||
{ sizeof( "final" ), "final" },
|
|
||||||
{ sizeof( "forceinline" ), "forceinline" },
|
|
||||||
{ sizeof( "global" ), "global" },
|
|
||||||
{ sizeof( "inline" ), "inline" },
|
|
||||||
{ sizeof( "internal" ), "internal" },
|
|
||||||
{ sizeof( "local_persist" ), "local_persist" },
|
|
||||||
{ sizeof( "mutable" ), "mutable" },
|
|
||||||
{ sizeof( "neverinline" ), "neverinline" },
|
|
||||||
{ sizeof( "override" ), "override" },
|
|
||||||
{ sizeof( "static" ), "static" },
|
|
||||||
{ sizeof( "thread_local" ), "thread_local" },
|
|
||||||
{ sizeof( "volatile" ), "volatile" },
|
|
||||||
{ sizeof( "virtual" ), "virtual" },
|
|
||||||
{ sizeof( "*" ), "*" },
|
|
||||||
{ sizeof( ";" ), ";" },
|
|
||||||
{ sizeof( "static_assert" ), "static_assert" },
|
|
||||||
{ sizeof( "__string__" ), "__string__" },
|
|
||||||
{ sizeof( "typename" ), "typename" },
|
|
||||||
{ sizeof( "unsigned" ), "unsigned" },
|
|
||||||
{ sizeof( "signed" ), "signed" },
|
|
||||||
{ sizeof( "short" ), "short" },
|
|
||||||
{ sizeof( "long" ), "long" },
|
|
||||||
{ sizeof( "bool" ), "bool" },
|
|
||||||
{ sizeof( "char" ), "char" },
|
|
||||||
{ sizeof( "int" ), "int" },
|
|
||||||
{ sizeof( "double" ), "double" },
|
|
||||||
{ sizeof( "__int8" ), "__int8" },
|
|
||||||
{ sizeof( "__int16" ), "__int16" },
|
|
||||||
{ sizeof( "__int32" ), "__int32" },
|
|
||||||
{ sizeof( "__int64" ), "__int64" },
|
|
||||||
{ sizeof( "_W64" ), "_W64" },
|
|
||||||
{ sizeof( "..." ), "..." },
|
|
||||||
{ sizeof( "__attrib_start__" ), "__attrib_start__" },
|
|
||||||
{ sizeof( "GEN_API_Export_Code" ), "GEN_API_Export_Code" },
|
|
||||||
{ sizeof( "GEN_API_Import_Code" ), "GEN_API_Import_Code" },
|
|
||||||
};
|
|
||||||
return lookup[type];
|
|
||||||
}
|
}
|
||||||
|
do_once_end u32 hash = crc32( str.Ptr, str.Len );
|
||||||
inline Type to_type( StrC str )
|
for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
local_persist u32 keymap[NumTokens];
|
if ( keymap[index] == hash )
|
||||||
do_once_start for ( u32 index = 0; index < NumTokens; index++ )
|
return (TokType)index;
|
||||||
{
|
|
||||||
StrC enum_str = to_str( (Type)index );
|
|
||||||
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
|
|
||||||
}
|
|
||||||
do_once_end u32 hash = crc32( str.Ptr, str.Len );
|
|
||||||
for ( u32 index = 0; index < NumTokens; index++ )
|
|
||||||
{
|
|
||||||
if ( keymap[index] == hash )
|
|
||||||
return (Type)index;
|
|
||||||
}
|
|
||||||
return Invalid;
|
|
||||||
}
|
}
|
||||||
|
return Tok_Invalid;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace ETokType
|
|
||||||
|
|
||||||
using TokType = ETokType::Type;
|
|
||||||
GEN_NS_PARSER_END
|
GEN_NS_PARSER_END
|
||||||
|
@ -19,7 +19,7 @@ CodeClass parse_class( StrC def )
|
|||||||
|
|
||||||
Context.Tokens = toks;
|
Context.Tokens = toks;
|
||||||
push_scope();
|
push_scope();
|
||||||
CodeClass result = (CodeClass) parse_class_struct( parser::TokType::Decl_Class );
|
CodeClass result = (CodeClass) parse_class_struct( Tok_Decl_Class );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
@ -235,7 +235,7 @@ CodeStruct parse_struct( StrC def )
|
|||||||
|
|
||||||
Context.Tokens = toks;
|
Context.Tokens = toks;
|
||||||
push_scope();
|
push_scope();
|
||||||
CodeStruct result = (CodeStruct) parse_class_struct( TokType::Decl_Struct );
|
CodeStruct result = (CodeStruct) parse_class_struct( Tok_Decl_Struct );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
@ -33,7 +33,7 @@ struct Token
|
|||||||
u32 Flags;
|
u32 Flags;
|
||||||
};
|
};
|
||||||
|
|
||||||
constexpr Token NullToken { nullptr, 0, TokType::Invalid, false, 0, TF_Null };
|
constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null };
|
||||||
|
|
||||||
AccessSpec to_access_specifier(Token tok)
|
AccessSpec to_access_specifier(Token tok)
|
||||||
{
|
{
|
||||||
@ -47,7 +47,7 @@ StrC to_str(Token tok)
|
|||||||
|
|
||||||
bool is_valid( Token tok )
|
bool is_valid( Token tok )
|
||||||
{
|
{
|
||||||
return tok.Text && tok.Length && tok.Type != TokType::Invalid;
|
return tok.Text && tok.Length && tok.Type != Tok_Invalid;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool is_access_operator(Token tok)
|
bool is_access_operator(Token tok)
|
||||||
@ -94,7 +94,7 @@ String to_string(Token tok)
|
|||||||
{
|
{
|
||||||
String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
|
String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
|
||||||
|
|
||||||
StrC type_str = ETokType::to_str( tok.Type );
|
StrC type_str = to_str( tok.Type );
|
||||||
|
|
||||||
append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
||||||
, tok.Line, tok.Column
|
, tok.Line, tok.Column
|
||||||
@ -117,7 +117,7 @@ Token* current(TokArray* self, bool skip_formatting )
|
|||||||
{
|
{
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self->Arr[self->Idx].Type == TokType::NewLine || self->Arr[self->Idx].Type == TokType::Comment )
|
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
|
||||||
self->Idx++;
|
self->Idx++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -130,7 +130,7 @@ Token* previous(TokArray self, bool skip_formatting)
|
|||||||
|
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self.Arr[idx].Type == TokType::NewLine )
|
while ( self.Arr[idx].Type == Tok_NewLine )
|
||||||
idx --;
|
idx --;
|
||||||
|
|
||||||
return & self.Arr[idx];
|
return & self.Arr[idx];
|
||||||
@ -145,7 +145,7 @@ Token* next(TokArray self, bool skip_formatting)
|
|||||||
|
|
||||||
if ( skip_formatting )
|
if ( skip_formatting )
|
||||||
{
|
{
|
||||||
while ( self.Arr[idx].Type == TokType::NewLine )
|
while ( self.Arr[idx].Type == Tok_NewLine )
|
||||||
idx++;
|
idx++;
|
||||||
|
|
||||||
return & self.Arr[idx + 1];
|
return & self.Arr[idx + 1];
|
||||||
@ -221,7 +221,7 @@ forceinline
|
|||||||
s32 lex_preprocessor_directive( LexContext* ctx )
|
s32 lex_preprocessor_directive( LexContext* ctx )
|
||||||
{
|
{
|
||||||
char const* hash = ctx->scanner;
|
char const* hash = ctx->scanner;
|
||||||
append( & Tokens, { hash, 1, TokType::Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } );
|
append( & Tokens, { hash, 1, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess } );
|
||||||
|
|
||||||
move_forward();
|
move_forward();
|
||||||
SkipWhitespace();
|
SkipWhitespace();
|
||||||
@ -233,12 +233,12 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
ctx->token.Length++;
|
ctx->token.Length++;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx->token.Type = ETokType::to_type( to_str(ctx->token) );
|
ctx->token.Type = to_type( to_str(ctx->token) );
|
||||||
|
|
||||||
bool is_preprocessor = ctx->token.Type >= TokType::Preprocess_Define && ctx->token.Type <= TokType::Preprocess_Pragma;
|
bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
|
||||||
if ( ! is_preprocessor )
|
if ( ! is_preprocessor )
|
||||||
{
|
{
|
||||||
ctx->token.Type = TokType::Preprocess_Unsupported;
|
ctx->token.Type = Tok_Preprocess_Unsupported;
|
||||||
|
|
||||||
// Its an unsupported directive, skip it
|
// Its an unsupported directive, skip it
|
||||||
s32 within_string = false;
|
s32 within_string = false;
|
||||||
@ -301,14 +301,14 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
return Lex_Continue; // Skip found token, its all handled here.
|
return Lex_Continue; // Skip found token, its all handled here.
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( ctx->token.Type == TokType::Preprocess_Else || ctx->token.Type == TokType::Preprocess_EndIf )
|
if ( ctx->token.Type == Tok_Preprocess_Else || ctx->token.Type == Tok_Preprocess_EndIf )
|
||||||
{
|
{
|
||||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||||
append( & Tokens, ctx->token );
|
append( & Tokens, ctx->token );
|
||||||
end_line();
|
end_line();
|
||||||
return Lex_Continue;
|
return Lex_Continue;
|
||||||
}
|
}
|
||||||
else if ( ctx->token.Type >= TokType::Preprocess_If && ctx->token.Type <= TokType::Preprocess_ElIf )
|
else if ( ctx->token.Type >= Tok_Preprocess_If && ctx->token.Type <= Tok_Preprocess_ElIf )
|
||||||
{
|
{
|
||||||
ctx->token.Flags |= TF_Preprocess_Cond;
|
ctx->token.Flags |= TF_Preprocess_Cond;
|
||||||
}
|
}
|
||||||
@ -317,9 +317,9 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
|
|
||||||
SkipWhitespace();
|
SkipWhitespace();
|
||||||
|
|
||||||
if ( ctx->token.Type == TokType::Preprocess_Define )
|
if ( ctx->token.Type == Tok_Preprocess_Define )
|
||||||
{
|
{
|
||||||
Token name = { ctx->scanner, 0, TokType::Identifier, ctx->line, ctx->column, TF_Preprocess };
|
Token name = { ctx->scanner, 0, Tok_Identifier, ctx->line, ctx->column, TF_Preprocess };
|
||||||
|
|
||||||
name.Text = ctx->scanner;
|
name.Text = ctx->scanner;
|
||||||
name.Length = 1;
|
name.Length = 1;
|
||||||
@ -343,11 +343,11 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
set(& ctx->defines, key, to_str(name) );
|
set(& ctx->defines, key, to_str(name) );
|
||||||
}
|
}
|
||||||
|
|
||||||
Token preprocess_content = { ctx->scanner, 0, TokType::Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
|
Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
|
||||||
|
|
||||||
if ( ctx->token.Type == TokType::Preprocess_Include )
|
if ( ctx->token.Type == Tok_Preprocess_Include )
|
||||||
{
|
{
|
||||||
preprocess_content.Type = TokType::String;
|
preprocess_content.Type = Tok_String;
|
||||||
|
|
||||||
if ( current != '"' && current != '<' )
|
if ( current != '"' && current != '<' )
|
||||||
{
|
{
|
||||||
@ -452,31 +452,31 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
forceinline
|
forceinline
|
||||||
void lex_found_token( LexContext* ctx )
|
void lex_found_token( LexContext* ctx )
|
||||||
{
|
{
|
||||||
if ( ctx->token.Type != TokType::Invalid )
|
if ( ctx->token.Type != Tok_Invalid )
|
||||||
{
|
{
|
||||||
append( & Tokens, ctx->token );
|
append( & Tokens, ctx->token );
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
TokType type = ETokType::to_type( to_str(ctx->token) );
|
TokType type = to_type( to_str(ctx->token) );
|
||||||
|
|
||||||
if (type <= TokType::Access_Public && type >= TokType::Access_Private )
|
if (type <= Tok_Access_Public && type >= Tok_Access_Private )
|
||||||
{
|
{
|
||||||
ctx->token.Flags |= TF_AccessSpecifier;
|
ctx->token.Flags |= TF_AccessSpecifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( type > TokType::__Attributes_Start )
|
if ( type > Tok___Attributes_Start )
|
||||||
{
|
{
|
||||||
ctx->token.Flags |= TF_Attribute;
|
ctx->token.Flags |= TF_Attribute;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( type == ETokType::Decl_Extern_Linkage )
|
if ( type == Tok_Decl_Extern_Linkage )
|
||||||
{
|
{
|
||||||
SkipWhitespace();
|
SkipWhitespace();
|
||||||
|
|
||||||
if ( current != '"' )
|
if ( current != '"' )
|
||||||
{
|
{
|
||||||
type = ETokType::Spec_Extern;
|
type = Tok_Spec_Extern;
|
||||||
ctx->token.Flags |= TF_Specifier;
|
ctx->token.Flags |= TF_Specifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -485,9 +485,9 @@ void lex_found_token( LexContext* ctx )
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if ( ( type <= TokType::Star && type >= TokType::Spec_Alignas)
|
if ( ( type <= Tok_Star && type >= Tok_Spec_Alignas)
|
||||||
|| type == TokType::Ampersand
|
|| type == Tok_Ampersand
|
||||||
|| type == TokType::Ampersand_DBL )
|
|| type == Tok_Ampersand_DBL )
|
||||||
{
|
{
|
||||||
ctx->token.Type = type;
|
ctx->token.Type = type;
|
||||||
ctx->token.Flags |= TF_Specifier;
|
ctx->token.Flags |= TF_Specifier;
|
||||||
@ -496,7 +496,7 @@ void lex_found_token( LexContext* ctx )
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
if ( type != TokType::Invalid )
|
if ( type != Tok_Invalid )
|
||||||
{
|
{
|
||||||
ctx->token.Type = type;
|
ctx->token.Type = type;
|
||||||
append( & Tokens, ctx->token );
|
append( & Tokens, ctx->token );
|
||||||
@ -512,7 +512,7 @@ void lex_found_token( LexContext* ctx )
|
|||||||
StrC* define = get(ctx->defines, key );
|
StrC* define = get(ctx->defines, key );
|
||||||
if ( define )
|
if ( define )
|
||||||
{
|
{
|
||||||
ctx->token.Type = TokType::Preprocess_Macro;
|
ctx->token.Type = Tok_Preprocess_Macro;
|
||||||
|
|
||||||
// Want to ignore any arguments the define may have as they can be execution expressions.
|
// Want to ignore any arguments the define may have as they can be execution expressions.
|
||||||
if ( ctx->left && current == '(' )
|
if ( ctx->left && current == '(' )
|
||||||
@ -548,7 +548,7 @@ void lex_found_token( LexContext* ctx )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
ctx->token.Type = TokType::Identifier;
|
ctx->token.Type = Tok_Identifier;
|
||||||
}
|
}
|
||||||
|
|
||||||
append( & Tokens, ctx->token );
|
append( & Tokens, ctx->token );
|
||||||
@ -607,7 +607,7 @@ TokArray lex( StrC content )
|
|||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
c.token = { c.scanner, 0, TokType::Invalid, c.line, c.column, TF_Null };
|
c.token = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
|
||||||
|
|
||||||
bool is_define = false;
|
bool is_define = false;
|
||||||
|
|
||||||
@ -623,7 +623,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
c.token.Type = TokType::NewLine;
|
c.token.Type = Tok_NewLine;
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
|
|
||||||
append( & Tokens, c.token );
|
append( & Tokens, c.token );
|
||||||
@ -655,7 +655,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Access_MemberSymbol;
|
c.token.Type = Tok_Access_MemberSymbol;
|
||||||
c.token.Flags = TF_AccessOperator;
|
c.token.Flags = TF_AccessOperator;
|
||||||
|
|
||||||
if (c.left) {
|
if (c.left) {
|
||||||
@ -668,7 +668,7 @@ TokArray lex( StrC content )
|
|||||||
if( current == '.' )
|
if( current == '.' )
|
||||||
{
|
{
|
||||||
c.token.Length = 3;
|
c.token.Length = 3;
|
||||||
c.token.Type = TokType::Varadic_Argument;
|
c.token.Type = Tok_Varadic_Argument;
|
||||||
c.token.Flags = TF_Null;
|
c.token.Flags = TF_Null;
|
||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
@ -686,7 +686,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Ampersand;
|
c.token.Type = Tok_Ampersand;
|
||||||
c.token.Flags |= TF_Operator;
|
c.token.Flags |= TF_Operator;
|
||||||
c.token.Flags |= TF_Specifier;
|
c.token.Flags |= TF_Specifier;
|
||||||
|
|
||||||
@ -696,7 +696,7 @@ TokArray lex( StrC content )
|
|||||||
if ( current == '&' ) // &&
|
if ( current == '&' ) // &&
|
||||||
{
|
{
|
||||||
c.token.Length = 2;
|
c.token.Length = 2;
|
||||||
c.token.Type = TokType::Ampersand_DBL;
|
c.token.Type = Tok_Ampersand_DBL;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -708,9 +708,9 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Assign_Classifer;
|
c.token.Type = Tok_Assign_Classifer;
|
||||||
// Can be either a classifier (ParentType, Bitfield width), or ternary else
|
// Can be either a classifier (ParentType, Bitfield width), or ternary else
|
||||||
// token.Type = TokType::Colon;
|
// token.Type = Tok_Colon;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -718,7 +718,7 @@ TokArray lex( StrC content )
|
|||||||
if ( current == ':' )
|
if ( current == ':' )
|
||||||
{
|
{
|
||||||
move_forward();
|
move_forward();
|
||||||
c.token.Type = TokType::Access_StaticSymbol;
|
c.token.Type = Tok_Access_StaticSymbol;
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
}
|
}
|
||||||
goto FoundToken;
|
goto FoundToken;
|
||||||
@ -727,7 +727,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::BraceCurly_Open;
|
c.token.Type = Tok_BraceCurly_Open;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -737,7 +737,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::BraceCurly_Close;
|
c.token.Type = Tok_BraceCurly_Close;
|
||||||
c.token.Flags = TF_EndDefinition;
|
c.token.Flags = TF_EndDefinition;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
@ -750,7 +750,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::BraceSquare_Open;
|
c.token.Type = Tok_BraceSquare_Open;
|
||||||
if ( c.left )
|
if ( c.left )
|
||||||
{
|
{
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -758,7 +758,7 @@ TokArray lex( StrC content )
|
|||||||
if ( current == ']' )
|
if ( current == ']' )
|
||||||
{
|
{
|
||||||
c.token.Length = 2;
|
c.token.Length = 2;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -768,7 +768,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::BraceSquare_Close;
|
c.token.Type = Tok_BraceSquare_Close;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -778,7 +778,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Capture_Start;
|
c.token.Type = Tok_Capture_Start;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -788,7 +788,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Capture_End;
|
c.token.Type = Tok_Capture_End;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -798,7 +798,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Char;
|
c.token.Type = Tok_Char;
|
||||||
c.token.Flags = TF_Literal;
|
c.token.Flags = TF_Literal;
|
||||||
|
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -832,7 +832,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Comma;
|
c.token.Type = Tok_Comma;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
@ -843,7 +843,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Star;
|
c.token.Type = Tok_Star;
|
||||||
c.token.Flags |= TF_Specifier;
|
c.token.Flags |= TF_Specifier;
|
||||||
c.token.Flags |= TF_Operator;
|
c.token.Flags |= TF_Operator;
|
||||||
|
|
||||||
@ -854,7 +854,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
c.token.Flags |= TF_Assign;
|
c.token.Flags |= TF_Assign;
|
||||||
// c.token.Type = TokType::Assign_Multiply;
|
// c.token.Type = Tok_Assign_Multiply;
|
||||||
|
|
||||||
if ( c.left )
|
if ( c.left )
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -866,7 +866,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Statement_End;
|
c.token.Type = Tok_Statement_End;
|
||||||
c.token.Flags = TF_EndDefinition;
|
c.token.Flags = TF_EndDefinition;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
@ -879,7 +879,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::String;
|
c.token.Type = Tok_String;
|
||||||
c.token.Flags |= TF_Literal;
|
c.token.Flags |= TF_Literal;
|
||||||
|
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -913,8 +913,8 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
// c.token.Type = TokType::Ternary;
|
// c.token.Type = Tok_Ternary;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
@ -926,8 +926,8 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
// c.token.Type = TokType::Assign;
|
// c.token.Type = Tok_Assign;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
c.token.Flags |= TF_Assign;
|
c.token.Flags |= TF_Assign;
|
||||||
|
|
||||||
@ -947,44 +947,44 @@ TokArray lex( StrC content )
|
|||||||
}
|
}
|
||||||
case '+':
|
case '+':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::Add
|
// c.token.Type = Tok_Add
|
||||||
|
|
||||||
}
|
}
|
||||||
case '%':
|
case '%':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::Modulo;
|
// c.token.Type = Tok_Modulo;
|
||||||
|
|
||||||
}
|
}
|
||||||
case '^':
|
case '^':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::B_XOr;
|
// c.token.Type = Tok_B_XOr;
|
||||||
}
|
}
|
||||||
case '~':
|
case '~':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::Unary_Not;
|
// c.token.Type = Tok_Unary_Not;
|
||||||
|
|
||||||
}
|
}
|
||||||
case '!':
|
case '!':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::L_Not;
|
// c.token.Type = Tok_L_Not;
|
||||||
}
|
}
|
||||||
case '<':
|
case '<':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::Lesser;
|
// c.token.Type = Tok_Lesser;
|
||||||
|
|
||||||
}
|
}
|
||||||
case '>':
|
case '>':
|
||||||
{
|
{
|
||||||
// c.token.Type = TokType::Greater;
|
// c.token.Type = Tok_Greater;
|
||||||
|
|
||||||
}
|
}
|
||||||
case '|':
|
case '|':
|
||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
// token.Type = TokType::L_Or;
|
// token.Type = Tok_L_Or;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -994,7 +994,7 @@ TokArray lex( StrC content )
|
|||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
c.token.Flags |= TF_Assign;
|
c.token.Flags |= TF_Assign;
|
||||||
// token.Flags |= TokFlags::Assignment;
|
// token.Flags |= TokFlags::Assignment;
|
||||||
// token.Type = TokType::Assign_L_Or;
|
// token.Type = Tok_Assign_L_Or;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -1014,8 +1014,8 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
// token.Type = TokType::Subtract;
|
// token.Type = Tok_Subtract;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
if ( c.left )
|
if ( c.left )
|
||||||
{
|
{
|
||||||
@ -1024,13 +1024,13 @@ TokArray lex( StrC content )
|
|||||||
if ( current == '>' )
|
if ( current == '>' )
|
||||||
{
|
{
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
// token.Type = TokType::Access_PointerToMemberSymbol;
|
// token.Type = Tok_Access_PointerToMemberSymbol;
|
||||||
c.token.Flags |= TF_AccessOperator;
|
c.token.Flags |= TF_AccessOperator;
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
if ( current == '*' )
|
if ( current == '*' )
|
||||||
{
|
{
|
||||||
// token.Type = TokType::Access_PointerToMemberOfPointerSymbol;
|
// token.Type = Tok_Access_PointerToMemberOfPointerSymbol;
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
move_forward();
|
move_forward();
|
||||||
}
|
}
|
||||||
@ -1038,7 +1038,7 @@ TokArray lex( StrC content )
|
|||||||
else if ( current == '=' )
|
else if ( current == '=' )
|
||||||
{
|
{
|
||||||
c.token.Length++;
|
c.token.Length++;
|
||||||
// token.Type = TokType::Assign_Subtract;
|
// token.Type = Tok_Assign_Subtract;
|
||||||
c.token.Flags |= TF_Assign;
|
c.token.Flags |= TF_Assign;
|
||||||
|
|
||||||
if (c.left)
|
if (c.left)
|
||||||
@ -1058,8 +1058,8 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Operator;
|
c.token.Type = Tok_Operator;
|
||||||
// token.Type = TokType::Divide;
|
// token.Type = Tok_Divide;
|
||||||
c.token.Flags = TF_Operator;
|
c.token.Flags = TF_Operator;
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
@ -1074,7 +1074,7 @@ TokArray lex( StrC content )
|
|||||||
}
|
}
|
||||||
else if ( current == '/' )
|
else if ( current == '/' )
|
||||||
{
|
{
|
||||||
c.token.Type = TokType::Comment;
|
c.token.Type = Tok_Comment;
|
||||||
c.token.Length = 2;
|
c.token.Length = 2;
|
||||||
c.token.Flags = TF_Null;
|
c.token.Flags = TF_Null;
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -1100,7 +1100,7 @@ TokArray lex( StrC content )
|
|||||||
}
|
}
|
||||||
else if ( current == '*' )
|
else if ( current == '*' )
|
||||||
{
|
{
|
||||||
c.token.Type = TokType::Comment;
|
c.token.Type = Tok_Comment;
|
||||||
c.token.Length = 2;
|
c.token.Length = 2;
|
||||||
c.token.Flags = TF_Null;
|
c.token.Flags = TF_Null;
|
||||||
move_forward();
|
move_forward();
|
||||||
@ -1160,7 +1160,7 @@ TokArray lex( StrC content )
|
|||||||
|
|
||||||
c.token.Text = c.scanner;
|
c.token.Text = c.scanner;
|
||||||
c.token.Length = 1;
|
c.token.Length = 1;
|
||||||
c.token.Type = TokType::Number;
|
c.token.Type = Tok_Number;
|
||||||
c.token.Flags = TF_Literal;
|
c.token.Flags = TF_Literal;
|
||||||
move_forward();
|
move_forward();
|
||||||
|
|
||||||
@ -1230,7 +1230,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||||
, idx
|
, idx
|
||||||
, ETokType::to_str( Tokens[ idx ].Type ).Ptr
|
, to_str( Tokens[ idx ].Type ).Ptr
|
||||||
, Tokens[ idx ].Length, Tokens[ idx ].Text
|
, Tokens[ idx ].Length, Tokens[ idx ].Text
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
File diff suppressed because it is too large
Load Diff
@ -249,7 +249,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
char const* enum_str = enum_strs[idx].string;
|
char const* enum_str = enum_strs[idx].string;
|
||||||
char const* entry_to_str = enum_str_strs [idx].string;
|
char const* entry_to_str = enum_str_strs [idx].string;
|
||||||
|
|
||||||
append_fmt( & enum_entries, "%s,\n", enum_str );
|
append_fmt( & enum_entries, "Tok_%s,\n", enum_str );
|
||||||
append_fmt( & to_str_entries, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
append_fmt( & to_str_entries, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -258,9 +258,9 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
char const* attribute_str = attribute_strs[idx].string;
|
char const* attribute_str = attribute_strs[idx].string;
|
||||||
char const* entry_to_str = attribute_str_strs [idx].string;
|
char const* entry_to_str = attribute_str_strs [idx].string;
|
||||||
|
|
||||||
append_fmt( & attribute_entries, "Attribute_%s,\n", attribute_str );
|
append_fmt( & attribute_entries, "Tok_Attribute_%s,\n", attribute_str );
|
||||||
append_fmt( & to_str_attributes, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
append_fmt( & to_str_attributes, "{ sizeof(\"%s\"), \"%s\" },\n", entry_to_str, entry_to_str);
|
||||||
append_fmt( & attribute_define_entries, "Entry( Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
|
append_fmt( & attribute_define_entries, "Entry( Tok_Attribute_%s, \"%s\" )", attribute_str, entry_to_str );
|
||||||
|
|
||||||
if ( idx < num(attribute_strs) - 1 )
|
if ( idx < num(attribute_strs) - 1 )
|
||||||
append( & attribute_define_entries, " \\\n");
|
append( & attribute_define_entries, " \\\n");
|
||||||
@ -275,11 +275,11 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
|
|
||||||
// We cannot parse this enum, it has Attribute names as enums
|
// We cannot parse this enum, it has Attribute names as enums
|
||||||
CodeEnum enum_code = parse_enum(token_fmt("entries", (StrC)enum_entries, "attribute_toks", (StrC)attribute_entries, stringize(
|
CodeEnum enum_code = parse_enum(token_fmt("entries", (StrC)enum_entries, "attribute_toks", (StrC)attribute_entries, stringize(
|
||||||
enum Type : u32
|
enum TokType_Def : u32
|
||||||
{
|
{
|
||||||
<entries>
|
<entries>
|
||||||
<attribute_toks>
|
<attribute_toks>
|
||||||
NumTokens
|
Tok_NumTokens
|
||||||
};
|
};
|
||||||
)));
|
)));
|
||||||
|
|
||||||
@ -291,7 +291,7 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
#undef do_once_end
|
#undef do_once_end
|
||||||
CodeFn to_str = parse_function(token_fmt("entries", (StrC)to_str_entries, "attribute_toks", (StrC)to_str_attributes, stringize(
|
CodeFn to_str = parse_function(token_fmt("entries", (StrC)to_str_entries, "attribute_toks", (StrC)to_str_attributes, stringize(
|
||||||
inline
|
inline
|
||||||
StrC to_str( Type type )
|
StrC to_str( TokType type )
|
||||||
{
|
{
|
||||||
local_persist
|
local_persist
|
||||||
StrC lookup[] {
|
StrC lookup[] {
|
||||||
@ -305,14 +305,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
|
|
||||||
CodeFn to_type = parse_function( token_fmt( "entries", (StrC)to_str_entries, stringize(
|
CodeFn to_type = parse_function( token_fmt( "entries", (StrC)to_str_entries, stringize(
|
||||||
inline
|
inline
|
||||||
Type to_type( StrC str )
|
TokType to_type( StrC str )
|
||||||
{
|
{
|
||||||
local_persist
|
local_persist
|
||||||
u32 keymap[ NumTokens ];
|
u32 keymap[ Tok_NumTokens ];
|
||||||
do_once_start
|
do_once_start
|
||||||
for ( u32 index = 0; index < NumTokens; index++ )
|
for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
StrC enum_str = to_str( (Type)index );
|
StrC enum_str = to_str( (TokType)index );
|
||||||
|
|
||||||
// We subtract 1 to remove the null terminator
|
// We subtract 1 to remove the null terminator
|
||||||
// This is because the tokens lexed are not null terminated.
|
// This is because the tokens lexed are not null terminated.
|
||||||
@ -322,13 +322,13 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
|
|
||||||
u32 hash = crc32( str.Ptr, str.Len );
|
u32 hash = crc32( str.Ptr, str.Len );
|
||||||
|
|
||||||
for ( u32 index = 0; index < NumTokens; index++ )
|
for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
if ( keymap[index] == hash )
|
if ( keymap[index] == hash )
|
||||||
return (Type)index;
|
return (TokType)index;
|
||||||
}
|
}
|
||||||
|
|
||||||
return Invalid;
|
return Tok_Invalid;
|
||||||
}
|
}
|
||||||
)));
|
)));
|
||||||
#pragma pop_macro("local_persist")
|
#pragma pop_macro("local_persist")
|
||||||
@ -336,15 +336,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path )
|
|||||||
#pragma pop_macro("do_once_end")
|
#pragma pop_macro("do_once_end")
|
||||||
|
|
||||||
//CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) );
|
//CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) );
|
||||||
CodeUsing td_toktype = def_using( name(TokType), def_type( name(ETokType::Type) ) );
|
CodeTypedef td_toktype = parse_typedef( code( typedef TokType_Def TokType; ));
|
||||||
|
|
||||||
return def_global_body( args(
|
return def_global_body( args(
|
||||||
untyped_str(txt("GEN_NS_PARSER_BEGIN\n")),
|
|
||||||
attribute_entires_def,
|
attribute_entires_def,
|
||||||
|
td_toktype,
|
||||||
enum_code,
|
enum_code,
|
||||||
to_str,
|
to_str,
|
||||||
td_toktype,
|
to_type
|
||||||
untyped_str(txt("GEN_NS_PARSER_END\n"))
|
|
||||||
));
|
));
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Loading…
Reference in New Issue
Block a user