mirror of
https://github.com/Ed94/gencpp.git
synced 2024-12-22 07:44:45 -08:00
TokType compiles for c lbirary
This commit is contained in:
parent
6147912783
commit
e6f30c7e1d
@ -1263,7 +1263,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
|||||||
#pragma endregion Print Dependencies
|
#pragma endregion Print Dependencies
|
||||||
|
|
||||||
#pragma region Print Components
|
#pragma region Print Components
|
||||||
CodeBody etoktype = gen_etoktype( project_dir "enums/ETokType.csv", project_dir "enums/AttributeTokens.csv" );
|
CodeBody etoktype = gen_etoktype( project_dir "enums/ETokType.csv", project_dir "enums/AttributeTokens.csv", helper_use_c_definition );
|
||||||
|
|
||||||
header.print_fmt( "\nGEN_NS_BEGIN\n");
|
header.print_fmt( "\nGEN_NS_BEGIN\n");
|
||||||
|
|
||||||
@ -1283,12 +1283,12 @@ R"(#define <interface_name>( code ) _Generic( (code), \
|
|||||||
header.print_fmt( "#pragma region Interface\n" );
|
header.print_fmt( "#pragma region Interface\n" );
|
||||||
header.print( src_interface );
|
header.print( src_interface );
|
||||||
header.print( format_code_to_untyped(src_upfront) );
|
header.print( format_code_to_untyped(src_upfront) );
|
||||||
// header.print_fmt( "\n#pragma region Parsing\n\n" );
|
header.print_fmt( "\n#pragma region Parsing\n\n" );
|
||||||
// header.print( format_code_to_untyped(parser_nspace) );
|
header.print( format_code_to_untyped(etoktype) );
|
||||||
// header.print( lexer );
|
// header.print( lexer );
|
||||||
// header.print( parser );
|
// header.print( parser );
|
||||||
// header.print( parsing_interface );
|
// header.print( parsing_interface );
|
||||||
// header.print_fmt( "\n#pragma endregion Parsing\n" );
|
header.print_fmt( "\n#pragma endregion Parsing\n" );
|
||||||
// header.print( untyped );
|
// header.print( untyped );
|
||||||
header.print_fmt( "\n#pragma endregion Interface\n\n");
|
header.print_fmt( "\n#pragma endregion Interface\n\n");
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@
|
|||||||
GEN_NS_PARSER_BEGIN
|
GEN_NS_PARSER_BEGIN
|
||||||
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
|
#define GEN_DEFINE_ATTRIBUTE_TOKENS Entry( Tok_Attribute_API_Export, "GEN_API_Export_Code" ) Entry( Tok_Attribute_API_Import, "GEN_API_Import_Code" )
|
||||||
|
|
||||||
enum TokType_Def : u32
|
enum TokType : u32
|
||||||
{
|
{
|
||||||
Tok_Invalid,
|
Tok_Invalid,
|
||||||
Tok_Access_Private,
|
Tok_Access_Private,
|
||||||
@ -109,9 +109,8 @@ enum TokType_Def : u32
|
|||||||
Tok_Attribute_API_Import,
|
Tok_Attribute_API_Import,
|
||||||
Tok_NumTokens
|
Tok_NumTokens
|
||||||
};
|
};
|
||||||
typedef enum TokType_Def TokType;
|
|
||||||
|
|
||||||
inline StrC to_str( TokType type )
|
inline StrC toktype_to_str( TokType type )
|
||||||
{
|
{
|
||||||
local_persist StrC lookup[] {
|
local_persist StrC lookup[] {
|
||||||
{ sizeof( "__invalid__" ), "__invalid__" },
|
{ sizeof( "__invalid__" ), "__invalid__" },
|
||||||
@ -215,12 +214,12 @@ inline StrC to_str( TokType type )
|
|||||||
return lookup[type];
|
return lookup[type];
|
||||||
}
|
}
|
||||||
|
|
||||||
inline TokType to_toktype( StrC str )
|
inline TokType strc_to_toktype( StrC str )
|
||||||
{
|
{
|
||||||
local_persist u32 keymap[Tok_NumTokens];
|
local_persist u32 keymap[Tok_NumTokens];
|
||||||
do_once_start for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
do_once_start for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
StrC enum_str = to_str( (TokType)index );
|
StrC enum_str = toktype_to_str( (TokType)index );
|
||||||
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
|
keymap[index] = crc32( enum_str.Ptr, enum_str.Len - 1 );
|
||||||
}
|
}
|
||||||
do_once_end u32 hash = crc32( str.Ptr, str.Len );
|
do_once_end u32 hash = crc32( str.Ptr, str.Len );
|
||||||
|
@ -94,7 +94,7 @@ String to_string(Token tok)
|
|||||||
{
|
{
|
||||||
String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
|
String result = string_make_reserve( GlobalAllocator, kilobytes(4) );
|
||||||
|
|
||||||
StrC type_str = to_str( tok.Type );
|
StrC type_str = toktype_to_str( tok.Type );
|
||||||
|
|
||||||
string_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
string_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
|
||||||
, tok.Line, tok.Column
|
, tok.Line, tok.Column
|
||||||
@ -249,7 +249,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
|
|||||||
ctx->token.Length++;
|
ctx->token.Length++;
|
||||||
}
|
}
|
||||||
|
|
||||||
ctx->token.Type = to_toktype( to_str(ctx->token) );
|
ctx->token.Type = strc_to_toktype( to_str(ctx->token) );
|
||||||
|
|
||||||
bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
|
bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
|
||||||
if ( ! is_preprocessor )
|
if ( ! is_preprocessor )
|
||||||
@ -475,7 +475,7 @@ void lex_found_token( LexContext* ctx )
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
TokType type = to_toktype( to_str(ctx->token) );
|
TokType type = strc_to_toktype( to_str(ctx->token) );
|
||||||
|
|
||||||
if (type <= Tok_Access_Public && type >= Tok_Access_Private )
|
if (type <= Tok_Access_Public && type >= Tok_Access_Private )
|
||||||
{
|
{
|
||||||
@ -1270,7 +1270,7 @@ TokArray lex( StrC content )
|
|||||||
{
|
{
|
||||||
log_fmt( "Token %d Type: %s : %.*s\n"
|
log_fmt( "Token %d Type: %s : %.*s\n"
|
||||||
, idx
|
, idx
|
||||||
, to_str( Tokens[ idx ].Type ).Ptr
|
, toktype_to_str( Tokens[ idx ].Type ).Ptr
|
||||||
, Tokens[ idx ].Length, Tokens[ idx ].Text
|
, Tokens[ idx ].Length, Tokens[ idx ].Text
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
@ -115,7 +115,7 @@ bool __eat(TokArray* self, TokType type )
|
|||||||
{
|
{
|
||||||
Token tok = * current( self, skip_formatting );
|
Token tok = * current( self, skip_formatting );
|
||||||
log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %.*s ' (%d, %d)`\n%s"
|
log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %.*s ' (%d, %d)`\n%s"
|
||||||
, to_str(type).Ptr
|
, toktype_to_str(type).Ptr
|
||||||
, at_idx.Length, at_idx.Text
|
, at_idx.Length, at_idx.Text
|
||||||
, tok.Line
|
, tok.Line
|
||||||
, tok.Column
|
, tok.Column
|
||||||
@ -546,7 +546,7 @@ Code parse_array_decl()
|
|||||||
|
|
||||||
if ( currtok.Type != Tok_BraceSquare_Close )
|
if ( currtok.Type != Tok_BraceSquare_Close )
|
||||||
{
|
{
|
||||||
log_failure( "%s: Error, expected ] in array declaration, not %s\n%s", to_str( currtok.Type ), to_string(Context) );
|
log_failure( "%s: Error, expected ] in array declaration, not %s\n%s", toktype_to_str( currtok.Type ), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
@ -687,7 +687,7 @@ Code parse_class_struct( TokType which, bool inplace_def = false )
|
|||||||
{
|
{
|
||||||
if ( which != Tok_Decl_Class && which != Tok_Decl_Struct )
|
if ( which != Tok_Decl_Class && which != Tok_Decl_Struct )
|
||||||
{
|
{
|
||||||
log_failure( "Error, expected class or struct, not %s\n%s", to_str( which ), to_string(Context) );
|
log_failure( "Error, expected class or struct, not %s\n%s", toktype_to_str( which ), to_string(Context) );
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1206,7 +1206,7 @@ Code parse_complicated_definition( TokType which )
|
|||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
log_failure( "Unsupported or bad member definition after %s declaration\n%s", to_str(which), to_string(Context) );
|
log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
@ -1251,7 +1251,7 @@ Code parse_complicated_definition( TokType which )
|
|||||||
|
|
||||||
if ( ! ok_to_parse )
|
if ( ! ok_to_parse )
|
||||||
{
|
{
|
||||||
log_failure( "Unsupported or bad member definition after %s declaration\n%s", to_str(which), to_string(Context) );
|
log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
@ -1270,7 +1270,7 @@ Code parse_complicated_definition( TokType which )
|
|||||||
&& ( tokens.Arr[idx - 4].Type != which))
|
&& ( tokens.Arr[idx - 4].Type != which))
|
||||||
)
|
)
|
||||||
{
|
{
|
||||||
log_failure( "Unsupported or bad member definition after %s declaration\n%s", to_str(which), to_string(Context) );
|
log_failure( "Unsupported or bad member definition after %s declaration\n%s", toktype_to_str(which), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
@ -1300,7 +1300,7 @@ Code parse_complicated_definition( TokType which )
|
|||||||
}
|
}
|
||||||
else
|
else
|
||||||
{
|
{
|
||||||
log_failure( "Unsupported or bad member definition after %s declaration\n%S", to_str(which).Ptr, to_string(Context) );
|
log_failure( "Unsupported or bad member definition after %s declaration\n%S", toktype_to_str(which).Ptr, to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return InvalidCode;
|
return InvalidCode;
|
||||||
}
|
}
|
||||||
@ -2075,7 +2075,7 @@ Token parse_identifier( bool* possible_member_function )
|
|||||||
return name;
|
return name;
|
||||||
}
|
}
|
||||||
|
|
||||||
log_failure( "Error, had a ~ operator after %S but not a destructor\n%s", to_str( prevtok.Type ), to_string(Context) );
|
log_failure( "Error, had a ~ operator after %S but not a destructor\n%s", toktype_to_str( prevtok.Type ), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return { nullptr, 0, Tok_Invalid };
|
return { nullptr, 0, Tok_Invalid };
|
||||||
}
|
}
|
||||||
@ -2095,7 +2095,7 @@ Token parse_identifier( bool* possible_member_function )
|
|||||||
|
|
||||||
if ( currtok.Type != Tok_Identifier )
|
if ( currtok.Type != Tok_Identifier )
|
||||||
{
|
{
|
||||||
log_failure( "Error, expected static symbol identifier, not %s\n%s", to_str( currtok.Type ), to_string(Context) );
|
log_failure( "Error, expected static symbol identifier, not %s\n%s", toktype_to_str( currtok.Type ), to_string(Context) );
|
||||||
pop(& Context);
|
pop(& Context);
|
||||||
return { nullptr, 0, Tok_Invalid };
|
return { nullptr, 0, Tok_Invalid };
|
||||||
}
|
}
|
||||||
|
@ -376,7 +376,7 @@ size_t gen_example_hash__P_long_long( long long val ) { return val * 2654435761u
|
|||||||
(selector_arg), \
|
(selector_arg), \
|
||||||
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT( FunctionID__ARGS_SIG_1 ) \
|
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT( FunctionID__ARGS_SIG_1 ) \
|
||||||
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT( FunctionID__ARGS_SIG_2 ) \
|
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT( FunctionID__ARGS_SIG_2 ) \
|
||||||
... \
|
/* ... */ \
|
||||||
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT(FunctionID__ARGS_SIG_N ) \
|
GEN_IF_MACRO_DEFINED_INCLUDE_THIS_SLOT(FunctionID__ARGS_SIG_N ) \
|
||||||
default: gen_generic_selection_fail \
|
default: gen_generic_selection_fail \
|
||||||
) GEN_RESOLVED_FUNCTION_CALL( selector_arg, __VA_ARG__ )
|
) GEN_RESOLVED_FUNCTION_CALL( selector_arg, __VA_ARG__ )
|
||||||
|
@ -407,14 +407,30 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
|||||||
#pragma pop_macro("GEN_DEFINE_ATTRIBUTE_TOKENS")
|
#pragma pop_macro("GEN_DEFINE_ATTRIBUTE_TOKENS")
|
||||||
|
|
||||||
// We cannot parse this enum, it has Attribute names as enums
|
// We cannot parse this enum, it has Attribute names as enums
|
||||||
CodeEnum enum_code = parse_enum(token_fmt("entries", string_to_strc(enum_entries), "attribute_toks", string_to_strc(attribute_entries), stringize(
|
CodeEnum enum_code;
|
||||||
enum TokType_Def : u32
|
if (use_c_definition)
|
||||||
{
|
{
|
||||||
<entries>
|
enum_code = parse_enum(token_fmt("entries", string_to_strc(enum_entries), "attribute_toks", string_to_strc(attribute_entries), stringize(
|
||||||
<attribute_toks>
|
enum TokType
|
||||||
Tok_NumTokens
|
{
|
||||||
};
|
<entries>
|
||||||
)));
|
<attribute_toks>
|
||||||
|
Tok_NumTokens,
|
||||||
|
Tok_UnderlyingType = GEN_U32_MAX
|
||||||
|
};
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
enum_code = parse_enum(token_fmt("entries", string_to_strc(enum_entries), "attribute_toks", string_to_strc(attribute_entries), stringize(
|
||||||
|
enum TokType : u32
|
||||||
|
{
|
||||||
|
<entries>
|
||||||
|
<attribute_toks>
|
||||||
|
Tok_NumTokens
|
||||||
|
};
|
||||||
|
)));
|
||||||
|
}
|
||||||
|
|
||||||
#pragma push_macro("local_persist")
|
#pragma push_macro("local_persist")
|
||||||
#pragma push_macro("do_once_start")
|
#pragma push_macro("do_once_start")
|
||||||
@ -424,10 +440,10 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
|||||||
#undef do_once_end
|
#undef do_once_end
|
||||||
CodeFn to_str = parse_function(token_fmt("entries", string_to_strc(to_str_entries), "attribute_toks", string_to_strc(to_str_attributes), stringize(
|
CodeFn to_str = parse_function(token_fmt("entries", string_to_strc(to_str_entries), "attribute_toks", string_to_strc(to_str_attributes), stringize(
|
||||||
inline
|
inline
|
||||||
StrC to_str( TokType type )
|
StrC toktype_to_str( TokType type )
|
||||||
{
|
{
|
||||||
local_persist
|
local_persist
|
||||||
StrC lookup[] {
|
StrC lookup[] = {
|
||||||
<entries>
|
<entries>
|
||||||
<attribute_toks>
|
<attribute_toks>
|
||||||
};
|
};
|
||||||
@ -438,14 +454,14 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
|||||||
|
|
||||||
CodeFn to_type = parse_function( token_fmt( "entries", string_to_strc(to_str_entries), stringize(
|
CodeFn to_type = parse_function( token_fmt( "entries", string_to_strc(to_str_entries), stringize(
|
||||||
inline
|
inline
|
||||||
TokType to_toktype( StrC str )
|
TokType strc_to_toktype( StrC str )
|
||||||
{
|
{
|
||||||
local_persist
|
local_persist
|
||||||
u32 keymap[ Tok_NumTokens ];
|
u32 keymap[ Tok_NumTokens ];
|
||||||
do_once_start
|
do_once_start
|
||||||
for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
for ( u32 index = 0; index < Tok_NumTokens; index++ )
|
||||||
{
|
{
|
||||||
StrC enum_str = to_str( (TokType)index );
|
StrC enum_str = toktype_to_str( (TokType)index );
|
||||||
|
|
||||||
// We subtract 1 to remove the null terminator
|
// We subtract 1 to remove the null terminator
|
||||||
// This is because the tokens lexed are not null terminated.
|
// This is because the tokens lexed are not null terminated.
|
||||||
@ -468,16 +484,19 @@ CodeBody gen_etoktype( char const* etok_path, char const* attr_path, bool use_c_
|
|||||||
#pragma pop_macro("do_once_start")
|
#pragma pop_macro("do_once_start")
|
||||||
#pragma pop_macro("do_once_end")
|
#pragma pop_macro("do_once_end")
|
||||||
|
|
||||||
//CodeNS nspace = def_namespace( name(ETokType), def_namespace_body( args( attribute_entires_def, enum_code, to_str, to_type ) ) );
|
CodeBody result = def_body(CT_Global_Body);
|
||||||
CodeTypedef td_toktype = parse_typedef( code( typedef enum TokType_Def TokType; ));
|
body_append(result, untyped_str(txt("GEN_NS_PARSER_BEGIN\n\n")));
|
||||||
|
body_append(result, attribute_entires_def);
|
||||||
return def_global_body( args(
|
body_append(result, enum_code);
|
||||||
attribute_entires_def,
|
if (use_c_definition)
|
||||||
enum_code,
|
{
|
||||||
td_toktype,
|
CodeTypedef td_toktype = parse_typedef( code( typedef enum TokType TokType; ));
|
||||||
to_str,
|
body_append(result, td_toktype);
|
||||||
to_type
|
}
|
||||||
));
|
body_append(result, to_str);
|
||||||
|
body_append(result, to_type);
|
||||||
|
body_append(result, untyped_str(txt("\nGEN_NS_PARSER_END\n\n")));
|
||||||
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
CodeBody gen_ast_inlines()
|
CodeBody gen_ast_inlines()
|
||||||
|
Loading…
Reference in New Issue
Block a user