12 Commits

20 changed files with 1060 additions and 252 deletions

View File

@ -147,3 +147,13 @@ struct ArrayHeader
## Building ## Building
See the [scripts directory](scripts/). See the [scripts directory](scripts/).
## Gallery
### Listing definitions in the Cuik Compiler
https://github.com/user-attachments/assets/2302240c-01f1-4e1b-a4b5-292eb3186648
### Unreal: Generating a UAttributeSet from a UDataTable
https://github.com/user-attachments/assets/2a07b743-825d-4f9f-beaf-3559e8748a4d

View File

@ -23,6 +23,7 @@ enum Specifier : u32
Spec_Ptr, Spec_Ptr,
Spec_Ref, Spec_Ref,
Spec_Register, Spec_Register,
Spec_Restrict,
Spec_RValue, Spec_RValue,
Spec_Static, Spec_Static,
Spec_Thread_Local, Spec_Thread_Local,
@ -56,6 +57,7 @@ inline Str spec_to_str( Specifier type )
{ "*", sizeof( "*" ) - 1 }, { "*", sizeof( "*" ) - 1 },
{ "&", sizeof( "&" ) - 1 }, { "&", sizeof( "&" ) - 1 },
{ "register", sizeof( "register" ) - 1 }, { "register", sizeof( "register" ) - 1 },
{ "restrict", sizeof( "restrict" ) - 1 },
{ "&&", sizeof( "&&" ) - 1 }, { "&&", sizeof( "&&" ) - 1 },
{ "static", sizeof( "static" ) - 1 }, { "static", sizeof( "static" ) - 1 },
{ "thread_local", sizeof( "thread_local" ) - 1 }, { "thread_local", sizeof( "thread_local" ) - 1 },
@ -72,16 +74,16 @@ inline Str spec_to_str( Specifier type )
inline bool spec_is_trailing( Specifier specifier ) inline bool spec_is_trailing( Specifier specifier )
{ {
switch (specifier) { switch ( specifier )
case Spec_Const: {
case Spec_Final: case Spec_Const :
case Spec_NoExceptions: case Spec_Final :
case Spec_Override: case Spec_NoExceptions :
case Spec_Pure: case Spec_Override :
case Spec_Volatile: case Spec_Pure :
case Spec_Volatile :
return true; return true;
default :
default:
return false; return false;
} }
} }

View File

@ -83,6 +83,7 @@ enum TokType : u32
Tok_Spec_Mutable, Tok_Spec_Mutable,
Tok_Spec_NeverInline, Tok_Spec_NeverInline,
Tok_Spec_Override, Tok_Spec_Override,
Tok_Spec_Restrict,
Tok_Spec_Static, Tok_Spec_Static,
Tok_Spec_ThreadLocal, Tok_Spec_ThreadLocal,
Tok_Spec_Volatile, Tok_Spec_Volatile,
@ -188,6 +189,7 @@ inline Str toktype_to_str( TokType type )
{ "mutable", sizeof( "mutable" ) - 1 }, { "mutable", sizeof( "mutable" ) - 1 },
{ "neverinline", sizeof( "neverinline" ) - 1 }, { "neverinline", sizeof( "neverinline" ) - 1 },
{ "override", sizeof( "override" ) - 1 }, { "override", sizeof( "override" ) - 1 },
{ "restrict", sizeof( "restrict" ) - 1 },
{ "static", sizeof( "static" ) - 1 }, { "static", sizeof( "static" ) - 1 },
{ "thread_local", sizeof( "thread_local" ) - 1 }, { "thread_local", sizeof( "thread_local" ) - 1 },
{ "volatile", sizeof( "volatile" ) - 1 }, { "volatile", sizeof( "volatile" ) - 1 },

View File

@ -327,8 +327,6 @@ forceinline CodeBody def_union_body ( s32 num, Code* codes )
#pragma region Parsing #pragma region Parsing
// TODO(Ed) : Implmeent the new parser API design.
#if 0 #if 0
struct StackNode struct StackNode
{ {

View File

@ -39,7 +39,7 @@ CodeConstructor parse_constructor( Str def )
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
b32 ignore_spec = false; b32 ignore_spec = false;

View File

@ -281,7 +281,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
ctx->token.Text.Len++; ctx->token.Text.Len++;
} }
ctx->token.Type = str_to_toktype( tok_to_str(ctx->token) ); ctx->token.Type = str_to_toktype( ctx->token.Text );
bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma; bool is_preprocessor = ctx->token.Type >= Tok_Preprocess_Define && ctx->token.Type <= Tok_Preprocess_Pragma;
if ( ! is_preprocessor ) if ( ! is_preprocessor )
@ -488,7 +488,7 @@ void lex_found_token( LexContext* ctx )
return; return;
} }
TokType type = str_to_toktype( tok_to_str(ctx->token) ); TokType type = str_to_toktype( ctx->token.Text );
if (type <= Tok_Access_Public && type >= Tok_Access_Private ) { if (type <= Tok_Access_Public && type >= Tok_Access_Private ) {
ctx->token.Flags |= TF_AccessSpecifier; ctx->token.Flags |= TF_AccessSpecifier;

View File

@ -533,7 +533,7 @@ Code parse_array_decl()
untyped_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr; untyped_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr;
Code array_expr = untyped_str( tok_to_str(untyped_tok) ); Code array_expr = untyped_str( untyped_tok.Text );
// [ <Content> // [ <Content>
if ( left == 0 ) if ( left == 0 )
@ -737,7 +737,7 @@ Code parse_class_struct( TokType which, bool inplace_def )
} }
Token parent_tok = parse_identifier(nullptr); Token parent_tok = parse_identifier(nullptr);
parent = def_type( tok_to_str(parent_tok) ); parent = def_type( parent_tok.Text );
// <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Parent/Interface Name> // <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Parent/Interface Name>
while ( check(Tok_Comma) ) while ( check(Tok_Comma) )
@ -750,7 +750,7 @@ Code parse_class_struct( TokType which, bool inplace_def )
} }
Token interface_tok = parse_identifier(nullptr); Token interface_tok = parse_identifier(nullptr);
array_append( interfaces, def_type( tok_to_str(interface_tok) ) ); array_append( interfaces, def_type( interface_tok.Text ) );
// <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Name>, ... // <ModuleFlags> <class/struct> <Attributes> <Name> : <Access Specifier> <Name>, ...
} }
} }
@ -773,10 +773,10 @@ Code parse_class_struct( TokType which, bool inplace_def )
} }
if ( which == Tok_Decl_Class ) if ( which == Tok_Decl_Class )
result = cast(Code, def_class( tok_to_str(name), def_assign( body, parent, access, attributes, interfaces, scast(s32, array_num(interfaces)), mflags ) )); result = cast(Code, def_class( name.Text, def_assign( body, parent, access, attributes, interfaces, scast(s32, array_num(interfaces)), mflags ) ));
else else
result = cast(Code, def_struct( tok_to_str(name), def_assign( body, (CodeTypename)parent, access, attributes, interfaces, scast(s32, array_num(interfaces)), mflags ) )); result = cast(Code, def_struct( name.Text, def_assign( body, (CodeTypename)parent, access, attributes, interfaces, scast(s32, array_num(interfaces)), mflags ) ));
if ( inline_cmt ) if ( inline_cmt )
result->InlineCmt = cast(Code, inline_cmt); result->InlineCmt = cast(Code, inline_cmt);
@ -994,7 +994,7 @@ CodeBody parse_class_struct_body( TokType which, Token name )
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
b32 ignore_spec = false; b32 ignore_spec = false;
@ -1102,7 +1102,7 @@ CodeBody parse_class_struct_body( TokType which, Token name )
untyped_tok.Text.Len = ( (sptr)currtok.Text.Ptr + currtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr; untyped_tok.Text.Len = ( (sptr)currtok.Text.Ptr + currtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr;
eat( currtok.Type ); eat( currtok.Type );
} }
member = untyped_str( tok_to_str(untyped_tok) ); member = untyped_str( untyped_tok.Text );
// Something unknown // Something unknown
break; break;
} }
@ -1130,7 +1130,7 @@ CodeComment parse_comment()
CodeComment CodeComment
result = (CodeComment) make_code(); result = (CodeComment) make_code();
result->Type = CT_Comment; result->Type = CT_Comment;
result->Content = cache_str( tok_to_str(currtok_noskip) ); result->Content = cache_str( currtok_noskip.Text );
// result->Token = currtok_noskip; // result->Token = currtok_noskip;
eat( Tok_Comment ); eat( Tok_Comment );
@ -1171,13 +1171,13 @@ Code parse_complicated_definition( TokType which )
} }
Token tok = tokens.Arr[ idx - 1 ]; Token tok = tokens.Arr[ idx - 1 ];
if ( tok_is_specifier(tok) && spec_is_trailing( str_to_specifier( tok_to_str(tok))) ) if ( tok_is_specifier(tok) && spec_is_trailing( str_to_specifier( tok.Text)) )
{ {
// <which> <type_identifier>(...) <specifier> ...; // <which> <type_identifier>(...) <specifier> ...;
s32 spec_idx = idx - 1; s32 spec_idx = idx - 1;
Token spec = tokens.Arr[spec_idx]; Token spec = tokens.Arr[spec_idx];
while ( tok_is_specifier(spec) && spec_is_trailing( str_to_specifier( tok_to_str(spec))) ) while ( tok_is_specifier(spec) && spec_is_trailing( str_to_specifier( spec.Text)) )
{ {
-- spec_idx; -- spec_idx;
spec = tokens.Arr[spec_idx]; spec = tokens.Arr[spec_idx];
@ -1207,9 +1207,19 @@ Code parse_complicated_definition( TokType which )
if ( tok.Type == Tok_BraceCurly_Close ) if ( tok.Type == Tok_BraceCurly_Close )
{ {
// Its an inplace definition // Its an inplace definition
// <which> <type_identifier> { ... } <identifier>; // <which> <type_identifier ?> { ... } <identifier>;
ok_to_parse = true; ok_to_parse = true;
is_inplace = true; is_inplace = true;
CodeTypename type = cast(CodeTypename, parse_forward_or_definition(which, is_inplace));
// Should be a name right after the type.
Token name = parse_identifier(nullptr);
_ctx->parser.Scope->Name = name.Text;
CodeVar result = parse_variable_after_name(ModuleFlag_None, NullCode, NullCode, type, name.Text);
parser_pop(& _ctx->parser);
return (Code) result;
} }
else if ( tok.Type == Tok_Identifier && tokens.Arr[ idx - 3 ].Type == which ) else if ( tok.Type == Tok_Identifier && tokens.Arr[ idx - 3 ].Type == which )
{ {
@ -1327,7 +1337,7 @@ Code parse_assignment_expression()
} }
expr_tok.Text.Len = ( ( sptr )currtok.Text.Ptr + currtok.Text.Len ) - ( sptr )expr_tok.Text.Ptr - 1; expr_tok.Text.Len = ( ( sptr )currtok.Text.Ptr + currtok.Text.Len ) - ( sptr )expr_tok.Text.Ptr - 1;
expr = untyped_str( tok_to_str(expr_tok) ); expr = untyped_str( expr_tok.Text );
// = <Expression> // = <Expression>
return expr; return expr;
} }
@ -1383,12 +1393,12 @@ CodeFn parse_function_after_name(
{ {
if ( specifiers == nullptr ) if ( specifiers == nullptr )
{ {
specifiers = def_specifier( str_to_specifier( tok_to_str(currtok)) ); specifiers = def_specifier( str_to_specifier( currtok.Text) );
eat( currtok.Type ); eat( currtok.Type );
continue; continue;
} }
specifiers_append(specifiers, str_to_specifier( tok_to_str(currtok)) ); specifiers_append(specifiers, str_to_specifier( currtok.Text) );
eat( currtok.Type ); eat( currtok.Type );
} }
// <Attributes> <Specifiers> <ReturnType> <Name> ( <Paraemters> ) <Specifiers> // <Attributes> <Specifiers> <ReturnType> <Name> ( <Paraemters> ) <Specifiers>
@ -1431,7 +1441,7 @@ CodeFn parse_function_after_name(
} }
StrBuilder StrBuilder
name_stripped = strbuilder_make_str( _ctx->Allocator_Temp, tok_to_str(name) ); name_stripped = strbuilder_make_str( _ctx->Allocator_Temp, name.Text );
strbuilder_strip_space(name_stripped); strbuilder_strip_space(name_stripped);
CodeFn CodeFn
@ -1718,7 +1728,7 @@ CodeBody parse_global_nspace( CodeType which )
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
bool ignore_spec = false; bool ignore_spec = false;
@ -2059,7 +2069,7 @@ CodeInclude parse_include()
} }
_ctx->parser.Scope->Name = currtok.Text; _ctx->parser.Scope->Name = currtok.Text;
include->Content = cache_str( tok_to_str(currtok) ); include->Content = cache_str( currtok.Text );
eat( Tok_String ); eat( Tok_String );
// #include <Path> or "Path" // #include <Path> or "Path"
@ -2384,12 +2394,12 @@ CodeOperator parse_operator_after_ret_type(
{ {
if ( specifiers == nullptr ) if ( specifiers == nullptr )
{ {
specifiers = def_specifier( str_to_specifier( tok_to_str(currtok)) ); specifiers = def_specifier( str_to_specifier( currtok.Text) );
eat( currtok.Type ); eat( currtok.Type );
continue; continue;
} }
specifiers_append(specifiers, str_to_specifier( tok_to_str(currtok)) ); specifiers_append(specifiers, str_to_specifier( currtok.Text) );
eat( currtok.Type ); eat( currtok.Type );
} }
// <ExportFlag> <Attributes> <Specifiers> <ReturnType> <Qualifier::...> operator <Op> ( <Parameters> ) <Specifiers> // <ExportFlag> <Attributes> <Specifiers> <ReturnType> <Qualifier::...> operator <Op> ( <Parameters> ) <Specifiers>
@ -2419,7 +2429,7 @@ CodeOperator parse_operator_after_ret_type(
} }
// OpValidateResult check_result = operator__validate( op, params, ret_type, specifiers ); // OpValidateResult check_result = operator__validate( op, params, ret_type, specifiers );
CodeOperator result = def_operator( op, tok_to_str(nspace), def_assign( params, ret_type, body, specifiers, attributes, mflags ) ); CodeOperator result = def_operator( op, nspace.Text, def_assign( params, ret_type, body, specifiers, attributes, mflags ) );
if ( inline_cmt ) if ( inline_cmt )
result->InlineCmt = inline_cmt; result->InlineCmt = inline_cmt;
@ -2537,7 +2547,7 @@ Code parse_operator_function_or_variable( bool expects_function, CodeAttributes
return InvalidCode; return InvalidCode;
} }
// Dealing with a variable // Dealing with a variable
result = cast(Code, parse_variable_after_name( ModuleFlag_None, attributes, specifiers, type, tok_to_str(name) )); result = cast(Code, parse_variable_after_name( ModuleFlag_None, attributes, specifiers, type, name.Text ));
// <Attributes> <Specifiers> <ValueType> <Name> ... // <Attributes> <Specifiers> <ValueType> <Name> ...
} }
} }
@ -2595,7 +2605,7 @@ CodePragma parse_pragma()
_ctx->parser.Scope->Name = currtok.Text; _ctx->parser.Scope->Name = currtok.Text;
pragma->Content = cache_str( tok_to_str(currtok) ); pragma->Content = cache_str( currtok.Text );
eat( Tok_Preprocess_Content ); eat( Tok_Preprocess_Content );
// #pragma <Content> // #pragma <Content>
@ -2729,7 +2739,7 @@ CodeParams parse_params( bool use_template_capture )
eat( currtok.Type ); eat( currtok.Type );
} }
value = untyped_str( strbuilder_to_str(parser_strip_formatting( tok_to_str(value_tok), parser_strip_formatting_dont_preserve_newlines )) ); value = untyped_str( strbuilder_to_str(parser_strip_formatting( value_tok.Text, parser_strip_formatting_dont_preserve_newlines )) );
// ( <Macro> <ValueType> <Name> = <Expression> // ( <Macro> <ValueType> <Name> = <Expression>
} }
} }
@ -2740,7 +2750,7 @@ CodeParams parse_params( bool use_template_capture )
result->Macro = macro; result->Macro = macro;
if ( name.Text.Len > 0 ) if ( name.Text.Len > 0 )
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
result->ValueType = type; result->ValueType = type;
@ -2843,7 +2853,7 @@ CodeParams parse_params( bool use_template_capture )
eat( currtok.Type ); eat( currtok.Type );
} }
value = untyped_str( strbuilder_to_str(parser_strip_formatting( tok_to_str(value_tok), parser_strip_formatting_dont_preserve_newlines )) ); value = untyped_str( strbuilder_to_str(parser_strip_formatting( value_tok.Text, parser_strip_formatting_dont_preserve_newlines )) );
// ( <Macro> <ValueType> <Name> = <Expression>, <Macro> <ValueType> <Name> <PostNameMacro> = <Expression> // ( <Macro> <ValueType> <Name> = <Expression>, <Macro> <ValueType> <Name> <PostNameMacro> = <Expression>
} }
// ( <Macro> <ValueType> <Name> = <Expression>, <Macro> <ValueType> <Name> <PostNameMacro> = <Expression>, .. // ( <Macro> <ValueType> <Name> = <Expression>, <Macro> <ValueType> <Name> <PostNameMacro> = <Expression>, ..
@ -2855,7 +2865,7 @@ CodeParams parse_params( bool use_template_capture )
param->Macro = macro; param->Macro = macro;
if ( name.Text.Len > 0 ) if ( name.Text.Len > 0 )
param->Name = cache_str( tok_to_str(name) ); param->Name = cache_str( name.Text );
param->PostNameMacro = post_name_macro; param->PostNameMacro = post_name_macro;
param->ValueType = cast(CodeTypename, type); param->ValueType = cast(CodeTypename, type);
@ -2912,7 +2922,7 @@ CodePreprocessCond parse_preprocess_cond()
} }
_ctx->parser.Scope->Name = currtok.Text; _ctx->parser.Scope->Name = currtok.Text;
cond->Content = cache_str( tok_to_str(currtok) ); cond->Content = cache_str( currtok.Text );
eat( Tok_Preprocess_Content ); eat( Tok_Preprocess_Content );
// #<Conditiona> <Content> // #<Conditiona> <Content>
@ -3179,7 +3189,7 @@ CodeVar parse_variable_after_name(
eat( Tok_BraceCurly_Close ); eat( Tok_BraceCurly_Close );
expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr; expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr;
expr = untyped_str( tok_to_str(expr_tok) ); expr = untyped_str( expr_tok.Text );
// <Attributes> <Specifiers> <ValueType> <Name> = { <Expression> } // <Attributes> <Specifiers> <ValueType> <Name> = { <Expression> }
} }
@ -3205,7 +3215,7 @@ CodeVar parse_variable_after_name(
} }
expr_token.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_token.Text.Ptr; expr_token.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_token.Text.Ptr;
expr = untyped_str( tok_to_str(expr_token) ); expr = untyped_str( expr_token.Text );
eat( Tok_Paren_Close ); eat( Tok_Paren_Close );
// <Attributes> <Specifiers> <ValueType> <Name> ( <Expression> ) // <Attributes> <Specifiers> <ValueType> <Name> ( <Expression> )
} }
@ -3228,7 +3238,7 @@ CodeVar parse_variable_after_name(
} }
expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr; expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr;
bitfield_expr = untyped_str( tok_to_str(expr_tok) ); bitfield_expr = untyped_str( expr_tok.Text );
// <Attributes> <Specifiers> <ValueType> <Name> : <Expression> // <Attributes> <Specifiers> <ValueType> <Name> : <Expression>
} }
@ -3309,7 +3319,7 @@ CodeVar parse_variable_declaration_list()
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
@ -3333,7 +3343,7 @@ CodeVar parse_variable_declaration_list()
{ {
log_failure( "Error, invalid specifier '%S' proceeding comma\n" log_failure( "Error, invalid specifier '%S' proceeding comma\n"
"(Parser will add and continue to specifiers, but will most likely fail to compile)\n%S" "(Parser will add and continue to specifiers, but will most likely fail to compile)\n%S"
, tok_to_str(currtok), strbuilder_to_str( parser_to_strbuilder(_ctx->parser)) ); , currtok.Text, strbuilder_to_str( parser_to_strbuilder(_ctx->parser)) );
continue; continue;
} }
break; break;
@ -3348,7 +3358,7 @@ CodeVar parse_variable_declaration_list()
} }
// , <Specifiers> // , <Specifiers>
Str name = tok_to_str(currtok); Str name = currtok.Text;
eat( Tok_Identifier ); eat( Tok_Identifier );
// , <Specifiers> <Name> // , <Specifiers> <Name>
@ -3417,7 +3427,7 @@ CodeConstructor parser_parse_constructor( CodeSpecifiers specifiers )
initializer_list_tok.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )initializer_list_tok.Text.Ptr; initializer_list_tok.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )initializer_list_tok.Text.Ptr;
// <Name> ( <Parameters> ) : <InitializerList> // <Name> ( <Parameters> ) : <InitializerList>
initializer_list = untyped_str( tok_to_str(initializer_list_tok) ); initializer_list = untyped_str( initializer_list_tok.Text );
// TODO(Ed): Constructors can have post-fix specifiers // TODO(Ed): Constructors can have post-fix specifiers
@ -3446,7 +3456,7 @@ CodeConstructor parser_parse_constructor( CodeSpecifiers specifiers )
CodeConstructor result = ( CodeConstructor )make_code(); CodeConstructor result = ( CodeConstructor )make_code();
result->Name = cache_str( tok_to_str(identifier)); result->Name = cache_str( identifier.Text );
result->Specs = specifiers; result->Specs = specifiers;
@ -3492,7 +3502,7 @@ CodeDefine parser_parse_define()
return InvalidCode; return InvalidCode;
} }
_ctx->parser.Scope->Name = currtok.Text; _ctx->parser.Scope->Name = currtok.Text;
define->Name = cache_str( tok_to_str(currtok) ); define->Name = cache_str( currtok.Text );
eat( Tok_Identifier ); eat( Tok_Identifier );
// #define <Name> // #define <Name>
@ -3550,7 +3560,7 @@ CodeDefine parser_parse_define()
return define; return define;
} }
define->Body = untyped_str( strbuilder_to_str( parser_strip_formatting( tok_to_str(currtok), parser_strip_formatting_dont_preserve_newlines )) ); define->Body = untyped_str( strbuilder_to_str( parser_strip_formatting( currtok.Text, parser_strip_formatting_dont_preserve_newlines )) );
eat( Tok_Preprocess_Content ); eat( Tok_Preprocess_Content );
// #define <Name> ( <params> ) <Content> // #define <Name> ( <params> ) <Content>
@ -3650,7 +3660,7 @@ CodeDestructor parser_parse_destructor( CodeSpecifiers specifiers )
if ( tok_is_valid(prefix_identifier) ) { if ( tok_is_valid(prefix_identifier) ) {
prefix_identifier.Text.Len += 1 + identifier.Text.Len; prefix_identifier.Text.Len += 1 + identifier.Text.Len;
result->Name = cache_str( tok_to_str(prefix_identifier) ); result->Name = cache_str( prefix_identifier.Text );
} }
if ( specifiers ) if ( specifiers )
@ -3684,9 +3694,6 @@ CodeEnum parser_parse_enum( bool inplace_def )
Code array_expr = { nullptr }; Code array_expr = { nullptr };
CodeTypename type = { nullptr }; CodeTypename type = { nullptr };
char entries_code[ kilobytes(128) ] = { 0 };
s32 entries_length = 0;
bool is_enum_class = false; bool is_enum_class = false;
eat( Tok_Decl_Enum ); eat( Tok_Decl_Enum );
@ -3729,7 +3736,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
else if ( currtok.Type == Tok_Preprocess_Macro_Expr ) else if ( currtok.Type == Tok_Preprocess_Macro_Expr )
{ {
// We'll support the enum_underlying macro // We'll support the enum_underlying macro
if ( str_contains( tok_to_str(currtok), enum_underlying_macro.Name) ) if ( str_contains( currtok.Text, enum_underlying_macro.Name) )
{ {
use_macro_underlying = true; use_macro_underlying = true;
underlying_macro = parse_simple_preprocess( Tok_Preprocess_Macro_Expr ); underlying_macro = parse_simple_preprocess( Tok_Preprocess_Macro_Expr );
@ -3764,7 +3771,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
switch ( currtok_noskip.Type ) switch ( currtok_noskip.Type )
{ {
case Tok_NewLine: case Tok_NewLine:
member = untyped_str( tok_to_str(currtok_noskip) ); member = untyped_str( currtok_noskip.Text );
eat( Tok_NewLine ); eat( Tok_NewLine );
break; break;
@ -3857,7 +3864,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
Token prev = * lex_previous(_ctx->parser.Tokens, lex_dont_skip_formatting); Token prev = * lex_previous(_ctx->parser.Tokens, lex_dont_skip_formatting);
entry.Text.Len = ( (sptr)prev.Text.Ptr + prev.Text.Len ) - (sptr)entry.Text.Ptr; entry.Text.Len = ( (sptr)prev.Text.Ptr + prev.Text.Len ) - (sptr)entry.Text.Ptr;
member = untyped_str( tok_to_str(entry) ); member = untyped_str( entry.Text );
} }
break; break;
} }
@ -3901,7 +3908,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
result->Type = is_enum_class ? CT_Enum_Class_Fwd : CT_Enum_Fwd; result->Type = is_enum_class ? CT_Enum_Class_Fwd : CT_Enum_Fwd;
} }
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
if ( attributes ) if ( attributes )
result->Attributes = attributes; result->Attributes = attributes;
@ -3953,7 +3960,7 @@ CodeExtern parser_parse_extern_link()
CodeExtern CodeExtern
result = (CodeExtern) make_code(); result = (CodeExtern) make_code();
result->Type = CT_Extern_Linkage; result->Type = CT_Extern_Linkage;
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
CodeBody entry = parser_parse_extern_link_body(); CodeBody entry = parser_parse_extern_link_body();
if ( cast(Code, entry) == Code_Invalid ) if ( cast(Code, entry) == Code_Invalid )
@ -3989,7 +3996,7 @@ CodeFriend parser_parse_friend()
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
@ -4107,7 +4114,7 @@ CodeFn parser_parse_function()
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
@ -4177,7 +4184,7 @@ CodeNS parser_parse_namespace()
CodeNS CodeNS
result = (CodeNS) make_code(); result = (CodeNS) make_code();
result->Type = CT_Namespace; result->Type = CT_Namespace;
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
result->Body = body; result->Body = body;
@ -4208,7 +4215,7 @@ CodeOperator parser_parse_operator()
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
@ -4322,7 +4329,7 @@ CodeOpCast parser_parse_operator_cast( CodeSpecifiers specifiers )
eat( Tok_BraceCurly_Close ); eat( Tok_BraceCurly_Close );
// <Specifiers> <Qualifier> :: ... operator <UnderlyingType>() <const> { <Body> } // <Specifiers> <Qualifier> :: ... operator <UnderlyingType>() <const> { <Body> }
body = untyped_str( tok_to_str(body_str) ); body = untyped_str( body_str.Text );
} }
else else
{ {
@ -4338,7 +4345,7 @@ CodeOpCast parser_parse_operator_cast( CodeSpecifiers specifiers )
CodeOpCast result = (CodeOpCast) make_code(); CodeOpCast result = (CodeOpCast) make_code();
if ( tok_is_valid(name) ) if ( tok_is_valid(name) )
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
if (body) { if (body) {
result->Type = CT_Operator_Cast; result->Type = CT_Operator_Cast;
@ -4441,7 +4448,7 @@ CodeTemplate parser_parse_template()
{ {
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
@ -4571,10 +4578,10 @@ CodeTypename parser_parse_type( bool from_template, bool* typedef_is_function )
// Prefix specifiers // Prefix specifiers
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
if ( spec != Spec_Const ) { if ( spec != Spec_Const ) {
log_failure( "Error, invalid specifier used in type definition: %S\n%SB", tok_to_str(currtok), parser_to_strbuilder(_ctx->parser) ); log_failure( "Error, invalid specifier used in type definition: %S\n%SB", currtok.Text, parser_to_strbuilder(_ctx->parser) );
parser_pop(& _ctx->parser); parser_pop(& _ctx->parser);
return InvalidCode; return InvalidCode;
} }
@ -4684,11 +4691,12 @@ else if ( currtok.Type == Tok_DeclType )
} }
} }
} }
// TODO(Ed): This needs updating
else if ( currtok.Type == Tok_Preprocess_Macro_Typename ) { else if ( currtok.Type == Tok_Preprocess_Macro_Typename ) {
// Typename is a macro // Typename is a macro
name = currtok; // name = currtok;
eat(Tok_Preprocess_Macro_Typename); // eat(Tok_Preprocess_Macro_Typename);
Code macro = parse_simple_preprocess(Tok_Preprocess_Macro_Typename);
name.Text = macro->Content;
} }
// The usual Identifier type signature that may have namespace qualifiers // The usual Identifier type signature that may have namespace qualifiers
@ -4709,15 +4717,18 @@ else if ( currtok.Type == Tok_DeclType )
// Suffix specifiers for typename. // Suffix specifiers for typename.
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
if ( spec != Spec_Const && spec != Spec_Ptr && spec != Spec_Ref && spec != Spec_RValue ) switch (spec ) {
{ GEN_PARSER_TYPENAME_ALLOWED_SUFFIX_SPECIFIER_CASES:
log_failure( "Error, invalid specifier used in type definition: %S\n%SB", tok_to_str(currtok), parser_to_strbuilder(_ctx->parser) ); break;
parser_pop(& _ctx->parser);
return InvalidCode; default: {
log_failure( "Error, invalid specifier used in type definition: %S\n%SB", currtok.Text, parser_to_strbuilder(_ctx->parser) );
parser_pop(& _ctx->parser);
return InvalidCode;
}
} }
specs_found[ NumSpecifiers ] = spec; specs_found[ NumSpecifiers ] = spec;
NumSpecifiers++; NumSpecifiers++;
eat( currtok.Type ); eat( currtok.Type );
@ -4734,7 +4745,7 @@ else if ( currtok.Type == Tok_DeclType )
// For function type signatures // For function type signatures
CodeTypename return_type = NullCode; CodeTypename return_type = NullCode;
CodeParams params = NullCode; CodeParams params = NullCode;
#ifdef GEN_USE_NEW_TYPENAME_PARSING #ifdef GEN_USE_NEW_TYPENAME_PARSING
CodeParams params_nested = NullCode; CodeParams params_nested = NullCode;
@ -4795,7 +4806,7 @@ else if ( currtok.Type == Tok_DeclType )
// StrBuilder // StrBuilder
// name_stripped = StrBuilder::make( FallbackAllocator, name ); // name_stripped = StrBuilder::make( FallbackAllocator, name );
// name_stripped.strip_space(); // name_stripped.strip_space();
return_type->Name = cache_str( tok_to_str(name) ); return_type->Name = cache_str( name.Text );
#ifdef GEN_USE_NEW_TYPENAME_PARSING #ifdef GEN_USE_NEW_TYPENAME_PARSING
if ( specifiers ) if ( specifiers )
@ -4908,14 +4919,14 @@ else if ( currtok.Type == Tok_DeclType )
// Look for suffix specifiers for the function // Look for suffix specifiers for the function
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
if ( spec != Spec_Const if ( spec != Spec_Const
// TODO : Add support for NoExcept, l-value, volatile, l-value, etc // TODO : Add support for NoExcept, l-value, volatile, l-value, etc
// && spec != Spec_NoExcept // && spec != Spec_NoExcept
&& spec != Spec_RValue ) && spec != Spec_RValue )
{ {
log_failure( "Error, invalid specifier used in type definition: %S\n%S", tok_to_str(currtok), strbuilder_to_str( parser_to_strbuilder(_ctx->parser)) ); log_failure( "Error, invalid specifier used in type definition: %S\n%S", currtok.Text, strbuilder_to_str( parser_to_strbuilder(_ctx->parser)) );
parser_pop(& _ctx->parser); parser_pop(& _ctx->parser);
return InvalidCode; return InvalidCode;
} }
@ -4949,7 +4960,7 @@ else if ( currtok.Type == Tok_DeclType )
// result->Token = _ctx->parser.Scope->Start; // result->Token = _ctx->parser.Scope->Start;
// Need to wait until were using the new parsing method to do this. // Need to wait until were using the new parsing method to do this.
StrBuilder name_stripped = parser_strip_formatting( tok_to_str(name), parser_strip_formatting_dont_preserve_newlines ); StrBuilder name_stripped = parser_strip_formatting( name.Text, parser_strip_formatting_dont_preserve_newlines );
// name_stripped.strip_space(); // name_stripped.strip_space();
@ -5208,7 +5219,7 @@ CodeTypedef parser_parse_typedef()
} }
else else
{ {
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
result->IsFunction = false; result->IsFunction = false;
} }
@ -5252,7 +5263,7 @@ CodeUnion parser_parse_union( bool inplace_def )
Str name = { nullptr, 0 }; Str name = { nullptr, 0 };
if ( check( Tok_Identifier ) ) if ( check( Tok_Identifier ) )
{ {
name = tok_to_str(currtok); name = currtok.Text;
_ctx->parser.Scope->Name = currtok.Text; _ctx->parser.Scope->Name = currtok.Text;
eat( Tok_Identifier ); eat( Tok_Identifier );
} }
@ -5443,7 +5454,7 @@ CodeUsing parser_parse_using()
CodeUsing CodeUsing
result = (CodeUsing) make_code(); result = (CodeUsing) make_code();
result->Name = cache_str( tok_to_str(name) ); result->Name = cache_str( name.Text );
result->ModuleFlags = mflags; result->ModuleFlags = mflags;
if ( is_namespace) if ( is_namespace)
@ -5494,7 +5505,7 @@ CodeVar parser_parse_variable()
while ( left && tok_is_specifier(currtok) ) while ( left && tok_is_specifier(currtok) )
{ {
Specifier spec = str_to_specifier( tok_to_str(currtok) ); Specifier spec = str_to_specifier( currtok.Text );
switch ( spec ) switch ( spec )
{ {
GEN_PARSER_VARIABLE_ALLOWED_SPECIFIER_CASES: GEN_PARSER_VARIABLE_ALLOWED_SPECIFIER_CASES:
@ -5520,13 +5531,6 @@ CodeVar parser_parse_variable()
} }
// <ModuleFlags> <Attributes> <Specifiers> // <ModuleFlags> <Attributes> <Specifiers>
// Note(Ed): We're enforcing that using this codepath requires non-macro jank.
// Code macro_stmt = parse_macro_as_definiton(attributes, specifiers);
// if (macro_stmt) {
// parser_pop(& _ctx->parser);
// return macro_stmt;
// }
CodeTypename type = parser_parse_type(parser_not_from_template, nullptr); CodeTypename type = parser_parse_type(parser_not_from_template, nullptr);
// <ModuleFlags> <Attributes> <Specifiers> <ValueType> // <ModuleFlags> <Attributes> <Specifiers> <ValueType>

View File

@ -97,9 +97,16 @@ case Spec_Global: \
case Spec_Inline: \ case Spec_Inline: \
case Spec_Local_Persist: \ case Spec_Local_Persist: \
case Spec_Mutable: \ case Spec_Mutable: \
case Spec_Restrict: \
case Spec_Static: \ case Spec_Static: \
case Spec_Thread_Local: \ case Spec_Thread_Local: \
case Spec_Volatile case Spec_Volatile
#define GEN_PARSER_TYPENAME_ALLOWED_SUFFIX_SPECIFIER_CASES \
case Spec_Const: \
case Spec_Ptr: \
case Spec_Restrict: \
case Spec_Ref: \
case Spec_RValue

View File

@ -43,11 +43,6 @@ AccessSpec tok_to_access_specifier(Token tok) {
return scast(AccessSpec, tok.Type); return scast(AccessSpec, tok.Type);
} }
forceinline
Str tok_to_str(Token tok) {
return tok.Text;
}
forceinline forceinline
bool tok_is_valid( Token tok ) { bool tok_is_valid( Token tok ) {
return tok.Text.Ptr && tok.Text.Len && tok.Type != Tok_Invalid; return tok.Text.Ptr && tok.Text.Len && tok.Type != Tok_Invalid;

View File

@ -14,6 +14,7 @@ NeverInline, neverinline
Ptr, * Ptr, *
Ref, & Ref, &
Register, register Register, register
Restrict, restrict
RValue, && RValue, &&
Static, static Static, static
Thread_Local, thread_local Thread_Local, thread_local

1 Invalid INVALID
14 Ptr *
15 Ref &
16 Register register
17 Restrict restrict
18 RValue &&
19 Static static
20 Thread_Local thread_local

View File

@ -72,6 +72,7 @@ Spec_LocalPersist, "local_persist"
Spec_Mutable, "mutable" Spec_Mutable, "mutable"
Spec_NeverInline, "neverinline" Spec_NeverInline, "neverinline"
Spec_Override, "override" Spec_Override, "override"
Spec_Restrict, "restrict"
Spec_Static, "static" Spec_Static, "static"
Spec_ThreadLocal, "thread_local" Spec_ThreadLocal, "thread_local"
Spec_Volatile, "volatile" Spec_Volatile, "volatile"

1 Invalid __invalid__
72 Spec_Mutable mutable
73 Spec_NeverInline neverinline
74 Spec_Override override
75 Spec_Restrict restrict
76 Spec_Static static
77 Spec_ThreadLocal thread_local
78 Spec_Volatile volatile

View File

@ -582,12 +582,10 @@ The function can parse all of them, however the AST node compresses them all int
3. Parse additional template arguments 3. Parse additional template arguments
4. Return completed identifier token 4. Return completed identifier token
Technical notes: Notes:
* Current implementation treats identifier as single token span * Current implementation treats identifier as single token span
* TODO: Refactor to AST-based identifier representation for: * TODO: Refactor to AST-based identifier representation for:
* Distinct qualifier/symbol tracking
* Improved semantic analysis capabilities
* Better support for nested symbol resolution * Better support for nested symbol resolution
## `parse_include` ## `parse_include`
@ -643,14 +641,56 @@ By the point this function is called the following are known : export module fla
When this function is called, attribute and specifiers may have been resolved, however what comes next can still be either an operator, function, or varaible. When this function is called, attribute and specifiers may have been resolved, however what comes next can still be either an operator, function, or varaible.
1. Check for preprocessor macro, if there is one : `parse_simple_preprocess` 1. Initial Type Resolution
2. `parse_type` (Does the bulk of the work) 1. Push parsing scope
3. Begin lookahead to see if we get qualifiers or we eventually find the operator declaration 2. Handle macro definitions via `parse_macro_as_definition`
4. If we find an operator keyword : `parse_operator_after_ret_type` 3. Parse base type, validate result
5. otherwise : 4. Exit on invalid type
1. `parse_identifier` 2. Declaration Classification
2. If we se a opening parenthesis (capture start), its a function : `parse_function_after_name` 1. Scan token stream for `operator` keyword
3. Its a variable : `parse_variable_after_name` 2. Track static symbol access
3. Branch handling:
* Operator overload: Forward to `parse_operator_after_ret_type`
* Function/variable: Parse identifier and analyze context
3. Function/Variable Disambiguation
1. Parse identifier
2. Analyze token patterns:
* Detect parameter capture via parenthesis
* Check for constructor initialization pattern
* Handle variadic argument cases
3. Macro Expression Analysis:
* Validate functional macros
* Track parenthesis balance
* Detect comma patterns
4. Declaration Parsing
1. Function path
* Verify function expectation (`consteval`)
* Delegate to `parse_function_after_name`
2. Variable path
* Validate against function expectation
* Forward to `parse_variable_after_name`
## `parse_macro_as_definition`
1. Validation
1. Check token type (Tok_Preprocess_Macro_Stmt)
2. Retrieve macro from lookup
3. Verify `MF_Allow_As_Definition` flag
2. Macro Processing
1. Parse via `parse_simple_preprocess`
2. Maintain original token categorization
3. Definition Construction
1. Format components:
* Attributes (if present)
* Specifiers (if present)
* Macro content
2. Build unified string representation
3. Convert to untyped code node
Notes:
* Early exits return NullCode for non-qualifying macros
* TODO: Pending AST_Macro implementation for proper attribute/specifier support
## `parse_pragma` ## `parse_pragma`
@ -659,21 +699,38 @@ When this function is called, attribute and specifiers may have been resolved, h
## `parse_params` ## `parse_params`
1. Consume either a `(` or `<` based on `use_template_capture` arg 1. Parameter List Initialization
2. If the we immdiately find a closing token, consume it and finish. 1. Delimiter handling based on context
3. If we encounter a varadic argument, consume it and return a `param_varadic` ast constant * Parentheses: `(...)` for standard parameters
4. `parse_type` * Angle brackets: `<...>` for template parameters
5. If we have a macro, parse it (Unreal has macros as tags to parameters and or as entire arguments). 2. Early return for empty parameter lists
6. So long as next token isn't a comma 3. Initial parameter component initialization
a. If we have an identifier * Macro reference
1. Consume it * Type information
2. Check for assignment: * Parameter value
a. Consume assign operator * Identifier token
b. Parse the expression 2. Primary Parameter Processing
7. While we continue to encounter commas 1. Handle varadic arguments
a. Consume them 2. Process preprocessor macros (`UPARAM` style)
b. Repeat steps 3 to 6.2.b 3. Parse parameter sequence
8. Consume the closing token * Type information
* Optional identifier
* Post-name macro expressions
* Default value expressions
4. Value expression capture with nested structure tracking
* Template depth counting
* Parentheses balance
* Text span calculation
3. Multi-Parameter Handling
1. Parse comma-separated entries
2. Maintain parameter structure
* Macro context
* Type information
* Identifier caching
* Post-name macro persistence
* Value assignments
3. Parameter list construction via `params_append`
4. Consume params capture termination token & return result.
## `parse_preprocess_cond` ## `parse_preprocess_cond`
@ -682,18 +739,40 @@ When this function is called, attribute and specifiers may have been resolved, h
## `parse_simple_preprocess` ## `parse_simple_preprocess`
There is still decent room for improvement in this setup. Right now the entire macro's relevant tokens are shoved into an untyped AST. It would be better to store it instead in an `AST_Macro` node instead down the line. 1. Basic Setup
1. Push scope
2. Capture initial macro token
3. Validate macro registration
* Lookup in macro registry
* Skip validation for unsupported macros
2. Functional Macro Processing
1. Handle macro invocation
* Parse opening parenthesis
* Track nested parenthesis level
* Capture parameter content
* Update macro span length
3. Macro Body Handling
1. Process associated block if macro expects body
* Parse curly brace delimited content
* Track nesting level
* Capture body content
2. Handle statement termination
* Context-specific semicolon handling
* Process inline comments
* Update macro span
4. Context-Specific Termination
1. Special case handling
* Enum context bypass
* Typedef context validation
* Global/class scope handling
2. Statement termination rules
* Process semicolons based on context
* Update token span accordingly
1. Consume the macro token Notes:
2. Check for an opening curly brace
1. Consume opening curly brace * Pending AST_Macro implementation for improved structure
2. Until the closing curly is encountered consume all tokens. * Current implementation uses simple token span capture
3. If the parent context is a typedef
1. Check for end stement
1. Consume it
2. Consume potential inline comment
3. Otherwise do steps 3 to 3.1.2
4. Shove it all in an untyped string
## `parse_static_assert` ## `parse_static_assert`
@ -713,127 +792,244 @@ This will get changed heavily once we have better support for typename expressio
## `parse_variable_after_name` ## `parse_variable_after_name`
This is needed as a variable defintion is not easily resolvable early on, it takes a long evaluation period before its known that the declaration or definition is a variable. As such this function handles resolving a variable. This is needed as a variable defintion is not easily resolvable early on, it takes a long evaluation period before its known that the declaration or definition is a variable. As such this function handles resolving a variable.
By the point this function is called the following are known : export module flag, attributes, specifiers, value type, name By the point this function is called the following are known : export module flag, attributes, specifiers, value type, name
1. If its an assignment, parse the assignment expression (currently to an untyped string) 1. Initialization Processing
2. If its an opening curly brace, parse the expression within (currnelty to an untyped stirng). 1. Array dimension parsing
1. Consume the closing curly brace 2. Expression capture
3. If its a `:`, we're dealing with bitfield definition: * Assignment expressions
1. Consume the assign classifier * Constructor initializations
2. Consume the expression (currently to an untyped string) * Bitfield specifications
4. If a comma is encountered : `parse_variable declaration_list` 2. Expression Pattern Handling
5. Consume statement end 1. Direct assignment (`=`)
6. Check for inline comment * Parse assignment expression
2. Brace initialization (`{}`)
* Track nested braces
* Capture initialization list
3. Constructor initialization (`()`)
* Track parenthesis nesting
* Update initialization flag
4. Bitfield specification (`:`)
* Validate non-empty expression
* Capture bitfield size
3. Multi-Variable Processing
1. Handle comma-separated declarations
2. Statement termination
* Process semicolon
* Capture inline comments
3. Link variable chain via NextVar
4. AST Node Construction
1. Core properties
* Type (`CT_Variable`)
* Name caching
* Module flags
* Value type
2. Optional components
* Array expression
* Bitfield size
* Attributes/Specifiers
* Initialization value
* Constructor flag
* Parent/Next linkage
## `parse_variable_declaration_list` ## `parse_variable_declaration_list`
1. Consume the comma 1. Chain Initialization
2. Parse specifiers 1. Initialize null variable chain head and tail
3. `parse_variable_after_name` 2. Process while comma token present
2. Per-Variable Processing
1. Specifier Collection
* Validate specifier ordering (const after pointer)
* Handle core specifiers: `ptr, ref, rvalue`
* Maintain specifier chain integrity
* Log invalid specifier usage but continue parsing
2. Variable Declaration
* Extract identifier name
* Parse remainder via `parse_variable_after_name`
* Note: Function pointers unsupported
3. Chain Management
1. First Variable
* Set as chain head and tail
2. Subsequent Variables
* Link to previous via NextVar
* Establish parent reference
* Update tail pointer
## `parse_class` Limitations:
* No function pointer support
## `parser_parse_class`
1. `parse_class_struct` 1. `parse_class_struct`
## `parse_constructor` ## `parser_parse_constructor`
This currently doesn't support postfix specifiers (planning to in the future) 1. Core Parse Sequence
1. Identifier extraction and parameter list capture
2. Handle construction variants:
* Colon-prefixed member initializer lists
* Direct body implementation
* Default/delete assignment forms
* Forward declarations
2. Initializer List Processing
1. Track nested parentheses balance
2. Capture full initializer span
3. Convert to untyped string representation
3. Implementation Variants
1. Body implementation
* Parse full function body
* Set `CT_Constructor` type
2. Forward declaration
* Process terminator and comments
* Set `CT_Constructor_Fwd` type
3. Special forms
* Handle assignment operator cases
* Capture inline comments for declarations
4. AST Construction
1. Core node attributes
* Cached identifier name
* Parameter list linkage
* Specifier chain
2. Optional components
* Initializer list
* Implementation body
* Inline comments
1. `parse_identifier` ## `parser_parse_define`
2. `parse_parameters`
3. If currtok is a `:`
1. Consume `:`
2. Parse the initializer list
3. `parse_function_body`
4. If currtok is an opening curly brace
1. `parse_function_body`
5. Otherwise:
1. Consume statement end
2. Check for inline comment
## `parse_destructor` 1. Token Stream Preparation
1. Handle optional preprocessor hash
2. Consume define directive
3. Validate identifier presence
2. Define Node Initialization
1. Construct CodeDefine with `CT_Preprocess_Define` type
2. Cache identifier name
3. Update scope context
3. Parameter Processing (Functional Macros)
1. Initial parameter detection
* Verify macro functionality
* Initialize parameter list node (`CT_Parameters_Define`)
2. Parameter chain construction
4. Content Handling
1. Content validation
* Verify presence
* Handle empty content case with newline
2. Content processing
* Strip formatting
* Preserve line termination
* Create untyped node
1. Check for and consume virtual specifier ## `parser_parse_destructor`
2. Check for the `~` operator
3. `parse_identifier`
4. Consume opening and closing parenthesis
5. Check for assignment operator:
1. Consume assignment op
2. Consume pure specifier `0`
6. If not pure virtual & currtok is opening curly brace:
1. `parse_function_body`
7. Otherwise:
1. Consume end statement
2. If currtok is comment : `parse_comment`
## `parse_enum` 1. Context Validation
1. Verify parser scope hierarchy
2. Check global namespace context
3. Process `virtual` specifier if present
2. Identifier Resolution
1. Parse prefix identifier in global scope
2. Validate destructor operator (`~`)
3. Capture destructor name
4. Enforce empty parameter list
3. Specifier Processing
1. Handle pure virtual case (`= 0`)
* Append `Spec_Pure` to specifiers
* Set `pure_virtual` flag
2. Process default specifier (= default)
* Parse as assignment expression
3. Validate specifier syntax
4. Implementation Processing
1. Function body (non-pure case)
* Parse complete body
* Set `CT_Destructor` type
2. Forward declaration
* Handle statement termination
* Process inline comments
* Set `CT_Destructor_Fwd` type
5. AST Construction
1. Build destructor node
2. Handle qualified names
* Concatenate prefix and identifier
3. Attach components
* Specifiers
* Implementation body
* Inline comments
1. Consume enum token ## `parser_parse_enum`
2. Check for and consume class token
3. `parse_attributes`
4. If there is an identifier consume it
5. Check for a `:`
1. Consume `:`
2. `parse_type`
6. If there is a body parse it (Consume `{`):
1. Newline : ast constant
2. Comment : `parse_comment`
3. Preprocess_Define : `parse_define`
4. Preprocess_Conditional (if, ifdef, ifndef, elif ) : `parse_preprocess_cond`
5. Preprocess_Else : ast constant
6. Preprocess_Endif : ast constant
7. Preprocess_Macro : `parse_simple_preprocess`
8. Preprocess_Pragma : `parse_pragma`
9. Preprocess_Unsupported : `parse_smple_preprocess`
10. An actual enum entry
1. Consume identifier
2. If there is an assignment operator:
1. Consume operator
2. Consume the expression (assigned to untyped string for now)
3. If a macro is encountered consume it (Unreal UMETA macro support)
3. If there is a comma, consume it
## `parse_export_body` 1. Declaration Components
1. Basic structure processing
* Enum type detection (`enum/enum class`)
* Attributes parsing
* Identifier capture
2. Underlying type resolution
* Standard type parsing
* Macro-based underlying type handling
* Classifier token validation
2. Body Processing
1. Entry parsing loop
* Preprocessor directives (`#define, #if, #pragma`)
* Enum member declarations
* Comment preservation
* Formatting tokens
2. Entry value handling
* Assignment expressions
* `UMETA` macro support
* Entry termination (commas)
3. Token span calculation for entries
3. AST Construction
1. Node type determination
* `CT_Enum/CT_Enum_Class` for definitions
* `CT_Enum_Fwd/CT_Enum_Class_Fwd` for declarations
2. Component attachment
* Name caching
* Body linkage
* Underlying type/macro
* Attributes
* Inline comments
## `parser_parse_export_body`
1. `parse_global_nspace` 1. `parse_global_nspace`
## `parse_extern_link_body` ## `parser_parse_extern_link_body`
1. `parse_global_nspace` 1. `parse_global_nspace`
## `parse_extern_link` ## `parser_parse_extern_link`
1. Consume Decl_Extern_Linkage 1. Consume `Tok_Decl_Extern_Linkage`
2. Consume the linkage identifier 2. Consume the linkage identifier
3. `parse_extern_link_body` 3. `parse_extern_link_body`
## `parse_friend` ## `parser_parse_friend`
1. Consume `friend` 1. Consume `friend`
2. `parse_type` 2. Parse specifiers
3. If the currok is an identifier its a function declaration or definition 3. `parse_type`
4. If the currok is an identifier its a function declaration or definition
1. `parse_function_after_name` 1. `parse_function_after_name`
4. Consume end statement so long as its not a function definion 5. Otherwise its a operator: `parse_operator_after_ret_type`
5. Check for inline comment, `parse_comment` if exists 6. Consume end statement so long as its not a function definion
7. Check for inline comment, `parse_comment` if exists
## `parse_function` ## `parser_parse_function`
1. Check and parse for `export` 1. Check and parse for `export`
2. `parse_attributes` 2. `parse_attributes`
3. Parse specifiers 3. Parse specifiers
4. `parse_type` 4. `parse_type` for return type
5. `parse_identifier` 5. `parse_identifier`
6. `parse_function_after_name` 6. `parse_function_after_name`
## `parse_namespace` ## `parser_parse_namespace`
1. Consume namespace declaration 1. Consume namespace declaration
2. Parse identifier 2. Parse identifier
3. `parse_global_namespace` 3. `parse_global_namespace`
## `parse_operator` ## `parser_parse_operator`
1. Check for and parse export declaration 1. Check for and parse export declaration
2. `parse_attributes` 2. `parse_attributes`
@ -841,7 +1037,7 @@ This currently doesn't support postfix specifiers (planning to in the future)
4. `parse_type` 4. `parse_type`
5. `parse_operator_after_ret_type` 5. `parse_operator_after_ret_type`
## `parse_operator_cast` ## `parser_parse_operator_cast`
1. Look for and parse a qualifier namespace for the cast (in-case this is defined outside the class's scope) 1. Look for and parse a qualifier namespace for the cast (in-case this is defined outside the class's scope)
2. Consume operator declaration 2. Consume operator declaration
@ -856,33 +1052,43 @@ This currently doesn't support postfix specifiers (planning to in the future)
1. Consume end statement 1. Consume end statement
2. Check for and consume comment : `parse_comment` 2. Check for and consume comment : `parse_comment`
## `parser_parse_struct`
## `parse_struct`
1. `parse_class_struct` 1. `parse_class_struct`
## `parse_template` ## `parser_parse_template`
Note: This currently doesn't support templated operator casts (going to need to add support for it) 1. Initial State Configuration
1. Module flag handling (`export` keyword)
2. Template parameter parsing via `parse_params`
* Uses specialized template capture mode
* Validates parameter list integrity
2. Declaration Type Resolution
1. Primary type dispatch
* `Class/Struct/Union` declarations
* Using declarations
2. Function/Variable handling
* Attribute collection
* Specifier validation (16 max)
* Function expectation detection
3. Special Case Processing
1. Global namespace constructors/destructors
* Context validation
* Delegation to `parse_global_nspace_constructor_destructor`
2. Operator cast implementations
* Token lookahead for operator detection
* Static symbol access validation
* Cast parsing delegation
4. AST Construction
1. Template node composition
* `CT_Template` type assignment
* Parameter linkage
* Declaration binding
* Module flag preservation
1. Check for and parse export declaration ## `parser_parse_type`
2. Consume template declaration
3. `parse_params`
4. Parse for any of the following:
1. Decl_Class : `parse_class`
2. Decl_Struct : `parse_struct`
3. Decl_Union : `parse_union`
4. Decl_Using : `parse_using`
5. The following compound into a resolved definition or declaration:
1. `parse_attributes`
2. Parse specifiers
3. Attempt to parse as constructor or destructor: `parse_global_nspace_constructor_destructor`
4. Otherwise: `parse_operator_function_or_variable`
## `parse_type` This implementatin will be updated in the future to properly handle functional typename signatures.
This function's implementation is awful and not done correctly. It will most likely be overhauled in the future as I plan to segement the AST_Type into several AST varaints along with sub-types to help produce robust type expressions.
Hopefully I won't need to make authentic type expressions as I was hopeing to avoid that...
### Current Algorithim ### Current Algorithim
@ -895,7 +1101,8 @@ Anything that is in the qualifier capture of the function typename is treated as
1. If its an in-place definition of a class, enum, struct, or union: 1. If its an in-place definition of a class, enum, struct, or union:
2. If its a decltype (Not supported yet but draft impl there) 2. If its a decltype (Not supported yet but draft impl there)
3. If its a compound native type expression (unsigned, char, short, long, int, float, dobule, etc ) 3. If its a compound native type expression (unsigned, char, short, long, int, float, dobule, etc )
4. Ends up being a regular type alias of an identifier 4. If its a typename amcro
5. A regular type alias of an identifier
5. Parse specifiers (postfix) 5. Parse specifiers (postfix)
6. We need to now look ahead to see If we're dealing with a function typename 6. We need to now look ahead to see If we're dealing with a function typename
7. If wer're dealing with a function typename: 7. If wer're dealing with a function typename:
@ -923,7 +1130,8 @@ Anything that is in the qualifier capture of the function typename is treated as
1. If its an in-place definition of a class, enum, struct, or union: 1. If its an in-place definition of a class, enum, struct, or union:
2. If its a decltype (Not supported yet but draft impl there) 2. If its a decltype (Not supported yet but draft impl there)
3. If its a compound native type expression (unsigned, char, short, long, int, float, dobule, etc ) 3. If its a compound native type expression (unsigned, char, short, long, int, float, dobule, etc )
4. Ends up being a regular type alias of an identifier 4. If its a typename amcro
5. A regular type alias of an identifier
4. Parse specifiers (postfix) 4. Parse specifiers (postfix)
1. If any specifiers are found populate specifiers code with them. 1. If any specifiers are found populate specifiers code with them.
5. We need to now look ahead to see If we're dealing with a function typename 5. We need to now look ahead to see If we're dealing with a function typename
@ -984,7 +1192,7 @@ Anything that is in the qualifier capture of the function typename is treated as
6. Decl_Union 6. Decl_Union
7. Preprocess_Define 7. Preprocess_Define
8. Preprocess_Conditional (if, ifdef, ifndef, elif, else, endif) 8. Preprocess_Conditional (if, ifdef, ifndef, elif, else, endif)
9. Preprocess_Macro 9. Preprocess_Macro (`MT_Statement` or `MT_Typename`)
10. Preprocess_Pragma 10. Preprocess_Pragma
11. Unsupported preprocess directive 11. Unsupported preprocess directive
12. Variable 12. Variable

View File

@ -1129,6 +1129,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
CodeBody interface = def_body(CT_Global_Body); CodeBody interface = def_body(CT_Global_Body);
for ( Code entry = parsed_interface.begin(); entry != parsed_interface.end(); ++ entry ) switch( entry->Type ) for ( Code entry = parsed_interface.begin(); entry != parsed_interface.end(); ++ entry ) switch( entry->Type )
{ {
case CT_Preprocess_If:
case CT_Preprocess_IfDef: case CT_Preprocess_IfDef:
{ {
b32 found = ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_interface, interface ); b32 found = ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_interface, interface );
@ -1217,11 +1218,18 @@ R"(#define <interface_name>( code ) _Generic( (code), \
CodeBody inlines = def_body(CT_Global_Body); CodeBody inlines = def_body(CT_Global_Body);
for ( Code entry = parsed_inlines.begin(); entry != parsed_inlines.end(); ++ entry ) switch( entry->Type ) for ( Code entry = parsed_inlines.begin(); entry != parsed_inlines.end(); ++ entry ) switch( entry->Type )
{ {
case CT_Preprocess_If:
case CT_Preprocess_IfDef: case CT_Preprocess_IfDef:
{ {
b32 found = ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_inlines, inlines ); b32 found = ignore_preprocess_cond_block(txt("GEN_INTELLISENSE_DIRECTIVES"), entry, parsed_inlines, inlines );
if (found) break; if (found) break;
found = ignore_preprocess_cond_block(txt("GEN_COMPILER_CPP"), entry, parsed_interface, interface);
if (found) break;
found = ignore_preprocess_cond_block(txt("0"), entry, parsed_interface, interface);
if (found) break;
inlines.append(entry); inlines.append(entry);
} }
break; break;

View File

@ -420,6 +420,10 @@ word make_code, gen_make_code
namespace set_allocator_, gen_set_allocator_ namespace set_allocator_, gen_set_allocator_
word register_macro, gen_register_macro
word register_macros, gen_register_macros
word register_macros_arr, gen_register_macros_arr
namespace Opts_, gen_Opts_ namespace Opts_, gen_Opts_
namespace def_, gen_def_ namespace def_, gen_def_

View File

@ -28,10 +28,9 @@ b32 ignore_preprocess_cond_block( Str cond_sig, Code& entry_iter, CodeBody& pars
//log_fmt("Preprocess cond found: %S\n", cond->Content); //log_fmt("Preprocess cond found: %S\n", cond->Content);
found = true; found = true;
s32 depth = 1; s32 depth = 0;
++ entry_iter; for(b32 continue_for = true; continue_for && entry_iter != parsed_body.end(); ) switch (entry_iter->Type)
for(b32 continue_for = true; continue_for && entry_iter != parsed_body.end(); ) switch {
(entry_iter->Type) {
case CT_Preprocess_If: case CT_Preprocess_If:
case CT_Preprocess_IfDef: case CT_Preprocess_IfDef:
case CT_Preprocess_IfNotDef: case CT_Preprocess_IfNotDef:
@ -41,20 +40,18 @@ b32 ignore_preprocess_cond_block( Str cond_sig, Code& entry_iter, CodeBody& pars
case CT_Preprocess_Else: case CT_Preprocess_Else:
++ entry_iter; ++ entry_iter;
for(; continue_for && entry_iter != parsed_body.end(); ++ entry_iter) if (depth == 1) for(; entry_iter != parsed_body.end(); ++ entry_iter)
{ {
if (entry_iter->Type == CT_Preprocess_EndIf) if ( entry_iter->Type == CT_Preprocess_EndIf)
{
continue_for = false;
break; break;
}
body.append(entry_iter); body.append(entry_iter);
} }
break; break;
case CT_Preprocess_EndIf: case CT_Preprocess_EndIf:
{ {
depth --; -- depth;
if (depth == 0) { if (depth == 0) {
continue_for = false; continue_for = false;
break; break;

View File

@ -106,3 +106,9 @@ case Spec_Mutable: \
case Spec_Static: \ case Spec_Static: \
case Spec_Thread_Local: \ case Spec_Thread_Local: \
case Spec_Volatile case Spec_Volatile
#define GEN_PARSER_TYPENAME_ALLOWED_SUFFIX_SPECIFIER_CASES \
case Spec_Const: \
case Spec_Ptr: \
case Spec_Ref: \
case Spec_RValue

View File

@ -368,7 +368,49 @@ if ( $test -and $false )
Pop-Location Pop-Location
} }
if ($test -and $true) if ( $test -and $true )
{
$path_test_c = join-path $path_test c_library
$path_build = join-path $path_test_c build
$path_gen = join-path $path_test_c gen
if ( -not(Test-Path($path_build) )) {
New-Item -ItemType Directory -Path $path_build
}
if ( -not(Test-Path($path_gen) )) {
New-Item -ItemType Directory -Path $path_gen
}
$path_singleheader_include = join-path $path_c_library gen
$includes = @( $path_singleheader_include )
$unit = join-path $path_test_c "test_cuik.c"
$executable = join-path $path_build "test_cuik.exe"
$compiler_args = @()
$compiler_args += ( $flag_define + 'GEN_TIME' )
$compiler_args += $flag_all_c
$compiler_args += $flag_updated_cpp_macro
$compiler_args += $flag_c11
$linker_args = @(
$flag_link_win_subsystem_console
)
$result = build-simple $path_build $includes $compiler_args $linker_args $unit $executable
Push-Location $path_test_c
if ( Test-Path( $executable ) ) {
write-host "`nRunning c_library test"
$time_taken = Measure-Command { & $executable
| ForEach-Object {
write-host `t $_ -ForegroundColor Green
}
}
write-host "`nc_library generator completed in $($time_taken.TotalMilliseconds) ms"
}
Pop-Location
}
if ($test -and $false)
{ {
$path_test_cpp = join-path $path_test cpp_library $path_test_cpp = join-path $path_test cpp_library
$path_build = join-path $path_test_cpp build $path_build = join-path $path_test_cpp build

View File

@ -0,0 +1,21 @@
MIT License
Copyright (c) 2024 Yasser Arguelles Snape
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

View File

@ -0,0 +1,454 @@
#pragma once
#include "../tb_internal.h"
#include <arena_array.h>
#include <limits.h>
enum {
INT_WIDEN_LIMIT = 3,
FAST_IDOM_LIMIT = 20
};
#if TB_PACKED_USERS
#define USERN(u) ((TB_Node*) ((u)->_n)) // node
#define USERI(u) ((int) ((u)->_slot)) // index
#else
#define USERN(u) ((u)->_n) // node
#define USERI(u) ((u)->_slot) // index
#endif
#define FOR_USERS(u, n) for (TB_User *u = (n)->users, *_end_ = &u[(n)->user_count]; u != _end_; u++)
////////////////////////////////
// Constant prop
////////////////////////////////
typedef struct {
int64_t min, max;
// for known bit analysis
uint64_t known_zeros, known_ones;
// we really don't wanna widen 18 quintillion times, it's never worth it
uint64_t widen;
} LatticeInt;
// Represents the fancier type system within the optimizer, it's
// all backed by my shitty understanding of lattice theory
struct Lattice {
enum {
LATTICE_BOT, // bot ^ x = bot
LATTICE_TOP, // top ^ x = x
LATTICE_INT,
LATTICE_TUPLE,
// float (each float type has it's own separate set of these btw):
//
// top
// / \
// / \
// / \
// / \
// /|\ /|\
// / | \ / | \
// N N N 0.0 1.5 ... # fltcon
// \ | / \ | /
// \|/ \|/
// nan ~nan
// \ /
// \ /
// \ /
// \ /
// flt
//
// N means NaN it's just too long to write in the diagram
LATTICE_FLT32, LATTICE_FLT64, // bottom types for floats
LATTICE_NAN32, LATTICE_NAN64,
LATTICE_XNAN32, LATTICE_XNAN64,
LATTICE_FLTCON32, LATTICE_FLTCON64, // _f32 and _f64
// pointers:
// anyptr
// / \
// / \
// / /|\
// | / | \
// null a b ... # ptrcon
// | \ | /
// \ ~null
// \ /
// allptr
LATTICE_ALLPTR,
LATTICE_ANYPTR,
LATTICE_NULL,
LATTICE_XNULL,
LATTICE_PTRCON,
// memory types
LATTICE_MEMORY,
// control tokens:
// top
// |
// dead
// |
// live
// |
// bot
LATTICE_LIVE,
LATTICE_DEAD,
} tag;
union {
size_t _elem_count; // LATTICE_TUPLE
LatticeInt _int; // LATTICE_INT
TB_Symbol* _ptr; // LATTICE_PTRCON
float _f32; // LATTICE_FLTCON32
double _f64; // LATTICE_FLTCON64
};
union {
Lattice* elems[0];
};
};
////////////////////////////////
// Cool properties
////////////////////////////////
uint32_t cfg_flags(TB_Node* n);
bool cfg_is_region(TB_Node* n);
bool cfg_is_natural_loop(TB_Node* n);
bool cfg_is_branch(TB_Node* n);
bool cfg_is_fork(TB_Node* n);
bool cfg_is_terminator(TB_Node* n);
bool cfg_is_endpoint(TB_Node* n);
bool tb_node_is_safepoint(TB_Node* n);
bool tb_node_has_mem_out(TB_Node* n);
TB_Node* tb_node_mem_in(TB_Node* n);
////////////////////////////////
// CFG
////////////////////////////////
typedef struct {
TB_Node *phi, *n;
int dst, src;
} PhiVal;
////////////////////////////////
// Core optimizer
////////////////////////////////
typedef struct {
TB_Module* mod;
NL_HashSet visited;
size_t ws_cap;
size_t ws_cnt;
TB_Function** ws;
} IPOSolver;
static bool cant_signed_overflow(TB_Node* n) {
return TB_NODE_GET_EXTRA_T(n, TB_NodeBinopInt)->ab & TB_ARITHMATIC_NSW;
}
static bool is_proj(TB_Node* n) {
return n->type == TB_PROJ || n->type == TB_MACH_PROJ || n->type == TB_BRANCH_PROJ;
}
static uint64_t tb__mask(uint64_t bits) {
return ~UINT64_C(0) >> (64 - bits);
}
static bool cfg_is_cproj(TB_Node* n) {
return is_proj(n) && n->dt.type == TB_TAG_CONTROL;
}
static bool cfg_is_mproj(TB_Node* n) {
return n->type == TB_PROJ && n->dt.type == TB_TAG_MEMORY;
}
// includes tuples which have control flow
static bool cfg_is_control(TB_Node* n) {
if (n->dt.type == TB_TAG_CONTROL) { return true; }
if (n->dt.type == TB_TAG_TUPLE) {
FOR_USERS(u, n) {
if (cfg_is_cproj(USERN(u))) { return true; }
}
}
return false;
}
static bool cfg_is_bb_entry(TB_Node* n) {
if (cfg_is_region(n)) {
return true;
} else if (cfg_is_cproj(n) && (n->inputs[0]->type == TB_ROOT || cfg_is_fork(n->inputs[0]))) {
// Start's control proj or a branch target
return true;
} else {
return false;
}
}
// returns a BranchProj's falsey proj, if it's an if-like TB_BRANCH
static TB_NodeBranchProj* cfg_if_branch(TB_Node* n) {
size_t succ_count = 0;
if (n->type == TB_BRANCH || n->type == TB_AFFINE_LATCH) {
TB_NodeBranch* br = TB_NODE_GET_EXTRA(n);
succ_count = br->succ_count;
} else if (cfg_is_branch(n)) {
FOR_USERS(u, n) {
if (USERN(u)->type == TB_BRANCH_PROJ) { succ_count++; }
}
} else {
tb_todo();
}
if (succ_count != 2) { return NULL; }
FOR_USERS(u, n) {
if (USERN(u)->type == TB_BRANCH_PROJ) {
TB_NodeBranchProj* proj = TB_NODE_GET_EXTRA(USERN(u));
if (proj->index == 1) { return proj; }
}
}
// shouldn't be reached wtf?
return NULL;
}
static bool is_mem_out_op(TB_Node* n) {
return n->dt.type == TB_TAG_MEMORY || (n->type >= TB_STORE && n->type <= TB_ATOMIC_CAS) || (n->type >= TB_CALL && n->type <= TB_TAILCALL) || n->type == TB_SPLITMEM || n->type == TB_MERGEMEM || n->type == TB_DEBUG_LOCATION;
}
static bool is_mem_end_op(TB_Node* n) {
return n->type == TB_RETURN || n->type == TB_TRAP || n->type == TB_UNREACHABLE;
}
static bool is_mem_in_op(TB_Node* n) {
return is_mem_out_op(n) || n->type == TB_SAFEPOINT || n->type == TB_LOAD;
}
static bool is_mem_only_in_op(TB_Node* n) {
return n->type == TB_SAFEPOINT || n->type == TB_LOAD;
}
static bool single_use(TB_Node* n) {
return n->user_count == 1;
}
static TB_User* get_single_use(TB_Node* n) {
return n->user_count == 1 ? &n->users[0] : NULL;
}
static bool tb_node_is_pinned(TB_Node* n) {
if ((n->type >= TB_ROOT && n->type <= TB_SAFEPOINT) || is_proj(n) || cfg_is_control(n)) {
return true;
}
return cfg_flags(n) & NODE_PINNED;
}
////////////////////////////////
// CFG analysis
////////////////////////////////
// if we see a branch projection, it may either be a BB itself
// or if it enters a REGION directly, then that region is the BB.
static TB_Node* cfg_next_bb_after_cproj(TB_Node* proj) {
return proj;
}
static TB_User* proj_with_index(TB_Node* n, int i) {
FOR_USERS(u, n) if (is_proj(USERN(u))) {
TB_NodeProj* p = TB_NODE_GET_EXTRA(USERN(u));
if (p->index == i) { return u; }
}
return NULL;
}
static TB_User* cfg_next_user(TB_Node* n) {
FOR_USERS(u, n) {
if (cfg_is_control(USERN(u))) { return u; }
}
return NULL;
}
static bool cfg_has_phis(TB_Node* n) {
if (!cfg_is_region(n)) { return false; }
FOR_USERS(u, n) {
if (USERN(u)->type == TB_PHI) { return true; }
}
return false;
}
static bool cfg_is_unreachable(TB_Node* n) {
FOR_USERS(u, n) {
if (USERN(u)->type == TB_UNREACHABLE) { return true; }
}
return false;
}
static TB_Node* cfg_next_control(TB_Node* n) {
FOR_USERS(u, n) {
if (cfg_is_control(USERN(u))) { return USERN(u); }
}
return NULL;
}
static TB_Node* cfg_get_pred(TB_CFG* cfg, TB_Node* n, int i) {
n = n->inputs[i];
for (;;) {
ptrdiff_t search = nl_map_get(cfg->node_to_block, n);
if (search >= 0 || n->type == TB_DEAD || cfg_is_region(n)) {
return n;
}
n = n->inputs[0];
}
}
static TB_BasicBlock* cfg_get_pred_bb(TB_CFG* cfg, TB_Node* n, int i) {
n = n->inputs[i];
for (;;) {
ptrdiff_t search = nl_map_get(cfg->node_to_block, n);
if (search >= 0) {
return cfg->node_to_block[search].v;
} else if (n->type == TB_DEAD || cfg_is_region(n)) {
return NULL;
}
n = n->inputs[0];
}
}
// shorthand because we use it a lot
static TB_Node* idom(TB_CFG* cfg, TB_Node* n) {
TB_ASSERT(cfg->node_to_block == NULL);
ptrdiff_t search = nl_map_get(cfg->node_to_block, n);
if (search < 0) {
return NULL;
}
TB_BasicBlock* dom = cfg->node_to_block[search].v->dom;
return dom ? dom->start : NULL;
}
static int dom_depth(TB_CFG* cfg, TB_Node* n) {
return nl_map_get_checked(cfg->node_to_block, n)->dom_depth;
}
static bool slow_dommy2(TB_BasicBlock* expected_dom, TB_BasicBlock* bb) {
while (bb->dom_depth > expected_dom->dom_depth) {
bb = bb->dom;
}
return bb == expected_dom;
}
static bool slow_dommy(TB_CFG* cfg, TB_Node* expected_dom, TB_Node* bb) {
TB_BasicBlock* a = nl_map_get_checked(cfg->node_to_block, expected_dom);
TB_BasicBlock* b = nl_map_get_checked(cfg->node_to_block, bb);
return slow_dommy2(a, b);
}
////////////////////////////////
// Unordered SoN successor iterator
////////////////////////////////
#define FOR_SUCC(it, n) for (SuccIter it = succ_iter(n); succ_iter_next(&it);)
typedef struct {
TB_Node* n;
TB_Node* succ;
int index; // -1 if we're not walking CProjs
} SuccIter;
static SuccIter succ_iter(TB_Node* n) {
if (n->dt.type == TB_TAG_TUPLE) {
return (SuccIter){ n, NULL, 0 };
} else if (!cfg_is_endpoint(n)) {
return (SuccIter){ n, NULL, -1 };
} else {
return (SuccIter){ n, NULL, n->user_count };
}
}
static bool succ_iter_next(SuccIter* restrict it) {
TB_Node* n = it->n;
// not branching? ok pick single next control
if (it->index == -1) {
it->index = n->user_count; // terminate
it->succ = cfg_next_control(n);
return true;
}
// if we're in this loop, we know we're scanning for CProjs
while (it->index < n->user_count) {
TB_Node* un = USERN(&n->users[it->index++]);
if (cfg_is_cproj(un)) {
it->succ = un;
return true;
}
}
return false;
}
// lovely properties
bool cfg_is_region(TB_Node* n);
bool cfg_is_natural_loop(TB_Node* n);
bool cfg_is_terminator(TB_Node* n);
bool cfg_is_endpoint(TB_Node* n);
// internal debugging mostly
void tb_print_dumb_node(Lattice** types, TB_Node* n);
// computes basic blocks but also dominators and loop nests if necessary.
TB_CFG tb_compute_cfg(TB_Function* f, TB_Worklist* ws, TB_Arena* tmp_arena, bool dominators);
void tb_free_cfg(TB_CFG* cfg);
// TB_Worklist API
void worklist_alloc(TB_Worklist* restrict ws, size_t initial_cap);
void worklist_free(TB_Worklist* restrict ws);
void worklist_clear(TB_Worklist* restrict ws);
void worklist_clear_visited(TB_Worklist* restrict ws);
bool worklist_test(TB_Worklist* restrict ws, TB_Node* n);
bool worklist_test_n_set(TB_Worklist* restrict ws, TB_Node* n);
void worklist_push(TB_Worklist* restrict ws, TB_Node* restrict n);
int worklist_count(TB_Worklist* ws);
TB_Node* worklist_pop(TB_Worklist* ws);
void subsume_node(TB_Function* f, TB_Node* n, TB_Node* new_n);
void subsume_node2(TB_Function* f, TB_Node* n, TB_Node* new_n);
void subsume_node_without_phis(TB_Function* f, TB_Node* n, TB_Node* new_n);
void tb__gvn_remove(TB_Function* f, TB_Node* n);
// Scheduler's cost model crap (talk about these in codegen_impl.h)
typedef int (*TB_GetLatency)(TB_Function* f, TB_Node* n, TB_Node* end);
typedef uint64_t (*TB_GetUnitMask)(TB_Function* f, TB_Node* n);
// Local scheduler
void tb_list_scheduler(TB_Function* f, TB_CFG* cfg, TB_Worklist* ws, DynArray(PhiVal*) phi_vals, TB_BasicBlock* bb, TB_GetLatency get_lat, TB_GetUnitMask get_unit_mask, int unit_count);
void tb_greedy_scheduler(TB_Function* f, TB_CFG* cfg, TB_Worklist* ws, DynArray(PhiVal*) phi_vals, TB_BasicBlock* bb);
void tb_dataflow(TB_Function* f, TB_Arena* arena, TB_CFG cfg);
// Global scheduler
void tb_clear_anti_deps(TB_Function* f, TB_Worklist* ws);
void tb_renumber_nodes(TB_Function* f, TB_Worklist* ws);
void tb_compact_nodes(TB_Function* f, TB_Worklist* ws);
void tb_global_schedule(TB_Function* f, TB_Worklist* ws, TB_CFG cfg, bool early_only, bool dataflow, TB_GetLatency get_lat);
void tb_compute_synthetic_loop_freq(TB_Function* f, TB_CFG* cfg);
// BB placement
int bb_placement_rpo(TB_Arena* arena, TB_CFG* cfg, int* dst_order);
int bb_placement_trace(TB_Arena* arena, TB_CFG* cfg, int* dst_order);
// makes arch-friendly IR
void tb_opt_legalize(TB_Function* f, TB_Arch arch);
int tb_opt_peeps(TB_Function* f);
int tb_opt_locals(TB_Function* f);
// Integrated IR debugger
void tb_integrated_dbg(TB_Function* f, TB_Node* n);
Lattice* latuni_get(TB_Function* f, TB_Node* n);
void tb__print_regmask(RegMask* mask);

View File

@ -0,0 +1,48 @@
#define GEN_IMPLEMENTATION
#define GEN_DEFINE_LIBRARY_CODE_CONSTANTS
#include "gen_singleheader.h"
#define gen_iterator( Type, container, iter ) \
gen_begin_ ## Type(container); \
iter != gen_end_ ## Type(container); \
code = gen_next_ ## Type(container, iter)
int main()
{
gen_Context ctx = {0};
gen_init(& ctx);
gen_register_macros( args(
((gen_Macro){ txt("USERN"), MT_Expression, MF_Functional }),
((gen_Macro){ txt("USERI"), MT_Expression, MF_Functional }),
((gen_Macro){ txt("USERI"), MT_Expression, MF_Functional }),
((gen_Macro){ txt("FOR_USERS"), MT_Statement, MF_Functional }),
((gen_Macro){ txt("FOR_SUCC"), MT_Statement, MF_Functional })
));
gen_CodeBody h_passes = gen_parse_file("Cuik/tb/opt/passes.h");
for (gen_Code code = gen_iterator(CodeBody, h_passes, code)) switch (code->Type) {
case CT_Function_Fwd:
case CT_Function:
gen_log_fmt("%S:\t%S RETURN_TYPE: %S PARAMS:%S\n"
, gen_codetype_to_str(code->Type)
, code->Name
, gen_strbuilder_to_str( gen_typename_to_strbuilder((gen_CodeTypename) code->ReturnType))
, gen_strbuilder_to_str( gen_params_to_strbuilder( (gen_CodeParams) code->Params))
);
break;
case CT_Variable:
gen_log_fmt("%S:\t%S Type:%S\n", gen_codetype_to_str(code->Type), code->Name, code->ValueType);
break;
case CT_Struct_Fwd:
case CT_Struct:
case CT_Typedef:
gen_log_fmt("%S: %S\n", gen_codetype_to_str(code->Type), code->Name);
break;
}
gen_deinit(& ctx);
return 0;
}