Compare commits

..

4 Commits

9 changed files with 324 additions and 325 deletions

View File

@ -26,8 +26,7 @@ enum TokFlags : u32
struct Token
{
char const* Text;
sptr Length;
Str Text;
TokType Type;
s32 Line;
s32 Column;
@ -36,74 +35,71 @@ struct Token
constexpr Token NullToken { nullptr, 0, Tok_Invalid, false, 0, TF_Null };
AccessSpec tok_to_access_specifier(Token tok)
{
forceinline
AccessSpec tok_to_access_specifier(Token tok) {
return scast(AccessSpec, tok.Type);
}
Str tok_to_str(Token tok)
{
Str str = { tok.Text, tok.Length };
return str;
forceinline
Str tok_to_str(Token tok) {
return tok.Text;
}
bool tok_is_valid( Token tok )
{
return tok.Text && tok.Length && tok.Type != Tok_Invalid;
forceinline
bool tok_is_valid( Token tok ) {
return tok.Text.Ptr && tok.Text.Len && tok.Type != Tok_Invalid;
}
bool tok_is_access_operator(Token tok)
{
forceinline
bool tok_is_access_operator(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_AccessOperator );
}
bool tok_is_access_specifier(Token tok)
{
forceinline
bool tok_is_access_specifier(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_AccessSpecifier );
}
bool tok_is_attribute(Token tok)
{
forceinline
bool tok_is_attribute(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_Attribute );
}
bool tok_is_operator(Token tok)
{
forceinline
bool tok_is_operator(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_Operator );
}
bool tok_is_preprocessor(Token tok)
{
forceinline
bool tok_is_preprocessor(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess );
}
bool tok_is_preprocess_cond(Token tok)
{
forceinline
bool tok_is_preprocess_cond(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_Preprocess_Cond );
}
bool tok_is_specifier(Token tok)
{
forceinline
bool tok_is_specifier(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_Specifier );
}
bool tok_is_end_definition(Token tok)
{
forceinline
bool tok_is_end_definition(Token tok) {
return bitfield_is_equal( u32, tok.Flags, TF_EndDefinition );
}
StrBuilder tok_to_strbuilder(Token tok)
{
StrBuilder result = strbuilder_make_reserve( GlobalAllocator, kilobytes(4) );
Str type_str = toktype_to_str( tok.Type );
strbuilder_append_fmt( & result, "Line: %d Column: %d, Type: %.*s Content: %.*s"
, tok.Line, tok.Column
, type_str.Len, type_str.Ptr
, tok.Length, tok.Text
, tok.Text.Len, tok.Text.Ptr
);
return result;
}
@ -122,14 +118,12 @@ Token* lex_current(TokArray* self, bool skip_formatting )
while ( self->Arr[self->Idx].Type == Tok_NewLine || self->Arr[self->Idx].Type == Tok_Comment )
self->Idx++;
}
return & self->Arr[self->Idx];
}
Token* lex_peek(TokArray self, bool skip_formatting)
{
s32 idx = self.Idx;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
@ -137,14 +131,12 @@ Token* lex_peek(TokArray self, bool skip_formatting)
return & self.Arr[idx];
}
return & self.Arr[idx];
}
Token* lex_previous(TokArray self, bool skip_formatting)
{
s32 idx = self.Idx;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
@ -152,14 +144,12 @@ Token* lex_previous(TokArray self, bool skip_formatting)
return & self.Arr[idx];
}
return & self.Arr[idx - 1];
}
Token* lex_next(TokArray self, bool skip_formatting)
{
s32 idx = self.Idx;
if ( skip_formatting )
{
while ( self.Arr[idx].Type == Tok_NewLine )
@ -167,7 +157,6 @@ Token* lex_next(TokArray self, bool skip_formatting)
return & self.Arr[idx + 1];
}
return & self.Arr[idx + 1];
}
@ -234,17 +223,17 @@ forceinline
s32 lex_preprocessor_directive( LexContext* ctx )
{
char const* hash = ctx->scanner;
Token hash_tok = { hash, 1, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
Token hash_tok = { { hash, 1 }, Tok_Preprocess_Hash, ctx->line, ctx->column, TF_Preprocess };
array_append( Lexer_Tokens, hash_tok );
move_forward();
skip_whitespace();
ctx->token.Text = ctx->scanner;
ctx->token.Text.Ptr = ctx->scanner;
while (ctx->left && ! char_is_space((* ctx->scanner)) )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
ctx->token.Type = str_to_toktype( tok_to_str(ctx->token) );
@ -268,18 +257,18 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( * ctx->scanner == '\\' && ! within_string && ! within_char )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
if ( (* ctx->scanner) == '\r' )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
if ( (* ctx->scanner) == '\n' )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
continue;
}
else
@ -295,22 +284,22 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( (* ctx->scanner) == '\r' )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
if ( (* ctx->scanner) == '\n' )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
break;
}
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
ctx->token.Length = ctx->token.Length + ctx->token.Text - hash;
ctx->token.Text = hash;
ctx->token.Text.Len = ctx->token.Text.Len + ctx->token.Text.Ptr - hash;
ctx->token.Text.Ptr = hash;
array_append( Lexer_Tokens, ctx->token );
return Lex_Continue; // Skip found token, its all handled here.
}
@ -333,31 +322,31 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( ctx->token.Type == Tok_Preprocess_Define )
{
Token name = { ctx->scanner, 0, Tok_Identifier, ctx->line, ctx->column, TF_Preprocess };
Token name = { { ctx->scanner, 0 }, Tok_Identifier, ctx->line, ctx->column, TF_Preprocess };
name.Text = ctx->scanner;
name.Length = 1;
name.Text.Ptr = ctx->scanner;
name.Text.Len = 1;
move_forward();
while ( ctx->left && ( char_is_alphanumeric((* ctx->scanner)) || (* ctx->scanner) == '_' ) )
{
move_forward();
name.Length++;
name.Text.Len++;
}
if ( ctx->left && (* ctx->scanner) == '(' )
{
move_forward();
name.Length++;
name.Text.Len++;
}
array_append( Lexer_Tokens, name );
u64 key = crc32( name.Text, name.Length );
u64 key = crc32( name.Text.Ptr, name.Text.Len );
hashtable_set(ctx->defines, key, tok_to_str(name) );
}
Token preprocess_content = { ctx->scanner, 0, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
Token preprocess_content = { { ctx->scanner, 0 }, Tok_Preprocess_Content, ctx->line, ctx->column, TF_Preprocess };
if ( ctx->token.Type == Tok_Preprocess_Include )
{
@ -365,7 +354,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( (* ctx->scanner) != '"' && (* ctx->scanner) != '<' )
{
StrBuilder directive_str = strbuilder_fmt_buf( GlobalAllocator, "%.*s", min( 80, ctx->left + preprocess_content.Length ), ctx->token.Text );
StrBuilder directive_str = strbuilder_fmt_buf( GlobalAllocator, "%.*s", min( 80, ctx->left + preprocess_content.Text.Len ), ctx->token.Text.Ptr );
log_failure( "gen::Parser::lex: Expected '\"' or '<' after #include, not '%c' (%d, %d)\n%s"
, (* ctx->scanner)
@ -376,16 +365,16 @@ s32 lex_preprocessor_directive( LexContext* ctx )
return Lex_ReturnNull;
}
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
while ( ctx->left && (* ctx->scanner) != '"' && (* ctx->scanner) != '>' )
{
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
}
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
if ( (* ctx->scanner) == '\r' && ctx->scanner[1] == '\n' )
{
@ -416,24 +405,24 @@ s32 lex_preprocessor_directive( LexContext* ctx )
if ( (* ctx->scanner) == '\\' && ! within_string && ! within_char )
{
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
if ( (* ctx->scanner) == '\r' )
{
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
}
if ( (* ctx->scanner) == '\n' )
{
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
continue;
}
else
{
StrBuilder directive_str = strbuilder_make_length( GlobalAllocator, ctx->token.Text, ctx->token.Length );
StrBuilder content_str = strbuilder_fmt_buf( GlobalAllocator, "%.*s", min( 400, ctx->left + preprocess_content.Length ), preprocess_content.Text );
StrBuilder directive_str = strbuilder_make_length( GlobalAllocator, ctx->token.Text.Ptr, ctx->token.Text.Len );
StrBuilder content_str = strbuilder_fmt_buf( GlobalAllocator, "%.*s", min( 400, ctx->left + preprocess_content.Text.Len ), preprocess_content.Text.Ptr );
log_failure( "gen::Parser::lex: Invalid escape sequence '\\%c' (%d, %d)"
" in preprocessor directive '%s' (%d, %d)\n%s"
@ -457,7 +446,7 @@ s32 lex_preprocessor_directive( LexContext* ctx )
}
move_forward();
preprocess_content.Length++;
preprocess_content.Text.Len++;
}
array_append( Lexer_Tokens, preprocess_content );
@ -520,9 +509,9 @@ void lex_found_token( LexContext* ctx )
u64 key = 0;
if ( (* ctx->scanner) == '(')
key = crc32( ctx->token.Text, ctx->token.Length + 1 );
key = crc32( ctx->token.Text.Ptr, ctx->token.Text.Len + 1 );
else
key = crc32( ctx->token.Text, ctx->token.Length );
key = crc32( ctx->token.Text.Ptr, ctx->token.Text.Len );
Str* define = hashtable_get(ctx->defines, key );
if ( define )
@ -533,7 +522,7 @@ void lex_found_token( LexContext* ctx )
if ( ctx->left && (* ctx->scanner) == '(' )
{
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
s32 level = 0;
while ( ctx->left && ((* ctx->scanner) != ')' || level > 0) )
@ -545,22 +534,22 @@ void lex_found_token( LexContext* ctx )
level--;
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
move_forward();
ctx->token.Length++;
ctx->token.Text.Len++;
}
//if ( (* ctx->scanner) == '\r' && ctx->scanner[1] == '\n' )
//{
// move_forward();
// ctx->token.Length++;
// ctx->token..Text.Length++;
//}
//else if ( (* ctx->scanner) == '\n' )
//{
// move_forward();
// ctx->token.Length++;
// ctx->token..Text.Length++;
//}
}
else
@ -625,7 +614,7 @@ TokArray lex( Str content )
#endif
{
Token thanks_c = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
c.token = thanks_c;
}
@ -636,7 +625,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '\r')
{
move_forward();
c.token.Length = 1;
c.token.Text.Len = 1;
}
if ( (* ctx->scanner) == '\n' )
@ -644,14 +633,14 @@ TokArray lex( Str content )
move_forward();
c.token.Type = Tok_NewLine;
c.token.Length++;
c.token.Text.Len++;
array_append( Lexer_Tokens, c.token );
continue;
}
}
c.token.Length = 0;
c.token.Text.Len = 0;
skip_whitespace();
if ( c.left <= 0 )
@ -670,19 +659,19 @@ TokArray lex( Str content )
//if ( last_type == Tok_Preprocess_Pragma )
{
{
Token thanks_c = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
c.token = thanks_c;
}
if ( (* ctx->scanner) == '\r')
{
move_forward();
c.token.Length = 1;
c.token.Text.Len = 1;
}
if ( (* ctx->scanner) == '\n' )
{
c.token.Type = Tok_NewLine;
c.token.Length++;
c.token.Text.Len++;
move_forward();
array_append( Lexer_Tokens, c.token );
@ -700,8 +689,8 @@ TokArray lex( Str content )
}
case '.':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Access_MemberSymbol;
c.token.Flags = TF_AccessOperator;
@ -714,7 +703,7 @@ TokArray lex( Str content )
move_forward();
if( (* ctx->scanner) == '.' )
{
c.token.Length = 3;
c.token.Text.Len = 3;
c.token.Type = Tok_Varadic_Argument;
c.token.Flags = TF_Null;
move_forward();
@ -731,8 +720,8 @@ TokArray lex( Str content )
}
case '&' :
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Ampersand;
c.token.Flags |= TF_Operator;
c.token.Flags |= TF_Specifier;
@ -742,7 +731,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '&' ) // &&
{
c.token.Length = 2;
c.token.Text.Len = 2;
c.token.Type = Tok_Ampersand_DBL;
if (c.left)
@ -753,8 +742,8 @@ TokArray lex( Str content )
}
case ':':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Assign_Classifer;
// Can be either a classifier (ParentType, Bitfield width), or ternary else
// token.Type = Tok_Colon;
@ -766,14 +755,14 @@ TokArray lex( Str content )
{
move_forward();
c.token.Type = Tok_Access_StaticSymbol;
c.token.Length++;
c.token.Text.Len++;
}
goto FoundToken;
}
case '{':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_BraceCurly_Open;
if (c.left)
@ -782,8 +771,8 @@ TokArray lex( Str content )
}
case '}':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_BraceCurly_Close;
c.token.Flags = TF_EndDefinition;
@ -795,8 +784,8 @@ TokArray lex( Str content )
}
case '[':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_BraceSquare_Open;
if ( c.left )
{
@ -804,7 +793,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == ']' )
{
c.token.Length = 2;
c.token.Text.Len = 2;
c.token.Type = Tok_Operator;
move_forward();
}
@ -813,8 +802,8 @@ TokArray lex( Str content )
}
case ']':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_BraceSquare_Close;
if (c.left)
@ -823,8 +812,8 @@ TokArray lex( Str content )
}
case '(':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Capture_Start;
if (c.left)
@ -833,8 +822,8 @@ TokArray lex( Str content )
}
case ')':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Capture_End;
if (c.left)
@ -843,8 +832,8 @@ TokArray lex( Str content )
}
case '\'':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Char;
c.token.Flags = TF_Literal;
@ -853,32 +842,32 @@ TokArray lex( Str content )
if ( c.left && (* ctx->scanner) == '\\' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
if ( (* ctx->scanner) == '\'' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
}
while ( c.left && (* ctx->scanner) != '\'' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
if ( c.left )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
goto FoundToken;
}
case ',':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Comma;
c.token.Flags = TF_Operator;
@ -888,8 +877,8 @@ TokArray lex( Str content )
}
case '*':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Star;
c.token.Flags |= TF_Specifier;
c.token.Flags |= TF_Operator;
@ -899,7 +888,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '=' )
{
c.token.Length++;
c.token.Text.Len++;
c.token.Flags |= TF_Assign;
// c.token.Type = Tok_Assign_Multiply;
@ -911,8 +900,8 @@ TokArray lex( Str content )
}
case ';':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Statement_End;
c.token.Flags = TF_EndDefinition;
@ -924,8 +913,8 @@ TokArray lex( Str content )
}
case '"':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_String;
c.token.Flags |= TF_Literal;
@ -941,25 +930,25 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '\\' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
if ( c.left )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
continue;
}
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
goto FoundToken;
}
case '?':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Operator;
// c.token.Type = Tok_Ternary;
c.token.Flags = TF_Operator;
@ -971,8 +960,8 @@ TokArray lex( Str content )
}
case '=':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Operator;
// c.token.Type = Tok_Assign;
c.token.Flags = TF_Operator;
@ -983,7 +972,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '=' )
{
c.token.Length++;
c.token.Text.Len++;
c.token.Flags = TF_Operator;
if (c.left)
@ -1027,8 +1016,8 @@ TokArray lex( Str content )
}
case '|':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Operator;
c.token.Flags = TF_Operator;
// token.Type = Tok_L_Or;
@ -1038,7 +1027,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '=' )
{
c.token.Length++;
c.token.Text.Len++;
c.token.Flags |= TF_Assign;
// token.Flags |= TokFlags::Assignment;
// token.Type = Tok_Assign_L_Or;
@ -1046,9 +1035,9 @@ TokArray lex( Str content )
if (c.left)
move_forward();
}
else while ( c.left && (* ctx->scanner) == *(c.scanner - 1) && c.token.Length < 3 )
else while ( c.left && (* ctx->scanner) == *(c.scanner - 1) && c.token.Text.Len < 3 )
{
c.token.Length++;
c.token.Text.Len++;
if (c.left)
move_forward();
@ -1059,8 +1048,8 @@ TokArray lex( Str content )
// Dash is unfortunatlly a bit more complicated...
case '-':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Operator;
// token.Type = Tok_Subtract;
c.token.Flags = TF_Operator;
@ -1070,7 +1059,7 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '>' )
{
c.token.Length++;
c.token.Text.Len++;
// token.Type = Tok_Access_PointerToMemberSymbol;
c.token.Flags |= TF_AccessOperator;
move_forward();
@ -1078,22 +1067,22 @@ TokArray lex( Str content )
if ( (* ctx->scanner) == '*' )
{
// token.Type = Tok_Access_PointerToMemberOfPointerSymbol;
c.token.Length++;
c.token.Text.Len++;
move_forward();
}
}
else if ( (* ctx->scanner) == '=' )
{
c.token.Length++;
c.token.Text.Len++;
// token.Type = Tok_Assign_Subtract;
c.token.Flags |= TF_Assign;
if (c.left)
move_forward();
}
else while ( c.left && (* ctx->scanner) == *(c.scanner - 1) && c.token.Length < 3 )
else while ( c.left && (* ctx->scanner) == *(c.scanner - 1) && c.token.Text.Len < 3 )
{
c.token.Length++;
c.token.Text.Len++;
if (c.left)
move_forward();
@ -1103,8 +1092,8 @@ TokArray lex( Str content )
}
case '/':
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Operator;
// token.Type = Tok_Divide;
c.token.Flags = TF_Operator;
@ -1116,31 +1105,31 @@ TokArray lex( Str content )
{
// token.Type = TokeType::Assign_Divide;
move_forward();
c.token.Length++;
c.token.Text.Len++;
c.token.Flags = TF_Assign;
}
else if ( (* ctx->scanner) == '/' )
{
c.token.Type = Tok_Comment;
c.token.Length = 2;
c.token.Text.Len = 2;
c.token.Flags = TF_Null;
move_forward();
while ( c.left && (* ctx->scanner) != '\n' && (* ctx->scanner) != '\r' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
if ( (* ctx->scanner) == '\r' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
if ( (* ctx->scanner) == '\n' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
array_append( Lexer_Tokens, c.token );
continue;
@ -1148,7 +1137,7 @@ TokArray lex( Str content )
else if ( (* ctx->scanner) == '*' )
{
c.token.Type = Tok_Comment;
c.token.Length = 2;
c.token.Text.Len = 2;
c.token.Flags = TF_Null;
move_forward();
@ -1158,25 +1147,25 @@ TokArray lex( Str content )
while ( c.left && ! at_end )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
star = (* ctx->scanner) == '*';
slash = c.scanner[1] == '/';
at_end = star && slash;
}
c.token.Length += 2;
c.token.Text.Len += 2;
move_forward();
move_forward();
if ( (* ctx->scanner) == '\r' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
if ( (* ctx->scanner) == '\n' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
array_append( Lexer_Tokens, c.token );
// end_line();
@ -1189,14 +1178,14 @@ TokArray lex( Str content )
if ( char_is_alpha( (* ctx->scanner) ) || (* ctx->scanner) == '_' )
{
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
move_forward();
while ( c.left && ( char_is_alphanumeric((* ctx->scanner)) || (* ctx->scanner) == '_' ) )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
goto FoundToken;
@ -1205,8 +1194,8 @@ TokArray lex( Str content )
{
// This is a very brute force lex, no checks are done for validity of literal.
c.token.Text = c.scanner;
c.token.Length = 1;
Str text = { c.scanner, 1 };
c.token.Text = text;
c.token.Type = Tok_Number;
c.token.Flags = TF_Literal;
move_forward();
@ -1218,12 +1207,12 @@ TokArray lex( Str content )
)
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
while ( c.left && char_is_hex_digit((* ctx->scanner)) )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
goto FoundToken;
@ -1232,18 +1221,18 @@ TokArray lex( Str content )
while ( c.left && char_is_digit((* ctx->scanner)) )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
if ( c.left && (* ctx->scanner) == '.' )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
while ( c.left && char_is_digit((* ctx->scanner)) )
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
// Handle number literal suffixes in a botched way
@ -1256,13 +1245,13 @@ TokArray lex( Str content )
{
char prev = (* ctx->scanner);
move_forward();
c.token.Length++;
c.token.Text.Len++;
// Handle 'll'/'LL' as a special case when we just processed an 'l'/'L'
if (c.left && (prev == 'l' || prev == 'L') && ((* ctx->scanner) == 'l' || (* ctx->scanner) == 'L'))
{
move_forward();
c.token.Length++;
c.token.Text.Len++;
}
}
}
@ -1278,7 +1267,7 @@ TokArray lex( Str content )
log_fmt( "Token %d Type: %s : %.*s\n"
, idx
, toktype_to_str( Lexer_Tokens[ idx ].Type ).Ptr
, Lexer_Tokens[ idx ].Length, Lexer_Tokens[ idx ].Text
, Lexer_Tokens[ idx ].Text.Len, Lexer_Tokens[ idx ].Text.Ptr
);
}
@ -1298,18 +1287,18 @@ TokArray lex( Str content )
TokType last_type = array_back(Lexer_Tokens)->Type;
if ( last_type == Tok_Preprocess_Macro )
{
Token thanks_c = { c.scanner, 0, Tok_Invalid, c.line, c.column, TF_Null };
Token thanks_c = { { c.scanner, 0 }, Tok_Invalid, c.line, c.column, TF_Null };
c.token = thanks_c;
if ( (* ctx->scanner) == '\r')
{
move_forward();
c.token.Length = 1;
c.token.Text.Len = 1;
}
if ( (* ctx->scanner) == '\n' )
{
c.token.Type = Tok_NewLine;
c.token.Length++;
c.token.Text.Len++;
move_forward();
array_append( Lexer_Tokens, c.token );

View File

@ -52,23 +52,23 @@ StrBuilder parser_to_strbuilder(ParseContext ctx)
Token scope_start = ctx.Scope->Start;
Token last_valid = ctx.Tokens.Idx >= array_num(ctx.Tokens.Arr) ? ctx.Tokens.Arr[array_num(ctx.Tokens.Arr) -1] : (* lex_current(& ctx.Tokens, true));
sptr length = scope_start.Length;
char const* current = scope_start.Text + length;
while ( current <= array_back( ctx.Tokens.Arr)->Text && *current != '\n' && length < 74 )
sptr length = scope_start.Text.Len;
char const* current = scope_start.Text.Ptr + length;
while ( current <= array_back( ctx.Tokens.Arr)->Text.Ptr && (* current) != '\n' && length < 74 )
{
current++;
length++;
}
Str scope_str = { scope_start.Text, length };
Str scope_str = { scope_start.Text.Ptr, length };
StrBuilder line = strbuilder_make_str( GlobalAllocator, scope_str );
strbuilder_append_fmt( & result, "\tScope : %s\n", line );
strbuilder_free(& line);
sptr dist = (sptr)last_valid.Text - (sptr)scope_start.Text + 2;
sptr dist = (sptr)last_valid.Text.Ptr - (sptr)scope_start.Text.Ptr + 2;
sptr length_from_err = dist;
Str err_str = { last_valid.Text, length_from_err };
Str err_str = { last_valid.Text.Ptr, length_from_err };
StrBuilder line_from_err = strbuilder_make_str( GlobalAllocator, err_str );
if ( length_from_err < 100 )
@ -82,7 +82,7 @@ StrBuilder parser_to_strbuilder(ParseContext ctx)
{
if ( tok_is_valid(curr_scope->Name) )
{
strbuilder_append_fmt(& result, "\t%d: %s, AST Name: %.*s\n", level, curr_scope->ProcName.Ptr, curr_scope->Name.Length, curr_scope->Name.Text );
strbuilder_append_fmt(& result, "\t%d: %s, AST Name: %.*s\n", level, curr_scope->ProcName.Ptr, curr_scope->Name.Text.Len, curr_scope->Name.Text.Ptr );
}
else
{
@ -119,7 +119,7 @@ bool lex__eat(TokArray* self, TokType type )
Token tok = * lex_current( self, lex_skip_formatting );
log_failure( "Parse Error, TokArray::eat, Expected: ' %s ' not ' %.*s ' (%d, %d)`\n%s"
, toktype_to_str(type).Ptr
, at_idx.Length, at_idx.Text
, at_idx.Text.Len, at_idx.Text.Ptr
, tok.Line
, tok.Column
, parser_to_strbuilder(Context)
@ -513,7 +513,7 @@ Code parse_array_decl()
{
push_scope();
if ( check( Tok_Operator ) && currtok.Text[0] == '[' && currtok.Text[1] == ']' )
if ( check( Tok_Operator ) && currtok.Text.Ptr[0] == '[' && currtok.Text.Ptr[1] == ']' )
{
Code array_expr = untyped_str( txt(" ") );
eat( Tok_Operator );
@ -549,7 +549,7 @@ Code parse_array_decl()
eat( currtok.Type );
}
untyped_tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)untyped_tok.Text;
untyped_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr;
Code array_expr = untyped_str( tok_to_str(untyped_tok) );
// [ <Content>
@ -614,7 +614,7 @@ CodeAttributes parse_attributes()
eat( Tok_Attribute_Close );
// [[ <Content> ]]
len = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )start.Text;
len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )start.Text.Ptr;
}
else if ( check( Tok_Decl_GNU_Attribute ) )
{
@ -633,7 +633,7 @@ CodeAttributes parse_attributes()
eat( Tok_Capture_End );
// __attribute__(( <Content> ))
len = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )start.Text;
len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )start.Text.Ptr;
}
else if ( check( Tok_Decl_MSVC_Attribute ) )
{
@ -650,7 +650,7 @@ CodeAttributes parse_attributes()
eat( Tok_Capture_End );
// __declspec( <Content> )
len = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )start.Text;
len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )start.Text.Ptr;
}
else if ( tok_is_attribute(currtok) )
{
@ -674,14 +674,14 @@ CodeAttributes parse_attributes()
eat(Tok_Capture_End);
}
len = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )start.Text;
len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )start.Text.Ptr;
// <Attribute> ( ... )
}
}
if ( len > 0 )
{
Str attribute_txt = { start.Text, len };
Str attribute_txt = { start.Text.Ptr, len };
parser_pop(& Context);
StrBuilder name_stripped = parser_strip_formatting( attribute_txt, parser_strip_formatting_dont_preserve_newlines );
@ -1081,7 +1081,7 @@ CodeBody parse_class_struct_body( TokType which, Token name )
attributes = more_attributes;
}
if ( currtok.Type == Tok_Operator && currtok.Text[0] == '~' )
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[0] == '~' )
{
member = cast(Code, parser_parse_destructor( specifiers ));
// <Attribute> <Specifiers> ~<Name>()
@ -1107,9 +1107,9 @@ CodeBody parse_class_struct_body( TokType which, Token name )
case Tok_Type_int:
case Tok_Type_double:
{
if ( nexttok.Type == Tok_Capture_Start && name.Length && currtok.Type == Tok_Identifier )
if ( nexttok.Type == Tok_Capture_Start && name.Text.Len && currtok.Type == Tok_Identifier )
{
if ( c_str_compare_len( name.Text, currtok.Text, name.Length ) == 0 )
if ( c_str_compare_len( name.Text.Ptr, currtok.Text.Ptr, name.Text.Len ) == 0 )
{
member = cast(Code, parser_parse_constructor( specifiers ));
// <Attributes> <Specifiers> <Name>()
@ -1151,7 +1151,7 @@ CodeBody parse_class_struct_body( TokType which, Token name )
while ( left && currtok.Type != Tok_BraceCurly_Close )
{
untyped_tok.Length = ( (sptr)currtok.Text + currtok.Length ) - (sptr)untyped_tok.Text;
untyped_tok.Text.Len = ( (sptr)currtok.Text.Ptr + currtok.Text.Len ) - (sptr)untyped_tok.Text.Ptr;
eat( currtok.Type );
}
@ -1379,7 +1379,7 @@ CodeDefine parse_define()
return InvalidCode;
}
if ( currtok.Length == 0 )
if ( currtok.Text.Len == 0 )
{
define->Content = get_cached_string( tok_to_str(currtok) );
eat( Tok_Preprocess_Content );
@ -1429,7 +1429,7 @@ Code parse_assignment_expression()
eat( currtok.Type );
}
expr_tok.Length = ( ( sptr )currtok.Text + currtok.Length ) - ( sptr )expr_tok.Text - 1;
expr_tok.Text.Len = ( ( sptr )currtok.Text.Ptr + currtok.Text.Len ) - ( sptr )expr_tok.Text.Ptr - 1;
expr = untyped_str( tok_to_str(expr_tok) );
// = <Expression>
return expr;
@ -1508,7 +1508,7 @@ CodeFn parse_function_after_name(
}
// <Attributes> <Specifiers> <ReturnType> <Name> ( <Paraemters> ) <Specifiers> { <Body> }
}
else if ( check(Tok_Operator) && currtok.Text[0] == '=' )
else if ( check(Tok_Operator) && currtok.Text.Ptr[0] == '=' )
{
eat(Tok_Operator);
specifiers_append(specifiers, Spec_Pure );
@ -1612,11 +1612,11 @@ Code parse_function_body()
Token past = prevtok;
s32 len = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)start.Text;
s32 len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)start.Text.Ptr;
if ( len > 0 )
{
Str str = { start.Text, len };
Str str = { start.Text.Ptr, len };
body_append( result, cast(Code, def_execution( str )) );
}
@ -2000,19 +2000,19 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
Token nav = tokens.Arr[ idx ];
for ( ; idx < array_num(tokens.Arr); idx++, nav = tokens.Arr[ idx ] )
{
if ( nav.Text[0] == '<' )
if ( nav.Text.Ptr[0] == '<' )
{
// Skip templated expressions as they mey have expressions with the () operators
s32 capture_level = 0;
s32 template_level = 0;
for ( ; idx < array_num(tokens.Arr); idx++, nav = tokens.Arr[idx] )
{
if (nav.Text[ 0 ] == '<')
if (nav.Text.Ptr[ 0 ] == '<')
++ template_level;
if (nav.Text[ 0 ] == '>')
if (nav.Text.Ptr[ 0 ] == '>')
-- template_level;
if (nav.Type == Tok_Operator && nav.Text[1] == '>')
if (nav.Type == Tok_Operator && nav.Text.Ptr[1] == '>')
-- template_level;
if ( nav.Type == Tok_Capture_Start)
@ -2047,7 +2047,7 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
// <Attributes> <Specifiers> ... <Identifier>
bool possible_destructor = false;
if ( tok_left.Type == Tok_Operator && tok_left.Text[0] == '~')
if ( tok_left.Type == Tok_Operator && tok_left.Text.Ptr[0] == '~')
{
possible_destructor = true;
-- idx;
@ -2066,12 +2066,12 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
s32 template_level = 0;
while ( idx != tokens.Idx )
{
if (tok_left.Text[ 0 ] == '<')
if (tok_left.Text.Ptr[ 0 ] == '<')
++ template_level;
if (tok_left.Text[ 0 ] == '>')
if (tok_left.Text.Ptr[ 0 ] == '>')
-- template_level;
if (tok_left.Type == Tok_Operator && tok_left.Text[1] == '>')
if (tok_left.Type == Tok_Operator && tok_left.Text.Ptr[1] == '>')
-- template_level;
if ( template_level != 0 && tok_left.Type == Tok_Capture_Start)
@ -2087,7 +2087,7 @@ Code parse_global_nspace_constructor_destructor( CodeSpecifiers specifiers )
tok_left = tokens.Arr[idx];
}
bool is_same = c_str_compare_len( tok_right.Text, tok_left.Text, tok_right.Length ) == 0;
bool is_same = c_str_compare_len( tok_right.Text.Ptr, tok_left.Text.Ptr, tok_right.Text.Len ) == 0;
if (tok_left.Type == Tok_Identifier && is_same)
{
// We have found the pattern we desired
@ -2134,12 +2134,12 @@ Token parse_identifier( bool* possible_member_function )
return invalid;
}
if ( currtok.Type == Tok_Operator && currtok.Text[0] == '~' )
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[0] == '~' )
{
bool is_destructor = str_are_equal( Context.Scope->Prev->ProcName, txt("parser_parse_destructor"));
if (is_destructor)
{
name.Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )name.Text;
name.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )name.Text.Ptr;
parser_pop(& Context);
return name;
}
@ -2149,7 +2149,7 @@ Token parse_identifier( bool* possible_member_function )
return invalid;
}
if ( currtok.Type == Tok_Operator && currtok.Text[0] == '*' && currtok.Length == 1 )
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[0] == '*' && currtok.Text.Len == 1 )
{
if ( possible_member_function )
*possible_member_function = true;
@ -2169,7 +2169,7 @@ Token parse_identifier( bool* possible_member_function )
return invalid;
}
name.Length = ( (sptr)currtok.Text + currtok.Length ) - (sptr)name.Text;
name.Text.Len = ( (sptr)currtok.Text.Ptr + currtok.Text.Len ) - (sptr)name.Text.Ptr;
eat( Tok_Identifier );
// <Qualifier Name> <Template Args> :: <Name>
@ -2231,7 +2231,7 @@ CodeOperator parse_operator_after_ret_type(
eat( Tok_Access_StaticSymbol );
}
nspace.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)nspace.Text;
nspace.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)nspace.Text.Ptr;
}
// <ExportFlag> <Attributes> <Specifiers> <ReturnType> <Qualifier::...>
@ -2253,14 +2253,14 @@ CodeOperator parse_operator_after_ret_type(
bool was_new_or_delete = false;
Operator op = Op_Invalid;
switch ( currtok.Text[0] )
switch ( currtok.Text.Ptr[0] )
{
case '+':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_Add;
else if ( currtok.Text[1] == '+' )
else if ( currtok.Text.Ptr[1] == '+' )
op = Op_Increment;
else
@ -2269,9 +2269,9 @@ CodeOperator parse_operator_after_ret_type(
break;
case '-':
{
if ( currtok.Text[1] == '>' )
if ( currtok.Text.Ptr[1] == '>' )
{
if ( currtok.Text[2] == '*' )
if ( currtok.Text.Ptr[2] == '*' )
op = Op_MemberOfPointer;
else
@ -2280,7 +2280,7 @@ CodeOperator parse_operator_after_ret_type(
break;
}
else if ( currtok.Text[1] == '=' )
else if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_Subtract;
else
@ -2289,7 +2289,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '*':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_Multiply;
else
@ -2311,7 +2311,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '/':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_Divide;
else
@ -2320,7 +2320,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '%':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_Modulo;
else
@ -2329,10 +2329,10 @@ CodeOperator parse_operator_after_ret_type(
break;
case '&':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_BAnd;
else if ( currtok.Text[1] == '&' )
else if ( currtok.Text.Ptr[1] == '&' )
op = Op_LAnd;
else
@ -2346,10 +2346,10 @@ CodeOperator parse_operator_after_ret_type(
break;
case '|':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_BOr;
else if ( currtok.Text[1] == '|' )
else if ( currtok.Text.Ptr[1] == '|' )
op = Op_LOr;
else
@ -2358,7 +2358,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '^':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_Assign_BXOr;
else
@ -2372,7 +2372,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '!':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_LNot;
else
@ -2381,7 +2381,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '=':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_LEqual;
else
@ -2390,12 +2390,12 @@ CodeOperator parse_operator_after_ret_type(
break;
case '<':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_LesserEqual;
else if ( currtok.Text[1] == '<' )
else if ( currtok.Text.Ptr[1] == '<' )
{
if ( currtok.Text[2] == '=' )
if ( currtok.Text.Ptr[2] == '=' )
op = Op_Assign_LShift;
else
@ -2407,12 +2407,12 @@ CodeOperator parse_operator_after_ret_type(
break;
case '>':
{
if ( currtok.Text[1] == '=' )
if ( currtok.Text.Ptr[1] == '=' )
op = Op_GreaterEqual;
else if ( currtok.Text[1] == '>' )
else if ( currtok.Text.Ptr[1] == '>' )
{
if ( currtok.Text[2] == '=' )
if ( currtok.Text.Ptr[2] == '=' )
op = Op_Assign_RShift;
else
@ -2424,7 +2424,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '(':
{
if ( currtok.Text[1] == ')' )
if ( currtok.Text.Ptr[1] == ')' )
op = Op_FunctionCall;
else
@ -2433,7 +2433,7 @@ CodeOperator parse_operator_after_ret_type(
break;
case '[':
{
if ( currtok.Text[1] == ']' )
if ( currtok.Text.Ptr[1] == ']' )
op = Op_Subscript;
else
@ -2444,7 +2444,7 @@ CodeOperator parse_operator_after_ret_type(
{
Str c_str_new = operator_to_str(Op_New);
Str c_str_delete = operator_to_str(Op_Delete);
if ( c_str_compare_len( currtok.Text, c_str_new.Ptr, max(c_str_new.Len - 1, currtok.Length)) == 0)
if ( c_str_compare_len( currtok.Text.Ptr, c_str_new.Ptr, max(c_str_new.Len - 1, currtok.Text.Len)) == 0)
{
op = Op_New;
eat( Tok_Identifier );
@ -2456,7 +2456,7 @@ CodeOperator parse_operator_after_ret_type(
idx++;
}
Token next = Context.Tokens.Arr[idx];
if ( currtok.Type == Tok_Operator && c_str_compare_len(currtok.Text, "[]", 2) == 0)
if ( currtok.Type == Tok_Operator && c_str_compare_len(currtok.Text.Ptr, "[]", 2) == 0)
{
eat(Tok_Operator);
op = Op_NewArray;
@ -2468,7 +2468,7 @@ CodeOperator parse_operator_after_ret_type(
op = Op_NewArray;
}
}
else if ( c_str_compare_len( currtok.Text, c_str_delete.Ptr, max(c_str_delete.Len - 1, currtok.Length )) == 0)
else if ( c_str_compare_len( currtok.Text.Ptr, c_str_delete.Ptr, max(c_str_delete.Len - 1, currtok.Text.Len )) == 0)
{
op = Op_Delete;
eat(Tok_Identifier);
@ -2480,7 +2480,7 @@ CodeOperator parse_operator_after_ret_type(
idx++;
}
Token next = Context.Tokens.Arr[idx];
if ( currtok.Type == Tok_Operator && c_str_compare_len(currtok.Text, "[]", 2) == 0)
if ( currtok.Type == Tok_Operator && c_str_compare_len(currtok.Text.Ptr, "[]", 2) == 0)
{
eat(Tok_Operator);
op = Op_DeleteArray;
@ -2704,7 +2704,7 @@ CodeParams parse_params( bool use_template_capture )
else
{
if ( check( Tok_Operator ) && currtok.Text[ 0 ] == '<' )
if ( check( Tok_Operator ) && currtok.Text.Ptr[ 0 ] == '<' )
eat( Tok_Operator );
// <
}
@ -2716,7 +2716,7 @@ CodeParams parse_params( bool use_template_capture )
parser_pop(& Context);
return NullCode;
}
else if ( check( Tok_Operator ) && currtok.Text[ 0 ] == '>' )
else if ( check( Tok_Operator ) && currtok.Text.Ptr[ 0 ] == '>' )
{
eat( Tok_Operator );
// >
@ -2742,7 +2742,7 @@ CodeParams parse_params( bool use_template_capture )
}
#define CheckEndParams() \
(use_template_capture ? (currtok.Text[ 0 ] != '>') : (currtok.Type != Tok_Capture_End))
(use_template_capture ? (currtok.Text.Ptr[ 0 ] != '>') : (currtok.Type != Tok_Capture_End))
// Ex: Unreal has this type of macro: vvvvvvvvv
// COREUOBJECT_API void CallFunction( FFrame& Stack, RESULT_DECL, UFunction* Function );
@ -2782,7 +2782,7 @@ CodeParams parse_params( bool use_template_capture )
// In template captures you can have a typename have direct assignment without a name
// typename = typename ...
// Which would result in a static value type from a struct expansion (traditionally)
if ( ( name.Text || use_template_capture ) && bitfield_is_equal( u32, currtok.Flags, TF_Assign ) )
if ( ( name.Text.Ptr || use_template_capture ) && bitfield_is_equal( u32, currtok.Flags, TF_Assign ) )
{
eat( Tok_Operator );
// ( <Macro> <ValueType> <Name> =
@ -2800,12 +2800,12 @@ CodeParams parse_params( bool use_template_capture )
s32 template_level = 0;
while ( (left && ( currtok.Type != Tok_Comma ) && template_level >= 0 && CheckEndParams()) || (capture_level > 0 || template_level > 0) )
{
if (currtok.Text[ 0 ] == '<')
if (currtok.Text.Ptr[ 0 ] == '<')
++ template_level;
if (currtok.Text[ 0 ] == '>')
if (currtok.Text.Ptr[ 0 ] == '>')
-- template_level;
if (currtok.Type == Tok_Operator && currtok.Text[1] == '>')
if (currtok.Type == Tok_Operator && currtok.Text.Ptr[1] == '>')
-- template_level;
if ( currtok.Type == Tok_Capture_Start)
@ -2814,7 +2814,7 @@ CodeParams parse_params( bool use_template_capture )
if ( currtok.Type == Tok_Capture_End)
-- capture_level;
value_tok.Length = ( ( sptr )currtok.Text + currtok.Length ) - ( sptr )value_tok.Text;
value_tok.Text.Len = ( ( sptr )currtok.Text.Ptr + currtok.Text.Len ) - ( sptr )value_tok.Text.Ptr;
eat( currtok.Type );
}
@ -2828,7 +2828,7 @@ CodeParams parse_params( bool use_template_capture )
result->Macro = macro;
if ( name.Length > 0 )
if ( name.Text.Len > 0 )
result->Name = get_cached_string( tok_to_str(name) );
result->ValueType = type;
@ -2895,7 +2895,7 @@ CodeParams parse_params( bool use_template_capture )
// In template captures you can have a typename have direct assignment without a name
// typename = typename ...
// Which would result in a static value type from a struct expansion (traditionally)
if ( ( name.Text || use_template_capture ) && bitfield_is_equal( u32, currtok.Flags, TF_Assign ) )
if ( ( name.Text.Ptr || use_template_capture ) && bitfield_is_equal( u32, currtok.Flags, TF_Assign ) )
{
eat( Tok_Operator );
// ( <Macro> <ValueType> <Name> = <Expression>, <Macro> <ValueType> <Name> =
@ -2916,12 +2916,12 @@ CodeParams parse_params( bool use_template_capture )
&& template_level >= 0
&& CheckEndParams()) || (capture_level > 0 || template_level > 0) )
{
if (currtok.Text[ 0 ] == '<')
if (currtok.Text.Ptr[ 0 ] == '<')
++ template_level;
if (currtok.Text[ 0 ] == '>')
if (currtok.Text.Ptr[ 0 ] == '>')
-- template_level;
if (currtok.Type == Tok_Operator && currtok.Text[1] == '>')
if (currtok.Type == Tok_Operator && currtok.Text.Ptr[1] == '>')
-- template_level;
if ( currtok.Type == Tok_Capture_Start)
@ -2930,7 +2930,7 @@ CodeParams parse_params( bool use_template_capture )
if ( currtok.Type == Tok_Capture_End)
-- capture_level;
value_tok.Length = ( ( sptr )currtok.Text + currtok.Length ) - ( sptr )value_tok.Text;
value_tok.Text.Len = ( ( sptr )currtok.Text.Ptr + currtok.Text.Len ) - ( sptr )value_tok.Text.Ptr;
eat( currtok.Type );
}
@ -2945,7 +2945,7 @@ CodeParams parse_params( bool use_template_capture )
param->Macro = macro;
if ( name.Length > 0 )
if ( name.Text.Len > 0 )
param->Name = get_cached_string( tok_to_str(name) );
param->PostNameMacro = post_name_macro;
@ -2963,7 +2963,7 @@ CodeParams parse_params( bool use_template_capture )
else
{
if ( ! check( Tok_Operator ) || currtok.Text[ 0 ] != '>' )
if ( ! check( Tok_Operator ) || currtok.Text.Ptr[ 0 ] != '>' )
{
log_failure( "Expected '<' after 'template' keyword\n%s", parser_to_strbuilder(Context) );
parser_pop(& Context);
@ -3060,7 +3060,7 @@ Code parse_simple_preprocess( TokType which, bool dont_consume_braces )
}
}
tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)tok.Text;
tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)tok.Text.Ptr;
}
else
{
@ -3082,7 +3082,7 @@ Code parse_simple_preprocess( TokType which, bool dont_consume_braces )
eat( Tok_Statement_End );
// <Macro>;
tok.Length += prevtok.Length;
tok.Text.Len += prevtok.Text.Len;
// TODO(Ed): Reveiw the context for this? (ESPECIALLY THIS)
if ( currtok_noskip.Type == Tok_Comment && currtok_noskip.Line == stmt_end.Line )
@ -3090,7 +3090,7 @@ Code parse_simple_preprocess( TokType which, bool dont_consume_braces )
eat( Tok_Comment );
// <Macro>; <InlineCmt>
tok.Length += prevtok.Length;
tok.Text.Len += prevtok.Text.Len;
}
}
}
@ -3104,7 +3104,7 @@ Code parse_simple_preprocess( TokType which, bool dont_consume_braces )
Token stmt_end = currtok;
eat( Tok_Statement_End );
// <Macro>;
tok.Length += prevtok.Length;
tok.Text.Len += prevtok.Text.Len;
}
}
@ -3113,7 +3113,7 @@ Code parse_simple_preprocess( TokType which, bool dont_consume_braces )
Leave_Scope_Early:
char const* content = c_str_fmt_buf( "%.*s ", tok.Length, tok.Text );
char const* content = c_str_fmt_buf( "%.*s ", tok.Text.Len, tok.Text.Ptr );
Code result = untyped_str( to_str_from_c_str(content) );
Context.Scope->Name = tok;
@ -3154,10 +3154,10 @@ Code parse_static_assert()
eat( Tok_Statement_End );
// static_assert( <Content> );
content.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)content.Text;
content.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)content.Text.Ptr;
char const* str = c_str_fmt_buf( "%.*s\n", content.Length, content.Text );
Str content_str = { str, content.Length + 1 };
char const* str = c_str_fmt_buf( "%.*s\n", content.Text.Len, content.Text.Ptr );
Str content_str = { str, content.Text.Len + 1 };
assert->Content = get_cached_string( content_str );
assert->Name = assert->Content;
@ -3174,20 +3174,20 @@ Code parse_static_assert()
internal inline
void parse_template_args( Token* token )
{
if ( currtok.Type == Tok_Operator && currtok.Text[ 0 ] == '<' && currtok.Length == 1 )
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[ 0 ] == '<' && currtok.Text.Len == 1 )
{
eat( Tok_Operator );
// <
s32 level = 0;
while ( left && level >= 0 && ( currtok.Text[ 0 ] != '>' || level > 0 ) )
while ( left && level >= 0 && ( currtok.Text.Ptr[ 0 ] != '>' || level > 0 ) )
{
if ( currtok.Text[ 0 ] == '<' )
if ( currtok.Text.Ptr[ 0 ] == '<' )
level++;
if ( currtok.Text[ 0 ] == '>' )
if ( currtok.Text.Ptr[ 0 ] == '>' )
level--;
if ( currtok.Type == Tok_Operator && currtok.Text[1] == '>')
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[1] == '>')
level--;
eat( currtok.Type );
@ -3200,7 +3200,7 @@ void parse_template_args( Token* token )
// < <Content> >
// Extend length of name to last token
token->Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )token->Text;
token->Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )token->Text.Ptr;
}
}
@ -3248,7 +3248,7 @@ CodeVar parse_variable_after_name(
}
eat( Tok_BraceCurly_Close );
expr_tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)expr_tok.Text;
expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr;
expr = untyped_str( tok_to_str(expr_tok) );
// <Attributes> <Specifiers> <ValueType> <Name> = { <Expression> }
}
@ -3274,7 +3274,7 @@ CodeVar parse_variable_after_name(
eat( currtok.Type );
}
expr_token.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)expr_token.Text;
expr_token.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_token.Text.Ptr;
expr = untyped_str( tok_to_str(expr_token) );
eat( Tok_Capture_End );
// <Attributes> <Specifiers> <ValueType> <Name> ( <Expression> )
@ -3299,7 +3299,7 @@ CodeVar parse_variable_after_name(
eat( currtok.Type );
}
expr_tok.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)expr_tok.Text;
expr_tok.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)expr_tok.Text.Ptr;
bitfield_expr = untyped_str( tok_to_str(expr_tok) );
// <Attributes> <Specifiers> <ValueType> <Name> : <Expression>
}
@ -3503,7 +3503,7 @@ CodeConstructor parser_parse_constructor( CodeSpecifiers specifiers )
eat( currtok.Type );
}
initializer_list_tok.Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )initializer_list_tok.Text;
initializer_list_tok.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )initializer_list_tok.Text.Ptr;
// <Name> ( <Parameters> ) : <InitializerList>
initializer_list = untyped_str( tok_to_str(initializer_list_tok) );
@ -3518,7 +3518,7 @@ CodeConstructor parser_parse_constructor( CodeSpecifiers specifiers )
body = cast(CodeBody, parse_function_body());
// <Name> ( <Parameters> ) { <Body> }
}
else if ( check( Tok_Operator) && currtok.Text[ 0 ] == '=' )
else if ( check( Tok_Operator) && currtok.Text.Ptr[ 0 ] == '=' )
{
body = cast(CodeBody, parse_assignment_expression());
}
@ -3582,7 +3582,7 @@ CodeDestructor parser_parse_destructor( CodeSpecifiers specifiers )
if (is_in_global_nspace)
prefix_identifier = parse_identifier(nullptr);
if ( left && currtok.Text[ 0 ] == '~' )
if ( left && currtok.Text.Ptr[ 0 ] == '~' )
eat( Tok_Operator );
else
{
@ -3603,13 +3603,13 @@ CodeDestructor parser_parse_destructor( CodeSpecifiers specifiers )
bool pure_virtual = false;
if ( check( Tok_Operator ) && currtok.Text[ 0 ] == '=' )
if ( check( Tok_Operator ) && currtok.Text.Ptr[ 0 ] == '=' )
{
// <Virtual Specifier> ~<Name>() =
bool skip_formatting = true;
Token upcoming = nexttok;
if ( left && upcoming.Text[ 0 ] == '0' )
if ( left && upcoming.Text.Ptr[ 0 ] == '0' )
{
eat( Tok_Operator );
eat( Tok_Number );
@ -3617,7 +3617,7 @@ CodeDestructor parser_parse_destructor( CodeSpecifiers specifiers )
specifiers_append(specifiers, Spec_Pure );
}
else if ( left && c_str_compare_len( upcoming.Text, "default", sizeof("default") - 1 ) == 0)
else if ( left && c_str_compare_len( upcoming.Text.Ptr, "default", sizeof("default") - 1 ) == 0)
{
body = cast(CodeBody, parse_assignment_expression());
// <Virtual Specifier> ~<
@ -3650,7 +3650,7 @@ CodeDestructor parser_parse_destructor( CodeSpecifiers specifiers )
if ( tok_is_valid(prefix_identifier) )
{
prefix_identifier.Length += 1 + identifier.Length;
prefix_identifier.Text.Len += 1 + identifier.Text.Len;
result->Name = get_cached_string( tok_to_str(prefix_identifier) );
}
@ -3818,7 +3818,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
eat( Tok_Identifier);
// <Name>
if ( currtok.Type == Tok_Operator && currtok.Text[0] == '=' )
if ( currtok.Type == Tok_Operator && currtok.Text.Ptr[0] == '=' )
{
eat( Tok_Operator );
// <Name> =
@ -3854,7 +3854,7 @@ CodeEnum parser_parse_enum( bool inplace_def )
// }
Token prev = * lex_previous(Context.Tokens, lex_dont_skip_formatting);
entry.Length = ( (sptr)prev.Text + prev.Length ) - (sptr)entry.Text;
entry.Text.Len = ( (sptr)prev.Text.Ptr + prev.Text.Len ) - (sptr)entry.Text.Ptr;
member = untyped_str( tok_to_str(entry) );
break;
@ -3946,8 +3946,8 @@ CodeExtern parser_parse_extern_link()
eat( Tok_String );
// extern "<Name>"
name.Text += 1;
name.Length -= 1;
name.Text.Ptr += 1;
name.Text.Len -= 1;
CodeExtern
result = (CodeExtern) make_code();
@ -4289,7 +4289,7 @@ CodeOpCast parser_parse_operator_cast( CodeSpecifiers specifiers )
}
// <Specifiers> <Qualifier> :: ...
name.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)name.Text;
name.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)name.Text.Ptr;
}
eat( Tok_Decl_Operator );
@ -4339,7 +4339,7 @@ CodeOpCast parser_parse_operator_cast( CodeSpecifiers specifiers )
eat( currtok.Type );
}
body_str.Length = ( (sptr)prevtok.Text + prevtok.Length ) - (sptr)body_str.Text;
body_str.Text.Len = ( (sptr)prevtok.Text.Ptr + prevtok.Text.Len ) - (sptr)body_str.Text.Ptr;
eat( Tok_BraceCurly_Close );
// <Specifiers> <Qualifier> :: ... operator <UnderlyingType>() <const> { <Body> }
@ -4704,7 +4704,7 @@ else if ( currtok.Type == Tok_DeclType )
eat( currtok.Type );
}
name.Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )name.Text;
name.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )name.Text.Ptr;
// <Attributes> <Specifiers> <Compound type expression>
}
else if ( currtok.Type == Tok_Type_Typename )
@ -4937,7 +4937,7 @@ else if ( currtok.Type == Tok_DeclType )
eat( Tok_Capture_End );
// <Attributes> <ReturnType> ( <Expression> )
name.Length = ( ( sptr )prevtok.Text + prevtok.Length ) - ( sptr )name.Text;
name.Text.Len = ( ( sptr )prevtok.Text.Ptr + prevtok.Text.Len ) - ( sptr )name.Text.Ptr;
#endif
}
@ -5082,8 +5082,7 @@ CodeTypedef parser_parse_typedef()
if ( currtok.Type == Tok_Identifier )
{
Str name_str = { name.Text, name.Length };
type = untyped_str(name_str);
type = untyped_str(name.Text);
name = currtok;
eat(Tok_Identifier);
}

View File

@ -3,7 +3,7 @@
# include "debug.cpp"
#endif
#pragma region StrBuilder Ops
#pragma region String Ops
internal
ssize _scan_zpl_i64( const char* text, s32 base, s64* value )
@ -211,4 +211,4 @@ f64 c_str_to_f64( const char* str, char** end_ptr )
return result;
}
#pragma endregion StrBuilder Ops
#pragma endregion String Ops

View File

@ -3,7 +3,7 @@
# include "memory.hpp"
#endif
#pragma region StrBuilder Ops
#pragma region String Ops
const char* char_first_occurence( const char* str, char c );
@ -284,4 +284,4 @@ void c_str_to_upper( char* str )
}
}
#pragma endregion StrBuilder Ops
#pragma endregion String Ops

View File

@ -1481,6 +1481,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
header.print_fmt( roll_own_dependencies_guard_start );
header.print( r_header_platform );
header.print_fmt( "\nGEN_NS_BEGIN\n" );
header.print_fmt( "GEN_API_C_BEGIN\n" );
header.print( r_header_macros );
header.print( header_generic_macros );
@ -1497,7 +1498,8 @@ R"(#define <interface_name>( code ) _Generic( (code), \
header.print( r_header_timing );
header.print(rf_header_parsing );
header.print_fmt( "\nGEN_NS_END\n" );
header.print_fmt( "\nGEN_API_C_END\n" );
header.print_fmt( "GEN_NS_END\n" );
header.print_fmt( roll_own_dependencies_guard_end );
#pragma endregion Print Dependencies
@ -1548,7 +1550,7 @@ R"(#define <interface_name>( code ) _Generic( (code), \
#pragma region Print Dependencies
header.print_fmt( roll_own_dependencies_guard_start );
header.print_fmt( "GEN_NS_BEGIN\n");
header.print_fmt( "\nGEN_NS_BEGIN\n");
header.print_fmt( "GEN_API_C_BEGIN\n" );
header.print( r_src_dep_start );
@ -1562,12 +1564,14 @@ R"(#define <interface_name>( code ) _Generic( (code), \
header.print( r_src_timing );
header.print( rf_src_parsing );
header.print_fmt( "\nGEN_API_C_END\n" );
header.print_fmt( "GEN_NS_END\n");
header.print_fmt( roll_own_dependencies_guard_end );
#pragma endregion Print Dependencies
#pragma region Print Components
header.print_fmt( "\nGEN_NS_BEGIN\n");
header.print_fmt( "GEN_API_C_BEGIN\n" );
header.print( fmt_newline);
header.print( rf_array_arena );
@ -1596,12 +1600,13 @@ R"(#define <interface_name>( code ) _Generic( (code), \
header.print( r_src_parsing );
header.print_fmt( "\n#pragma endregion Parsing\n" );
header.print( r_src_untyped );
header.print_fmt( "\n#pragma endregion Interface\n\n");
header.print_fmt( "\n#pragma endregion Interface\n");
header.print( rf_src_builder );
header.print( rf_src_scanner );
header.print_fmt( "GEN_API_C_END\n" );
header.print_fmt( "\nGEN_API_C_END\n" );
header.print_fmt( "GEN_NS_END\n");
#pragma endregion Print Components
header.print_fmt( implementation_guard_end );

View File

@ -173,7 +173,7 @@ word PrintF_Buffer, gen_PrintF_Buffer
word Msg_Invalid_Value, gen_Msg_Invalid_Value
word log_fmt, gen_log_fmt
// StrBuilder Ops
// String Ops
namespace char_, gen_char_
@ -468,6 +468,10 @@ word Allocator_TypeTable, gen_Allocator_TypeTable
word Builder, gen_Builder
namespace builder_, gen_builder_
// Scanner
word scan_file, gen_scan_file
// Implementation (prviate)
word _format_info, gen__format_info

View File

@ -171,7 +171,7 @@ int gen_main()
builder_print_fmt( header, "#pragma endregion Inlines\n" );
builder_print( header, header_end );
builder_print_fmt( header, "GEN_NS_END\n\n" );
builder_print_fmt( header, "\nGEN_NS_END\n\n" );
builder_print( header, pop_ignores );
builder_write(header);
}
@ -238,7 +238,7 @@ int gen_main()
builder_print( & header, def_include( txt("gen.hpp") ));
builder_print_fmt( & header, "\nGEN_NS_BEGIN\n" );
builder_print( & header, builder );
builder_print_fmt( & header, "GEN_NS_END\n" );
builder_print_fmt( & header, "\nGEN_NS_END\n" );
builder_write( & header);
}
@ -278,7 +278,7 @@ int gen_main()
builder_print( & src, def_include( txt("gen.scanner.hpp") ) );
builder_print_fmt( & src, "\nGEN_NS_BEGIN\n" );
builder_print( & src, scanner );
builder_print_fmt( & src, "GEN_NS_END\n" );
builder_print_fmt( & src, "\nGEN_NS_END\n" );
builder_write( & src);
}

View File

@ -105,6 +105,7 @@ int gen_main()
header.print( scan_file( path_base "dependencies/parsing.hpp" ) );
}
header.print(fmt_newline);
header.print_fmt( "GEN_NS_END\n" );
header.print_fmt( roll_own_dependencies_guard_end );
header.print( fmt_newline );
@ -155,7 +156,11 @@ int gen_main()
if ( generate_builder ) {
header.print( scan_file( path_base "auxillary/builder.hpp" ) );
}
if ( generate_scanner ) {
header.print( scan_file( path_base "auxillary/scanner.hpp" ) );
}
header.print(fmt_newline);
header.print_fmt( "GEN_NS_END\n" );
}
@ -177,7 +182,8 @@ int gen_main()
header.print_fmt( roll_own_dependencies_guard_start );
header.print( impl_start );
header.print_fmt( "GEN_NS_BEGIN\n\n");
header.print( fmt_newline );
header.print_fmt( "GEN_NS_BEGIN\n");
header.print( debug );
header.print( string_ops );
@ -230,21 +236,17 @@ int gen_main()
header.print( parsing_interface );
header.print_fmt( "\n#pragma endregion Parsing\n" );
header.print( untyped );
header.print_fmt( "\n#pragma endregion Interface\n\n");
header.print_fmt( "\n#pragma endregion Interface\n");
if ( generate_builder ) {
header.print( scan_file( path_base "auxillary/builder.cpp" ) );
}
// Scanner header depends on implementation
if ( generate_scanner ) {
header.print( scan_file( path_base "auxillary/scanner.hpp" ) );
}
if ( generate_scanner ) {
header.print( scan_file( path_base "auxillary/scanner.cpp" ) );
}
header.print( fmt_newline);
header.print_fmt( "GEN_NS_END\n");
header.print_fmt( "%s\n", (char const*) implementation_guard_end );

View File

@ -223,7 +223,7 @@ int gen_main()
header.print_fmt( "#pragma endregion Inlines\n" );
header.print( header_end );
header.print_fmt( "GEN_NS_END\n\n" );
header.print_fmt( "\nGEN_NS_END\n\n" );
header.print( pop_ignores );
header.write();
}
@ -294,7 +294,7 @@ int gen_main()
header.print( def_include( txt("gen.hpp") ));
header.print_fmt( "\nGEN_NS_BEGIN\n" );
header.print( builder );
header.print_fmt( "GEN_NS_END\n" );
header.print_fmt( "\nGEN_NS_END\n" );
header.print( fmt_newline );
header.print( pop_ignores );
header.write();
@ -333,7 +333,7 @@ int gen_main()
header.print_fmt( "\nGEN_NS_BEGIN\n" );
header.print( parsing );
header.print( scanner );
header.print_fmt( "GEN_NS_END\n" );
header.print_fmt( "\nGEN_NS_END\n" );
header.print( fmt_newline );
header.print( pop_ignores );
header.write();
@ -353,7 +353,7 @@ int gen_main()
src.print_fmt( "\nGEN_NS_BEGIN\n" );
src.print( parsing );
// src.print( scanner );
src.print_fmt( "GEN_NS_END\n" );
src.print_fmt( "\nGEN_NS_END\n" );
src.print( fmt_newline );
src.print( pop_ignores );
src.write();