mirror of
https://github.com/Ed94/LangStudies.git
synced 2025-01-21 20:13:45 -08:00
EOI: Lecture 7 complete
This commit is contained in:
parent
de420a8111
commit
e120749a7e
@ -1,5 +1,5 @@
|
||||
[*.gd]
|
||||
indent_style = space
|
||||
indent_style = tab
|
||||
indent_size = 4
|
||||
|
||||
[*.md]
|
||||
|
@ -42,23 +42,33 @@ margin_bottom = -2.0
|
||||
|
||||
[node name="Eva_Interpret_Btn" type="Button" parent="VBox"]
|
||||
margin_right = 203.0
|
||||
margin_bottom = 32.0
|
||||
margin_bottom = 30.0
|
||||
rect_pivot_offset = Vector2( -123, -302 )
|
||||
size_flags_vertical = 3
|
||||
size_flags_stretch_ratio = 0.08
|
||||
theme = ExtResource( 1 )
|
||||
text = "Eva: Interpret"
|
||||
|
||||
[node name="ClearOutput_Btn" type="Button" parent="VBox"]
|
||||
margin_top = 34.0
|
||||
margin_right = 203.0
|
||||
margin_bottom = 64.0
|
||||
rect_pivot_offset = Vector2( -123, -302 )
|
||||
size_flags_vertical = 3
|
||||
size_flags_stretch_ratio = 0.08
|
||||
theme = ExtResource( 1 )
|
||||
text = "Clear Output"
|
||||
|
||||
[node name="Separator" type="HSeparator" parent="VBox"]
|
||||
modulate = Color( 0.145098, 0.145098, 0.164706, 0 )
|
||||
margin_top = 36.0
|
||||
margin_top = 68.0
|
||||
margin_right = 203.0
|
||||
margin_bottom = 441.0
|
||||
margin_bottom = 443.0
|
||||
size_flags_vertical = 15
|
||||
theme = ExtResource( 5 )
|
||||
|
||||
[node name="Back_Btn" type="Button" parent="VBox"]
|
||||
margin_top = 445.0
|
||||
margin_top = 447.0
|
||||
margin_right = 203.0
|
||||
margin_bottom = 478.0
|
||||
rect_pivot_offset = Vector2( -123, -302 )
|
||||
@ -85,9 +95,11 @@ anchor_right = 0.625
|
||||
anchor_bottom = 1.0
|
||||
margin_left = 0.199997
|
||||
theme = ExtResource( 5 )
|
||||
readonly = true
|
||||
|
||||
[node name="Debug_TEdit" type="TextEdit" parent="."]
|
||||
anchor_left = 0.625
|
||||
anchor_right = 1.0
|
||||
anchor_bottom = 1.0
|
||||
theme = ExtResource( 5 )
|
||||
readonly = true
|
||||
|
@ -1,35 +1,41 @@
|
||||
extends Node
|
||||
|
||||
var eva = preload("Eva.gd").new()
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
# Eva -------------------------------------------------------
|
||||
const SLexer = preload("Lexer.gd")
|
||||
var Lexer : SLexer
|
||||
|
||||
const SParser = preload("Parser.gd")
|
||||
var Parser : SParser
|
||||
|
||||
const SEva = preload("Eva.gd")
|
||||
var Eva : SEva
|
||||
|
||||
|
||||
# UX --------------------------------------------------------
|
||||
onready var Editor = get_node("Editor_TEdit")
|
||||
onready var Output = get_node("Output_TEdit")
|
||||
onready var Debug = get_node("Debug_TEdit")
|
||||
onready var Eva_Btn = get_node("VBox/Eva_Interpret_Btn")
|
||||
onready var Back_Btn = get_node("VBox/Back_Btn")
|
||||
onready var Editor = get_node("Editor_TEdit")
|
||||
onready var Output = get_node("Output_TEdit")
|
||||
onready var Debug = get_node("Debug_TEdit")
|
||||
onready var Eva_Btn = get_node("VBox/Eva_Interpret_Btn")
|
||||
onready var Clear_Btn = get_node("VBox/ClearOutput_Btn")
|
||||
onready var Back_Btn = get_node("VBox/Back_Btn")
|
||||
|
||||
|
||||
func evaBtn_pressed():
|
||||
eva.init(Editor.text, Output)
|
||||
Lexer = SLexer.new(Editor.text, Output)
|
||||
Parser = SParser.new(Lexer, Output)
|
||||
Eva = SEva.new(null, Output)
|
||||
|
||||
var ast = eva.parse()
|
||||
var ast = Parser.parse()
|
||||
var result = Eva.eval(ast)
|
||||
|
||||
Output.text = eva.eval(ast)
|
||||
Debug.text = JSON.print(eva.Records, "\t")
|
||||
if result != null:
|
||||
Output.text += "\nResult: " + result
|
||||
|
||||
Debug.text = JSON.print( Eva.get_EnvSnapshot(), "\t" )
|
||||
|
||||
func clearBtn_pressed():
|
||||
Output.text = ""
|
||||
|
||||
func backBtn_pressed():
|
||||
queue_free()
|
||||
@ -37,4 +43,5 @@ func backBtn_pressed():
|
||||
|
||||
func _ready():
|
||||
Eva_Btn.connect("pressed", self, "evaBtn_pressed")
|
||||
Clear_Btn.connect("pressed", self, "clearBtn_pressed")
|
||||
Back_Btn.connect("pressed", self, "backBtn_pressed")
|
||||
|
@ -1,444 +1,108 @@
|
||||
extends Object
|
||||
|
||||
var SRegEx = preload("res://RegM/Scripts/SRegex.gd").new()
|
||||
|
||||
# ---------------------------------------------------------- Lexer
|
||||
const TType : Dictionary = \
|
||||
{
|
||||
fmt_S = "Formatting",
|
||||
cmt_SL = "Comment Single-Line",
|
||||
cmt_ML = "Comment Multi-Line",
|
||||
|
||||
def_Start = "Expression Start",
|
||||
def_End = "Expression End",
|
||||
def_Var = "Variable",
|
||||
|
||||
literal_Number = "Literal: Number",
|
||||
literal_String = "Literal: String",
|
||||
|
||||
op_Assgin = "Assignment",
|
||||
op_Numeric = "op_Numeric",
|
||||
|
||||
identifier = "Identifier"
|
||||
}
|
||||
|
||||
const Spec : Dictionary = \
|
||||
{
|
||||
TType.cmt_SL : "start // inline.repeat(0-)",
|
||||
TType.cmt_ML : "start /* set(whitespace !whitespace).repeat(0-).lazy */",
|
||||
|
||||
TType.fmt_S : "start whitespace.repeat(1-).lazy",
|
||||
|
||||
TType.def_Start : "start \\(",
|
||||
TType.def_End : "start \\)",
|
||||
TType.def_Var : "start \"var\"",
|
||||
|
||||
TType.literal_Number : \
|
||||
"""start
|
||||
set(+ \\-).repeat(0-1)
|
||||
( set(0-9).repeat(1-) \\. ).repeat(0-1)
|
||||
set(0-9).repeat(1-)
|
||||
""",
|
||||
TType.literal_String : "start \\\" !set( \\\" ).repeat(0-) \\\" ",
|
||||
|
||||
TType.op_Assgin : "start \"set\"",
|
||||
TType.op_Numeric : "start set(+ \\- * /)",
|
||||
|
||||
TType.identifier :
|
||||
"""start
|
||||
(
|
||||
set(A-z).repeat(1-)
|
||||
set(\\- _).repeat(0-1)
|
||||
)
|
||||
.repeat(0-1)
|
||||
"""
|
||||
}
|
||||
|
||||
class Token:
|
||||
var Type : String
|
||||
var Value : String
|
||||
|
||||
func is_Literal():
|
||||
return Type == TType.literal_Number || Type == TType.literal_String;
|
||||
|
||||
|
||||
var SourceText : String
|
||||
var Cursor : int
|
||||
var SpecRegex : Dictionary
|
||||
var Tokens : Array
|
||||
var TokenIndex : int = 0
|
||||
|
||||
|
||||
func compile_regex():
|
||||
for type in TType.values() :
|
||||
var regex = RegEx.new()
|
||||
var result = SRegEx.compile(Spec[type])
|
||||
|
||||
regex.compile( result )
|
||||
|
||||
SpecRegex[type] = regex
|
||||
|
||||
func init(programSrcText, errorOutput):
|
||||
ErrorOutput = errorOutput
|
||||
|
||||
SourceText = programSrcText
|
||||
Cursor = 0
|
||||
TokenIndex = 0
|
||||
|
||||
if SpecRegex.size() == 0 :
|
||||
compile_regex()
|
||||
|
||||
tokenize()
|
||||
|
||||
func next_Token():
|
||||
var nextToken = null
|
||||
|
||||
if Tokens.size() > TokenIndex :
|
||||
nextToken = Tokens[TokenIndex]
|
||||
TokenIndex += 1
|
||||
|
||||
return nextToken
|
||||
|
||||
func reached_EndOfText():
|
||||
return Cursor >= SourceText.length()
|
||||
|
||||
func tokenize():
|
||||
Tokens.clear()
|
||||
|
||||
while reached_EndOfText() == false :
|
||||
var srcLeft = SourceText.substr(Cursor)
|
||||
var token = Token.new()
|
||||
|
||||
var error = true
|
||||
for type in TType.values() :
|
||||
var result = SpecRegex[type].search( srcLeft )
|
||||
if result == null || result.get_start() != 0 :
|
||||
continue
|
||||
|
||||
# Skip Comments
|
||||
if type == TType.cmt_SL || type == TType.cmt_ML :
|
||||
Cursor += result.get_string().length()
|
||||
error = false
|
||||
break
|
||||
|
||||
# Skip Whitespace
|
||||
if type == TType.fmt_S :
|
||||
var addVal = result.get_string().length()
|
||||
|
||||
Cursor += addVal
|
||||
error = false
|
||||
break
|
||||
|
||||
token.Type = type
|
||||
token.Value = result.get_string()
|
||||
Cursor += ( result.get_string().length() )
|
||||
|
||||
Tokens.append( token )
|
||||
|
||||
error = false
|
||||
break;
|
||||
|
||||
if error :
|
||||
var assertStrTmplt = "Lexer - tokenize: Source text not understood by tokenizer at Cursor pos: {value} -: {txt}"
|
||||
var assertStr = assertStrTmplt.format({"value" : Cursor, "txt" : srcLeft})
|
||||
throw(assertStr)
|
||||
return
|
||||
# ---------------------------------------------------------- Lexer END
|
||||
|
||||
|
||||
# ---------------------------------------------------------- Parser
|
||||
# ---------------------------------------------------------- AST Node
|
||||
|
||||
const NType = \
|
||||
{
|
||||
literal_Number = "Literal: Number",
|
||||
literal_String = "Literal: String",
|
||||
|
||||
op_Assign = "Assignment",
|
||||
|
||||
op_Add = "+",
|
||||
op_Sub = "-",
|
||||
op_Mult = "*",
|
||||
op_Div = "/",
|
||||
|
||||
identifier = "Identifier",
|
||||
variable = "Variable"
|
||||
}
|
||||
|
||||
class ASTNode:
|
||||
var Data : Array
|
||||
|
||||
func add_Expr( expr ):
|
||||
Data.append(expr)
|
||||
|
||||
func add_TokenValue( token ):
|
||||
Data.append( token.Value )
|
||||
|
||||
func set_Type( nType ):
|
||||
Data.append(nType)
|
||||
|
||||
func arg( id ):
|
||||
return Data[id]
|
||||
|
||||
func num_args():
|
||||
return Data.size() - 1
|
||||
|
||||
func type():
|
||||
return Data[0]
|
||||
|
||||
func is_Number():
|
||||
return type() == NType.literal_Number
|
||||
|
||||
func is_String():
|
||||
return type() == NType.literal_String
|
||||
|
||||
func string():
|
||||
return arg(1).substr(1, arg(1).length() -2)
|
||||
|
||||
# Serialization ----------------------------------------------------
|
||||
func array_Serialize(array, fn_objSerializer) :
|
||||
var result = []
|
||||
|
||||
for entry in array :
|
||||
if typeof(entry) == TYPE_ARRAY :
|
||||
result.append( array_Serialize( entry, fn_objSerializer ))
|
||||
|
||||
elif typeof(entry) == TYPE_OBJECT :
|
||||
fn_objSerializer.set_instance(entry)
|
||||
result.append( fn_objSerializer.call_func() )
|
||||
|
||||
else :
|
||||
result.append( entry )
|
||||
|
||||
return result
|
||||
|
||||
func to_SExpression():
|
||||
# var expression = []
|
||||
|
||||
# if typeof(Value) == TYPE_ARRAY :
|
||||
var \
|
||||
to_SExpression_Fn = FuncRef.new()
|
||||
to_SExpression_Fn.set_function("to_SExpression")
|
||||
|
||||
return array_Serialize( self.Data, to_SExpression_Fn )
|
||||
|
||||
# if typeof(Value) == TYPE_OBJECT :
|
||||
# var result = [ Type, Value.to_SExpression() ]
|
||||
# return result
|
||||
|
||||
# expression.append(Value)
|
||||
# return expression
|
||||
# Serialization END -------------------------------------------------
|
||||
|
||||
# ---------------------------------------------------------- AST Node END
|
||||
|
||||
var TokenType : Token
|
||||
var NextToken : Token
|
||||
|
||||
# Gets the next token only if the current token is the specified intended token (tokenType)
|
||||
func eat(tokenType):
|
||||
var currToken = NextToken
|
||||
|
||||
check(currToken != null, "Parser - eat: NextToken was null")
|
||||
|
||||
var assertStrTmplt = "Parser - eat: Unexpected token: {value}, expected: {type}"
|
||||
var assertStr = assertStrTmplt.format({"value" : currToken.Value, "type" : tokenType})
|
||||
|
||||
check(currToken.Type == tokenType, assertStr)
|
||||
|
||||
NextToken = next_Token()
|
||||
|
||||
return currToken
|
||||
|
||||
func parse():
|
||||
NextToken = next_Token()
|
||||
|
||||
if NextToken.Type == TType.def_Start:
|
||||
return parse_Expression()
|
||||
|
||||
if NextToken.Type == TType.identifier:
|
||||
return parse_Identifier()
|
||||
|
||||
if NextToken.is_Literal():
|
||||
return parse_Literal()
|
||||
|
||||
func parse_Expression():
|
||||
eat(TType.def_Start)
|
||||
var node : ASTNode
|
||||
|
||||
if NextToken.Type == TType.def_Var:
|
||||
node = parse_Variable()
|
||||
|
||||
if NextToken.Type == TType.op_Assgin:
|
||||
node = parse_op_Assign()
|
||||
|
||||
elif NextToken.Type == TType.op_Numeric:
|
||||
node = parse_op_Numeric()
|
||||
|
||||
var arg = 1
|
||||
while NextToken.Type != TType.def_End:
|
||||
if NextToken.Type == TType.def_Start:
|
||||
node.add_Expr( parse_Expression() )
|
||||
else :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
elif NextToken.is_Literal():
|
||||
node = parse_Literal()
|
||||
|
||||
eat(TType.def_End)
|
||||
|
||||
return node
|
||||
|
||||
func parse_Variable():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.variable)
|
||||
eat(TType.def_Var)
|
||||
|
||||
check( NextToken.Type == TType.identifier,
|
||||
String("Parser - parse_Variable: NextToken should have been identifier. TokenData - Type: {type} Value: {value}") \
|
||||
.format({"type" : NextToken.Type, "value" : NextToken.Value })
|
||||
)
|
||||
|
||||
node.add_TokenValue( NextToken )
|
||||
eat(TType.identifier)
|
||||
|
||||
if NextToken.Type == TType.def_Start :
|
||||
node.add_Expr( parse_Expression() )
|
||||
|
||||
else :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
return node
|
||||
|
||||
func parse_Identifier():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.identifier)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.identifier)
|
||||
|
||||
return node
|
||||
|
||||
func parse_op_Assign():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_type(NType.op_Assign)
|
||||
|
||||
eat(TType.op_Assgin)
|
||||
|
||||
check( NextToken.Type != TType.identifier,
|
||||
String("Parser - parse_op_Assign: NextToken should have been identifier, Type: {type} Value: {value}") \
|
||||
.format({"type" : NextToken.Type, "value" : NextToken.Value })
|
||||
)
|
||||
|
||||
node.add_TokenValue( NextToken.Value )
|
||||
|
||||
if NextToken.is_Literal() :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
elif NextToken.Type == TType.def_Start :
|
||||
node.add_Expr( parse_Expression() )
|
||||
|
||||
return node
|
||||
|
||||
func parse_op_Numeric():
|
||||
var node = ASTNode.new()
|
||||
|
||||
match NextToken.Value:
|
||||
NType.op_Add:
|
||||
node.set_Type(NType.op_Add)
|
||||
NType.op_Sub:
|
||||
node.set_Type(NType.op_Sub)
|
||||
NType.op_Mult:
|
||||
node.set_Type(NType.op_Mult)
|
||||
NType.op_Div:
|
||||
node.set_Type(NType.op_Div)
|
||||
|
||||
eat(TType.op_Numeric)
|
||||
|
||||
return node
|
||||
|
||||
func parse_Literal():
|
||||
var node = ASTNode.new()
|
||||
|
||||
match NextToken.Type:
|
||||
TType.literal_Number:
|
||||
node.set_Type(NType.literal_Number)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.literal_Number)
|
||||
|
||||
TType.literal_String:
|
||||
node.set_Type(NType.literal_String)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.literal_String)
|
||||
|
||||
return node
|
||||
|
||||
# ---------------------------------------------------------- Parser END
|
||||
|
||||
# ---------------------------------------------------------- Environment
|
||||
|
||||
var Records : Dictionary
|
||||
|
||||
func env_DefineVar(symbol : String, value) :
|
||||
Records[symbol] = value
|
||||
|
||||
func env_Lookup(symbol : String) :
|
||||
check(Records.has(symbol), String("Symbol not found in environment records"))
|
||||
|
||||
return Records[symbol]
|
||||
|
||||
# ---------------------------------------------------------- Environment END
|
||||
# ---------------------------------------------------------- UTILITIES
|
||||
var EvalOut
|
||||
|
||||
func check( condition : bool, message : String):
|
||||
assert(condition, message)
|
||||
if ! condition:
|
||||
EvalOut.text = "Eva - Error: " + message
|
||||
|
||||
func throw( message ):
|
||||
assert(false, message)
|
||||
EvalOut.text = "Eva - Error: " + message
|
||||
# ---------------------------------------------------------- UTILITIES END
|
||||
|
||||
class_name Eva
|
||||
|
||||
# ---------------------------------------------------------- GLOBALS
|
||||
var ErrorOutput
|
||||
const Parser = preload("Parser.gd")
|
||||
const NType = Parser.NType
|
||||
|
||||
const EvaEnv = preload("EvaEnv.gd")
|
||||
var Env : EvaEnv
|
||||
|
||||
var Parent
|
||||
# ---------------------------------------------------------- GLOBALS END
|
||||
|
||||
# ---------------------------------------------------------- UTILITIES
|
||||
func check( condition : bool, message : String):
|
||||
assert(condition, message)
|
||||
ErrorOutput.text = "Eva - Error: " + message
|
||||
|
||||
func throw( message ):
|
||||
assert(false, message)
|
||||
ErrorOutput.text = "Eva - Error: " + message
|
||||
# ---------------------------------------------------------- UTILITIES END
|
||||
func _init(parent, evalOut):
|
||||
EvalOut = evalOut
|
||||
Env = EvaEnv.new(EvalOut)
|
||||
Parent = parent
|
||||
|
||||
func eval( ast ):
|
||||
if ast.type() == NType.identifier :
|
||||
return env_Lookup( ast.arg(1) )
|
||||
if ast.type() == NType.program :
|
||||
var index = 1;
|
||||
while index < ast.num_args():
|
||||
eval( ast.arg(index) )
|
||||
index += 1
|
||||
|
||||
if ast.type() == NType.variable :
|
||||
var result = eval( ast.arg(index) )
|
||||
if result != null:
|
||||
return String( result )
|
||||
else:
|
||||
return null
|
||||
|
||||
elif ast.type() == NType.block :
|
||||
return eval_Block( ast )
|
||||
|
||||
elif ast.type() == NType.identifier :
|
||||
var identifier = ast.arg(1)
|
||||
|
||||
if Parent != null && !Env.has( identifier):
|
||||
return Parent.Env.lookup( identifier )
|
||||
|
||||
return Env.lookup( identifier )
|
||||
|
||||
elif ast.type() == NType.fn_Print :
|
||||
return eval_Print( ast )
|
||||
|
||||
elif ast.type() == NType.op_Assign :
|
||||
var symbol = ast.arg(1)
|
||||
var value = eval( ast.arg(2) )
|
||||
|
||||
env_DefineVar(symbol, value)
|
||||
if Parent != null && !Env.has( symbol):
|
||||
return Parent.Env.set( symbol, value )
|
||||
|
||||
return Env.set( symbol, value )
|
||||
|
||||
elif ast.type() == NType.variable :
|
||||
var symbol = ast.arg(1)
|
||||
var value = eval( ast.arg(2) )
|
||||
|
||||
Env.define_Var(symbol, value)
|
||||
return value
|
||||
|
||||
if ast.is_String() :
|
||||
elif ast.is_Number() :
|
||||
return float( ast.arg(1) )
|
||||
|
||||
elif ast.is_String() :
|
||||
return ast.string()
|
||||
|
||||
return String( eval_Numeric(ast) )
|
||||
return eval_Numeric( ast )
|
||||
|
||||
var msgT = "eval - Unimplemented: {ast}"
|
||||
var msg = msgT.format({"ast" : JSON.print(ast.to_SExpression(), "\t") })
|
||||
throw(msg)
|
||||
|
||||
func eval_Numeric( ast ):
|
||||
if ast.is_Number() :
|
||||
return float(ast.arg(1))
|
||||
func eval_Block( ast ):
|
||||
var eva_Block = get_script().new( self, EvalOut )
|
||||
|
||||
var result
|
||||
|
||||
var index = 1;
|
||||
while index <= ast.num_args() :
|
||||
result = eva_Block.eval( ast.arg(index) )
|
||||
index += 1
|
||||
|
||||
return result
|
||||
|
||||
func eval_Numeric( ast ):
|
||||
if ast.type() == NType.op_Add:
|
||||
var result = 0.0; var index = 1
|
||||
|
||||
while index <= ast.num_args():
|
||||
result += eval_Numeric( ast.arg(index) )
|
||||
result += eval( ast.arg(index) )
|
||||
index += 1
|
||||
|
||||
return result
|
||||
@ -447,7 +111,7 @@ func eval_Numeric( ast ):
|
||||
var result = 0.0; var index = 1
|
||||
|
||||
while index <= ast.num_args():
|
||||
result -= eval_Numeric( ast.arg(index) )
|
||||
result -= eval( ast.arg(index) )
|
||||
index += 1
|
||||
|
||||
return result
|
||||
@ -456,7 +120,7 @@ func eval_Numeric( ast ):
|
||||
var result = 1.0; var index = 1
|
||||
|
||||
while index <= ast.num_args():
|
||||
result *= eval_Numeric( ast.arg(index) )
|
||||
result *= eval( ast.arg(index) )
|
||||
index += 1
|
||||
|
||||
return result
|
||||
@ -465,8 +129,19 @@ func eval_Numeric( ast ):
|
||||
var result = 1.0; var index = 1
|
||||
|
||||
while index <= ast.num_args():
|
||||
result /= eval_Numeric( ast.arg(index) )
|
||||
result /= eval( ast.arg(index) )
|
||||
result += 1
|
||||
|
||||
return result
|
||||
|
||||
func eval_Print( ast ):
|
||||
EvalOut.text += "\n" + String( eval( ast.arg(1) ) )
|
||||
return null
|
||||
|
||||
func get_EnvSnapshot():
|
||||
var snapshot = Env.Records.duplicate(true)
|
||||
|
||||
if Parent != null:
|
||||
snapshot[Parent] = Parent.Env.Records.duplicate(true)
|
||||
|
||||
return snapshot
|
||||
|
41
App/EoI/Scripts/EvaEnv.gd
Normal file
41
App/EoI/Scripts/EvaEnv.gd
Normal file
@ -0,0 +1,41 @@
|
||||
extends Object
|
||||
|
||||
# ---------------------------------------------------------- UTILITIES
|
||||
var ErrorOut
|
||||
|
||||
func check( condition : bool, message : String):
|
||||
assert(condition, message)
|
||||
if ! condition:
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
|
||||
func throw( message ):
|
||||
assert(false, message)
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
# ---------------------------------------------------------- UTILITIES END
|
||||
|
||||
class_name EvaEnv
|
||||
|
||||
|
||||
var Records : Dictionary
|
||||
|
||||
|
||||
func _init(errorOut):
|
||||
ErrorOut = errorOut
|
||||
|
||||
func define_Var(symbol : String, value) :
|
||||
Records[symbol] = value
|
||||
|
||||
func has(symbol : String) :
|
||||
return Records.has(symbol)
|
||||
|
||||
func set(symbol : String, value) :
|
||||
check(Records.has(symbol), String("Symbol not found in environment records"))
|
||||
|
||||
Records[symbol] = value
|
||||
|
||||
return Records[symbol]
|
||||
|
||||
func lookup(symbol : String) :
|
||||
check(Records.has(symbol), String("Symbol not found in environment records"))
|
||||
|
||||
return Records[symbol]
|
167
App/EoI/Scripts/Lexer.gd
Normal file
167
App/EoI/Scripts/Lexer.gd
Normal file
@ -0,0 +1,167 @@
|
||||
extends Object
|
||||
|
||||
# ---------------------------------------------------------- UTILITIES
|
||||
var ErrorOut
|
||||
|
||||
func check( condition : bool, message : String):
|
||||
assert(condition, message)
|
||||
if ! condition:
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
|
||||
func throw( message ):
|
||||
assert(false, message)
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
# ---------------------------------------------------------- UTILITIES END
|
||||
|
||||
class_name Lexer
|
||||
|
||||
var SRegEx = preload("res://RegM/Scripts/SRegex.gd").new()
|
||||
|
||||
|
||||
const TType : Dictionary = \
|
||||
{
|
||||
fmt_S = "Formatting",
|
||||
cmt_SL = "Comment Single-Line",
|
||||
cmt_ML = "Comment Multi-Line",
|
||||
|
||||
def_Block = "Expression Block Start",
|
||||
def_Start = "Expression Start",
|
||||
def_End = "Expression End",
|
||||
def_Var = "Variable",
|
||||
|
||||
literal_Number = "Literal: Number",
|
||||
literal_String = "Literal: String",
|
||||
|
||||
op_Assgin = "Assignment",
|
||||
op_Numeric = "op_Numeric",
|
||||
|
||||
fn_Print = "Print",
|
||||
|
||||
identifier = "Identifier"
|
||||
}
|
||||
|
||||
const Spec : Dictionary = \
|
||||
{
|
||||
TType.cmt_SL : "start // inline.repeat(0-)",
|
||||
TType.cmt_ML : "start /* set(whitespace !whitespace).repeat(0-).lazy */",
|
||||
|
||||
TType.fmt_S : "start whitespace.repeat(1-).lazy",
|
||||
|
||||
TType.def_Block : "start \"begin\"",
|
||||
TType.def_Start : "start \\(",
|
||||
TType.def_End : "start \\)",
|
||||
TType.def_Var : "start \"var\"",
|
||||
|
||||
TType.literal_Number : \
|
||||
"""start
|
||||
set(+ \\-).repeat(0-1)
|
||||
( set(0-9).repeat(1-) \\. ).repeat(0-1)
|
||||
set(0-9).repeat(1-)
|
||||
""",
|
||||
TType.literal_String : "start \\\" !set( \\\" ).repeat(0-) \\\" ",
|
||||
|
||||
TType.op_Assgin : "start \"set\"",
|
||||
TType.op_Numeric : "start set(+ \\- * /)",
|
||||
|
||||
TType.fn_Print : "start \"print\"",
|
||||
|
||||
TType.identifier :
|
||||
"""start
|
||||
(
|
||||
set(A-z).repeat(1-)
|
||||
set(\\- _).repeat(0-1)
|
||||
)
|
||||
.repeat(0-1)
|
||||
"""
|
||||
}
|
||||
|
||||
class Token:
|
||||
var Type : String
|
||||
var Value : String
|
||||
|
||||
func is_Literal():
|
||||
return Type == TType.literal_Number || Type == TType.literal_String;
|
||||
|
||||
|
||||
var SourceText : String
|
||||
var Cursor : int
|
||||
var SpecRegex : Dictionary
|
||||
var Tokens : Array
|
||||
var TokenIndex : int = 0
|
||||
|
||||
|
||||
func compile_regex():
|
||||
for type in TType.values() :
|
||||
var regex = RegEx.new()
|
||||
var result = SRegEx.compile(Spec[type])
|
||||
|
||||
regex.compile( result )
|
||||
|
||||
SpecRegex[type] = regex
|
||||
|
||||
func next_Token():
|
||||
var nextToken = null
|
||||
|
||||
if Tokens.size() > TokenIndex :
|
||||
nextToken = Tokens[TokenIndex]
|
||||
TokenIndex += 1
|
||||
|
||||
return nextToken
|
||||
|
||||
func reached_EndOfText():
|
||||
return Cursor >= SourceText.length()
|
||||
|
||||
func tokenize():
|
||||
Tokens.clear()
|
||||
|
||||
while reached_EndOfText() == false :
|
||||
var srcLeft = SourceText.substr(Cursor)
|
||||
var token = Token.new()
|
||||
|
||||
var error = true
|
||||
for type in TType.values() :
|
||||
var result = SpecRegex[type].search( srcLeft )
|
||||
if result == null || result.get_start() != 0 :
|
||||
continue
|
||||
|
||||
# Skip Comments
|
||||
if type == TType.cmt_SL || type == TType.cmt_ML :
|
||||
Cursor += result.get_string().length()
|
||||
error = false
|
||||
break
|
||||
|
||||
# Skip Whitespace
|
||||
if type == TType.fmt_S :
|
||||
var addVal = result.get_string().length()
|
||||
|
||||
Cursor += addVal
|
||||
error = false
|
||||
break
|
||||
|
||||
token.Type = type
|
||||
token.Value = result.get_string()
|
||||
Cursor += ( result.get_string().length() )
|
||||
|
||||
Tokens.append( token )
|
||||
|
||||
error = false
|
||||
break;
|
||||
|
||||
if error :
|
||||
var assertStrTmplt = "Lexer - tokenize: Source text not understood by tokenizer at Cursor pos: {value} -: {txt}"
|
||||
var assertStr = assertStrTmplt.format({"value" : Cursor, "txt" : srcLeft})
|
||||
throw(assertStr)
|
||||
return
|
||||
|
||||
|
||||
func _init(programSrcText, errorOut) :
|
||||
ErrorOut = errorOut
|
||||
|
||||
SourceText = programSrcText
|
||||
Cursor = 0
|
||||
TokenIndex = 0
|
||||
|
||||
if SpecRegex.size() == 0 :
|
||||
compile_regex()
|
||||
|
||||
tokenize()
|
285
App/EoI/Scripts/Parser.gd
Normal file
285
App/EoI/Scripts/Parser.gd
Normal file
@ -0,0 +1,285 @@
|
||||
extends Object
|
||||
|
||||
# ---------------------------------------------------------- UTILITIES
|
||||
var ErrorOut
|
||||
|
||||
func check( condition : bool, message : String):
|
||||
assert(condition, message)
|
||||
if ! condition:
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
|
||||
func throw( message ):
|
||||
assert(false, message)
|
||||
ErrorOut.text = "Eva - Error: " + message
|
||||
# ---------------------------------------------------------- UTILITIES END
|
||||
|
||||
class_name Parser
|
||||
|
||||
# ---------------------------------------------------------- AST Node
|
||||
|
||||
const NType = \
|
||||
{
|
||||
program = "Program",
|
||||
|
||||
block = "Scope Block",
|
||||
|
||||
literal_Number = "Literal: Number",
|
||||
literal_String = "Literal: String",
|
||||
|
||||
op_Assign = "Assignment",
|
||||
|
||||
op_Add = "+",
|
||||
op_Sub = "-",
|
||||
op_Mult = "*",
|
||||
op_Div = "/",
|
||||
|
||||
fn_Print = "Print",
|
||||
|
||||
identifier = "Identifier",
|
||||
variable = "Variable"
|
||||
}
|
||||
|
||||
class ASTNode:
|
||||
var Data : Array
|
||||
|
||||
func add_Expr( expr ):
|
||||
Data.append(expr)
|
||||
|
||||
func add_TokenValue( token ):
|
||||
Data.append( token.Value )
|
||||
|
||||
func set_Type( nType ):
|
||||
Data.append(nType)
|
||||
|
||||
func arg( id ):
|
||||
return Data[id]
|
||||
|
||||
func num_args():
|
||||
return Data.size() - 1
|
||||
|
||||
func type():
|
||||
return Data[0]
|
||||
|
||||
func is_op_Numeric():
|
||||
match type():
|
||||
NType.op_Add: return true
|
||||
NType.op_Sub: return true
|
||||
NType.op_Mult: return true
|
||||
NType.op_Div: return true
|
||||
_: return false
|
||||
|
||||
func is_Number():
|
||||
return type() == NType.literal_Number
|
||||
|
||||
func is_String():
|
||||
return type() == NType.literal_String
|
||||
|
||||
func string():
|
||||
return arg(1).substr(1, arg(1).length() -2)
|
||||
|
||||
# Serialization ----------------------------------------------------
|
||||
func array_Serialize(array, fn_objSerializer) :
|
||||
var result = []
|
||||
|
||||
for entry in array :
|
||||
if typeof(entry) == TYPE_ARRAY :
|
||||
result.append( array_Serialize( entry, fn_objSerializer ))
|
||||
|
||||
elif typeof(entry) == TYPE_OBJECT :
|
||||
fn_objSerializer.set_instance(entry)
|
||||
result.append( fn_objSerializer.call_func() )
|
||||
|
||||
else :
|
||||
result.append( entry )
|
||||
|
||||
return result
|
||||
|
||||
func to_SExpression():
|
||||
var \
|
||||
to_SExpression_Fn = FuncRef.new()
|
||||
to_SExpression_Fn.set_function("to_SExpression")
|
||||
|
||||
return array_Serialize( self.Data, to_SExpression_Fn )
|
||||
# Serialization END -------------------------------------------------
|
||||
|
||||
# ---------------------------------------------------------- AST Node END
|
||||
|
||||
const SLexer = preload("Lexer.gd")
|
||||
const TType = SLexer.TType
|
||||
var Lexer : SLexer
|
||||
|
||||
var NextToken : SLexer.Token
|
||||
|
||||
# Gets the next token only if the current token is the specified intended token (tokenType)
|
||||
func eat(tokenType):
|
||||
var currToken = NextToken
|
||||
|
||||
check(currToken != null, "Parser - eat: NextToken was null")
|
||||
|
||||
var assertStrTmplt = "Parser - eat: Unexpected token: {value}, expected: {type}"
|
||||
var assertStr = assertStrTmplt.format({"value" : currToken.Value, "type" : tokenType})
|
||||
|
||||
check(currToken.Type == tokenType, assertStr)
|
||||
|
||||
NextToken = Lexer.next_Token()
|
||||
|
||||
return currToken
|
||||
|
||||
func parse():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.program)
|
||||
|
||||
while NextToken != null :
|
||||
if NextToken.Type == TType.def_Start:
|
||||
node.add_Expr( parse_Expression() )
|
||||
|
||||
elif NextToken.Type == TType.identifier:
|
||||
node.add_Expr( parse_Identifier() )
|
||||
|
||||
elif NextToken.is_Literal():
|
||||
node.Add_Expr( parse_Literal() )
|
||||
|
||||
return node
|
||||
|
||||
func parse_Expression():
|
||||
eat(TType.def_Start)
|
||||
var node : ASTNode
|
||||
|
||||
match NextToken.Type :
|
||||
TType.def_Block:
|
||||
node = parse_Block()
|
||||
TType.def_Var:
|
||||
node = parse_Variable()
|
||||
TType.fn_Print:
|
||||
node = parse_fn_Print()
|
||||
TType.op_Assgin:
|
||||
node = parse_op_Assign()
|
||||
TType.op_Numeric:
|
||||
node = parse_op_Numeric()
|
||||
|
||||
var arg = 1
|
||||
while NextToken.Type != TType.def_End:
|
||||
if NextToken.Type == TType.def_Start:
|
||||
node.add_Expr( parse_Expression() )
|
||||
elif NextToken.Type == TType.identifier:
|
||||
node.add_Expr( parse_Identifier() )
|
||||
else :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
eat(TType.def_End)
|
||||
|
||||
return node
|
||||
|
||||
func parse_Block():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.block)
|
||||
eat(TType.def_Block)
|
||||
|
||||
return node
|
||||
|
||||
func parse_Variable():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.variable)
|
||||
eat(TType.def_Var)
|
||||
|
||||
check( NextToken.Type == TType.identifier,
|
||||
String("Parser - parse_Variable: NextToken should have been identifier. TokenData - Type: {type} Value: {value}") \
|
||||
.format({"type" : NextToken.Type, "value" : NextToken.Value })
|
||||
)
|
||||
|
||||
node.add_TokenValue( NextToken )
|
||||
eat(TType.identifier)
|
||||
|
||||
if NextToken.Type == TType.def_Start :
|
||||
node.add_Expr( parse_Expression() )
|
||||
|
||||
else :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
return node
|
||||
|
||||
func parse_Identifier():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.identifier)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.identifier)
|
||||
|
||||
return node
|
||||
|
||||
func parse_fn_Print():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.fn_Print)
|
||||
|
||||
eat(TType.fn_Print)
|
||||
|
||||
return node
|
||||
|
||||
func parse_op_Assign():
|
||||
var \
|
||||
node = ASTNode.new()
|
||||
node.set_Type(NType.op_Assign)
|
||||
|
||||
eat(TType.op_Assgin)
|
||||
|
||||
check( NextToken.Type == TType.identifier,
|
||||
String("Parser - parse_op_Assign: NextToken should have been identifier, Type: {type} Value: {value}") \
|
||||
.format({"type" : NextToken.Type, "value" : NextToken.Value })
|
||||
)
|
||||
|
||||
node.add_TokenValue( NextToken )
|
||||
eat(TType.identifier)
|
||||
|
||||
if NextToken.is_Literal() :
|
||||
node.add_Expr( parse_Literal() )
|
||||
|
||||
elif NextToken.Type == TType.def_Start :
|
||||
node.add_Expr( parse_Expression() )
|
||||
|
||||
return node
|
||||
|
||||
func parse_op_Numeric():
|
||||
var node = ASTNode.new()
|
||||
|
||||
match NextToken.Value:
|
||||
NType.op_Add:
|
||||
node.set_Type(NType.op_Add)
|
||||
NType.op_Sub:
|
||||
node.set_Type(NType.op_Sub)
|
||||
NType.op_Mult:
|
||||
node.set_Type(NType.op_Mult)
|
||||
NType.op_Div:
|
||||
node.set_Type(NType.op_Div)
|
||||
|
||||
eat(TType.op_Numeric)
|
||||
|
||||
return node
|
||||
|
||||
func parse_Literal():
|
||||
var node = ASTNode.new()
|
||||
|
||||
match NextToken.Type:
|
||||
TType.literal_Number:
|
||||
node.set_Type(NType.literal_Number)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.literal_Number)
|
||||
|
||||
TType.literal_String:
|
||||
node.set_Type(NType.literal_String)
|
||||
node.add_TokenValue(NextToken)
|
||||
|
||||
eat(TType.literal_String)
|
||||
|
||||
return node
|
||||
|
||||
func _init(lexer, errorOut) :
|
||||
ErrorOut = errorOut
|
||||
Lexer = lexer
|
||||
|
||||
NextToken = Lexer.next_Token()
|
@ -3,7 +3,7 @@ extends Object
|
||||
var SRegEx = preload("res://RegM/Scripts/SRegex.gd").new()
|
||||
|
||||
|
||||
class_name Lexer
|
||||
class_name RDP_Lexer
|
||||
|
||||
|
||||
const TokenType : Dictionary = \
|
@ -1,5 +1,7 @@
|
||||
extends Object
|
||||
|
||||
class_name RDP_Parser
|
||||
|
||||
const NodeType = \
|
||||
{
|
||||
program = "Program",
|
||||
@ -108,9 +110,9 @@ class PNode:
|
||||
|
||||
|
||||
|
||||
var SLexer : Script = preload("res://RDP/Scripts/Lexer.gd")
|
||||
var SLexer : Script = preload("res://RDP/Scripts/RDP_Lexer.gd")
|
||||
var TokenType = SLexer.TokenType
|
||||
var NextToken : Lexer.Token
|
||||
var NextToken : RDP_Lexer.Token
|
||||
var Lexer
|
||||
|
||||
|
@ -1,8 +1,8 @@
|
||||
extends Panel
|
||||
|
||||
|
||||
var Lexer = preload("Lexer.gd").new()
|
||||
var Parser = preload("Parser.gd").new()
|
||||
var Lexer = preload("RDP_Lexer.gd").new()
|
||||
var Parser = preload("RDP_Parser.gd").new()
|
||||
|
||||
|
||||
onready var Tokens_TOut = get_node("Tokens_TOut")
|
||||
|
@ -15,13 +15,37 @@ _global_script_classes=[ {
|
||||
"path": "res://EoI/Scripts/Eva.gd"
|
||||
}, {
|
||||
"base": "Object",
|
||||
"class": "EvaEnv",
|
||||
"language": "GDScript",
|
||||
"path": "res://EoI/Scripts/EvaEnv.gd"
|
||||
}, {
|
||||
"base": "Object",
|
||||
"class": "Lexer",
|
||||
"language": "GDScript",
|
||||
"path": "res://RDP/Scripts/Lexer.gd"
|
||||
"path": "res://EoI/Scripts/Lexer.gd"
|
||||
}, {
|
||||
"base": "Object",
|
||||
"class": "Parser",
|
||||
"language": "GDScript",
|
||||
"path": "res://EoI/Scripts/Parser.gd"
|
||||
}, {
|
||||
"base": "Object",
|
||||
"class": "RDP_Lexer",
|
||||
"language": "GDScript",
|
||||
"path": "res://RDP/Scripts/RDP_Lexer.gd"
|
||||
}, {
|
||||
"base": "Object",
|
||||
"class": "RDP_Parser",
|
||||
"language": "GDScript",
|
||||
"path": "res://RDP/Scripts/RDP_Parser.gd"
|
||||
} ]
|
||||
_global_script_class_icons={
|
||||
"Eva": "",
|
||||
"Lexer": ""
|
||||
"EvaEnv": "",
|
||||
"Lexer": "",
|
||||
"Parser": "",
|
||||
"RDP_Lexer": "",
|
||||
"RDP_Parser": ""
|
||||
}
|
||||
|
||||
[application]
|
||||
|
Loading…
x
Reference in New Issue
Block a user