BAPFS - Lecture 1 & 2 complete

This commit is contained in:
Edward R. Gonzalez 2022-07-06 06:03:11 -04:00
commit 0d6ea7fae5
18 changed files with 692 additions and 0 deletions

3
.editorconfig Normal file
View File

@ -0,0 +1,3 @@
[*.zig]
indent_style = space
indent_size = 4

54
.gitignore vendored Normal file
View File

@ -0,0 +1,54 @@
# Godot 4+ specific ignores
.godot/
# Godot-specific ignores
.import/
export.cfg
export_presets.cfg
# Dummy HTML5 export presets file for continuous integration
!.github/dist/export_presets.cfg
# Imported translations (automatically generated from CSV files)
*.translation
# Mono-specific ignores
.mono/
data_*/
mono_crash.*.json
# System/tool-specific ignores
.directory
.DS_Store
*~
# VSCode
.vscode/*
!.vscode/settings.json
!.vscode/tasks.json
!.vscode/launch.json
!.vscode/extensions.json
!.vscode/*.code-snippets
# Local History for Visual Studio Code
.history/
# Built Visual Studio Code Extensions
*.vsix
# Fork
Source/.sconsign.dblite
.vs
# Compiled Object files
*.slo
*.lo
*.o
*.obj

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "Engine/gd"]
path = Engine/gd
url = https://github.com/godotengine/godot

Binary file not shown.

After

Width:  |  Height:  |  Size: 495 KiB

View File

@ -0,0 +1,35 @@
[remap]
importer="texture"
type="StreamTexture"
path="res://.import/RDP_Class_cover_small.png-51d9e4e36c8441da2486970409e2a06b.stex"
metadata={
"vram_texture": false
}
[deps]
source_file="res://Branding/RDP_Class_cover_small.png"
dest_files=[ "res://.import/RDP_Class_cover_small.png-51d9e4e36c8441da2486970409e2a06b.stex" ]
[params]
compress/mode=0
compress/lossy_quality=0.7
compress/hdr_mode=0
compress/bptc_ldr=0
compress/normal_map=0
flags/repeat=0
flags/filter=true
flags/mipmaps=false
flags/anisotropic=false
flags/srgb=2
process/fix_alpha_border=true
process/premult_alpha=false
process/HDR_as_SRGB=false
process/invert_color=false
process/normal_map_invert_y=false
stream=false
size_limit=0
detect_3d=true
svg/scale=1.0

61
Editor/Lecture.1.gd Normal file
View File

@ -0,0 +1,61 @@
extends Node
# This closesly follows the source provided in the lectures.
# Later on after the lectures are complete or when I deem
# Necessary there will be heavy refactors.
class SyntaxNode:
var Type : String
var Value : int
func Dictionary():
var result = \
{
Type = self.Type,
Value = self.Value
}
return result
class LetterParser:
var Str : String
# NumericLiteral
# : NUMBER
# ;
#
func NumericLiteral():
var \
node = SyntaxNode.new()
node.Type = "NumericLiteral"
node.Value = int(self.Str)
return node
# Parses the text program description into an AST.
func Parse(programDescription):
self.Str = programDescription
return NumericLiteral()
var ProgramDescription = "7"
var LParser = LetterParser.new()
# Note: _ready is being used for Program func of the lectures.
# Main Entry point.
#
# Program
# : NumericLiteral
# ;
#
func _ready():
var ast = LParser.Parse(ProgramDescription)
print(to_json(ast.Dictionary()))
# Called every frame. 'delta' is the elapsed time since the previous frame.
#func _process(delta):
# pass

6
Editor/Lecture.1.tscn Normal file
View File

@ -0,0 +1,6 @@
[gd_scene load_steps=2 format=2]
[ext_resource path="res://Lecture.1.gd" type="Script" id=1]
[node name="Test" type="Node2D"]
script = ExtResource( 1 )

212
Editor/Lecture.2.gd Normal file
View File

@ -0,0 +1,212 @@
extends Node
const JsonBeautifier = preload("res://ThirdParty/json_beautifier.gd")
# This closesly follows the source provided in the lectures.
# Later on after the lectures are complete or when I deem
# Necessary there will be heavy refactors.
enum TokenTypes \
{
Token_Number,
Token_String
}
const StrTokenTypes = \
{
Token_Number = "Number",
Token_String = "String"
}
class Token:
var Type : String
var Value : String
func toDict():
var result = \
{
Type = self.Type,
Value = self.Value
}
return result
class Tokenizer:
var SrcTxt : String
var Cursor : int;
# Sets up the tokenizer with the program source text.
func init(programSrcText):
SrcTxt = programSrcText
Cursor = 0
# Provides the next token in the source text.
func next_Token():
if self.reached_EndOfTxt() == true :
return null
var token = self.SrcTxt.substr(Cursor)
# Numbers
if token[self.Cursor].is_valid_integer() :
var \
numberTok = Token.new()
numberTok.Type = "Number"
numberTok.Value = ""
while token.length() > self.Cursor && token[self.Cursor].is_valid_integer() :
numberTok.Value += token[self.Cursor]
self.Cursor += 1
return numberTok
# String:
if token[self.Cursor] == '"' :
var \
stringTok = Token.new()
stringTok.Type = "String"
stringTok.Value = "\""
self.Cursor += 1
while token.length() > self.Cursor :
stringTok.Value += token[self.Cursor]
self.Cursor += 1
return stringTok
return null
func reached_EndOfTxt():
return self.Cursor >= ( self.SrcTxt.length() - 1 )
var GTokenizer = Tokenizer.new()
class SyntaxNode:
var Type : String
var Value # Not specifing a type implicity declares a Variant type.
func toDict():
var result = \
{
Type = self.Type,
Value = self.Value
}
return result
class ProgramNode:
var Type : String
var Body : Object
func toDict():
var result = \
{
Type = self.Type,
Body = self.Body.toDict()
}
return result
class Parser:
var TokenizerRef : Tokenizer
var NextToken : Token
func eat(tokenType):
var currToken = self.NextToken
assert(currToken != null, "eat: NextToken was null")
var assertStrTmplt = "eat: Unexpected token: {value}, expected: {type}"
var assertStr = assertStrTmplt.format({"value" : currToken.Value, "type" : tokenType})
assert(currToken.Type == tokenType, assertStr)
self.NextToken = self.TokenizerRef.next_Token()
return currToken
# Literal
# : NumericLiteral
# : StringLiteral
# ;
func parse_Literal():
match self.NextToken.Type :
"Number":
return parse_NumericLiteral()
"String":
return parse_StringLiteral()
assert(false, "parse_Literal: Was not able to detect valid literal type from NextToken")
# NumericLiteral
# : Number
# ;
#
func parse_NumericLiteral():
var Token = self.eat("Number")
var \
node = SyntaxNode.new()
node.Type = "NumericLiteral"
node.Value = int( Token.Value )
return node
# StringLiteral
# : String
# ;
#
func parse_StringLiteral():
var Token = self.eat("String")
var \
node = SyntaxNode.new()
node.Type = "StringLiteral"
node.Value = Token.Value.substr( 1, Token.Value.length() - 2 )
return node
# Program
# : Literal
# ;
#
func parse_Program():
var \
node = ProgramNode.new()
node.Type = "Program"
node.Body = parse_Literal()
return node
# Parses the text program description into an AST.
func parse(TokenizerRef):
self.TokenizerRef = TokenizerRef
NextToken = TokenizerRef.next_Token()
return parse_Program()
var GParser = Parser.new()
# Main Entry point.
func _ready():
# Numerical test
var ProgramDescription = "47"
GTokenizer.init(ProgramDescription)
var ast = GParser.parse(GTokenizer)
print(JsonBeautifier.beautify_json(to_json(ast.toDict())))
# String Test
ProgramDescription = "\"hello\""
GTokenizer.init(ProgramDescription)
ast = GParser.parse(GTokenizer)
print(JsonBeautifier.beautify_json(to_json(ast.toDict())))
# Called every frame. 'delta' is the elapsed time since the previous frame.
#func _process(delta):
# pass

6
Editor/Lecture.2.tscn Normal file
View File

@ -0,0 +1,6 @@
[gd_scene load_steps=2 format=2]
[ext_resource path="res://Lecture.2.gd" type="Script" id=1]
[node name="Test" type="Node2D"]
script = ExtResource( 1 )

133
Editor/ThirdParty/json_beautifier.gd vendored Normal file
View File

@ -0,0 +1,133 @@
###############################################################################
# JSON Beautifier #
# Copyright (C) 2018-2020 Michael Alexsander #
#-----------------------------------------------------------------------------#
# This Source Code Form is subject to the terms of the Mozilla Public #
# License, v. 2.0. If a copy of the MPL was not distributed with this #
# file, You can obtain one at http://mozilla.org/MPL/2.0/. #
###############################################################################
class_name JSONBeautifier
# Takes valid JSON (if invalid, it will return a error according with Godot's
# 'validade_json()' method) and a number of spaces for indentation (default is
# '0', in which it will use tabs instead), returning properly formatted JSON.
static func beautify_json(json: String, spaces := 0) -> String:
var error_message: String = validate_json(json)
if not error_message.empty():
return error_message
var indentation := ""
if spaces > 0:
for i in spaces:
indentation += " "
else:
indentation = "\t"
var quotation_start := -1
var char_position := 0
for i in json:
# Workaround a Godot quirk, as it allows JSON strings to end with a
# trailing comma.
if i == "," and char_position + 1 == json.length():
break
# Avoid formating inside strings.
if i == "\"":
if quotation_start == -1:
quotation_start = char_position
elif json[char_position - 1] != "\\":
quotation_start = -1
char_position += 1
continue
elif quotation_start != -1:
char_position += 1
continue
match i:
# Remove pre-existing formatting.
" ", "\n", "\t":
json[char_position] = ""
char_position -= 1
"{", "[", ",":
if json[char_position + 1] != "}" and\
json[char_position + 1] != "]":
json = json.insert(char_position + 1, "\n")
char_position += 1
"}", "]":
if json[char_position - 1] != "{" and\
json[char_position - 1] != "[":
json = json.insert(char_position, "\n")
char_position += 1
":":
json = json.insert(char_position + 1, " ")
char_position += 1
char_position += 1
for i in [["{", "}"], ["[", "]"]]:
var bracket_start: int = json.find(i[0])
while bracket_start != -1:
var bracket_end: int = json.find("\n", bracket_start)
var bracket_count := 0
while bracket_end != - 1:
if json[bracket_end - 1] == i[0]:
bracket_count += 1
elif json[bracket_end + 1] == i[1]:
bracket_count -= 1
# Move through the indentation to see if there is a match.
while json[bracket_end + 1] == indentation[0]:
bracket_end += 1
if json[bracket_end + 1] == i[1]:
bracket_count -= 1
if bracket_count <= 0:
break
bracket_end = json.find("\n", bracket_end + 1)
# Skip one newline so the end bracket doesn't get indented.
bracket_end = json.rfind("\n", json.rfind("\n", bracket_end) - 1)
while bracket_end > bracket_start:
json = json.insert(bracket_end + 1, indentation)
bracket_end = json.rfind("\n", bracket_end - 1)
bracket_start = json.find(i[0], bracket_start + 1)
return json
# Takes valid JSON (if invalid, it will return a error according with Godot's
# 'validade_json()' method), returning JSON in a single line.
static func uglify_json(json: String) -> String:
var quotation_start := -1
var char_position := 0
for i in json:
# Avoid formating inside strings.
if i == "\"":
if quotation_start == -1:
quotation_start = char_position
elif json[char_position - 1] != "\\":
quotation_start = -1
char_position += 1
continue
elif quotation_start != -1:
char_position += 1
continue
if i == " " or i == "\n" or i == "\t":
json[char_position] = ""
char_position -= 1
char_position += 1
return json

7
Editor/default_env.tres Normal file
View File

@ -0,0 +1,7 @@
[gd_resource type="Environment" load_steps=2 format=2]
[sub_resource type="ProceduralSky" id=1]
[resource]
background_mode = 2
background_sky = SubResource( 1 )

BIN
Editor/icon.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 3.2 KiB

35
Editor/icon.png.import Normal file
View File

@ -0,0 +1,35 @@
[remap]
importer="texture"
type="StreamTexture"
path="res://.import/icon.png-487276ed1e3a0c39cad0279d744ee560.stex"
metadata={
"vram_texture": false
}
[deps]
source_file="res://icon.png"
dest_files=[ "res://.import/icon.png-487276ed1e3a0c39cad0279d744ee560.stex" ]
[params]
compress/mode=0
compress/lossy_quality=0.7
compress/hdr_mode=0
compress/bptc_ldr=0
compress/normal_map=0
flags/repeat=0
flags/filter=true
flags/mipmaps=false
flags/anisotropic=false
flags/srgb=2
process/fix_alpha_border=true
process/premult_alpha=false
process/HDR_as_SRGB=false
process/invert_color=false
process/normal_map_invert_y=false
stream=false
size_limit=0
detect_3d=true
svg/scale=1.0

38
Editor/project.godot Normal file
View File

@ -0,0 +1,38 @@
; Engine configuration file.
; It's best edited using the editor UI and not directly,
; since the parameters that go here are not all obvious.
;
; Format:
; [section] ; section goes between []
; param=value ; assign values to parameters
config_version=4
_global_script_classes=[ {
"base": "Reference",
"class": "JSONBeautifier",
"language": "GDScript",
"path": "res://ThirdParty/json_beautifier.gd"
} ]
_global_script_class_icons={
"JSONBeautifier": ""
}
[application]
config/name="Parser"
run/main_scene="res://Lecture.2.tscn"
boot_splash/image="res://Branding/RDP_Class_cover_small.png"
config/icon="res://Branding/RDP_Class_cover_small.png"
[gui]
common/drop_mouse_on_gui_input_disabled=true
[physics]
common/enable_pause_aware_picking=true
[rendering]
environment/default_environment="res://default_env.tres"

1
Engine/gd Submodule

@ -0,0 +1 @@
Subproject commit 32ef964b0f7d4e18a919e904988727b3ed775901

92
Notes.Lecture.1.txt Normal file
View File

@ -0,0 +1,92 @@
Following the first lecture of "Building a Parser from scratch"
By Dmitry Soshnikov.
Lecture 1:
Phases:
Data - Text Content
Processor - Tokenizer
Data - Tokens
Processor - Parser
Data - AST
Example of syntaxes :
S-Expression :
(class Point
(begin
(def constructor (self x y)
(begin
(set (prop self x) x)
(set (prop self y) y)
)
)
(def calc (self)
(+ (prop self x)
(prop self y)
)
)
)
)
(var p (new Point 10 20))
((prop p calc) p)
User Syntax :
class Point
{
def constructor( x, y )
{
this.x = x;
this.y = y;
}
def calc() {
return this.x + this.y;
}
}
let
p = new Point(10, 20);
p.calc();
Tokenizer - Lexial Analysis : Uses Regular Expressions (Optimal)
Parser - Syntactic Analysis : Uses Backus-Naur Form
Backus-Naur Example :
Program
: StatementList
;
StatementList
: BlockStatement
| IfStatement
| FunctionDeclaration
...
;
FunctionDeclaration
: def Identifier ( Arguments ) BlockStatement
;
Hand-written parsers :
Use recursive descent.
Automatically generated
All kinds of stuff...

1
Readme.md Normal file
View File

@ -0,0 +1 @@
# Language Studies

5
editor.bat Normal file
View File

@ -0,0 +1,5 @@
cd Engine\gd\bin\
start godot.windows.tools.64.exe
exit