Got whitespace parser working + widget generation for basic case!

This commit is contained in:
Edward R. Gonzalez 2024-03-10 10:31:21 -04:00
parent 197dd82e78
commit c80254adbc
19 changed files with 443 additions and 279 deletions

View File

@ -6,5 +6,9 @@
},
"godot_tools.scene_file_config": "c:\\projects\\SectrPrototype\\code",
"autoHide.autoHidePanel": false,
"autoHide.autoHideSideBar": false
"autoHide.autoHideSideBar": false,
"files.associations": {
"*.rmd": "markdown",
"type_traits": "cpp"
}
}

View File

@ -50,12 +50,11 @@ startup :: proc( persistent_mem, frame_mem, transient_mem, files_buffer_mem : ^V
state := new( State, persistent_allocator() )
using state
// Setup General Slab
// Setup Persistent Slab
{
alignment := uint(mem.DEFAULT_ALIGNMENT)
policy : SlabPolicy
policy_ptr := & policy
policy_ptr := & default_slab_policy
push( policy_ptr, SlabSizeClass { 16 * Megabyte, 4 * Kilobyte, alignment })
push( policy_ptr, SlabSizeClass { 32 * Megabyte, 16 * Kilobyte, alignment })
push( policy_ptr, SlabSizeClass { 64 * Megabyte, 32 * Kilobyte, alignment })
@ -75,7 +74,7 @@ startup :: proc( persistent_mem, frame_mem, transient_mem, files_buffer_mem : ^V
push( policy_ptr, SlabSizeClass { 512 * Megabyte, 512 * Megabyte, alignment })
alloc_error : AllocatorError
general_slab, alloc_error = slab_init( policy_ptr, allocator = persistent_allocator() )
persistent_slab, alloc_error = slab_init( policy_ptr, allocator = persistent_allocator() )
verify( alloc_error == .None, "Failed to allocate the general slab allocator" )
}
@ -112,7 +111,7 @@ startup :: proc( persistent_mem, frame_mem, transient_mem, files_buffer_mem : ^V
rl.SetConfigFlags( {
rl.ConfigFlag.WINDOW_RESIZABLE,
rl.ConfigFlag.WINDOW_TOPMOST,
// rl.ConfigFlag.WINDOW_TOPMOST,
})
// Rough setup of window with rl stuff
@ -174,8 +173,9 @@ startup :: proc( persistent_mem, frame_mem, transient_mem, files_buffer_mem : ^V
// }
// Setup workspace UI state
ui_startup( & workspace.ui, cache_allocator = general_slab_allocator() )
ui_startup( & workspace.ui, cache_allocator = persistent_slab_allocator() )
}
}
startup_ms := duration_ms( time.tick_lap_time( & startup_tick))
@ -225,10 +225,10 @@ reload :: proc( persistent_mem, frame_mem, transient_mem, files_buffer_mem : ^VA
// Thankfully persistent dynamic allocations are rare, and thus we know exactly which ones they are.
font_provider_data := & get_state().font_provider_data
font_provider_data.font_cache.hashes.allocator = general_slab_allocator()
font_provider_data.font_cache.entries.allocator = general_slab_allocator()
font_provider_data.font_cache.hashes.allocator = persistent_slab_allocator()
font_provider_data.font_cache.entries.allocator = persistent_slab_allocator()
ui_reload( & get_state().project.workspace.ui, cache_allocator = general_slab_allocator() )
ui_reload( & get_state().project.workspace.ui, cache_allocator = persistent_slab_allocator() )
log("Module reloaded")
}
@ -243,9 +243,17 @@ tick :: proc( host_delta_time : f64, host_delta_ns : Duration ) -> b32
{
client_tick := time.tick_now()
state := get_state(); using state
// Setup Frame Slab
{
alloc_error : AllocatorError
frame_slab, alloc_error = slab_init( & default_slab_policy, allocator = frame_allocator() )
verify( alloc_error == .None, "Failed to allocate frame slab" )
}
context.allocator = frame_allocator()
context.temp_allocator = transient_allocator()
state := get_state(); using state
rl.PollInputEvents()

View File

@ -64,8 +64,16 @@ files_buffer_allocator :: proc() -> Allocator {
return varena_allocator( Memory_App.files_buffer )
}
general_slab_allocator :: proc() -> Allocator {
return slab_allocator( get_state().general_slab )
persistent_slab_allocator :: proc() -> Allocator {
return slab_allocator( get_state().persistent_slab )
}
frame_slab_allocator :: proc() -> Allocator {
return slab_allocator( get_state().frame_slab )
}
transient_slab_allocator :: proc() -> Allocator {
return slab_allocator( get_state().transient_slab )
}
// TODO(Ed) : Implment host memory mapping api
@ -132,7 +140,11 @@ AppConfig :: struct {
}
State :: struct {
general_slab : Slab,
default_slab_policy : SlabPolicy,
persistent_slab : Slab,
frame_slab : Slab,
transient_slab : Slab, // TODO(Ed): This needs to be recreated per transient wipe
string_cache : StringCache,
font_provider_data : FontProviderData,
@ -241,4 +253,6 @@ DebugData :: struct {
draggable_box_pos : Vec2,
draggable_box_size : Vec2,
box_original_size : Vec2,
lorem_parse : PWS_ParseResult,
}

View File

@ -65,7 +65,7 @@ font_provider_startup :: proc()
font_provider_data := & get_state().font_provider_data; using font_provider_data
font_cache_alloc_error : AllocatorError
font_cache, font_cache_alloc_error = zpl_hmap_init_reserve( FontDef, general_slab_allocator(), 2 )
font_cache, font_cache_alloc_error = zpl_hmap_init_reserve( FontDef, persistent_slab_allocator(), 2 )
verify( font_cache_alloc_error == AllocatorError.None, "Failed to allocate font_cache" )
log("font_cache created")

View File

@ -68,18 +68,18 @@ array_init_reserve :: proc
return
}
array_append :: proc( using self : ^Array( $ Type), value : Type ) -> AllocatorError
array_append :: proc( self : ^Array( $ Type), value : Type ) -> AllocatorError
{
if num == capacity
if self.header.num == self.header.capacity
{
grow_result := array_grow( self, capacity )
grow_result := array_grow( self, self.header.capacity )
if grow_result != AllocatorError.None {
return grow_result
}
}
data[ num ] = value
num += 1
self.header.data[ self.header.num ] = value
self.header.num += 1
return AllocatorError.None
}
@ -177,10 +177,6 @@ array_push_back :: proc( using self : Array( $ Type)) -> b32 {
return true
}
array_back :: proc( using self : Array( $ Type ) ) -> ( ^Type) {
return & data[ num - 1 ]
}
array_clear :: proc( using self : Array( $ Type ), zero_data : b32 ) {
if zero_data {
mem.set( raw_data( data ), 0, num )

View File

@ -51,6 +51,8 @@ DLL_NodeFL :: struct ( $ Type : typeid ) #raw_union {
using _ : struct {
first, last : ^Type,
},
// TODO(Ed): Review this
using _ : struct {
bottom, top: ^Type,
},
@ -62,19 +64,51 @@ type_is_node :: #force_inline proc "contextless" ( $ Type : typeid ) -> bool
return type_has_field( type_elem_type(Type), "prev" ) && type_has_field( type_elem_type(Type), "next" )
}
dll_push_back :: #force_inline proc "contextless" ( current_ptr : ^(^ ($ Type)), node : ^Type ) {
current := (current_ptr ^)
current.prev = current
current.next = node
(current_ptr ^) = node
// First/Last append
dll_fl_append :: proc ( list : ^( $TypeList), node : ^( $TypeNode) )
{
if list.first == nil {
list.first = node
list.last = node
}
else {
list.last = node
}
}
dll_pop_back :: #force_inline proc "contextless" ( current_ptr : ^(^ ($ Type)), node : ^Type ) {
dll_push_back :: proc "contextless" ( current_ptr : ^(^ ($ TypeCurr)), node : ^$TypeNode )
{
current := (current_ptr ^)
current.next = nil
(current_ptr ^) = node
if current == nil
{
(current_ptr ^) = node
node.prev = nil
}
else
{
node.prev = current
(current_ptr^) = node
current.next = node
}
node.next = nil
}
dll_pop_back :: #force_inline proc "contextless" ( current_ptr : ^(^ ($ Type)) )
{
to_remove := (current_ptr ^)
if to_remove == nil {
return
}
if to_remove.prev == nil {
(current_ptr ^) = nil
}
else {
(current_ptr ^) = to_remove.prev
(current_ptr ^).next = nil
}
}
dll_full_insert_raw :: proc "contextless" ( null : ^($ Type), parent, pos, node : ^Type )

View File

@ -215,7 +215,7 @@ stack_allocator_proc :: proc(
return nil, .None
}
dll_pop_back( & stack.last, stack.last )
dll_pop_back( & stack.last )
}
case .Free_All:
// TODO(Ed) : Review that we don't have any header issues with the reset.

View File

@ -52,7 +52,7 @@ str_cache_init :: proc( /*allocator : Allocator*/ ) -> ( cache : StringCache ) {
cache.slab, alloc_error = slab_init( & policy, allocator = persistent_allocator() )
verify(alloc_error == .None, "Failed to initialize the string cache" )
cache.table, alloc_error = zpl_hmap_init_reserve( StringCached, general_slab_allocator(), 64 * Kilobyte )
cache.table, alloc_error = zpl_hmap_init_reserve( StringCached, persistent_slab_allocator(), 64 * Kilobyte )
return
}
@ -85,3 +85,8 @@ str_intern :: proc(
return (result ^)
}
// runes_intern :: proc( content : []rune ) -> StringCached
// {
// cache := get_state().string_cache
// }

View File

@ -20,24 +20,14 @@ import "core:os"
import "core:slice"
import "core:sync"
VArena_GrowthPolicyEntry :: struct {
// The upper limit until this policy is no longer valid
// set to 0 if its desired to be always valid)
commit_limit : uint,
// How much to increment by if the next allocation
// If the upcoming allocation size is larger than this,
// then the allocation size is used instead.
increment : uint,
}
VArena_GrowthPolicyProc :: #type proc( commit_used, committed, reserved, requested_size : uint ) -> uint
VArena :: struct {
using vmem : VirtualMemoryRegion,
commit_used : uint,
growth_policy : VArena_GrowthPolicyProc,
mutex : sync.Mutex,
using vmem : VirtualMemoryRegion,
commit_used : uint,
growth_policy : VArena_GrowthPolicyProc,
allow_any_reize : b32,
mutex : sync.Mutex,
}
varena_default_growth_policy :: proc( commit_used, committed, reserved, requested_size : uint ) -> uint
@ -68,7 +58,7 @@ varena_allocator :: proc( arena : ^VArena ) -> ( allocator : Allocator ) {
// Default growth_policy is nil
varena_init :: proc( base_address : uintptr, to_reserve, to_commit : uint,
growth_policy : VArena_GrowthPolicyProc
growth_policy : VArena_GrowthPolicyProc, allow_any_reize : b32 = false
) -> ( arena : VArena, alloc_error : AllocatorError)
{
page_size := uint(virtual_get_page_size())
@ -93,6 +83,7 @@ varena_init :: proc( base_address : uintptr, to_reserve, to_commit : uint,
else {
arena.growth_policy = growth_policy
}
arena.allow_any_reize = allow_any_reize
return
}
@ -219,7 +210,22 @@ varena_allocator_proc :: proc(
old_memory_offset := uintptr(old_memory) + uintptr(old_size)
current_offset := uintptr(arena.reserve_start) + uintptr(arena.commit_used)
verify( old_memory_offset == current_offset, "Cannot resize existing allocation in vitual arena to a larger size unless it was the last allocated" )
verify( old_memory_offset == current_offset || arena.allow_any_reize, "Cannot resize existing allocation in vitual arena to a larger size unless it was the last allocated" )
if old_memory_offset == current_offset && arena.allow_any_reize
{
// Give it new memory and copy the old over. Old memory is unrecoverable until clear.
new_region : []byte
new_region, alloc_error = varena_alloc( arena, size, alignment, (mode != .Resize_Non_Zeroed), location )
if new_region == nil || alloc_error != .None {
data = byte_slice( old_memory, old_size )
return
}
copy_non_overlapping( raw_data(new_region), old_memory, int(old_size) )
data = new_region
return
}
new_region : []byte
new_region, alloc_error = varena_alloc( arena, size - old_size, alignment, (mode != .Resize_Non_Zeroed), location )

View File

@ -118,10 +118,10 @@ setup_memory :: proc() -> ClientMemory
persistent, alloc_error = varena_init( sectr.Memory_Base_Address_Persistent, sectr.Memory_Reserve_Persistent, sectr.Memory_Commit_Initial_Persistent, nil )
verify( alloc_error == .None, "Failed to allocate persistent virtual arena for the sectr module")
frame, alloc_error = varena_init( sectr.Memory_Base_Address_Frame, sectr.Memory_Reserve_Frame, sectr.Memory_Commit_Initial_Frame, nil )
frame, alloc_error = varena_init( sectr.Memory_Base_Address_Frame, sectr.Memory_Reserve_Frame, sectr.Memory_Commit_Initial_Frame, nil, allow_any_reize = true )
verify( alloc_error == .None, "Failed to allocate frame virtual arena for the sectr module")
transient, alloc_error = varena_init( sectr.Memory_Base_Address_Transient, sectr.Memory_Reserve_Transient, sectr.Memory_Commit_Initial_Transient, nil )
transient, alloc_error = varena_init( sectr.Memory_Base_Address_Transient, sectr.Memory_Reserve_Transient, sectr.Memory_Commit_Initial_Transient, nil, allow_any_reize = true )
verify( alloc_error == .None, "Failed to allocate transient virtual arena for the sectr module")
files_buffer, alloc_error = varena_init( sectr.Memory_Base_Address_Files_Buffer, sectr.Memory_Reserve_FilesBuffer, sectr.Memory_Commit_Initial_Filebuffer, nil )

View File

@ -1,6 +1,6 @@
/* Parser: Whitespace
This is a prototype parser meant to only parse whitespace from visible blocks of code.
Its meant to be the most minimal useful AST for boostrapping an AST Editor.
Its meant to be the most minimal useful AST with coupling to traditional text file formatting.
All symbols related directly to the parser are prefixed with the PWS_ namespace.
@ -40,69 +40,47 @@ import "core:os"
Rune_Space :: ' '
Rune_Tab :: '\t'
Rune_Carriage_Return :: 'r'
Rune_New_Line :: '\n'
Rune_Carriage_Return :: '\r'
Rune_Line_Feed :: '\n'
// Rune_Tab_Vertical :: '\v'
PWS_TokenType :: enum u32 {
Invalid,
Visible,
Space,
Tab,
Spaces,
Tabs,
New_Line,
End_Of_File,
Count,
}
// TODO(Ed) : The runes and token arrays should be handled by a slab allocator
// This can grow in undeterministic ways, persistent will get very polluted otherwise.
PWS_LexResult :: struct {
allocator : Allocator,
content : string,
runes : []rune,
tokens : Array(PWS_Token),
}
PWS_Token :: struct {
type : PWS_TokenType,
line, column : u32,
ptr : ^rune,
content : StringCached,
}
PWS_AST_Content :: union #no_nil {
^PWS_Token,
[] rune,
PWS_AST_Type :: enum u32 {
Invalid,
Visible,
Spaces,
Tabs,
Line,
Count,
}
PWS_AST_Spaces :: struct {
content : PWS_AST_Content,
PWS_AST :: struct {
using links : DLL_NodeFull(PWS_AST),
type : PWS_AST_Type,
using links : DLL_NodePN(PWS_AST),
}
PWS_AST_Tabs :: struct {
content : PWS_AST_Content,
using links : DLL_NodePN(PWS_AST),
}
PWS_AST_Visible :: struct {
content : PWS_AST_Content,
using links : DLL_NodePN(PWS_AST),
}
PWS_AST_Line :: struct {
using content : DLL_NodeFL(PWS_AST),
end_token : ^ PWS_Token,
using links : DLL_NodePN(PWS_AST),
}
PWS_AST :: union #no_nil {
PWS_AST_Visible,
PWS_AST_Spaces,
PWS_AST_Tabs,
PWS_AST_Line,
line, column : u32,
content : StringCached,
}
PWS_ParseError :: struct {
@ -118,53 +96,60 @@ PWS_LineArray_RserveSize :: Kilobyte
// This can grow in undeterministic ways, persistent will get very polluted otherwise.
PWS_ParseResult :: struct {
content : string,
runes : []rune,
tokens : Array(PWS_Token),
nodes : Array(PWS_AST),
lines : Array( ^PWS_AST_Line),
nodes : Array(PWS_AST), // Nodes should be dumped in a pool.
lines : Array( ^PWS_AST),
errors : [PWS_ParseError_Max] PWS_ParseError,
}
// @(private="file")
// AST :: PWS_AST
PWS_LexerData :: struct {
using result : PWS_LexResult,
pws_parser_lex :: proc ( content : string, allocator : Allocator ) -> ( PWS_LexResult, AllocatorError )
content : string,
previous_rune : rune,
previous : PWS_TokenType,
line : u32,
column : u32,
start : int,
length : int,
current : PWS_Token,
}
pws_parser_lex :: proc ( text : string, allocator : Allocator ) -> ( PWS_LexResult, AllocatorError )
{
LexerData :: struct {
using result : PWS_LexResult,
head : [^] rune,
left : i32,
line : u32,
column : u32,
}
using lexer : LexerData
using lexer : PWS_LexerData
context.user_ptr = & lexer
content = text
rune_type :: proc() -> PWS_TokenType
if len(text) == 0 {
ensure( false, "Attempted to lex nothing")
return result, .None
}
rune_type :: proc( codepoint : rune ) -> PWS_TokenType
{
using self := context_ext( LexerData)
using self := context_ext( PWS_LexerData)
switch (head[0])
switch codepoint
{
case Rune_Space:
return PWS_TokenType.Space
return PWS_TokenType.Spaces
case Rune_Tab:
return PWS_TokenType.Tab
return PWS_TokenType.Tabs
case Rune_New_Line:
case Rune_Line_Feed:
return PWS_TokenType.New_Line
// Support for CRLF format
case Rune_Carriage_Return:
{
if left - 1 == 0 {
if previous_rune == 0 {
return PWS_TokenType.Invalid
}
if head[1] == Rune_New_Line {
return PWS_TokenType.New_Line
}
// Assume for now its a new line
return PWS_TokenType.New_Line
}
}
@ -173,28 +158,8 @@ pws_parser_lex :: proc ( content : string, allocator : Allocator ) -> ( PWS_LexR
return PWS_TokenType.Visible
}
advance :: proc() -> PWS_TokenType {
using self := context_ext( LexerData)
head = head[1:]
left -= 1
column += 1
type := rune_type()
line += u32(type == PWS_TokenType.New_Line)
return type
}
alloc_error : AllocatorError
runes, alloc_error = to_runes( content, allocator )
if alloc_error != AllocatorError.None {
ensure(false, "Failed to allocate runes from content")
return result, alloc_error
}
left = cast(i32) len(runes)
head = & runes[0]
tokens, alloc_error = array_init_reserve( PWS_Token, allocator, u64(left / 2) )
tokens, alloc_error = array_init_reserve( PWS_Token, allocator, u64( len(text)) )
if alloc_error != AllocatorError.None {
ensure(false, "Failed to allocate token's array")
return result, alloc_error
@ -203,153 +168,193 @@ pws_parser_lex :: proc ( content : string, allocator : Allocator ) -> ( PWS_LexR
line = 0
column = 0
for ; left > 0;
make_token :: proc ( codepoint : rune, byte_offset : int ) -> AllocatorError
{
current : PWS_Token
current.type = rune_type()
self := context_ext( PWS_LexerData); using self
if previous_rune == Rune_Carriage_Return && codepoint != Rune_Line_Feed {
ensure(false, "Rouge Carriage Return")
}
start_ptr := uintptr( raw_data(content)) + uintptr(start)
token_slice := transmute(string) byte_slice( rawptr(start_ptr), length )
current.content = str_intern( token_slice )
start = byte_offset
length = 0
line += cast(u32) (current.type == .New_Line)
column = 0
return array_append( & tokens, current )
}
last_rune : rune
last_byte_offset : int
for codepoint, byte_offset in text
{
type := rune_type( codepoint )
if (current.type != type && previous != .Invalid) || current.type == .New_Line
{
alloc_error = make_token( previous_rune, byte_offset )
if alloc_error != AllocatorError.None {
ensure(false, "Failed to append token to token array")
return lexer, alloc_error
}
}
current.type = type
current.line = line
current.column = column
for ; advance() == current.type; {
}
alloc_error = array_append( & tokens, current )
if alloc_error != AllocatorError.None {
ensure(false, "Failed to append token to token array")
return lexer, alloc_error
}
column += 1
length += 1
previous = current.type
previous_rune = codepoint
last_byte_offset = byte_offset
}
make_token( previous_rune, last_byte_offset )
return result, alloc_error
}
pws_parser_parse :: proc( content : string, allocator : Allocator ) -> ( PWS_ParseResult, AllocatorError )
PWS_ParseData :: struct {
using result : PWS_ParseResult,
left : u32,
head : [^]PWS_Token,
line : PWS_AST,
prev_line : ^PWS_AST,
}
pws_parser_parse :: proc( text : string, allocator : Allocator ) -> ( PWS_ParseResult, AllocatorError )
{
ParseData :: struct {
using result : PWS_ParseResult,
left : u32,
head : [^]PWS_Token,
line : PWS_AST_Line,
}
using parser : ParseData
using parser : PWS_ParseData
context.user_ptr = & result
//region Helper procs
peek_next :: proc() -> ( ^PWS_Token)
{
using self := context_ext( ParseData)
if left - 1 == 0 {
return nil
}
return head[ 1: ]
if len(text) == 0 {
ensure( false, "Attempted to lex nothing")
return result, .None
}
check_next :: proc( expected : PWS_TokenType ) -> b32 {
using self := context_ext( ParseData)
lex, alloc_error := pws_parser_lex( text, allocator = allocator )
verify( alloc_error == nil, "Allocation faiure in lex")
next := peek_next()
return next != nil && next.type == expected
}
advance :: proc( expected : PWS_TokenType ) -> (^PWS_Token)
{
using self := context_ext( ParseData)
next := peek_next()
if next == nil {
return nil
}
if next.type != expected {
ensure( false, "Didn't get expected token type from next in lexed" )
return nil
}
head = next
return head
}
//endregion Helper procs
lex, alloc_error := pws_parser_lex( content, allocator )
if alloc_error != AllocatorError.None {
}
runes = lex.runes
tokens = lex.tokens
nodes, alloc_error = array_init_reserve( PWS_AST, allocator, PWS_NodeArray_ReserveSize )
if alloc_error != AllocatorError.None {
}
lines, alloc_error = array_init_reserve( ^PWS_AST_Line, allocator, PWS_LineArray_RserveSize )
if alloc_error != AllocatorError.None {
verify( alloc_error == nil, "Allocation failure creating nodes array")
lines, alloc_error = array_init_reserve( ^PWS_AST, allocator, PWS_LineArray_RserveSize )
verify( alloc_error == nil, "Allocation failure creating line array")
//region Helper procs
eat_line :: proc()
{
self := context_ext( PWS_ParseData); using self
tok := cast( ^PWS_Token) head
ast : PWS_AST
ast.type = .Line
ast.line = tok.line
ast.column = tok.column
ast.content = tok.content
alloc_error := array_append( & nodes, line )
verify( alloc_error == nil, "Allocation failure appending node")
node := & nodes.data[ nodes.num - 1 ]
// TODO(Ed): Review this with multiple line test
dll_push_back( & prev_line, node )
prev_line = node
// Debug build compile error
// alloc_error = array_append( & lines, prev_line )
// verify( alloc_error == nil, "Allocation failure appending node")
line = {}
}
//endregion
head = & tokens.data[0]
left = u32(tokens.num)
// Parse Line
for ; left > 0;
{
parse_content :: proc( $ Type : typeid, tok_type : PWS_TokenType ) -> Type
{
using self := context_ext( ParseData)
ast : Type
ast.content = cast( ^PWS_Token) head
advance( tok_type )
return ast
}
add_node :: proc( ast : PWS_AST ) //-> ( should_return : b32 )
{
using self := context_ext( ParseData)
// TODO(Ed) : Harden this
array_append( & nodes, ast )
if line.first == nil {
line.first = array_back( nodes )
}
else
{
line.last = array_back( nodes)
}
}
// TODO(Ed) : Harden this
type : PWS_AST_Type
#partial switch head[0].type
{
case PWS_TokenType.Visible:
{
ast := parse_content( PWS_AST_Visible, PWS_TokenType.Visible )
add_node( ast )
}
case PWS_TokenType.Space:
{
ast := parse_content( PWS_AST_Visible, PWS_TokenType.Space )
add_node( ast )
}
case PWS_TokenType.Tab:
{
ast := parse_content( PWS_AST_Tabs, PWS_TokenType.Tab )
add_node( ast )
}
case PWS_TokenType.New_Line:
{
line.end_token = head
case .Tabs:
type = .Tabs
ast : PWS_AST
ast = line
case .Spaces:
type = .Spaces
// TODO(Ed) : Harden This
array_append( & nodes, ast )
array_append( & lines, & array_back(nodes).(PWS_AST_Line) )
line = {}
case .Visible:
type = .Visible
case .New_Line:
{
eat_line()
alloc_error = array_append( & lines, prev_line )
verify( alloc_error == nil, "Allocation failure appending node")
}
case PWS_TokenType.End_Of_File:
}
if type != .Line
{
tok := cast( ^PWS_Token) head
ast : PWS_AST
ast.type = type
ast.line = tok.line
ast.column = tok.column
ast.content = tok.content
// Compiler Error (-Debug)
// prev_node = array_back( nodes )
prev_node : ^PWS_AST = nil
if nodes.num > 0 {
prev_node = & nodes.data[ nodes.num - 1 ]
}
alloc_error := array_append( & nodes, ast )
verify( alloc_error == nil, "Allocation failure appending node")
node := & nodes.data[ nodes.num - 1 ]
// dll_push_back( & prev_node, last_node )
{
if prev_node != nil
{
node.prev = prev_node
prev_node.next = node
}
}
// dll_fl_append( & line, last_node )
if line.first == nil {
line.first = node
line.last = node
}
else {
line.last = node
}
}
head = head[ 1:]
left -= 1
}
if line.first != nil {
eat_line()
alloc_error = array_append( & lines, prev_line )
verify( alloc_error == nil, "Allocation failure appending node")
}
return result, alloc_error

View File

@ -19,8 +19,6 @@ when ODIN_OS == OS_Type.Windows {
// 1 inch = 2.54 cm, 96 inch * 2.54 = 243.84 DPCM
}
//region Unit Conversion Impl
// cm_to_points :: proc( cm : f32 ) -> f32 {

View File

@ -50,11 +50,11 @@ render :: proc()
// Debug Text
{
debug_text( "Screen Width : %v", rl.GetScreenWidth () )
debug_text( "Screen Height: %v", rl.GetScreenHeight() )
// debug_text( "Screen Width : %v", rl.GetScreenWidth () )
// debug_text( "Screen Height: %v", rl.GetScreenHeight() )
debug_text( "frametime_target_ms : %f ms", frametime_target_ms )
debug_text( "frametime : %f ms", frametime_delta_ms )
debug_text( "frametime_last_elapsed_ms : %f ms", frametime_elapsed_ms )
// debug_text( "frametime_last_elapsed_ms : %f ms", frametime_elapsed_ms )
if replay.mode == ReplayMode.Record {
debug_text( "Recording Input")
}
@ -83,7 +83,7 @@ render :: proc()
if active_box != nil{
debug_text("Active Box: %v", active_box.label.str )
}
debug_text("Active Resizing: %v", ui.active_start_signal.resizing)
// debug_text("Active Resizing: %v", ui.active_start_signal.resizing)
debug.draw_debug_text_y = 50
}
@ -100,7 +100,7 @@ render_mode_2d :: proc()
rl.BeginMode2D( project.workspace.cam )
draw_text( "This is text in world space", { 0, 200 }, 16.0 )
// draw_text( "This is text in world space", { 0, 200 }, 16.0 )
cam_zoom_ratio := 1.0 / cam.zoom

View File

@ -200,8 +200,8 @@ update :: proc( delta_time : f64 ) -> b32
default_layout := UI_Layout {
anchor = {},
// alignment = { 0.0, 0.5 },
alignment = { 0.5, 0.5 },
text_alignment = { 0.5, 0.5 },
alignment = { 0.0, 0.0 },
text_alignment = { 0.0, 0.0 },
// alignment = { 1.0, 1.0 },
// corner_radii = { 0.3, 0.3, 0.3, 0.3 },
pos = { 0, 0 },
@ -230,7 +230,78 @@ update :: proc( delta_time : f64 ) -> b32
config.ui_resize_border_width = 2.5
test_draggable()
test_text_box()
// test_text_box()
// Whitespace AST test
when true
{
alloc_error : AllocatorError
text := str_intern( "Lorem ipsum dolor sit amet")
debug.lorem_parse, alloc_error = pws_parser_parse( text.str, frame_allocator() )
verify( alloc_error == .None, "Faield to parse due to allocation failure" )
text_space := str_intern( " " )
text_tab := str_intern( "\t")
layout_text := default_layout
// index := 0
widgets : Array(UI_Widget)
widgets, alloc_error = array_init( UI_Widget, frame_allocator() )
widget_ptr := & widgets
label_id := 0
for line in array_to_slice_num( debug.lorem_parse.lines )
{
head := line.first
for ; head != nil;
{
ui_style_theme_set_layout( layout_text )
widget : UI_Widget
// We're assumping PWS_Token for now...
// Eventually I'm going to flatten this, its not worth doing it the way I am...
#partial switch head.type
{
case .Visible:
label := str_intern( str_fmt_alloc( "%v %v", head.content.str, label_id, label_id ))
widget = ui_text( head.content.str, head.content )
label_id += 1
layout_text.pos.x += widget.style.layout.size.x
case .Spaces:
label := str_intern( str_fmt_alloc( "%v %v%v", "space", label_id, label_id ))
widget := ui_text( label.str, text_space, {} )
widget.style.layout.size = Vec2 { 20, 30 }
label_id += 1
for idx in 0 ..< len( head.content.runes )
{
widget.style.layout.size.x += widget.style.layout.size.x
}
layout_text.pos.x += widget.style.layout.size.x
case .Tabs:
label := str_intern( str_fmt_alloc( "%v %v%v", "tab", label_id, label_id ))
widget := ui_text( label.str, text_tab, {} )
label_id += 1
for idx in 0 ..< len( head.content.runes )
{
widget.style.layout.size.x += widget.style.layout.size.x
}
layout_text.pos.x += widget.style.layout.size.x
}
array_append( widget_ptr, widget )
head = head.next
}
}
// runtime.trap()
}
}
//endregion Imgui Tick

View File

@ -23,9 +23,21 @@ test_draggable :: proc()
state := get_state(); using state
ui := ui_context
draggable_layout := UI_Layout {
anchor = {},
// alignment = { 0.0, 0.5 },
alignment = { 0.5, 0.5 },
text_alignment = { 0.0, 0.0 },
// alignment = { 1.0, 1.0 },
// corner_radii = { 0.3, 0.3, 0.3, 0.3 },
pos = { 0, 0 },
size = { 200, 200 },
}
ui_style_theme_set_layout( draggable_layout )
draggable := ui_widget( "Draggable Box!", UI_BoxFlags { .Mouse_Clickable, .Mouse_Resizable } )
if draggable.first_frame {
debug.draggable_box_pos = draggable.style.layout.pos
debug.draggable_box_pos = draggable.style.layout.pos + { 0, -100 }
debug.draggable_box_size = draggable.style.layout.size
}

View File

@ -20,7 +20,7 @@ ui_button :: proc( label : string, flags : UI_BoxFlags = {} ) -> (btn : UI_Widge
return
}
ui_text :: proc( label : string, content : StringCached, font_size : f32 = 24, font := Font_Default, flags : UI_BoxFlags ) -> UI_Widget
ui_text :: proc( label : string, content : StringCached, font_size : f32 = 30, font := Font_Default, flags : UI_BoxFlags = {} ) -> UI_Widget
{
state := get_state(); using state
@ -30,7 +30,7 @@ ui_text :: proc( label : string, content : StringCached, font_size : f32 = 24, f
}
text_size := measure_text_size( content.str, font, font_size, 0 )
box := ui_box_make( flags, "TEXT BOX!" )
box := ui_box_make( flags, label )
signal := ui_signal_from_box( box )
box.text = content

1
examples/Lorem Ipsum.txt Normal file
View File

@ -0,0 +1 @@
Lorem ipsum dolor sit amet, consectetur adipiscing elit, sed do eiusmod tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.

View File

@ -93,6 +93,7 @@ $msvc_link_default_base_address = 0x180000000
push-location $path_root
$update_deps = join-path $path_scripts 'update_deps.ps1'
$odin_compiler = join-path $path_odin 'odin.exe'
$raddbg = "C:/dev/raddbg/raddbg.exe"
function Invoke-WithColorCodedOutput { param( [scriptblock] $command )
& $command 2>&1 | ForEach-Object {
@ -152,22 +153,30 @@ push-location $path_root
$build_args += '.'
$build_args += $flag_build_mode_dll
$build_args += $flag_output_path + $module_dll
$build_args += ($flag_collection + $pkg_collection_thirdparty)
$build_args += $flag_use_separate_modules
$build_args += $flag_thread_count + $CoreCount_Physical
# $build_args += ($flag_collection + $pkg_collection_thirdparty)
# $build_args += $flag_use_separate_modules
# $build_args += $flag_thread_count + $CoreCount_Physical
$build_args += $flag_optimize_none
# $build_args += $flag_optimize_minimal
$build_args += $flag_debug
$build_args += $flag_pdb_name + $pdb
$build_args += $flag_subsystem + 'windows'
# $build_args += $flag_show_system_calls
$build_args += $flag_show_timings
$build_args += ($flag_extra_linker_flags + $linker_args )
# $build_args += ($flag_extra_linker_flags + $linker_args )
$raddbg_args = @()
$raddbg_args += $odin_compiler
$raddbg_args += $build_args
if ( Test-Path $module_dll) {
$module_dll_pre_build_hash = get-filehash -path $module_dll -Algorithm MD5
}
# write-host $build_args
Invoke-WithColorCodedOutput -command { & $odin_compiler $build_args }
# Invoke-WithColorCodedOutput -command { & $raddbg "$odin_compiler" "$build_args" }
if ( Test-Path $module_dll ) {
$module_dll_post_build_hash = get-filehash -path $module_dll -Algorithm MD5
@ -213,14 +222,14 @@ push-location $path_root
$build_args += $command_build
$build_args += './host'
$build_args += $flag_output_path + $executable
$build_args += ($flag_collection + $pkg_collection_thirdparty)
$build_args += $flag_use_separate_modules
$build_args += $flag_thread_count + $CoreCount_Physical
# $build_args += ($flag_collection + $pkg_collection_thirdparty)
# $build_args += $flag_use_separate_modules
# $build_args += $flag_thread_count + $CoreCount_Physical
$build_args += $flag_optimize_none
$build_args += $flag_debug
$build_args += $flag_pdb_name + $pdb
$build_args += $flag_subsystem + 'windows'
$build_args += ($flag_extra_linker_flags + $linker_args )
# $build_args += ($flag_extra_linker_flags + $linker_args )
$build_args += $flag_show_timings
# $build_args += $flag_show_system_call

View File

@ -27,12 +27,13 @@ if (Test-Path -Path $path_odin)
# Get the latest local and remote commit hashes for the current branch
$localCommit = git -C $path_odin rev-parse HEAD
$remoteCommit = git -C $path_odin rev-parse '@{u}'
if ($localCommit -ne $remoteCommit)
# if ( $true -or $localCommit -ne $remoteCommit)
if ( $localCommit -ne $remoteCommit)
{
Write-Host "Odin repository is out-of-date. Pulling changes and rebuilding..."
git -C $path_odin pull
push-location $path_odin
& .\build.bat
& .\build.bat debug
pop-location
$binaries_dirty = $true