Compare commits

...

3 Commits

11 changed files with 94 additions and 51 deletions

View File

@ -12,7 +12,7 @@ file_copy_sync :: proc( path_src, path_dst: string, allocator := context.allocat
{
path_info, result := file_status( path_src, allocator )
if result != os.ERROR_NONE {
logf("Could not get file info: %v", result, LogLevel.Error )
log_fmt("Could not get file info: %v", result, LogLevel.Error )
return false
}
file_size = path_info.size
@ -20,14 +20,14 @@ file_copy_sync :: proc( path_src, path_dst: string, allocator := context.allocat
src_content, result := os.read_entire_file( path_src, allocator )
if ! result {
logf( "Failed to read file to copy: %v", path_src, LogLevel.Error )
log_fmt( "Failed to read file to copy: %v", path_src, LogLevel.Error )
runtime.debug_trap()
return false
}
result = os.write_entire_file( path_dst, src_content, false )
if ! result {
logf( "Failed to copy file: %v", path_dst, LogLevel.Error )
log_fmt( "Failed to copy file: %v", path_dst, LogLevel.Error )
runtime.debug_trap()
return false
}

View File

@ -141,7 +141,7 @@ hmap_chained_get :: proc( using self : HMapChained($Type), key : u64) -> ^Type
{
if slot.occupied && slot.key == key {
if self.dbg_name != "" && self.tracker.entries.header != nil {
logf( "%v: Retrieved %v in lookup[%v] which shows key as %v", self.dbg_name, key, hash_index, slot.key )
log_fmt( "%v: Retrieved %v in lookup[%v] which shows key as %v", self.dbg_name, key, hash_index, slot.key )
}
return & slot.value
}
@ -223,13 +223,13 @@ hmap_chained_set :: proc( self : HMapChained($Type), key : u64, value : Type ) -
return & surface_slot.value, error
}
if ! surface_slot.occupied
if ! surface_slot.occupied || surface_slot.key == key
{
surface_slot.key = key
surface_slot.value = value
surface_slot.occupied = true
if dbg_name != "" && tracker.entries.header != nil {
logf( "%v: Set %v in lookup[%v]", self.dbg_name, key, hash_index )
log_fmt( "%v: Set %v in lookup[%v]", self.dbg_name, key, hash_index )
}
return & surface_slot.value, .None
@ -257,13 +257,13 @@ hmap_chained_set :: proc( self : HMapChained($Type), key : u64, value : Type ) -
}
}
if ! slot.next.occupied
if ! slot.next.occupied || surface_slot.key == key
{
slot.next.key = key
slot.next.value = value
slot.next.occupied = true
if dbg_name != "" && tracker.entries.header != nil {
logf( "%v: Set %v in lookup[%v] nest_id: %v", self.dbg_name, key, hash_index, nest_id )
log_fmt( "%v: Set %v in lookup[%v] nest_id: %v", self.dbg_name, key, hash_index, nest_id )
}
return & slot.next.value, .None
}

View File

@ -8,14 +8,24 @@ import str "core:strings"
import "core:time"
import core_log "core:log"
Max_Logger_Message_Width :: 180
Max_Logger_Message_Width :: 160
LogLevel :: core_log.Level
LoggerEntry :: struct {
text : string,
timestamp : string,
level : string,
location : string,
}
Logger :: struct {
file_path : string,
file : os.Handle,
id : string,
varena : VArena,
entries : Array(LoggerEntry),
}
to_odin_logger :: proc( logger : ^ Logger ) -> core_log.Logger {
@ -37,6 +47,27 @@ logger_init :: proc( logger : ^ Logger, id : string, file_path : string, file :
logger.file_path = file_path
logger.id = id
LOGGER_VARENA_BASE_ADDRESS : uintptr = 2 * Terabyte
@static vmem_init_counter : uintptr = 0
when true {
alloc_error : AllocatorError
// logger.varena, alloc_error = varena_init(
// LOGGER_VARENA_BASE_ADDRESS + vmem_init_counter * 250 * Megabyte,
// 1 * Megabyte,
// 128 * Kilobyte,
// growth_policy = nil,
// allow_any_resize = true,
// dbg_name = "logger varena",
// enable_mem_tracking = false )
// verify( alloc_error == .None, "Failed to allocate logger's virtual arena")
vmem_init_counter += 1
// TODO(Ed): Figure out another solution here...
// logger.entries, alloc_error = array_init(Array(LoggerEntry), 8192, runtime.heap_allocator())
// verify( alloc_error == .None, "Failed to allocate logger's entries array")
}
context.logger = { logger_interface, logger, core_log.Level.Debug, core_log.Default_File_Logger_Opts }
log("Initialized Logger")
when false {
@ -60,8 +91,10 @@ logger_interface :: proc(
first_line_length := len(text) > Max_Logger_Message_Width ? Max_Logger_Message_Width : len(text)
first_line := transmute(string) text[ 0 : first_line_length ]
// str_fmt_builder( & builder, "%-s ", Max_Logger_Message_Width, first_line )
str_fmt_builder( & builder, "%-180s ", first_line )
str_fmt_builder( & builder, "%s ", first_line )
// str_fmt_builder( & builder, "%-s ", first_line )
// Signature
{
@ -84,6 +117,7 @@ logger_interface :: proc(
str_fmt_builder( & builder, "] ")
}
}
core_log.do_level_header( options, & builder, level )
if logger.id != "" {
@ -125,7 +159,7 @@ log :: proc( msg : string, level := LogLevel.Info, loc := #caller_location ) {
core_log.log( level, msg, location = loc )
}
logf :: proc( fmt : string, args : ..any, level := LogLevel.Info, loc := #caller_location ) {
log_fmt :: proc( fmt : string, args : ..any, level := LogLevel.Info, loc := #caller_location ) {
temp_arena : Arena; arena_init(& temp_arena, Logger_Allocator_Buffer[:])
context.allocator = arena_allocator(& temp_arena)
context.temp_allocator = arena_allocator(& temp_arena)

View File

@ -25,7 +25,7 @@ memtracker_clear :: proc ( tracker : MemoryTracker ) {
return
}
logf("Clearing tracker: %v", tracker.name)
log_fmt("Clearing tracker: %v", tracker.name)
memtracker_dump_entries(tracker);
array_clear(tracker.entries)
}
@ -75,12 +75,12 @@ memtracker_register :: proc( tracker : ^MemoryTracker, new_entry : MemoryTracker
memtracker_dump_entries(tracker ^)
}
array_append_at( & tracker.entries, new_entry, idx )
logf("Registered: %v -> %v | %v", new_entry.start, new_entry.end, tracker.name)
log_fmt("Registered: %v -> %v | %v", new_entry.start, new_entry.end, tracker.name)
return
}
array_append( & tracker.entries, new_entry )
logf("Registered: %v -> %v | %v", new_entry.start, new_entry.end, tracker.name )
log_fmt("Registered: %v -> %v | %v", new_entry.start, new_entry.end, tracker.name )
}
memtracker_register_auto_name :: proc( tracker : ^MemoryTracker, start, end : rawptr )
@ -118,7 +118,7 @@ memtracker_unregister :: proc( tracker : MemoryTracker, to_remove : MemoryTracke
entry := & entries[idx]
if entry.start == to_remove.start {
if (entry.end == to_remove.end || to_remove.end == nil) {
logf("Unregistered: %v -> %v | %v", to_remove.start, to_remove.end, tracker.name );
log_fmt("Unregistered: %v -> %v | %v", to_remove.start, to_remove.end, tracker.name );
array_remove_at(tracker.entries, idx)
return
}
@ -166,6 +166,6 @@ memtracker_dump_entries :: proc( tracker : MemoryTracker )
log( "Dumping Memory Tracker:")
for idx in 0 ..< tracker.entries.num {
entry := & tracker.entries.data[idx]
logf("%v -> %v", entry.start, entry.end)
log_fmt("%v -> %v", entry.start, entry.end)
}
}

View File

@ -231,14 +231,14 @@ startup :: proc( prof : ^SpallProfiler, persistent_mem, frame_mem, transient_mem
backend := sokol_gfx.query_backend()
switch backend
{
case .D3D11: logf("sokol_gfx: using D3D11 backend")
case .GLCORE, .GLES3: logf("sokol_gfx: using GL backend")
case .D3D11: log_fmt("sokol_gfx: using D3D11 backend")
case .GLCORE, .GLES3: log_fmt("sokol_gfx: using GL backend")
case .METAL_MACOS, .METAL_IOS, .METAL_SIMULATOR:
logf("sokol_gfx: using Metal backend")
log_fmt("sokol_gfx: using Metal backend")
case .WGPU: logf("sokol_gfx: using WebGPU backend")
case .DUMMY: logf("sokol_gfx: using dummy backend")
case .WGPU: log_fmt("sokol_gfx: using WebGPU backend")
case .DUMMY: log_fmt("sokol_gfx: using dummy backend")
}
render_data.pass_actions.bg_clear_black.colors[0] = sokol_gfx.Color_Attachment_Action {
@ -280,11 +280,11 @@ startup :: proc( prof : ^SpallProfiler, persistent_mem, frame_mem, transient_mem
// path_open_sans := strings.concatenate( { Path_Assets, "OpenSans-Regular.ttf" } )
// font_open_sans = font_load( path_open_sans, 16.0, "OpenSans" )
path_noto_sans := strings.concatenate( { Path_Assets, "NotoSans-Regular.ttf" } )
font_noto_sans = font_load( path_noto_sans, 16.0, "NotoSans" )
// path_noto_sans := strings.concatenate( { Path_Assets, "NotoSans-Regular.ttf" } )
// font_noto_sans = font_load( path_noto_sans, 16.0, "NotoSans" )
// path_neodgm_code := strings.concatenate( { Path_Assets, "neodgm_code.ttf"} )
// font_neodgm_code = font_load( path_neodgm_code, 32.0, "NeoDunggeunmo Code" )
path_neodgm_code := strings.concatenate( { Path_Assets, "neodgm_code.ttf"} )
font_neodgm_code = font_load( path_neodgm_code, 32.0, "NeoDunggeunmo Code" )
// path_rec_mono_linear := strings.concatenate( { Path_Assets, "RecMonoLinear-Regular-1.084.ttf" })
// font_rec_mono_linear = font_load( path_rec_mono_linear, 16.0, "RecMonoLinear Regular" )
@ -298,7 +298,7 @@ startup :: proc( prof : ^SpallProfiler, persistent_mem, frame_mem, transient_mem
// path_arial_unicode_ms := strings.concatenate( { Path_Assets, "Arial Unicode MS.ttf" } )
// font_arial_unicode_ms = font_load( path_arial_unicode_ms, 16.0, "Arial_Unicode_MS" )
default_font = font_noto_sans
default_font = font_neodgm_code
log( "Default font loaded" )
}

View File

@ -93,7 +93,7 @@ sokol_app_log_callback :: proc "c" (
}
cloned_tag := str.clone_from_cstring(tag, context.temp_allocator)
logf( "%-80s %s::%v", cloned_msg, cloned_tag, line_nr, level = odin_level )
log_fmt( "%-80s %s::%v", cloned_msg, cloned_tag, line_nr, level = odin_level )
}
// TODO(Ed): Does this need to be queued to a separate thread?
@ -113,8 +113,8 @@ sokol_app_event_callback :: proc "c" (sokol_event : ^sokol_app.Event)
switch sokol_event.type
{
case .INVALID:
logf("sokol_app - event: INVALID?")
logf("%v", sokol_event)
log_fmt("sokol_app - event: INVALID?")
log_fmt("%v", sokol_event)
case .KEY_DOWN:
if sokol_event.key_repeat do return
@ -223,8 +223,8 @@ sokol_app_event_callback :: proc "c" (sokol_event : ^sokol_app.Event)
sokol_app.consume_event()
case .DISPLAY_CHANGED:
logf("sokol_app - event: Display changed")
logf("refresh rate: %v", sokol_app.refresh_rate())
log_fmt("sokol_app - event: Display changed")
log_fmt("refresh rate: %v", sokol_app.refresh_rate())
monitor_refresh_hz := sokol_app.refresh_rate()
sokol_app.consume_event()
}
@ -274,7 +274,7 @@ sokol_gfx_log_callback :: proc "c" (
}
cloned_tag := str.clone_from_cstring(tag, context.temp_allocator)
logf( "%-80s %s::%v", cloned_msg, cloned_tag, line_nr, level = odin_level )
log_fmt( "%-80s %s::%v", cloned_msg, cloned_tag, line_nr, level = odin_level )
}
#endregion("Sokol GFX")

View File

@ -6,7 +6,7 @@ import ve "codebase:font/VEFontCache"
import sokol_gfx "thirdparty:sokol/gfx"
Font_Provider_Use_Freetype :: false
Font_Largest_Px_Size :: 154
Font_Largest_Px_Size :: 72
Font_Size_Interval :: 2
Font_Default :: FontID { 0, "" }

View File

@ -262,7 +262,7 @@ import "codebase:grime"
to_odin_logger :: grime.to_odin_logger
logger_init :: grime.logger_init
log :: grime.log
logf :: grime.logf
log_fmt :: grime.log_fmt
// memory
MemoryTracker :: grime.MemoryTracker
@ -398,6 +398,10 @@ get_bounds :: proc {
view_get_bounds,
}
join :: proc {
join_range2,
}
inverse_mag :: proc {
inverse_mag_vec3,
// inverse_mag_rotor3,
@ -535,6 +539,10 @@ remove_at :: proc {
grime.array_remove_at,
}
// size :: proc {
// size_range2,
// }
scope :: proc {
ui_layout_scope_via_layout,
ui_layout_scope_via_combo,

View File

@ -120,7 +120,7 @@ PWS_LexerData :: struct {
pws_parser_lex :: proc ( text : string, allocator : Allocator ) -> ( PWS_LexResult, AllocatorError )
{
bytes := transmute([]byte) text
log( str_fmt( "lexing: %v ...", (len(text) > 30 ? transmute(string) bytes[ :30] : text) ))
// log( str_fmt( "lexing: %v ...", (len(text) > 30 ? transmute(string) bytes[ :30] : text) ))
profile(#procedure)
using lexer : PWS_LexerData
@ -256,7 +256,7 @@ pws_parser_parse :: proc( text : string, allocator : Allocator ) -> ( PWS_ParseR
tokens = lex.tokens
log( str_fmt( "parsing: %v ...", (len(text) > 30 ? transmute(string) bytes[ :30] : text) ))
// log( str_fmt( "parsing: %v ...", (len(text) > 30 ? transmute(string) bytes[ :30] : text) ))
// TODO(Ed): Change this to use a node pool
nodes, alloc_error = make( Array(PWS_AST), PWS_NodeArray_ReserveSize, allocator )

View File

@ -63,7 +63,8 @@ UI_Layout_Stack_Size :: 512
UI_Style_Stack_Size :: 512
UI_Parent_Stack_Size :: 512
// UI_Built_Boxes_Array_Size :: 8
UI_Built_Boxes_Array_Size :: 128 * Kilobyte
UI_Built_Boxes_Array_Size :: 56 * Kilobyte
UI_BoxCache_TableSize :: 4 * Kilobyte
UI_RenderEntry :: struct {
info : UI_RenderBoxInfo,
@ -72,7 +73,7 @@ UI_RenderEntry :: struct {
layer_id : i32,
}
UI_RenderLayer :: DLL_NodeFL(UI_RenderEntry)\
UI_RenderLayer :: DLL_NodeFL(UI_RenderEntry)
UI_RenderBoxInfo :: struct {
using computed : UI_Computed,
@ -99,9 +100,9 @@ UI_State :: struct {
built_box_count : i32,
caches : [2] HMapZPL( UI_Box ),
prev_cache : ^HMapZPL( UI_Box ),
curr_cache : ^HMapZPL( UI_Box ),
caches : [2] HMapChained( UI_Box ),
prev_cache : ^HMapChained( UI_Box ),
curr_cache : ^HMapChained( UI_Box ),
// For rendering via a set of layers organized into a single command list
// render_queue_builder : SubArena,
@ -143,7 +144,7 @@ ui_startup :: proc( ui : ^ UI_State, cache_allocator : Allocator /* , cache_rese
ui^ = {}
for & cache in ui.caches {
box_cache, allocation_error := make( HMapZPL(UI_Box), UI_Built_Boxes_Array_Size, cache_allocator )
box_cache, allocation_error := make( HMapChained(UI_Box), UI_BoxCache_TableSize, cache_allocator )
verify( allocation_error == AllocatorError.None, "Failed to allocate box cache" )
cache = box_cache
}
@ -165,7 +166,7 @@ ui_reload :: proc( ui : ^ UI_State, cache_allocator : Allocator )
{
// We need to repopulate Allocator references
for & cache in ui.caches {
hmap_zpl_reload( & cache, cache_allocator)
hmap_chained_reload( cache, cache_allocator)
}
ui.render_queue.backing = cache_allocator
ui.render_list.backing = cache_allocator
@ -454,7 +455,7 @@ ui_hash_part_from_key_string :: proc ( content : string ) -> string {
ui_key_from_string :: #force_inline proc "contextless" ( value : string ) -> UI_Key
{
// profile(#procedure)
USE_RAD_DEBUGGERS_METHOD :: true
USE_RAD_DEBUGGERS_METHOD :: false
key : UI_Key

View File

@ -61,8 +61,8 @@ ui_box_equal :: #force_inline proc "contextless" ( a, b : ^ UI_Box ) -> b32 {
return result
}
ui_box_from_key :: #force_inline proc ( cache : ^HMapZPL(UI_Box), key : UI_Key ) -> (^UI_Box) {
return hmap_zpl_get( cache, cast(u64) key )
ui_box_from_key :: #force_inline proc ( cache : ^HMapChained(UI_Box), key : UI_Key ) -> (^UI_Box) {
return hmap_chained_get( cache ^, cast(u64) key )
}
ui_box_make :: proc( flags : UI_BoxFlags, label : string ) -> (^ UI_Box)
@ -72,7 +72,7 @@ ui_box_make :: proc( flags : UI_BoxFlags, label : string ) -> (^ UI_Box)
key := ui_key_from_string( label )
curr_box : (^ UI_Box)
prev_box := hmap_zpl_get( prev_cache, cast(u64) key )
prev_box := hmap_chained_get( prev_cache ^, cast(u64) key )
{
// profile("Assigning current box")
set_result : ^ UI_Box
@ -80,13 +80,13 @@ ui_box_make :: proc( flags : UI_BoxFlags, label : string ) -> (^ UI_Box)
if prev_box != nil
{
// Previous history was found, copy over previous state.
set_result, set_error = hmap_zpl_set( curr_cache, cast(u64) key, (prev_box ^) )
set_result, set_error = hmap_chained_set( curr_cache ^, cast(u64) key, (prev_box ^) )
}
else {
box : UI_Box
box.key = key
box.label = str_intern( label )
set_result, set_error = hmap_zpl_set( curr_cache, cast(u64) key, box )
set_result, set_error = hmap_chained_set( curr_cache ^, cast(u64) key, box )
}
verify( set_error == AllocatorError.None, "Failed to set hmap_zpl due to allocator error" )
@ -114,7 +114,7 @@ ui_box_make :: proc( flags : UI_BoxFlags, label : string ) -> (^ UI_Box)
return curr_box
}
ui_prev_cached_box :: #force_inline proc( box : ^UI_Box ) -> ^UI_Box { return hmap_zpl_get( ui_context().prev_cache, cast(u64) box.key ) }
ui_prev_cached_box :: #force_inline proc( box : ^UI_Box ) -> ^UI_Box { return hmap_chained_get( ui_context().prev_cache ^, cast(u64) box.key ) }
// TODO(Ed): Rename to ui_box_tranverse_view_next
// Traveral pritorizes immeidate children