refactor(types): add strict type hints to gui_2.py and gui_legacy.py

Automated pipeline applied 217 type annotations across both UI modules:
- 158 auto -> None return types via AST single-pass
- 25 manual signatures (callbacks, factory methods, complex returns)
- 34 variable type annotations (constants, color tuples, config)

Zero untyped functions/variables remain in either file.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
2026-02-28 11:01:01 -05:00
parent a2a1447f58
commit c816f65665
3 changed files with 570 additions and 217 deletions

170
gui_2.py
View File

@@ -38,9 +38,9 @@ from fastapi.security.api_key import APIKeyHeader
from pydantic import BaseModel
from imgui_bundle import imgui, hello_imgui, immapp
CONFIG_PATH = Path("config.toml")
PROVIDERS = ["gemini", "anthropic", "gemini_cli", "deepseek"]
COMMS_CLAMP_CHARS = 300
CONFIG_PATH: Path = Path("config.toml")
PROVIDERS: list[str] = ["gemini", "anthropic", "gemini_cli", "deepseek"]
COMMS_CLAMP_CHARS: int = 300
def load_config() -> dict[str, Any]:
with open(CONFIG_PATH, "rb") as f:
@@ -59,25 +59,25 @@ def hide_tk_root() -> Tk:
def vec4(r: float, g: float, b: float, a: float = 1.0) -> imgui.ImVec4: return imgui.ImVec4(r/255, g/255, b/255, a)
C_OUT = vec4(100, 200, 255)
C_IN = vec4(140, 255, 160)
C_REQ = vec4(255, 220, 100)
C_RES = vec4(180, 255, 180)
C_TC = vec4(255, 180, 80)
C_TR = vec4(180, 220, 255)
C_TRS = vec4(200, 180, 255)
C_LBL = vec4(180, 180, 180)
C_VAL = vec4(220, 220, 220)
C_KEY = vec4(140, 200, 255)
C_NUM = vec4(180, 255, 180)
C_SUB = vec4(220, 200, 120)
C_OUT: tuple[float, ...] = vec4(100, 200, 255)
C_IN: tuple[float, ...] = vec4(140, 255, 160)
C_REQ: tuple[float, ...] = vec4(255, 220, 100)
C_RES: tuple[float, ...] = vec4(180, 255, 180)
C_TC: tuple[float, ...] = vec4(255, 180, 80)
C_TR: tuple[float, ...] = vec4(180, 220, 255)
C_TRS: tuple[float, ...] = vec4(200, 180, 255)
C_LBL: tuple[float, ...] = vec4(180, 180, 180)
C_VAL: tuple[float, ...] = vec4(220, 220, 220)
C_KEY: tuple[float, ...] = vec4(140, 200, 255)
C_NUM: tuple[float, ...] = vec4(180, 255, 180)
C_SUB: tuple[float, ...] = vec4(220, 200, 120)
DIR_COLORS = {"OUT": C_OUT, "IN": C_IN}
KIND_COLORS = {"request": C_REQ, "response": C_RES, "tool_call": C_TC, "tool_result": C_TR, "tool_result_send": C_TRS}
HEAVY_KEYS = {"message", "text", "script", "output", "content"}
DIR_COLORS: dict[str, tuple[float, ...]] = {"OUT": C_OUT, "IN": C_IN}
KIND_COLORS: dict[str, tuple[float, ...]] = {"request": C_REQ, "response": C_RES, "tool_call": C_TC, "tool_result": C_TR, "tool_result_send": C_TRS}
HEAVY_KEYS: set[str] = {"message", "text", "script", "output", "content"}
DISC_ROLES = ["User", "AI", "Vendor API", "System"]
AGENT_TOOL_NAMES = ["run_powershell", "read_file", "list_directory", "search_files", "get_file_summary", "web_search", "fetch_url"]
DISC_ROLES: list[str] = ["User", "AI", "Vendor API", "System"]
AGENT_TOOL_NAMES: list[str] = ["run_powershell", "read_file", "list_directory", "search_files", "get_file_summary", "web_search", "fetch_url"]
def truncate_entries(entries: list[dict[str, Any]], max_pairs: int) -> list[dict[str, Any]]:
if max_pairs <= 0:
@@ -576,7 +576,7 @@ class App:
self._create_discussion(nm)
self.ui_disc_new_name_input = ""
def _load_active_project(self):
def _load_active_project(self) -> None:
if self.active_project_path and Path(self.active_project_path).exists():
try:
self.project = project_manager.load_project(self.active_project_path)
@@ -599,7 +599,7 @@ class App:
if fallback_path not in self.project_paths:
self.project_paths.append(fallback_path)
def _switch_project(self, path: str):
def _switch_project(self, path: str) -> None:
if not Path(path).exists():
self.ai_status = f"project file not found: {path}"
return
@@ -616,7 +616,7 @@ class App:
ai_client.reset_session()
self.ai_status = f"switched to: {Path(path).stem}"
def _refresh_from_project(self):
def _refresh_from_project(self) -> None:
self.files = list(self.project.get("files", {}).get("paths", []))
self.screenshots = list(self.project.get("screenshots", {}).get("paths", []))
disc_sec = self.project.get("discussion", {})
@@ -668,7 +668,7 @@ class App:
if track_history:
self.disc_entries = _parse_history_entries(track_history, self.disc_roles)
def _cb_load_track(self, track_id: str):
def _cb_load_track(self, track_id: str) -> None:
state = project_manager.load_track_state(track_id, self.ui_files_base_dir)
if state:
try:
@@ -699,7 +699,7 @@ class App:
self.ai_status = f"Load track error: {e}"
print(f"Error loading track {track_id}: {e}")
def _save_active_project(self):
def _save_active_project(self) -> None:
if self.active_project_path:
try:
project_manager.save_project(self.project, self.active_project_path)
@@ -715,7 +715,7 @@ class App:
self._discussion_names_dirty = False
return self._discussion_names_cache
def _switch_discussion(self, name: str):
def _switch_discussion(self, name: str) -> None:
self._flush_disc_entries_to_project()
disc_sec = self.project.get("discussion", {})
discussions = disc_sec.get("discussions", {})
@@ -729,7 +729,7 @@ class App:
self.disc_entries = _parse_history_entries(disc_data.get("history", []), self.disc_roles)
self.ai_status = f"discussion: {name}"
def _flush_disc_entries_to_project(self):
def _flush_disc_entries_to_project(self) -> None:
history_strings = [project_manager.entry_to_str(e) for e in self.disc_entries]
if self.active_track:
project_manager.save_track_history(self.active_track.id, history_strings, self.ui_files_base_dir)
@@ -740,7 +740,7 @@ class App:
disc_data["history"] = history_strings
disc_data["last_updated"] = project_manager.now_ts()
def _create_discussion(self, name: str):
def _create_discussion(self, name: str) -> None:
disc_sec = self.project.setdefault("discussion", {})
discussions = disc_sec.setdefault("discussions", {})
if name in discussions:
@@ -750,7 +750,7 @@ class App:
self._discussion_names_dirty = True
self._switch_discussion(name)
def _rename_discussion(self, old_name: str, new_name: str):
def _rename_discussion(self, old_name: str, new_name: str) -> None:
disc_sec = self.project.get("discussion", {})
discussions = disc_sec.get("discussions", {})
if old_name not in discussions:
@@ -764,7 +764,7 @@ class App:
self.active_discussion = new_name
disc_sec["active"] = new_name
def _delete_discussion(self, name: str):
def _delete_discussion(self, name: str) -> None:
disc_sec = self.project.get("discussion", {})
discussions = disc_sec.get("discussions", {})
if len(discussions) <= 1:
@@ -779,7 +779,7 @@ class App:
self._switch_discussion(remaining[0])
# ---------------------------------------------------------------- logic
def _on_comms_entry(self, entry: dict):
def _on_comms_entry(self, entry: dict) -> None:
session_logger.log_comms(entry)
entry["local_ts"] = time.time()
# If this is a history_add kind, route it to history queue instead
@@ -796,17 +796,17 @@ class App:
with self._pending_comms_lock:
self._pending_comms.append(entry)
def _on_tool_log(self, script: str, result: str):
def _on_tool_log(self, script: str, result: str) -> None:
session_logger.log_tool_call(script, result, None)
with self._pending_tool_calls_lock:
self._pending_tool_calls.append((script, result, time.time()))
def _on_api_event(self, *args, **kwargs):
def _on_api_event(self, *args, **kwargs) -> None:
payload = kwargs.get("payload", {})
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({"action": "refresh_api_metrics", "payload": payload})
def _on_performance_alert(self, message: str):
def _on_performance_alert(self, message: str) -> None:
"""Called by PerformanceMonitor when a threshold is exceeded."""
alert_text = f"[PERFORMANCE ALERT] {message}. Please consider optimizing recent changes or reducing load."
# Inject into history as a 'System' message
@@ -817,7 +817,7 @@ class App:
"ts": project_manager.now_ts()
})
def _process_pending_gui_tasks(self):
def _process_pending_gui_tasks(self) -> None:
if not self._pending_gui_tasks:
return
with self._pending_gui_tasks_lock:
@@ -932,7 +932,7 @@ class App:
except Exception as e:
print(f"Error executing GUI task: {e}")
def _handle_approve_script(self):
def _handle_approve_script(self) -> None:
"""Logic for approving a pending script via API hooks."""
print("[DEBUG] _handle_approve_script called")
with self._pending_dialog_lock:
@@ -946,7 +946,7 @@ class App:
else:
print("[DEBUG] No pending dialog to approve")
def _handle_reject_script(self):
def _handle_reject_script(self) -> None:
"""Logic for rejecting a pending script via API hooks."""
print("[DEBUG] _handle_reject_script called")
with self._pending_dialog_lock:
@@ -960,7 +960,7 @@ class App:
else:
print("[DEBUG] No pending dialog to reject")
def _handle_mma_respond(self, approved: bool, payload: str = None, abort: bool = False, prompt: str = None, context_md: str = None):
def _handle_mma_respond(self, approved: bool, payload: str = None, abort: bool = False, prompt: str = None, context_md: str = None) -> None:
if self._pending_mma_approval:
dlg = self._pending_mma_approval.get("dialog_container", [None])[0]
if dlg:
@@ -985,7 +985,7 @@ class App:
dlg._condition.notify_all()
self._pending_mma_spawn = None
def _handle_approve_ask(self):
def _handle_approve_ask(self) -> None:
"""Responds with approval for a pending /api/ask request."""
if not self._ask_request_id: return
request_id = self._ask_request_id
@@ -1003,7 +1003,7 @@ class App:
self._ask_request_id = None
self._ask_tool_data = None
def _handle_reject_ask(self):
def _handle_reject_ask(self) -> None:
"""Responds with rejection for a pending /api/ask request."""
if not self._ask_request_id: return
request_id = self._ask_request_id
@@ -1021,7 +1021,7 @@ class App:
self._ask_request_id = None
self._ask_tool_data = None
def _handle_reset_session(self):
def _handle_reset_session(self) -> None:
"""Logic for resetting the AI session."""
ai_client.reset_session()
ai_client.clear_comms_log()
@@ -1037,7 +1037,7 @@ class App:
self.ai_response = ""
self.ui_ai_input = ""
def _handle_md_only(self):
def _handle_md_only(self) -> None:
"""Logic for the 'MD Only' action."""
try:
md, path, *_ = self._do_generate()
@@ -1049,7 +1049,7 @@ class App:
except Exception as e:
self.ai_status = f"error: {e}"
def _handle_generate_send(self):
def _handle_generate_send(self) -> None:
"""Logic for the 'Gen + Send' action."""
try:
md, path, file_items, stable_md, disc_text = self._do_generate()
@@ -1076,13 +1076,13 @@ class App:
self._loop
)
def _run_event_loop(self):
def _run_event_loop(self) -> None:
"""Runs the internal asyncio event loop."""
asyncio.set_event_loop(self._loop)
self._loop.create_task(self._process_event_queue())
self._loop.run_forever()
def shutdown(self):
def shutdown(self) -> None:
"""Cleanly shuts down the app's background tasks."""
if self._loop.is_running():
self._loop.call_soon_threadsafe(self._loop.stop)
@@ -1094,7 +1094,7 @@ class App:
if self.models_thread and self.models_thread.is_alive():
self.models_thread.join(timeout=1.0)
async def _process_event_queue(self):
async def _process_event_queue(self) -> None:
"""Listens for and processes events from the AsyncEventQueue."""
while True:
event_name, payload = await self.event_queue.get()
@@ -1115,7 +1115,7 @@ class App:
"payload": payload
})
def _handle_request_event(self, event: events.UserRequestEvent):
def _handle_request_event(self, event: events.UserRequestEvent) -> None:
"""Processes a UserRequestEvent by calling the AI client."""
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
@@ -1147,14 +1147,14 @@ class App:
self._loop
)
def _test_callback_func_write_to_file(self, data: str):
def _test_callback_func_write_to_file(self, data: str) -> None:
"""A dummy function that a custom_callback would execute for testing."""
# Note: This file path is relative to where the test is run.
# This is for testing purposes only.
with open("temp_callback_output.txt", "w") as f:
f.write(data)
def _recalculate_session_usage(self):
def _recalculate_session_usage(self) -> None:
usage = {"input_tokens": 0, "output_tokens": 0, "cache_read_input_tokens": 0, "cache_creation_input_tokens": 0, "total_tokens": 0, "last_latency": 0.0}
for entry in ai_client.get_comms_log():
if entry.get("kind") == "response" and "usage" in entry.get("payload", {}):
@@ -1164,7 +1164,7 @@ class App:
usage[k] += u.get(k, 0) or 0
self.session_usage = usage
def _refresh_api_metrics(self, payload: dict, md_content: str | None = None):
def _refresh_api_metrics(self, payload: dict, md_content: str | None = None) -> None:
if "latency" in payload:
self.session_usage["last_latency"] = payload["latency"]
self._recalculate_session_usage()
@@ -1184,7 +1184,7 @@ class App:
size_bytes = cache_stats.get("total_size_bytes", 0)
self._gemini_cache_text = f"Gemini Caches: {count} ({size_bytes / 1024:.1f} KB)"
def cb_load_prior_log(self):
def cb_load_prior_log(self) -> None:
root = hide_tk_root()
path = filedialog.askopenfilename(
title="Load Session Log",
@@ -1249,7 +1249,7 @@ class App:
self.ai_status = "powershell done, awaiting AI..."
return output
def resolve_pending_action(self, action_id: str, approved: bool):
def resolve_pending_action(self, action_id: str, approved: bool) -> bool:
"""Resolves a pending PowerShell script confirmation by its ID.
Args:
@@ -1276,7 +1276,7 @@ class App:
return True
return False
def _append_tool_log(self, script: str, result: str):
def _append_tool_log(self, script: str, result: str) -> None:
self._tool_log.append((script, result, time.time()))
self.ui_last_script_text = script
self.ui_last_script_output = result
@@ -1285,7 +1285,7 @@ class App:
if self.ui_auto_scroll_tool_calls:
self._scroll_tool_calls_to_bottom = True
def _flush_to_project(self):
def _flush_to_project(self) -> None:
proj = self.project
proj.setdefault("output", {})["output_dir"] = self.ui_output_dir
proj.setdefault("files", {})["base_dir"] = self.ui_files_base_dir
@@ -1320,7 +1320,7 @@ class App:
else:
mma_sec["active_track"] = None
def _flush_to_config(self):
def _flush_to_config(self) -> None:
self.config["ai"] = {
"provider": self.current_provider,
"model": self.current_model,
@@ -1352,7 +1352,7 @@ class App:
discussion_text = aggregate.build_discussion_text(history)
return full_md, path, file_items, stable_md, discussion_text
def _fetch_models(self, provider: str):
def _fetch_models(self, provider: str) -> None:
self.ai_status = "fetching models..."
def do_fetch():
@@ -1369,13 +1369,13 @@ class App:
self.models_thread.start()
# ---------------------------------------------------------------- helpers
def _render_text_viewer(self, label: str, content: str):
def _render_text_viewer(self, label: str, content: str) -> None:
if imgui.button("[+]##" + str(id(content))):
self.show_text_viewer = True
self.text_viewer_title = label
self.text_viewer_content = content
def _render_heavy_text(self, label: str, content: str):
def _render_heavy_text(self, label: str, content: str) -> None:
imgui.text_colored(C_LBL, f"{label}:")
imgui.same_line()
if imgui.button("[+]##" + label):
@@ -1400,7 +1400,7 @@ class App:
imgui.text(content if content else "(empty)")
# ---------------------------------------------------------------- gui
def _show_menus(self):
def _show_menus(self) -> None:
if imgui.begin_menu("Windows"):
for w in self.show_windows.keys():
_, self.show_windows[w] = imgui.menu_item(w, "", self.show_windows[w])
@@ -1429,7 +1429,7 @@ class App:
self.ai_status = f"error: {e}"
imgui.end_menu()
def _gui_func(self):
def _gui_func(self) -> None:
try:
self.perf_monitor.start_frame()
# Process GUI task queue
@@ -1817,7 +1817,7 @@ class App:
import traceback
traceback.print_exc()
def _render_projects_panel(self):
def _render_projects_panel(self) -> None:
proj_name = self.project.get("project", {}).get("name", Path(self.active_project_path).stem)
imgui.text_colored(C_IN, f"Active: {proj_name}")
imgui.separator()
@@ -1910,7 +1910,7 @@ class App:
if imgui.button('Plan Epic (Tier 1)', imgui.ImVec2(-1, 0)):
self._cb_plan_epic()
def _cb_plan_epic(self):
def _cb_plan_epic(self) -> None:
def _bg_task():
try:
self.ai_status = "Planning Epic (Tier 1)..."
@@ -1937,14 +1937,14 @@ class App:
print(f"ERROR in _cb_plan_epic background task: {e}")
threading.Thread(target=_bg_task, daemon=True).start()
def _cb_accept_tracks(self):
def _cb_accept_tracks(self) -> None:
def _bg_task():
for track_data in self.proposed_tracks:
self._start_track_logic(track_data)
self.ai_status = "Tracks accepted and execution started."
threading.Thread(target=_bg_task, daemon=True).start()
def _cb_start_track(self, user_data=None):
def _cb_start_track(self, user_data: Any = None) -> None:
idx = 0
if isinstance(user_data, int):
idx = user_data
@@ -1956,7 +1956,7 @@ class App:
threading.Thread(target=lambda: self._start_track_logic(track_data), daemon=True).start()
self.ai_status = f"Track '{title}' started."
def _start_track_logic(self, track_data):
def _start_track_logic(self, track_data: dict[str, Any]) -> None:
try:
goal = track_data.get("goal", "")
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
@@ -2019,7 +2019,7 @@ class App:
self.ai_status = f"Track start error: {e}"
print(f"ERROR in _start_track_logic: {e}")
def _render_track_proposal_modal(self):
def _render_track_proposal_modal(self) -> None:
if self._show_track_proposal_modal:
imgui.open_popup("Track Proposal")
if imgui.begin_popup_modal("Track Proposal", True, imgui.WindowFlags_.always_auto_resize)[0]:
@@ -2044,7 +2044,7 @@ class App:
imgui.close_current_popup()
imgui.end_popup()
def _render_log_management(self):
def _render_log_management(self) -> None:
exp, self.show_windows["Log Management"] = imgui.begin("Log Management", self.show_windows["Log Management"])
if not exp:
imgui.end()
@@ -2103,7 +2103,7 @@ class App:
imgui.end_table()
imgui.end()
def _render_files_panel(self):
def _render_files_panel(self) -> None:
imgui.text("Base Dir")
ch, self.ui_files_base_dir = imgui.input_text("##f_base", self.ui_files_base_dir)
imgui.same_line()
@@ -2135,7 +2135,7 @@ class App:
r.destroy()
if d: self.files.append(str(Path(d) / "**" / "*"))
def _render_screenshots_panel(self):
def _render_screenshots_panel(self) -> None:
imgui.text("Base Dir")
ch, self.ui_shots_base_dir = imgui.input_text("##s_base", self.ui_shots_base_dir)
imgui.same_line()
@@ -2164,7 +2164,7 @@ class App:
for p in paths:
if p not in self.screenshots: self.screenshots.append(p)
def _render_discussion_panel(self):
def _render_discussion_panel(self) -> None:
# THINKING indicator
is_thinking = self.ai_status in ["sending..."]
if is_thinking:
@@ -2354,7 +2354,7 @@ class App:
self._scroll_disc_to_bottom = False
imgui.end_child()
def _render_provider_panel(self):
def _render_provider_panel(self) -> None:
imgui.text("Provider")
if imgui.begin_combo("##prov", self.current_provider):
for p in PROVIDERS:
@@ -2413,7 +2413,7 @@ class App:
if self._gemini_cache_text:
imgui.text_colored(C_SUB, self._gemini_cache_text)
def _render_message_panel(self):
def _render_message_panel(self) -> None:
# LIVE indicator
is_live = self.ai_status in ["running powershell...", "fetching url...", "searching web...", "powershell done, awaiting AI..."]
if is_live:
@@ -2446,7 +2446,7 @@ class App:
if self.ui_ai_input:
self.disc_entries.append({"role": "User", "content": self.ui_ai_input, "collapsed": False, "ts": project_manager.now_ts()})
def _render_response_panel(self):
def _render_response_panel(self) -> None:
if self._trigger_blink:
self._trigger_blink = False
self._is_blinking = True
@@ -2482,7 +2482,7 @@ class App:
if is_blinking:
imgui.pop_style_color(2)
def _cb_ticket_retry(self, ticket_id):
def _cb_ticket_retry(self, ticket_id: str) -> None:
for t in self.active_tickets:
if t.get('id') == ticket_id:
t['status'] = 'todo'
@@ -2492,7 +2492,7 @@ class App:
self._loop
)
def _cb_ticket_skip(self, ticket_id):
def _cb_ticket_skip(self, ticket_id: str) -> None:
for t in self.active_tickets:
if t.get('id') == ticket_id:
t['status'] = 'skipped'
@@ -2502,7 +2502,7 @@ class App:
self._loop
)
def _render_mma_dashboard(self):
def _render_mma_dashboard(self) -> None:
# 1. Track Browser
imgui.text("Track Browser")
if imgui.begin_table("mma_tracks_table", 4, imgui.TableFlags_.borders | imgui.TableFlags_.row_bg | imgui.TableFlags_.resizable):
@@ -2595,7 +2595,7 @@ class App:
else:
imgui.text_disabled("No active MMA track.")
def _render_ticket_dag_node(self, ticket, tickets_by_id, children_map, rendered):
def _render_ticket_dag_node(self, ticket: Ticket, tickets_by_id: dict[str, Ticket], children_map: dict[str, list[str]], rendered: set[str]) -> None:
tid = ticket.get('id', '??')
target = ticket.get('target_file', 'general')
status = ticket.get('status', 'pending').upper()
@@ -2656,7 +2656,7 @@ class App:
imgui.text_disabled(" (shown above)")
imgui.tree_pop()
def _render_tool_calls_panel(self):
def _render_tool_calls_panel(self) -> None:
imgui.text("Tool call history")
imgui.same_line()
if imgui.button("Clear##tc"):
@@ -2728,7 +2728,7 @@ class App:
self._scroll_tool_calls_to_bottom = False
imgui.end_child()
def _render_comms_history_panel(self):
def _render_comms_history_panel(self) -> None:
imgui.text_colored(vec4(200, 220, 160), f"Status: {self.ai_status}")
imgui.same_line()
if imgui.button("Clear##comms"):
@@ -2855,14 +2855,14 @@ class App:
if self.is_viewing_prior_session:
imgui.pop_style_color()
def _render_system_prompts_panel(self):
def _render_system_prompts_panel(self) -> None:
imgui.text("Global System Prompt (all projects)")
ch, self.ui_global_system_prompt = imgui.input_text_multiline("##gsp", self.ui_global_system_prompt, imgui.ImVec2(-1, 100))
imgui.separator()
imgui.text("Project System Prompt")
ch, self.ui_project_system_prompt = imgui.input_text_multiline("##psp", self.ui_project_system_prompt, imgui.ImVec2(-1, 100))
def _render_theme_panel(self):
def _render_theme_panel(self) -> None:
exp, self.show_windows["Theme"] = imgui.begin("Theme", self.show_windows["Theme"])
if exp:
imgui.text("Palette")
@@ -2901,15 +2901,15 @@ class App:
if ch: theme.set_scale(scale)
imgui.end()
def _load_fonts(self):
def _load_fonts(self) -> None:
font_path, font_size = theme.get_font_loading_params()
if font_path and Path(font_path).exists():
hello_imgui.load_font(font_path, font_size)
def _post_init(self):
def _post_init(self) -> None:
theme.apply_current()
def run(self):
def run(self) -> None:
"""Initializes the ImGui runner and starts the main application loop."""
if "--headless" in sys.argv:
print("Headless mode active")
@@ -2949,7 +2949,7 @@ class App:
save_config(self.config)
session_logger.close_session()
def main():
def main() -> None:
app = App()
app.run()