fix(logging): Update GUI and controller to use correct session log paths and fix syntax errors.

This commit is contained in:
2026-03-06 14:22:41 -05:00
parent 0de50e216b
commit 2cfd0806cf
2 changed files with 86 additions and 159 deletions

View File

@@ -125,7 +125,6 @@ class AppController:
self._pending_gui_tasks_lock: threading.Lock = threading.Lock()
self._pending_dialog_lock: threading.Lock = threading.Lock()
self._api_event_queue_lock: threading.Lock = threading.Lock()
self.config: Dict[str, Any] = {}
self.project: Dict[str, Any] = {}
self.active_project_path: str = ""
@@ -135,19 +134,15 @@ class AppController:
self.disc_roles: List[str] = []
self.files: List[str] = []
self.screenshots: List[str] = []
self.event_queue: events.SyncEventQueue = events.SyncEventQueue()
self._loop_thread: Optional[threading.Thread] = None
self.tracks: List[Dict[str, Any]] = []
self.active_track: Optional[models.Track] = None
self.active_tickets: List[Dict[str, Any]] = []
self.mma_streams: Dict[str, str] = {}
self.mma_status: str = "idle"
self._tool_log: List[Dict[str, Any]] = []
self._comms_log: List[Dict[str, Any]] = []
self.session_usage: Dict[str, Any] = {
"input_tokens": 0,
"output_tokens": 0,
@@ -155,31 +150,26 @@ class AppController:
"cache_creation_input_tokens": 0,
"last_latency": 0.0
}
self.mma_tier_usage: Dict[str, Dict[str, Any]] = {
"Tier 1": {"input": 0, "output": 0, "model": "gemini-3.1-pro-preview"},
"Tier 2": {"input": 0, "output": 0, "model": "gemini-3-flash-preview"},
"Tier 3": {"input": 0, "output": 0, "model": "gemini-2.5-flash-lite"},
"Tier 4": {"input": 0, "output": 0, "model": "gemini-2.5-flash-lite"},
}
self.perf_monitor: performance_monitor.PerformanceMonitor = performance_monitor.PerformanceMonitor()
self._pending_gui_tasks: List[Dict[str, Any]] = []
self._api_event_queue: List[Dict[str, Any]] = []
# Pending dialogs state moved from App
self._pending_dialog: Optional[ConfirmDialog] = None
self._pending_dialog_open: bool = False
self._pending_actions: Dict[str, ConfirmDialog] = {}
self._pending_ask_dialog: bool = False
# AI settings state
self._current_provider: str = "gemini"
self._current_model: str = "gemini-2.5-flash-lite"
self.temperature: float = 0.0
self.max_tokens: int = 8192
self.history_trunc_limit: int = 8000
# UI-related state moved to controller
self.ui_ai_input: str = ""
self.ui_disc_new_name_input: str = ""
@@ -195,7 +185,6 @@ class AppController:
self.ui_new_ticket_desc: str = ""
self.ui_new_ticket_target: str = ""
self.ui_new_ticket_deps: str = ""
self.ui_output_dir: str = ""
self.ui_files_base_dir: str = ""
self.ui_shots_base_dir: str = ""
@@ -208,7 +197,6 @@ class AppController:
self.ui_auto_add_history: bool = False
self.ui_global_system_prompt: str = ""
self.ui_agent_tools: Dict[str, bool] = {}
self.available_models: List[str] = []
self.proposed_tracks: List[Dict[str, Any]] = []
self._show_track_proposal_modal: bool = False
@@ -231,7 +219,6 @@ class AppController:
self._perf_last_update: float = 0.0
self._autosave_interval: float = 60.0
self._last_autosave: float = time.time()
# More state moved from App
self._ask_dialog_open: bool = False
self._ask_request_id: Optional[str] = None
@@ -248,7 +235,6 @@ class AppController:
self._mma_spawn_edit_mode: bool = False
self._mma_spawn_prompt: str = ''
self._mma_spawn_context: str = ''
self._trigger_blink: bool = False
self._is_blinking: bool = False
self._blink_start_time: float = 0.0
@@ -272,7 +258,6 @@ class AppController:
self.prior_session_entries: List[Dict[str, Any]] = []
self.test_hooks_enabled: bool = ("--enable-test-hooks" in sys.argv) or (os.environ.get("SLOP_TEST_HOOKS") == "1")
self.ui_manual_approve: bool = False
self._settable_fields: Dict[str, str] = {
'ai_input': 'ui_ai_input',
'project_git_dir': 'ui_project_git_dir',
@@ -298,7 +283,6 @@ class AppController:
'ui_new_track_desc': 'ui_new_track_desc',
'manual_approve': 'ui_manual_approve'
}
self._gettable_fields = dict(self._settable_fields)
self._gettable_fields.update({
'ui_focus_agent': 'ui_focus_agent',
@@ -313,7 +297,6 @@ class AppController:
'operations_live_indicator': 'operations_live_indicator',
'prior_session_indicator': 'prior_session_indicator'
})
self._init_actions()
@property
@@ -350,6 +333,7 @@ class AppController:
'_test_callback_func_write_to_file': self._test_callback_func_write_to_file,
'_set_env_var': lambda k, v: os.environ.update({k: v})
}
def _update_gcli_adapter(self, path: str) -> None:
sys.stderr.write(f"[DEBUG] _update_gcli_adapter called with: {path}\n")
sys.stderr.flush()
@@ -445,7 +429,6 @@ class AppController:
payload = task.get("payload")
if not isinstance(payload, dict):
payload = task # Fallback to task if payload missing or wrong type
self.mma_status = payload.get("status", "idle")
self.active_tier = payload.get("active_tier")
self.mma_tier_usage = payload.get("tier_usage", self.mma_tier_usage)
@@ -604,23 +587,18 @@ class AppController:
self.temperature = ai_cfg.get("temperature", 0.0)
self.max_tokens = ai_cfg.get("max_tokens", 8192)
self.history_trunc_limit = ai_cfg.get("history_trunc_limit", 8000)
projects_cfg = self.config.get("projects", {})
self.project_paths = list(projects_cfg.get("paths", []))
self.active_project_path = projects_cfg.get("active", "")
self._load_active_project()
self.files = list(self.project.get("files", {}).get("paths", []))
self.screenshots = list(self.project.get("screenshots", {}).get("paths", []))
disc_sec = self.project.get("discussion", {})
self.disc_roles = list(disc_sec.get("roles", list(models.DISC_ROLES)))
self.active_discussion = disc_sec.get("active", "main")
disc_data = disc_sec.get("discussions", {}).get(self.active_discussion, {})
with self._disc_entries_lock:
self.disc_entries = models.parse_history_entries(disc_data.get("history", []), self.disc_roles)
# UI state
self.ui_output_dir = self.project.get("output", {}).get("output_dir", "./md_gen")
self.ui_files_base_dir = self.project.get("files", {}).get("base_dir", ".")
@@ -635,7 +613,6 @@ class AppController:
self.ui_summary_only = proj_meta.get("summary_only", False)
self.ui_auto_add_history = disc_sec.get("auto_add", False)
self.ui_global_system_prompt = self.config.get("ai", {}).get("system_prompt", "")
_default_windows = {
"Context Hub": True,
"Files & Media": True,
@@ -653,10 +630,8 @@ class AppController:
}
saved = self.config.get("gui", {}).get("show_windows", {})
self.show_windows = {k: saved.get(k, v) for k, v in _default_windows.items()}
agent_tools_cfg = self.project.get("agent", {}).get("tools", {})
self.ui_agent_tools = {t: agent_tools_cfg.get(t, True) for t in models.AGENT_TOOL_NAMES}
label = self.project.get("project", {}).get("name", "")
session_logger.open_session(label=label)
@@ -664,7 +639,7 @@ class AppController:
root = hide_tk_root()
path = filedialog.askopenfilename(
title="Load Session Log",
initialdir="logs",
initialdir="logs/sessions",
filetypes=[("Log/JSONL", "*.log *.jsonl"), ("All Files", "*.*")]
)
root.destroy()
@@ -713,20 +688,21 @@ class AppController:
def _prune_old_logs(self) -> None:
"""Asynchronously prunes old insignificant logs on startup."""
def run_prune() -> None:
try:
from src import log_registry
from src import log_pruner
registry = log_registry.LogRegistry("logs/log_registry.toml")
pruner = log_pruner.LogPruner(registry, "logs")
pruner.prune()
except Exception as e:
registry = log_registry.LogRegistry("logs/sessions/log_registry.toml")
pruner = log_pruner.LogPruner(registry, "logs/sessions")
pruner.prune() except Exception as e:
print(f"Error during log pruning: {e}")
thread = threading.Thread(target=run_prune, daemon=True)
thread.start()
def _fetch_models(self, provider: str) -> None:
self._set_status("fetching models...")
def do_fetch() -> None:
try:
models_list = ai_client.list_models(provider)
@@ -777,12 +753,12 @@ class AppController:
ai_client.events.on("request_start", lambda **kw: self._on_api_event("request_start", **kw))
ai_client.events.on("response_received", lambda **kw: self._on_api_event("response_received", **kw))
ai_client.events.on("tool_execution", lambda **kw: self._on_api_event("tool_execution", **kw))
self.hook_server = api_hooks.HookServer(app if app else self)
self.hook_server.start()
def _run_event_loop(self):
"""Internal loop runner."""
def queue_fallback() -> None:
while True:
try:
@@ -792,7 +768,6 @@ class AppController:
self._process_pending_history_adds()
except: pass
time.sleep(0.1)
fallback_thread = threading.Thread(target=queue_fallback, daemon=True)
fallback_thread.start()
self._process_event_queue()
@@ -808,9 +783,7 @@ class AppController:
time.sleep(0.01) # Measurable frame time
self.perf_monitor.end_frame()
time.sleep(0.006) # Aim for ~60 FPS total
threading.Thread(target=tick_perf, daemon=True).start()
while True:
event_name, payload = self.event_queue.get()
sys.stderr.write(f"[DEBUG] _process_event_queue got event: {event_name} with payload: {str(payload)[:100]}\n")
@@ -849,6 +822,7 @@ class AppController:
if self.test_hooks_enabled:
with self._api_event_queue_lock:
self._api_event_queue.append({"type": "response", "payload": payload})
def _handle_request_event(self, event: events.UserRequestEvent) -> None:
"""Processes a UserRequestEvent by calling the AI client."""
ai_client.set_current_tier(None) # Ensure main discussion is untagged
@@ -860,10 +834,8 @@ class AppController:
"collapsed": False,
"ts": project_manager.now_ts()
})
# Clear response area for new turn
self.ai_response = ""
csp = filter(bool, [self.ui_global_system_prompt.strip(), self.ui_project_system_prompt.strip()])
ai_client.set_custom_system_prompt("\n\n".join(csp))
ai_client.set_model_params(self.temperature, self.max_tokens, self.history_trunc_limit)
@@ -946,6 +918,7 @@ class AppController:
if self.test_hooks_enabled:
with self._api_event_queue_lock:
self._api_event_queue.append({"type": event_name, "payload": payload})
def _on_performance_alert(self, message: str) -> None:
alert_text = f"[PERFORMANCE ALERT] {message}. Please consider optimizing recent changes or reducing load."
with self._pending_history_adds_lock:
@@ -966,7 +939,6 @@ class AppController:
self._append_tool_log(script, output)
self._set_status("powershell done, awaiting AI...")
return output
sys.stderr.write("[DEBUG] Creating ConfirmDialog.\n")
sys.stderr.flush()
dialog = ConfirmDialog(script, base_dir)
@@ -977,7 +949,6 @@ class AppController:
else:
with self._pending_dialog_lock:
self._pending_dialog = dialog
if self.test_hooks_enabled and hasattr(self, '_api_event_queue'):
with self._api_event_queue_lock:
self._api_event_queue.append({
@@ -989,7 +960,6 @@ class AppController:
})
sys.stderr.write(f"[DEBUG] Appended script_confirmation_required to _api_event_queue. ID={dialog._uid}\n")
sys.stderr.flush()
sys.stderr.write(f"[DEBUG] Waiting for dialog ID={dialog._uid}...\n")
sys.stderr.flush()
approved, final_script = dialog.wait()
@@ -1034,6 +1004,7 @@ class AppController:
dialog._condition.notify_all()
return True
return False
@property
def current_provider(self) -> str:
return self._current_provider
@@ -1063,7 +1034,6 @@ class AppController:
def create_api(self) -> FastAPI:
"""Creates and configures the FastAPI application for headless mode."""
api = FastAPI(title="Manual Slop Headless API")
API_KEY_NAME = "X-API-KEY"
api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
@@ -1244,27 +1214,28 @@ class AppController:
@api.get("/api/v1/sessions", dependencies=[Depends(get_api_key)])
def list_sessions() -> list[str]:
"""Lists all session log files."""
log_dir = Path("logs")
"""Lists all session IDs."""
log_dir = Path("logs/sessions")
if not log_dir.exists():
return []
return [f.name for f in log_dir.glob("*.log")]
return [d.name for d in log_dir.iterdir() if d.is_dir()]
@api.get("/api/v1/sessions/{session_id}", dependencies=[Depends(get_api_key)])
def get_session(session_id: str) -> dict[str, Any]:
"""Returns the content of a specific session log."""
log_path = Path("logs") / session_id
"""Returns the content of the comms.log for a specific session."""
log_path = Path("logs/sessions") / session_id / "comms.log"
if not log_path.exists():
raise HTTPException(status_code=404, detail="Session log not found")
return {"id": session_id, "content": log_path.read_text(encoding="utf-8", errors="replace")}
@api.delete("/api/v1/sessions/{session_id}", dependencies=[Depends(get_api_key)])
def delete_session(session_id: str) -> dict[str, str]:
"""Deletes a specific session log."""
log_path = Path("logs") / session_id
if not log_path.exists():
raise HTTPException(status_code=404, detail="Session log not found")
log_path.unlink()
"""Deletes a specific session directory."""
log_path = Path("logs/sessions") / session_id
if not log_path.exists() or not log_path.is_dir():
raise HTTPException(status_code=404, detail="Session directory not found")
import shutil
shutil.rmtree(log_path)
return {"status": "deleted"}
@api.get("/api/v1/context", dependencies=[Depends(get_api_key)])
@@ -1288,7 +1259,6 @@ class AppController:
def token_stats() -> dict[str, Any]:
"""Returns current token usage and budget statistics."""
return self._token_stats
return api
def _cb_new_project_automated(self, user_data: Any) -> None:
@@ -1327,6 +1297,7 @@ class AppController:
return
self._refresh_from_project()
self._set_status(f"switched to: {Path(path).stem}")
def _refresh_from_project(self) -> None:
self.files = list(self.project.get("files", {}).get("paths", []))
self.screenshots = list(self.project.get("screenshots", {}).get("paths", []))
@@ -1559,7 +1530,6 @@ class AppController:
discussions = disc_sec.get("discussions", {})
for d_name in discussions:
discussions[d_name]["history"] = []
self._set_status("session reset")
self.ai_response = ""
self.ui_ai_input = ""
@@ -1568,7 +1538,6 @@ class AppController:
self._current_provider = "gemini"
self._current_model = "gemini-2.5-flash-lite"
ai_client.set_provider(self._current_provider, self._current_model)
with self._pending_history_adds_lock:
self._pending_history_adds.clear()
with self._api_event_queue_lock:
@@ -1578,6 +1547,7 @@ class AppController:
def _handle_md_only(self) -> None:
"""Logic for the 'MD Only' action."""
def worker():
try:
md, path, *_ = self._do_generate()
@@ -1592,6 +1562,7 @@ class AppController:
def _handle_generate_send(self) -> None:
"""Logic for the 'Gen + Send' action."""
def worker():
sys.stderr.write("[DEBUG] _handle_generate_send worker started\n")
sys.stderr.flush()
@@ -1601,7 +1572,6 @@ class AppController:
self.last_md = md
self.last_md_path = path
self.last_file_items = file_items
self._set_status("sending...")
user_msg = self.ui_ai_input
base_dir = self.ui_files_base_dir
@@ -1640,14 +1610,12 @@ class AppController:
if "latency" in payload:
self.session_usage["last_latency"] = payload["latency"]
self._recalculate_session_usage()
if md_content is not None:
stats = ai_client.get_token_stats(md_content)
# Ensure compatibility if keys are named differently
if "total_tokens" in stats and "estimated_prompt_tokens" not in stats:
stats["estimated_prompt_tokens"] = stats["total_tokens"]
self._token_stats = stats
cache_stats = payload.get("cache_stats")
if cache_stats:
count = cache_stats.get("cache_count", 0)
@@ -1736,6 +1704,7 @@ class AppController:
_t1_resp = [e for e in _t1_new if e.get("direction") == "IN" and e.get("kind") == "response"]
_t1_in = sum(e.get("payload", {}).get("usage", {}).get("input_tokens", 0) for e in _t1_resp)
_t1_out = sum(e.get("payload", {}).get("usage", {}).get("output_tokens", 0) for e in _t1_resp)
def _push_t1_usage(i: int, o: int) -> None:
self.mma_tier_usage["Tier 1"]["input"] += i
self.mma_tier_usage["Tier 1"]["output"] += o
@@ -1764,13 +1733,13 @@ class AppController:
def _cb_accept_tracks(self) -> None:
self._show_track_proposal_modal = False
def _bg_task() -> None:
sys.stderr.write("[DEBUG] _cb_accept_tracks _bg_task started\n")
# Generate skeletons once
self._set_status("Phase 2: Generating skeletons for all tracks...")
sys.stderr.write("[DEBUG] Creating ASTParser...\n")
parser = ASTParser(language="python")
generated_skeletons = ""
try:
# Use a local copy of files to avoid concurrent modification issues
@@ -1790,7 +1759,6 @@ class AppController:
sys.stderr.write(f"[DEBUG] Error in scan loop: {e}\n")
self._set_status(f"Error generating skeletons: {e}")
return # Exit if skeleton generation fails
sys.stderr.write("[DEBUG] Skeleton generation complete. Starting tracks...\n")
# Now loop through tracks and call _start_track_logic with generated skeletons
total_tracks = len(self.proposed_tracks)
@@ -1798,7 +1766,6 @@ class AppController:
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
self._set_status(f"Processing track {i+1} of {total_tracks}: '{title}'...")
self._start_track_logic(track_data, skeletons_str=generated_skeletons) # Pass skeletons
sys.stderr.write("[DEBUG] All tracks started. Refreshing...\n")
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({'action': 'refresh_from_project'}) # Ensure UI refresh after tracks are started
@@ -1812,7 +1779,6 @@ class AppController:
# Ensure it's loaded as active
if not self.active_track or self.active_track.id != track_id:
self._cb_load_track(track_id)
if self.active_track:
# Use the active track object directly to start execution
self._set_mma_status("running")
@@ -1822,7 +1788,6 @@ class AppController:
threading.Thread(target=engine.run, kwargs={"md_content": full_md}, daemon=True).start()
self._set_status(f"Track '{self.active_track.description}' started.")
return
idx = 0
if isinstance(user_data, int):
idx = user_data
@@ -1839,9 +1804,7 @@ class AppController:
goal = track_data.get("goal", "")
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
self._set_status(f"Phase 2: Generating tickets for {title}...")
skeletons = skeletons_str or "" # Use provided skeletons or empty
self._set_status("Phase 2: Calling Tech Lead...")
_t2_baseline = len(ai_client.get_comms_log())
raw_tickets = conductor_tech_lead.generate_tickets(goal, skeletons)
@@ -1853,14 +1816,12 @@ class AppController:
def _push_t2_usage(i: int, o: int) -> None:
self.mma_tier_usage["Tier 2"]["input"] += i
self.mma_tier_usage["Tier 2"]["output"] += o
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({
"action": "custom_callback",
"callback": _push_t2_usage,
"args": [_t2_in, _t2_out]
})
if not raw_tickets:
self._set_status(f"Error: No tickets generated for track: {title}")
print(f"Warning: No tickets generated for track: {title}")
@@ -1889,12 +1850,10 @@ class AppController:
meta = models.Metadata(id=track_id, name=title, status="todo", created_at=datetime.now(), updated_at=datetime.now())
state = models.TrackState(metadata=meta, discussion=[], tasks=tickets)
project_manager.save_track_state(track_id, state, self.ui_files_base_dir)
# Add to memory and notify UI
self.tracks.append({"id": track_id, "title": title, "status": "todo"})
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({'action': 'refresh_from_project'})
# 4. Initialize ConductorEngine and run loop
engine = multi_agent_conductor.ConductorEngine(track, self.event_queue, auto_queue=not self.mma_step_mode)
# Use current full markdown context for the track execution
@@ -1979,7 +1938,6 @@ class AppController:
# Sync active_tickets (list of dicts) back to active_track.tickets (list of models.Ticket objects)
self.active_track.tickets = [models.Ticket.from_dict(t) for t in self.active_tickets]
# Save the state to disk
existing = project_manager.load_track_state(self.active_track.id, self.ui_files_base_dir)
meta = models.Metadata(
id=self.active_track.id,

View File

@@ -91,7 +91,6 @@ class App:
self.controller.PROVIDERS = PROVIDERS
self.controller.init_state()
self.controller.start_services(self)
# Aliases for controller-owned locks
self._send_thread_lock = self.controller._send_thread_lock
self._disc_entries_lock = self.controller._disc_entries_lock
@@ -101,7 +100,6 @@ class App:
self._pending_gui_tasks_lock = self.controller._pending_gui_tasks_lock
self._pending_dialog_lock = self.controller._pending_dialog_lock
self._api_event_queue_lock = self.controller._api_event_queue_lock
self._discussion_names_cache: list[str] = []
self._discussion_names_dirty: bool = True
@@ -145,9 +143,7 @@ class App:
@current_model.setter
def current_model(self, value: str) -> None:
self.controller.current_model = value
# ---------------------------------------------------------------- project loading
# ---------------------------------------------------------------- logic
def shutdown(self) -> None:
@@ -160,13 +156,6 @@ class App:
os.makedirs("tests/artifacts", exist_ok=True)
with open("tests/artifacts/temp_callback_output.txt", "w") as f:
f.write(data)
# ---------------------------------------------------------------- helpers
def _render_text_viewer(self, label: str, content: str) -> None:
@@ -738,10 +727,6 @@ class App:
if imgui.button('Plan Epic (Tier 1)', imgui.ImVec2(-1, 0)):
self._cb_plan_epic()
def _render_track_proposal_modal(self) -> None:
if self._show_track_proposal_modal:
imgui.open_popup("Track Proposal")
@@ -788,7 +773,7 @@ class App:
if not exp:
imgui.end()
return
registry = log_registry.LogRegistry("logs/log_registry.toml")
registry = log_registry.LogRegistry("logs/sessions/log_registry.toml")
sessions = registry.data
if imgui.begin_table("sessions_table", 7, imgui.TableFlags_.borders | imgui.TableFlags_.row_bg | imgui.TableFlags_.resizable):
imgui.table_setup_column("Session ID")
@@ -1218,7 +1203,6 @@ class App:
toks = msg.get("tokens", 0)
imgui.text_disabled(f" [{role}] ~{toks:,} tokens")
shown += 1
imgui.separator()
if ai_client._provider == "gemini":
if ai_client._gemini_cache is not None:
@@ -1307,11 +1291,6 @@ class App:
if is_blinking:
imgui.pop_style_color(2)
def _render_tool_calls_panel(self) -> None:
imgui.text("Tool call history")
imgui.same_line()
@@ -1380,7 +1359,6 @@ class App:
in_t = stats.get('input', 0)
out_t = stats.get('output', 0)
total_cost += cost_tracker.estimate_cost(model, in_t, out_t)
imgui.text("Track:")
imgui.same_line()
imgui.text_colored(C_VAL, track_name)
@@ -1402,7 +1380,6 @@ class App:
elif self.mma_status == "error": status_col = imgui.ImVec4(1, 0, 0, 1)
imgui.text_colored(status_col, self.mma_status.upper())
imgui.separator()
# 0. Conductor Setup
if imgui.collapsing_header("Conductor Setup"):
if imgui.button("Run Setup Scan"):
@@ -1449,7 +1426,6 @@ class App:
if imgui.button(f"Load##{track.get('id')}"):
self._cb_load_track(str(track.get("id") or ""))
imgui.end_table()
# 1b. New Track Form
imgui.text("Create New Track")
changed_n, self.ui_new_track_name = imgui.input_text("Name##new_track", self.ui_new_track_name)
@@ -1465,7 +1441,6 @@ class App:
self._cb_create_track(self.ui_new_track_name, self.ui_new_track_desc, self.ui_new_track_type)
self.ui_new_track_name = ""
self.ui_new_track_desc = ""
imgui.separator()
# 2. Global Controls
changed, self.mma_step_mode = imgui.checkbox("Step Mode (HITL)", self.mma_step_mode)
@@ -1532,7 +1507,6 @@ class App:
cost = cost_tracker.estimate_cost(model, in_t, out_t)
total_cost += cost
imgui.text(f"${cost:,.4f}")
# Total Row
imgui.table_next_row()
imgui.table_set_bg_color(imgui.TableBgTarget_.row_bg0, imgui.get_color_u32(imgui.Col_.plot_lines_hovered))
@@ -1582,7 +1556,6 @@ class App:
rendered: set[str] = set()
for root in roots:
self._render_ticket_dag_node(root, tickets_by_id, children_map, rendered)
# 5. Add Ticket Form
imgui.separator()
if imgui.button("Add Ticket"):
@@ -1599,7 +1572,6 @@ class App:
self.ui_new_ticket_desc = ""
self.ui_new_ticket_target = ""
self.ui_new_ticket_deps = ""
if self._show_add_ticket_form:
imgui.begin_child("add_ticket_form", imgui.ImVec2(-1, 220), True)
imgui.text_colored(C_VAL, "New Ticket Details")
@@ -1607,7 +1579,6 @@ class App:
_, self.ui_new_ticket_desc = imgui.input_text_multiline("Description##new_ticket", self.ui_new_ticket_desc, imgui.ImVec2(-1, 60))
_, self.ui_new_ticket_target = imgui.input_text("Target File##new_ticket", self.ui_new_ticket_target)
_, self.ui_new_ticket_deps = imgui.input_text("Depends On (IDs, comma-separated)##new_ticket", self.ui_new_ticket_deps)
if imgui.button("Create"):
new_ticket = {
"id": self.ui_new_ticket_id,
@@ -1871,7 +1842,6 @@ class App:
def _render_theme_panel(self) -> None:
exp, opened = imgui.begin("Theme", self.show_windows["Theme"])
self.show_windows["Theme"] = bool(opened)
if exp:
imgui.text("Palette")
cp = theme.get_current_palette()
@@ -1919,7 +1889,6 @@ class App:
def run(self) -> None:
"""Initializes the ImGui runner and starts the main application loop."""
if "--headless" in sys.argv:
print("Headless mode active")
self._fetch_models(self.current_provider)