WIP: PAIN2
This commit is contained in:
@@ -69,6 +69,10 @@ class ApiHookClient:
|
||||
"""Pushes an event to the GUI's SyncEventQueue via the /api/gui endpoint."""
|
||||
return self._make_request('POST', '/api/gui', data=payload) or {}
|
||||
|
||||
def push_event(self, action: str, payload: dict) -> dict[str, Any]:
|
||||
"""Convenience to push a GUI task."""
|
||||
return self.post_gui({"action": action, **payload})
|
||||
|
||||
def click(self, item: str, user_data: Any = None) -> dict[str, Any]:
|
||||
"""Simulates a button click."""
|
||||
return self.post_gui({"action": "click", "item": item, "user_data": user_data})
|
||||
@@ -127,14 +131,14 @@ class ApiHookClient:
|
||||
"""Retrieves performance and diagnostic metrics."""
|
||||
return self._make_request('GET', '/api/gui/diagnostics') or {}
|
||||
|
||||
def get_performance(self) -> dict[str, Any]:
|
||||
"""Convenience for test_visual_sim_gui_ux.py."""
|
||||
diag = self.get_gui_diagnostics()
|
||||
return {"performance": diag}
|
||||
|
||||
def get_mma_status(self) -> dict[str, Any]:
|
||||
"""Convenience to get the current MMA engine status."""
|
||||
state = self.get_gui_state()
|
||||
return {
|
||||
"mma_status": state.get("mma_status"),
|
||||
"ai_status": state.get("ai_status"),
|
||||
"active_tier": state.get("mma_active_tier")
|
||||
}
|
||||
"""Convenience to get the current MMA engine status. Returns FULL state."""
|
||||
return self.get_gui_state()
|
||||
|
||||
def get_node_status(self, node_id: str) -> dict[str, Any]:
|
||||
"""Retrieves status for a specific node in the MMA DAG."""
|
||||
|
||||
@@ -308,7 +308,12 @@ class AppController:
|
||||
self._gettable_fields.update({
|
||||
'ui_focus_agent': 'ui_focus_agent',
|
||||
'active_discussion': 'active_discussion',
|
||||
'_track_discussion_active': '_track_discussion_active'
|
||||
'_track_discussion_active': '_track_discussion_active',
|
||||
'proposed_tracks': 'proposed_tracks',
|
||||
'mma_streams': 'mma_streams',
|
||||
'active_track': 'active_track',
|
||||
'active_tickets': 'active_tickets',
|
||||
'tracks': 'tracks'
|
||||
})
|
||||
|
||||
self._init_actions()
|
||||
@@ -343,6 +348,14 @@ class AppController:
|
||||
else:
|
||||
ai_client._gemini_cli_adapter.binary_path = str(path)
|
||||
|
||||
def _set_status(self, status: str) -> None:
|
||||
"""Thread-safe update of ai_status via the GUI task queue."""
|
||||
with self._pending_gui_tasks_lock:
|
||||
self._pending_gui_tasks.append({
|
||||
"action": "set_ai_status",
|
||||
"payload": status
|
||||
})
|
||||
|
||||
def _process_pending_gui_tasks(self) -> None:
|
||||
if not self._pending_gui_tasks:
|
||||
return
|
||||
@@ -357,6 +370,10 @@ class AppController:
|
||||
# ...
|
||||
if action == "refresh_api_metrics":
|
||||
self._refresh_api_metrics(task.get("payload", {}), md_content=self.last_md or None)
|
||||
elif action == "set_ai_status":
|
||||
self.ai_status = task.get("payload", "")
|
||||
sys.stderr.write(f"[DEBUG] Updated ai_status via task to: {self.ai_status}\n")
|
||||
sys.stderr.flush()
|
||||
elif action == "handle_ai_response":
|
||||
payload = task.get("payload", {})
|
||||
text = payload.get("text", "")
|
||||
@@ -760,7 +777,7 @@ class AppController:
|
||||
sys.stderr.flush()
|
||||
while True:
|
||||
event_name, payload = self.event_queue.get()
|
||||
sys.stderr.write(f"[DEBUG] _process_event_queue got event: {event_name}\n")
|
||||
sys.stderr.write(f"[DEBUG] _process_event_queue got event: {event_name} with payload: {str(payload)[:100]}\n")
|
||||
sys.stderr.flush()
|
||||
if event_name == "shutdown":
|
||||
break
|
||||
@@ -1027,10 +1044,21 @@ class AppController:
|
||||
"""Returns the current GUI state for specific fields."""
|
||||
gettable = getattr(self, "_gettable_fields", {})
|
||||
state = {}
|
||||
import dataclasses
|
||||
for key, attr in gettable.items():
|
||||
state[key] = getattr(self, attr, None)
|
||||
val = getattr(self, attr, None)
|
||||
if dataclasses.is_dataclass(val):
|
||||
state[key] = dataclasses.asdict(val)
|
||||
else:
|
||||
state[key] = val
|
||||
return state
|
||||
|
||||
@api.post("/api/gui", dependencies=[Depends(get_api_key)])
|
||||
def post_gui(req: dict) -> dict[str, str]:
|
||||
"""Pushes a GUI task to the event queue."""
|
||||
self.event_queue.put("gui_task", req)
|
||||
return {"status": "queued"}
|
||||
|
||||
@api.get("/status", dependencies=[Depends(get_api_key)])
|
||||
def status() -> dict[str, Any]:
|
||||
"""Returns the current status of the application."""
|
||||
@@ -1652,35 +1680,37 @@ class AppController:
|
||||
self._show_track_proposal_modal = False
|
||||
def _bg_task() -> None:
|
||||
# Generate skeletons once
|
||||
self.ai_status = "Phase 2: Generating skeletons for all tracks..."
|
||||
self._set_status("Phase 2: Generating skeletons for all tracks...")
|
||||
parser = file_cache.ASTParser(language="python")
|
||||
generated_skeletons = ""
|
||||
try:
|
||||
for i, file_path in enumerate(self.files):
|
||||
try:
|
||||
self.ai_status = f"Phase 2: Scanning files ({i+1}/{len(self.files)})..."
|
||||
abs_path = Path(self.ui_files_base_dir) / file_path
|
||||
if abs_path.exists() and abs_path.suffix == ".py":
|
||||
with open(abs_path, "r", encoding="utf-8") as f:
|
||||
code = f.read()
|
||||
generated_skeletons += f"\\nFile: {file_path}\\n{parser.get_skeleton(code)}\\n"
|
||||
except Exception as e:
|
||||
print(f"Error parsing skeleton for {file_path}: {e}")
|
||||
# Use a local copy of files to avoid concurrent modification issues
|
||||
files_to_scan = list(self.files)
|
||||
for i, file_path in enumerate(files_to_scan):
|
||||
try:
|
||||
self._set_status(f"Phase 2: Scanning files ({i+1}/{len(files_to_scan)})...")
|
||||
abs_path = Path(self.ui_files_base_dir) / file_path
|
||||
if abs_path.exists() and abs_path.suffix == ".py":
|
||||
with open(abs_path, "r", encoding="utf-8") as f:
|
||||
code = f.read()
|
||||
generated_skeletons += f"\nFile: {file_path}\n{parser.get_skeleton(code)}\n"
|
||||
except Exception as e:
|
||||
print(f"Error parsing skeleton for {file_path}: {e}")
|
||||
except Exception as e:
|
||||
self.ai_status = f"Error generating skeletons: {e}"
|
||||
print(f"Error generating skeletons: {e}")
|
||||
return # Exit if skeleton generation fails
|
||||
self._set_status(f"Error generating skeletons: {e}")
|
||||
print(f"Error generating skeletons: {e}")
|
||||
return # Exit if skeleton generation fails
|
||||
|
||||
# Now loop through tracks and call _start_track_logic with generated skeletons
|
||||
total_tracks = len(self.proposed_tracks)
|
||||
for i, track_data in enumerate(self.proposed_tracks):
|
||||
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
|
||||
self.ai_status = f"Processing track {i+1} of {total_tracks}: '{title}'..."
|
||||
self._set_status(f"Processing track {i+1} of {total_tracks}: '{title}'...")
|
||||
self._start_track_logic(track_data, skeletons_str=generated_skeletons) # Pass skeletons
|
||||
|
||||
with self._pending_gui_tasks_lock:
|
||||
self._pending_gui_tasks.append({'action': 'refresh_from_project'}) # Ensure UI refresh after tracks are started
|
||||
self.ai_status = f"All {total_tracks} tracks accepted and execution started."
|
||||
self._pending_gui_tasks.append({'action': 'refresh_from_project'}) # Ensure UI refresh after tracks are started
|
||||
self._set_status(f"All {total_tracks} tracks accepted and execution started.")
|
||||
threading.Thread(target=_bg_task, daemon=True).start()
|
||||
|
||||
def _cb_start_track(self, user_data: Any = None) -> None:
|
||||
@@ -1716,39 +1746,34 @@ class AppController:
|
||||
try:
|
||||
goal = track_data.get("goal", "")
|
||||
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
|
||||
self.ai_status = f"Phase 2: Generating tickets for {title}..."
|
||||
self._set_status(f"Phase 2: Generating tickets for {title}...")
|
||||
|
||||
skeletons = "" # Initialize skeletons variable
|
||||
if skeletons_str is None: # Only generate if not provided
|
||||
# 1. Get skeletons for context
|
||||
parser = file_cache.ASTParser(language="python")
|
||||
for i, file_path in enumerate(self.files):
|
||||
try:
|
||||
self.ai_status = f"Phase 2: Scanning files ({i+1}/{len(self.files)})..."
|
||||
abs_path = Path(self.ui_files_base_dir) / file_path
|
||||
if abs_path.exists() and abs_path.suffix == ".py":
|
||||
with open(abs_path, "r", encoding="utf-8") as f:
|
||||
code = f.read()
|
||||
skeletons += f"\\nFile: {file_path}\\n{parser.get_skeleton(code)}\\n"
|
||||
except Exception as e:
|
||||
print(f"Error parsing skeleton for {file_path}: {e}")
|
||||
else:
|
||||
skeletons = skeletons_str # Use provided skeletons
|
||||
skeletons = skeletons_str or "" # Use provided skeletons or empty
|
||||
|
||||
self.ai_status = "Phase 2: Calling Tech Lead..."
|
||||
self._set_status("Phase 2: Calling Tech Lead...")
|
||||
_t2_baseline = len(ai_client.get_comms_log())
|
||||
raw_tickets = conductor_tech_lead.generate_tickets(goal, skeletons)
|
||||
_t2_new = ai_client.get_comms_log()[_t2_baseline:]
|
||||
_t2_resp = [e for e in _t2_new if e.get("direction") == "IN" and e.get("kind") == "response"]
|
||||
_t2_in = sum(e.get("payload", {}).get("usage", {}).get("input_tokens", 0) for e in _t2_resp)
|
||||
_t2_out = sum(e.get("payload", {}).get("usage", {}).get("output_tokens", 0) for e in _t2_resp)
|
||||
self.mma_tier_usage["Tier 2"]["input"] += _t2_in
|
||||
self.mma_tier_usage["Tier 2"]["output"] += _t2_out
|
||||
|
||||
def _push_t2_usage(i: int, o: int) -> None:
|
||||
self.mma_tier_usage["Tier 2"]["input"] += i
|
||||
self.mma_tier_usage["Tier 2"]["output"] += o
|
||||
|
||||
with self._pending_gui_tasks_lock:
|
||||
self._pending_gui_tasks.append({
|
||||
"action": "custom_callback",
|
||||
"callback": _push_t2_usage,
|
||||
"args": [_t2_in, _t2_out]
|
||||
})
|
||||
|
||||
if not raw_tickets:
|
||||
self.ai_status = f"Error: No tickets generated for track: {title}"
|
||||
self._set_status(f"Error: No tickets generated for track: {title}")
|
||||
print(f"Warning: No tickets generated for track: {title}")
|
||||
return
|
||||
self.ai_status = "Phase 2: Sorting tickets..."
|
||||
self._set_status("Phase 2: Sorting tickets...")
|
||||
try:
|
||||
sorted_tickets_data = conductor_tech_lead.topological_sort(raw_tickets)
|
||||
except ValueError as e:
|
||||
@@ -1781,7 +1806,7 @@ class AppController:
|
||||
# Start the engine in a separate thread
|
||||
threading.Thread(target=engine.run, kwargs={"md_content": full_md}, daemon=True).start()
|
||||
except Exception as e:
|
||||
self.ai_status = f"Track start error: {e}"
|
||||
self._set_status(f"Track start error: {e}")
|
||||
print(f"ERROR in _start_track_logic: {e}")
|
||||
|
||||
def _cb_ticket_retry(self, ticket_id: str) -> None:
|
||||
|
||||
@@ -927,6 +927,10 @@ def dispatch(tool_name: str, tool_input: dict[str, Any]) -> str:
|
||||
return get_tree(path, int(tool_input.get("max_depth", 2)))
|
||||
return f"ERROR: unknown MCP tool '{tool_name}'"
|
||||
|
||||
def get_tool_schemas() -> list[dict[str, Any]]:
|
||||
"""Returns the list of tool specifications for the AI."""
|
||||
return list(MCP_TOOL_SPECS)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ tool schema helpers
|
||||
# These are imported by ai_client.py to build provider-specific declarations.
|
||||
|
||||
Reference in New Issue
Block a user