refactor(phase5): Comprehensive stabilisation pass. De-duplicated App/Controller state, hardened session reset, and updated integration tests with deterministic polling.

This commit is contained in:
2026-05-09 16:55:45 -04:00
parent d1cc019640
commit b958fa2819
16 changed files with 351 additions and 383 deletions
+42 -28
View File
@@ -261,6 +261,9 @@ class AppController:
self.ui_gemini_cli_path: str = "gemini"
self.ui_word_wrap: bool = True
self.ui_auto_add_history: bool = False
self.ui_separate_message_panel: bool = False
self.ui_separate_response_panel: bool = False
self.ui_separate_tool_calls_panel: bool = False
self.ui_active_tool_preset: str | None = None
self.ui_global_system_prompt: str = ""
self.ui_base_system_prompt: str = ""
@@ -560,6 +563,11 @@ class AppController:
def thinking_indicator(self) -> bool:
return self.ai_status in ("sending...", "streaming...")
@property
def summary_cache(self) -> Any:
from src import summarize
return summarize._summary_cache
@property
def rag_enabled(self) -> bool:
return self.rag_config.enabled if self.rag_config else False
@@ -673,6 +681,7 @@ class AppController:
'btn_reset_base_prompt': self._cb_reset_base_prompt,
'btn_show_base_prompt_diff': self._cb_show_base_prompt_diff,
'btn_rebuild_rag_index': self._rebuild_rag_index,
'btn_clear_summary_cache': self._handle_clear_summary_cache,
}
self._predefined_callbacks: dict[str, Callable[..., Any]] = {
'_test_callback_func_write_to_file': self._test_callback_func_write_to_file,
@@ -1013,8 +1022,6 @@ class AppController:
self.mma_streams[stream_id] = ""
self.mma_streams[stream_id] += f"[BEAD UPDATE] {bead_id} -> status: {status}\n"
except Exception as e:
sys.stderr.write(f"[DEBUG] Error executing GUI task: {e}\n{traceback.format_exc()}\n")
sys.stderr.flush()
print(f"Error executing GUI task: {e}")
def _process_pending_history_adds(self) -> None:
@@ -1107,6 +1114,9 @@ class AppController:
self.ui_separate_tier2 = False
self.ui_separate_tier3 = False
self.ui_separate_tier4 = False
self.ui_separate_message_panel = False
self.ui_separate_response_panel = False
self.ui_separate_tool_calls_panel = False
self.ui_separate_external_tools = False
self.config = models.load_config()
path_info = paths.get_full_path_info()
@@ -1124,6 +1134,9 @@ class AppController:
self.project_paths = list(projects_cfg.get("paths", []))
self.active_project_path = projects_cfg.get("active", "")
self._load_active_project()
if not self.project or not isinstance(self.project, dict) or "project" not in self.project:
name = Path(self.active_project_path).stem if self.active_project_path else "unnamed"
self.project = project_manager.default_project(name)
self.workspace_manager = workspace_manager.WorkspaceManager(project_root=Path(self.active_project_path).parent if self.active_project_path else None)
self.workspace_profiles = self.workspace_manager.load_all_profiles()
# Deserialize FileItems in files.paths
@@ -1203,6 +1216,9 @@ class AppController:
self.ui_project_preset_name = proj_meta.get("active_preset")
gui_cfg = self.config.get("gui", {})
self.ui_separate_message_panel = gui_cfg.get('separate_message_panel', False)
self.ui_separate_response_panel = gui_cfg.get('separate_response_panel', False)
self.ui_separate_tool_calls_panel = gui_cfg.get('separate_tool_calls_panel', False)
self.ui_auto_switch_layout = gui_cfg.get("auto_switch_layout", False)
self.ui_tier_layout_bindings = gui_cfg.get("tier_layout_bindings", {"Tier 1": "", "Tier 2": "", "Tier 3": "", "Tier 4": ""})
from src import bg_shader
@@ -1531,7 +1547,6 @@ class AppController:
try:
self.all_available_models[p] = ai_client.list_models(p)
except Exception as e:
sys.stderr.write(f"[DEBUG] Error fetching models for {p}: {e}\n")
self.all_available_models[p] = []
models_list = self.all_available_models.get(provider, [])
@@ -2275,6 +2290,10 @@ class AppController:
summarize._summary_cache.clear()
self._push_mma_state_update()
def _handle_clear_summary_cache(self, user_data: Any = None) -> None:
self.summary_cache.clear()
self.ai_status = 'summary cache cleared'
def _cb_show_base_prompt_diff(self, user_data=None) -> None:
"""
[C: src/gui_2.py:App._render_system_prompts_panel]
@@ -2425,7 +2444,6 @@ class AppController:
"""
[C: src/gui_2.py:App._render_system_prompts_panel]
"""
print(f"[DEBUG] _apply_preset: name={name}, scope={scope}")
if name == "None":
if scope == "global":
self.ui_global_preset_name = ""
@@ -2434,7 +2452,6 @@ class AppController:
return
preset = self.presets.get(name)
if not preset:
print(f"[DEBUG] _apply_preset: preset {name} not found in {list(self.presets.keys())}")
return
if scope == "global":
self.ui_global_system_prompt = preset.system_prompt
@@ -2447,7 +2464,6 @@ class AppController:
"""
[C: src/gui_2.py:App._render_preset_manager_content]
"""
print(f"[DEBUG] _cb_save_preset: name={name}, scope={scope}")
if not name or not name.strip():
raise ValueError("Preset name cannot be empty or whitespace.")
preset = models.Preset(
@@ -2456,7 +2472,6 @@ class AppController:
)
self.preset_manager.save_preset(preset, scope)
self.presets = self.preset_manager.load_all()
print(f"[DEBUG] _cb_save_preset: saved {name}, total presets now {len(self.presets)}")
def _cb_delete_preset(self, name, scope):
"""
@@ -2746,6 +2761,14 @@ class AppController:
self.ui_ai_input = ""
self.ui_manual_approve = False
self.ui_auto_add_history = False
self.active_track = None
self.active_tier = None
self.mma_status = 'idle'
self.proposed_tracks = []
self.active_tickets = []
self.engines.clear()
self.mma_streams.clear()
self._worker_status.clear()
self._current_provider = "gemini"
self._current_model = "gemini-2.5-flash-lite"
ai_client.set_provider(self._current_provider, self._current_model)
@@ -2755,6 +2778,17 @@ class AppController:
self._api_event_queue.clear()
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.clear()
self.ui_use_default_base_prompt = True
self.ui_global_system_prompt = ''
self.ui_base_system_prompt = ''
self.ui_project_system_prompt = ''
self.ui_project_main_context = ''
self.ui_active_persona = ''
self.ui_active_tool_preset = None
self.ui_active_bias_profile = None
self.temperature = 0.0
self.top_p = 1.0
self.max_tokens = 8192
def _handle_md_only(self) -> None:
"""
@@ -2789,8 +2823,6 @@ class AppController:
"""
[C: tests/test_symbol_parsing.py:test_handle_generate_send_appends_definitions, tests/test_symbol_parsing.py:test_handle_generate_send_no_symbols]
"""
sys.stderr.write("[DEBUG] _handle_generate_send worker started\n")
sys.stderr.flush()
try:
md, path, file_items, stable_md, disc_text = self._do_generate()
self._last_stable_md = stable_md
@@ -2819,8 +2851,6 @@ class AppController:
user_msg += f'\n\n[Definition: {symbol} from {file_path} (line {line})]\n```python\n{definition}\n```'
base_dir = self.active_project_root
sys.stderr.write(f"[DEBUG] _do_generate success. Prompt: {user_msg[:50]}...\n")
sys.stderr.flush()
# Prepare event payload
event_payload = events.UserRequestEvent(
prompt=user_msg,
@@ -2831,11 +2861,7 @@ class AppController:
)
# Push to async queue
self.event_queue.put("user_request", event_payload)
sys.stderr.write("[DEBUG] Enqueued user_request event\n")
sys.stderr.flush()
except Exception as e:
sys.stderr.write(f"[DEBUG] _do_generate ERROR: {e}\n{traceback.format_exc()}\n")
sys.stderr.flush()
self.ai_status = f"generate error: {e}"
threading.Thread(target=worker, daemon=True).start()
@@ -3035,13 +3061,9 @@ class AppController:
[C: src/gui_2.py:App._render_mma_dashboard, tests/test_mma_orchestration_gui.py:test_cb_plan_epic_launches_thread]
"""
def _bg_task() -> None:
sys.stderr.write("[DEBUG] _cb_plan_epic _bg_task started\n")
sys.stderr.flush()
try:
self.ai_status = "Planning Epic (Tier 1)..."
history = orchestrator_pm.get_track_history_summary()
sys.stderr.write(f"[DEBUG] History summary length: {len(history)}\n")
sys.stderr.flush()
proj = project_manager.load_project(self.active_project_path)
flat = project_manager.flat_config(self.project)
file_items = aggregate.build_file_items(Path(self.active_project_root), flat.get("files", {}).get("paths", []))
@@ -3086,16 +3108,13 @@ class AppController:
self._show_track_proposal_modal = False
def _bg_task() -> None:
sys.stderr.write("[DEBUG] _cb_accept_tracks _bg_task started\n")
# Generate skeletons once
self.ai_status = "Phase 2: Generating skeletons for all tracks..."
sys.stderr.write("[DEBUG] Creating ASTParser...\n")
parser = ASTParser(language="python")
generated_skeletons = ""
try:
# Use a local copy of files to avoid concurrent modification issues
files_to_scan = list(self.files)
sys.stderr.write(f"[DEBUG] Scanning {len(files_to_scan)} files for skeletons...\n")
for i, file_path in enumerate(files_to_scan):
try:
self.ai_status = f"Phase 2: Scanning files ({i+1}/{len(files_to_scan)})..."
@@ -3105,19 +3124,16 @@ class AppController:
code = f.read()
generated_skeletons += f"\nFile: {file_path}\n{parser.get_skeleton(code)}\n"
except Exception as e:
sys.stderr.write(f"[DEBUG] Error parsing skeleton for {file_path}: {e}\n")
pass
except Exception as e:
sys.stderr.write(f"[DEBUG] Error in scan loop: {e}\n")
self.ai_status = f"Error generating skeletons: {e}"
return # Exit if skeleton generation fails
sys.stderr.write("[DEBUG] Skeleton generation complete. Starting tracks...\n")
# Now loop through tracks and call _start_track_logic with generated skeletons
total_tracks = len(self.proposed_tracks)
for i, track_data in enumerate(self.proposed_tracks):
title = track_data.get("title") or track_data.get("goal", "Untitled Track")
self.ai_status = f"Processing track {i+1} of {total_tracks}: '{title}'..."
self._start_track_logic(track_data, skeletons_str=generated_skeletons) # Pass skeletons
sys.stderr.write("[DEBUG] All tracks started. Refreshing...\n")
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({'action': 'refresh_from_project'}) # Ensure UI refresh after tracks are started
self.ai_status = f"All {total_tracks} tracks accepted and execution started."
@@ -3135,7 +3151,6 @@ class AppController:
self._cb_load_track(track_id)
if self.active_track and self.active_track.id == track_id:
# Use the active track object directly to start execution
print(f"[DEBUG] _cb_start_track: track_id={self.active_track.id}, desc={self.active_track.description}")
self.mma_status = "running"
engine = multi_agent_conductor.ConductorEngine(self.active_track, self.event_queue, auto_queue=not self.mma_step_mode)
self.engines[self.active_track.id] = engine
@@ -3145,7 +3160,6 @@ class AppController:
self.ai_status = f"Track '{self.active_track.description}' started."
elif self.active_track and self.active_track.id != track_id:
# load_track failed but active_track is still wrong - reload explicitly
print(f"[DEBUG] _cb_start_track: load failed, trying reload track_id={track_id}")
self._cb_load_track(track_id)
if self.active_track and self.active_track.id == track_id:
self.mma_status = "running"