chore(mma): Implement visual simulation for Epic planning and fix UI refresh

This commit is contained in:
2026-02-28 21:07:46 -05:00
parent 3d861ecf08
commit d65fa79e26
11 changed files with 237 additions and 28 deletions

View File

@@ -1614,17 +1614,23 @@ def run_tier4_analysis(stderr: str) -> str:
return f"[QA ANALYSIS FAILED] {e}"
# ------------------------------------------------------------------ unified send
import json
from typing import Any, Callable, Optional, List
# Assuming _model, _system_prompt, _provider, _send_lock are module-level variables
# and the _send_xxx functions are also defined at module level.
def send(
md_content: str,
user_message: str,
base_dir: str = ".",
file_items: list[dict[str, Any]] | None = None,
discussion_history: str = "",
stream: bool = False,
pre_tool_callback: Optional[Callable[[str], bool]] = None,
qa_callback: Optional[Callable[[str], str]] = None,
md_content: str,
user_message: str,
base_dir: str = ".",
file_items: list[dict[str, Any]] | None = None,
discussion_history: str = "",
stream: bool = False,
pre_tool_callback: Optional[Callable[[str], bool]] = None,
qa_callback: Optional[Callable[[str], str]] = None,
) -> str:
"""
"""
Send a message to the active provider.
md_content : aggregated markdown string (for Gemini: stable content only,
@@ -1639,16 +1645,72 @@ def send(
pre_tool_callback : Optional callback (payload: str) -> bool called before tool execution
qa_callback : Optional callback (stderr: str) -> str called for Tier 4 error analysis
"""
with _send_lock:
if _provider == "gemini":
return _send_gemini(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "gemini_cli":
return _send_gemini_cli(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "anthropic":
return _send_anthropic(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "deepseek":
return _send_deepseek(md_content, user_message, base_dir, file_items, discussion_history, stream=stream, pre_tool_callback=pre_tool_callback, qa_callback=qa_callback)
raise ValueError(f"unknown provider: {_provider}")
# --- START MOCK LOGIC ---
# Assuming _model, _custom_system_prompt, and _system_prompt are module-level variables.
# If _model is not 'mock', proceed to original provider logic.
if _model == 'mock':
mock_response_content = None
# Use _custom_system_prompt for keyword detection
current_system_prompt = _custom_system_prompt # Assuming _custom_system_prompt is accessible and defined
if 'tier1_epic_init' in current_system_prompt:
mock_response_content = [
{
"id": "mock-track-1",
"type": "epic",
"module": "conductor",
"persona": "Tier 1 Orchestrator",
"severity": "high",
"goal": "Initialize a new track.",
"acceptance_criteria": "Track created successfully with required fields."
},
{
"id": "mock-track-2",
"type": "epic",
"module": "conductor",
"persona": "Tier 1 Orchestrator",
"severity": "medium",
"goal": "Initialize another track.",
"acceptance_criteria": "Second track created successfully."
}
]
elif 'tier2_sprint_planning' in current_system_prompt:
mock_response_content = [
{
"id": "mock-ticket-1",
"type": "story",
"goal": "Implement feature X.",
"target_file": "src/feature_x.py",
"depends_on": [],
"context_requirements": ["requirements.txt", "main.py"]
},
{
"id": "mock-ticket-2",
"type": "bug",
"goal": "Fix bug Y.",
"target_file": "src/bug_y.py",
"depends_on": ["mock-ticket-1"],
"context_requirements": ["tests/test_bug_y.py"]
}
]
else:
mock_response_content = "Mock AI Response"
# The function is typed to return 'str', so we return a JSON string.
# Ensure 'json' is imported at the module level.
return json.dumps(mock_response_content)
# --- END MOCK LOGIC ---
with _send_lock:
if _provider == "gemini":
return _send_gemini(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "gemini_cli":
return _send_gemini_cli(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "anthropic":
return _send_anthropic(md_content, user_message, base_dir, file_items, discussion_history, pre_tool_callback, qa_callback)
elif _provider == "deepseek":
return _send_deepseek(md_content, user_message, base_dir, file_items, discussion_history, stream=stream, pre_tool_callback=pre_tool_callback, qa_callback=qa_callback)
raise ValueError(f"unknown provider: {_provider}")
def get_history_bleed_stats(md_content: str | None = None) -> dict[str, Any]:
"""