checkpoint: mma_orchestrator track

This commit is contained in:
2026-02-26 22:59:26 -05:00
parent f05fa3d340
commit d087a20f7b
17 changed files with 930 additions and 73 deletions

View File

@@ -6,7 +6,64 @@ import aggregate
import summarize
from pathlib import Path
def generate_tracks(user_request: str, project_config: dict, file_items: list[dict]) -> list[dict]:
CONDUCTOR_PATH = Path("conductor")
def get_track_history_summary() -> str:
"""
Scans conductor/archive/ and conductor/tracks/ to build a summary of past work.
"""
summary_parts = []
archive_path = CONDUCTOR_PATH / "archive"
tracks_path = CONDUCTOR_PATH / "tracks"
paths_to_scan = []
if archive_path.exists():
paths_to_scan.extend(list(archive_path.iterdir()))
if tracks_path.exists():
paths_to_scan.extend(list(tracks_path.iterdir()))
for track_dir in paths_to_scan:
if not track_dir.is_dir():
continue
metadata_file = track_dir / "metadata.json"
spec_file = track_dir / "spec.md"
title = track_dir.name
status = "unknown"
overview = "No overview available."
if metadata_file.exists():
try:
with open(metadata_file, "r", encoding="utf-8") as f:
meta = json.load(f)
title = meta.get("title", title)
status = meta.get("status", status)
except Exception:
pass
if spec_file.exists():
try:
with open(spec_file, "r", encoding="utf-8") as f:
content = f.read()
# Basic extraction of Overview section if it exists
if "## Overview" in content:
overview = content.split("## Overview")[1].split("##")[0].strip()
else:
# Just take a snippet of the beginning
overview = content[:200] + "..."
except Exception:
pass
summary_parts.append(f"Track: {title}\nStatus: {status}\nOverview: {overview}\n---")
if not summary_parts:
return "No previous tracks found."
return "\n".join(summary_parts)
def generate_tracks(user_request: str, project_config: dict, file_items: list[dict], history_summary: str = None) -> list[dict]:
"""
Tier 1 (Strategic PM) call.
Analyzes the project state and user request to generate a list of Tracks.
@@ -16,42 +73,49 @@ def generate_tracks(user_request: str, project_config: dict, file_items: list[di
# 2. Construct Prompt
system_prompt = mma_prompts.PROMPTS.get("tier1_epic_init")
user_message = (
f"### USER REQUEST:
{user_request}
"
f"### REPOSITORY MAP:
{repo_map}
"
"Please generate the implementation tracks for this request."
)
# 3. Call Tier 1 Model (Strategic - Pro)
# Note: We use gemini-1.5-pro or similar high-reasoning model for Tier 1
response = ai_client.send(
md_content="", # We pass everything in user_message for clarity
user_message=user_message,
system_prompt=system_prompt,
model_name="gemini-1.5-pro" # Strategic Tier
)
user_message_parts = [
f"### USER REQUEST:\n{user_request}\n",
f"### REPOSITORY MAP:\n{repo_map}\n"
]
if history_summary:
user_message_parts.append(f"### TRACK HISTORY:\n{history_summary}\n")
user_message_parts.append("Please generate the implementation tracks for this request.")
user_message = "\n".join(user_message_parts)
# Set custom system prompt for this call
old_system_prompt = ai_client._custom_system_prompt
ai_client.set_custom_system_prompt(system_prompt)
# 4. Parse JSON Output
try:
# The prompt asks for a JSON array. We need to extract it if the AI added markdown blocks.
json_match = response.strip()
if "```json" in json_match:
json_match = json_match.split("```json")[1].split("```")[0].strip()
elif "```" in json_match:
json_match = json_match.split("```")[1].split("```")[0].strip()
tracks = json.loads(json_match)
return tracks
except Exception as e:
print(f"Error parsing Tier 1 response: {e}")
print(f"Raw response: {response}")
return []
# 3. Call Tier 1 Model (Strategic - Pro)
# Note: We use gemini-1.5-pro or similar high-reasoning model for Tier 1
response = ai_client.send(
md_content="", # We pass everything in user_message for clarity
user_message=user_message
)
# 4. Parse JSON Output
try:
# The prompt asks for a JSON array. We need to extract it if the AI added markdown blocks.
json_match = response.strip()
if "```json" in json_match:
json_match = json_match.split("```json")[1].split("```")[0].strip()
elif "```" in json_match:
json_match = json_match.split("```")[1].split("```")[0].strip()
tracks = json.loads(json_match)
return tracks
except Exception as e:
print(f"Error parsing Tier 1 response: {e}")
print(f"Raw response: {response}")
return []
finally:
# Restore old system prompt
ai_client.set_custom_system_prompt(old_system_prompt)
if __name__ == "__main__":
# Quick CLI test
@@ -61,5 +125,6 @@ if __name__ == "__main__":
file_items = aggregate.build_file_items(Path("."), flat.get("files", {}).get("paths", []))
print("Testing Tier 1 Track Generation...")
tracks = generate_tracks("Implement a basic unit test for the ai_client.py module.", flat, file_items)
history = get_track_history_summary()
tracks = generate_tracks("Implement a basic unit test for the ai_client.py module.", flat, file_items, history_summary=history)
print(json.dumps(tracks, indent=2))