feat(ui): Enhanced context control with per-file flags and Gemini cache awareness

This commit is contained in:
2026-03-07 12:13:08 -05:00
parent 61f331aee6
commit d7a6ba7e51
10 changed files with 383 additions and 28 deletions

View File

@@ -147,6 +147,7 @@ class AppController:
self._tool_log: List[Dict[str, Any]] = []
self._tool_stats: Dict[str, Dict[str, Any]] = {} # {tool_name: {"count": 0, "total_time_ms": 0.0, "failures": 0}}
self._cached_cache_stats: Dict[str, Any] = {} # Pre-computed cache stats for GUI
self._cached_files: List[str] = []
self._token_history: List[Dict[str, Any]] = [] # Token usage over time [{"time": t, "input": n, "output": n, "model": s}, ...]
self._session_start_time: float = time.time() # For calculating burn rate
self._ticket_start_times: dict[str, float] = {}
@@ -702,10 +703,19 @@ class AppController:
self.project_paths = list(projects_cfg.get("paths", []))
self.active_project_path = projects_cfg.get("active", "")
self._load_active_project()
self.files = list(self.project.get("files", {}).get("paths", []))
# Deserialize FileItems in files.paths
raw_paths = self.project.get("files", {}).get("paths", [])
self.files = []
for p in raw_paths:
if isinstance(p, models.FileItem):
self.files.append(p)
elif isinstance(p, dict):
self.files.append(models.FileItem.from_dict(p))
else:
self.files.append(models.FileItem(path=str(p)))
self.screenshots = list(self.project.get("screenshots", {}).get("paths", []))
disc_sec = self.project.get("discussion", {})
self.disc_roles = list(disc_sec.get("roles", ["User", "AI", "Vendor API", "System"]))
self.disc_roles = list(disc_sec.get("roles", ["User", "AI", "Vendor API", "System", "Reasoning", "Context"]))
self.active_discussion = disc_sec.get("active", "main")
disc_data = disc_sec.get("discussions", {}).get(self.active_discussion, {})
with self._disc_entries_lock:
@@ -1804,6 +1814,9 @@ class AppController:
if k in usage:
usage[k] += u.get(k, 0) or 0
self.session_usage = usage
# Update cached files list
stats = ai_client.get_gemini_cache_stats()
self._cached_files = stats.get("cached_files", [])
def _refresh_api_metrics(self, payload: dict[str, Any], md_content: str | None = None) -> None:
if "latency" in payload: