docs(conductor): Synchronize docs for track 'GUI Performance Profiling & Optimization'
This commit is contained in:
@@ -167,7 +167,8 @@ class AppController:
|
||||
"Tier 3": {"input": 0, "output": 0, "provider": "gemini", "model": "gemini-2.5-flash-lite"},
|
||||
"Tier 4": {"input": 0, "output": 0, "provider": "gemini", "model": "gemini-2.5-flash-lite"},
|
||||
}
|
||||
self.perf_monitor: performance_monitor.PerformanceMonitor = performance_monitor.PerformanceMonitor()
|
||||
self.perf_monitor: performance_monitor.PerformanceMonitor = performance_monitor.get_monitor()
|
||||
self._perf_profiling_enabled: bool = False
|
||||
self._pending_gui_tasks: List[Dict[str, Any]] = []
|
||||
self._api_event_queue: List[Dict[str, Any]] = []
|
||||
# Pending dialogs state moved from App
|
||||
@@ -354,6 +355,15 @@ class AppController:
|
||||
except Exception as e:
|
||||
self._inject_preview = f"Error reading file: {e}"
|
||||
|
||||
@property
|
||||
def perf_profiling_enabled(self) -> bool:
|
||||
return self._perf_profiling_enabled
|
||||
|
||||
@perf_profiling_enabled.setter
|
||||
def perf_profiling_enabled(self, value: bool) -> None:
|
||||
self._perf_profiling_enabled = value
|
||||
self.perf_monitor.enabled = value
|
||||
|
||||
@property
|
||||
def thinking_indicator(self) -> bool:
|
||||
return self.ai_status in ("sending...", "streaming...")
|
||||
@@ -911,6 +921,7 @@ class AppController:
|
||||
|
||||
def _run_event_loop(self):
|
||||
"""Internal loop runner."""
|
||||
if getattr(self, 'perf_profiling_enabled', False): self.perf_monitor.start_component("_run_event_loop")
|
||||
|
||||
def queue_fallback() -> None:
|
||||
while True:
|
||||
@@ -924,6 +935,7 @@ class AppController:
|
||||
fallback_thread = threading.Thread(target=queue_fallback, daemon=True)
|
||||
fallback_thread.start()
|
||||
self._process_event_queue()
|
||||
if getattr(self, 'perf_profiling_enabled', False): self.perf_monitor.end_component("_run_event_loop")
|
||||
|
||||
def _process_event_queue(self) -> None:
|
||||
"""Listens for and processes events from the SyncEventQueue."""
|
||||
@@ -983,6 +995,7 @@ class AppController:
|
||||
|
||||
def _handle_request_event(self, event: events.UserRequestEvent) -> None:
|
||||
"""Processes a UserRequestEvent by calling the AI client."""
|
||||
if getattr(self, 'perf_profiling_enabled', False): self.perf_monitor.start_component("_handle_request_event")
|
||||
ai_client.set_current_tier(None) # Ensure main discussion is untagged
|
||||
if self.ui_auto_add_history:
|
||||
with self._pending_history_adds_lock:
|
||||
@@ -1025,6 +1038,7 @@ class AppController:
|
||||
sys.stderr.write(f"[DEBUG] _handle_request_event ERROR: {e}\n{traceback.format_exc()}\n")
|
||||
sys.stderr.flush()
|
||||
self.event_queue.put("response", {"text": f"ERROR: {e}", "status": "error", "role": "System"})
|
||||
if getattr(self, 'perf_profiling_enabled', False): self.perf_monitor.end_component("_handle_request_event")
|
||||
|
||||
def _on_ai_stream(self, text: str) -> None:
|
||||
"""Handles streaming text from the AI."""
|
||||
|
||||
Reference in New Issue
Block a user