feat(perf): Add get_ui_performance AI tool
This commit is contained in:
1
gui.py
1
gui.py
@@ -501,6 +501,7 @@ class App:
|
||||
ai_client.confirm_and_run_callback = self._confirm_and_run
|
||||
ai_client.comms_log_callback = self._on_comms_entry
|
||||
ai_client.tool_log_callback = self._on_tool_log
|
||||
mcp_client.perf_monitor_callback = self.perf_monitor.get_metrics
|
||||
|
||||
# ---------------------------------------------------------------- project loading
|
||||
|
||||
|
||||
@@ -45,6 +45,9 @@ _allowed_paths: set[Path] = set()
|
||||
_base_dirs: set[Path] = set()
|
||||
_primary_base_dir: Path | None = None
|
||||
|
||||
# Injected by gui.py - returns a dict of performance metrics
|
||||
perf_monitor_callback = None
|
||||
|
||||
|
||||
def configure(file_items: list[dict], extra_base_dirs: list[str] | None = None):
|
||||
"""
|
||||
@@ -300,11 +303,27 @@ def fetch_url(url: str) -> str:
|
||||
return full_text
|
||||
except Exception as e:
|
||||
return f"ERROR fetching URL '{url}': {e}"
|
||||
|
||||
|
||||
def get_ui_performance() -> str:
|
||||
"""Returns current UI performance metrics (FPS, Frame Time, CPU, Input Lag)."""
|
||||
if perf_monitor_callback is None:
|
||||
return "ERROR: Performance monitor callback not registered."
|
||||
try:
|
||||
metrics = perf_monitor_callback()
|
||||
# Clean up the dict string for the AI
|
||||
metric_str = str(metrics)
|
||||
for char in "{}'":
|
||||
metric_str = metric_str.replace(char, "")
|
||||
return f"UI Performance Snapshot:\n{metric_str}"
|
||||
except Exception as e:
|
||||
return f"ERROR: Failed to retrieve UI performance: {str(e)}"
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ tool dispatch
|
||||
|
||||
|
||||
TOOL_NAMES = {"read_file", "list_directory", "search_files", "get_file_summary", "web_search", "fetch_url"}
|
||||
TOOL_NAMES = {"read_file", "list_directory", "search_files", "get_file_summary", "web_search", "fetch_url", "get_ui_performance"}
|
||||
|
||||
|
||||
def dispatch(tool_name: str, tool_input: dict) -> str:
|
||||
@@ -323,6 +342,8 @@ def dispatch(tool_name: str, tool_input: dict) -> str:
|
||||
return web_search(tool_input.get("query", ""))
|
||||
if tool_name == "fetch_url":
|
||||
return fetch_url(tool_input.get("url", ""))
|
||||
if tool_name == "get_ui_performance":
|
||||
return get_ui_performance()
|
||||
return f"ERROR: unknown MCP tool '{tool_name}'"
|
||||
|
||||
|
||||
@@ -420,17 +441,11 @@ MCP_TOOL_SPECS = [
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "fetch_url",
|
||||
"description": "Fetch a webpage and extract its text content, removing HTML tags and scripts. Useful for reading documentation or articles found via web_search.",
|
||||
"name": "get_ui_performance",
|
||||
"description": "Get a snapshot of the current UI performance metrics, including FPS, Frame Time (ms), CPU usage (%), and Input Lag (ms). Use this to diagnose UI slowness or verify that your changes haven't degraded the user experience.",
|
||||
"parameters": {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"url": {
|
||||
"type": "string",
|
||||
"description": "The URL to fetch."
|
||||
}
|
||||
},
|
||||
"required": ["url"]
|
||||
"properties": {}
|
||||
}
|
||||
},
|
||||
}
|
||||
]
|
||||
|
||||
32
tests/test_mcp_perf_tool.py
Normal file
32
tests/test_mcp_perf_tool.py
Normal file
@@ -0,0 +1,32 @@
|
||||
import unittest
|
||||
from unittest.mock import MagicMock
|
||||
import mcp_client
|
||||
|
||||
class TestMCPPerfTool(unittest.TestCase):
|
||||
def test_get_ui_performance_dispatch(self):
|
||||
# Mock the callback
|
||||
mock_metrics = {
|
||||
'last_frame_time_ms': 16.6,
|
||||
'fps': 60.0,
|
||||
'cpu_percent': 15.5,
|
||||
'input_lag_ms': 5.0
|
||||
}
|
||||
mcp_client.perf_monitor_callback = MagicMock(return_value=mock_metrics)
|
||||
|
||||
# Test dispatch
|
||||
result = mcp_client.dispatch("get_ui_performance", {})
|
||||
|
||||
self.assertIn("UI Performance Snapshot:", result)
|
||||
self.assertIn("last_frame_time_ms: 16.6", result)
|
||||
self.assertIn("fps: 60.0", result)
|
||||
self.assertIn("cpu_percent: 15.5", result)
|
||||
self.assertIn("input_lag_ms: 5.0", result)
|
||||
|
||||
mcp_client.perf_monitor_callback.assert_called_once()
|
||||
|
||||
def test_tool_spec_exists(self):
|
||||
spec_names = [spec["name"] for spec in mcp_client.MCP_TOOL_SPECS]
|
||||
self.assertIn("get_ui_performance", spec_names)
|
||||
|
||||
if __name__ == '__main__':
|
||||
unittest.main()
|
||||
Reference in New Issue
Block a user