feat(perf): Expand instrumentation with context manager and extended metrics
This commit is contained in:
@@ -21,6 +21,7 @@ from src import summarize
|
|||||||
from src import project_manager
|
from src import project_manager
|
||||||
from src import beads_client
|
from src import beads_client
|
||||||
from src.file_cache import ASTParser
|
from src.file_cache import ASTParser
|
||||||
|
from src.performance_monitor import get_monitor
|
||||||
|
|
||||||
def find_next_increment(output_dir: Path, namespace: str) -> int:
|
def find_next_increment(output_dir: Path, namespace: str) -> int:
|
||||||
pattern = re.compile(rf"^{re.escape(namespace)}_(\d+)\.md$")
|
pattern = re.compile(rf"^{re.escape(namespace)}_(\d+)\.md$")
|
||||||
@@ -132,6 +133,7 @@ def build_file_items(base_dir: Path, files: list[str | dict[str, Any]]) -> list[
|
|||||||
auto_aggregate : bool
|
auto_aggregate : bool
|
||||||
force_full : bool
|
force_full : bool
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("build_file_items"):
|
||||||
items: list[dict[str, Any]] = []
|
items: list[dict[str, Any]] = []
|
||||||
for entry_raw in files:
|
for entry_raw in files:
|
||||||
if isinstance(entry_raw, dict):
|
if isinstance(entry_raw, dict):
|
||||||
@@ -176,6 +178,7 @@ def build_summary_section(base_dir: Path, files: list[str | dict[str, Any]]) ->
|
|||||||
Build a compact summary section using summarize.py — one short block per file.
|
Build a compact summary section using summarize.py — one short block per file.
|
||||||
Used as the initial <context> block instead of full file contents.
|
Used as the initial <context> block instead of full file contents.
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("build_summary_section"):
|
||||||
items = build_file_items(base_dir, files)
|
items = build_file_items(base_dir, files)
|
||||||
return summarize.build_summary_markdown(items)
|
return summarize.build_summary_markdown(items)
|
||||||
|
|
||||||
@@ -297,6 +300,7 @@ def build_tier3_context(file_items: list[dict[str, Any]], screenshot_base_dir: P
|
|||||||
Tier 3 Context: Execution/Worker.
|
Tier 3 Context: Execution/Worker.
|
||||||
Full content for focus_files and files with tier=3, summaries/skeletons for others.
|
Full content for focus_files and files with tier=3, summaries/skeletons for others.
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("build_tier3_context"):
|
||||||
parts = []
|
parts = []
|
||||||
if file_items:
|
if file_items:
|
||||||
sections = []
|
sections = []
|
||||||
@@ -336,6 +340,7 @@ def build_tier3_context(file_items: list[dict[str, Any]], screenshot_base_dir: P
|
|||||||
return "\n\n---\n\n".join(parts)
|
return "\n\n---\n\n".join(parts)
|
||||||
|
|
||||||
def build_markdown(base_dir: Path, files: list[str | dict[str, Any]], screenshot_base_dir: Path, screenshots: list[str], history: list[str], summary_only: bool = False, execution_mode: str = "standard") -> str:
|
def build_markdown(base_dir: Path, files: list[str | dict[str, Any]], screenshot_base_dir: Path, screenshots: list[str], history: list[str], summary_only: bool = False, execution_mode: str = "standard") -> str:
|
||||||
|
with get_monitor().scope("build_markdown"):
|
||||||
parts = []
|
parts = []
|
||||||
# STATIC PREFIX: Files and Screenshots must go first to maximize Cache Hits
|
# STATIC PREFIX: Files and Screenshots must go first to maximize Cache Hits
|
||||||
if files:
|
if files:
|
||||||
|
|||||||
+4
-1
@@ -28,6 +28,7 @@ See Also:
|
|||||||
"""
|
"""
|
||||||
from typing import List
|
from typing import List
|
||||||
from src.models import Ticket
|
from src.models import Ticket
|
||||||
|
from src.performance_monitor import get_monitor
|
||||||
|
|
||||||
class TrackDAG:
|
class TrackDAG:
|
||||||
"""
|
"""
|
||||||
@@ -87,6 +88,7 @@ class TrackDAG:
|
|||||||
Returns:
|
Returns:
|
||||||
True if a cycle is detected, False otherwise.
|
True if a cycle is detected, False otherwise.
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("dag_has_cycle"):
|
||||||
visited = set()
|
visited = set()
|
||||||
rec_stack = set()
|
rec_stack = set()
|
||||||
|
|
||||||
@@ -119,6 +121,7 @@ class TrackDAG:
|
|||||||
Raises:
|
Raises:
|
||||||
ValueError: If a dependency cycle is detected.
|
ValueError: If a dependency cycle is detected.
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("dag_topological_sort"):
|
||||||
if self.has_cycle():
|
if self.has_cycle():
|
||||||
raise ValueError("Dependency cycle detected")
|
raise ValueError("Dependency cycle detected")
|
||||||
visited = set()
|
visited = set()
|
||||||
@@ -161,6 +164,7 @@ class ExecutionEngine:
|
|||||||
Returns:
|
Returns:
|
||||||
A list of ready Ticket objects.
|
A list of ready Ticket objects.
|
||||||
"""
|
"""
|
||||||
|
with get_monitor().scope("dag_tick"):
|
||||||
self.dag.cascade_blocks()
|
self.dag.cascade_blocks()
|
||||||
ready = self.dag.get_ready_tasks()
|
ready = self.dag.get_ready_tasks()
|
||||||
return ready
|
return ready
|
||||||
@@ -185,4 +189,3 @@ class ExecutionEngine:
|
|||||||
ticket = self.dag.ticket_map.get(task_id)
|
ticket = self.dag.ticket_map.get(task_id)
|
||||||
if ticket:
|
if ticket:
|
||||||
ticket.status = status
|
ticket.status = status
|
||||||
|
|
||||||
|
|||||||
+13
-1
@@ -2348,15 +2348,21 @@ class App:
|
|||||||
if self.perf_profiling_enabled:
|
if self.perf_profiling_enabled:
|
||||||
imgui.separator()
|
imgui.separator()
|
||||||
imgui.text("Detailed Component Timings (Moving Average)")
|
imgui.text("Detailed Component Timings (Moving Average)")
|
||||||
if imgui.begin_table("comp_timings", 3, imgui.TableFlags_.borders):
|
if imgui.begin_table("comp_timings", 6, imgui.TableFlags_.borders):
|
||||||
imgui.table_setup_column("Component")
|
imgui.table_setup_column("Component")
|
||||||
imgui.table_setup_column("Avg (ms)")
|
imgui.table_setup_column("Avg (ms)")
|
||||||
|
imgui.table_setup_column("Count")
|
||||||
|
imgui.table_setup_column("Max (ms)")
|
||||||
|
imgui.table_setup_column("Min (ms)")
|
||||||
imgui.table_setup_column("Graph")
|
imgui.table_setup_column("Graph")
|
||||||
imgui.table_headers_row()
|
imgui.table_headers_row()
|
||||||
for key, val in metrics.items():
|
for key, val in metrics.items():
|
||||||
if key.startswith("time_") and key.endswith("_ms") and not key.endswith("_avg"):
|
if key.startswith("time_") and key.endswith("_ms") and not key.endswith("_avg"):
|
||||||
comp_name = key[5:-3]
|
comp_name = key[5:-3]
|
||||||
avg_val = metrics.get(f"{key}_avg", val)
|
avg_val = metrics.get(f"{key}_avg", val)
|
||||||
|
count = int(metrics.get(f"count_{comp_name}", 0))
|
||||||
|
max_val = metrics.get(f"max_{comp_name}_ms", 0.0)
|
||||||
|
min_val = metrics.get(f"min_{comp_name}_ms", 0.0)
|
||||||
imgui.table_next_row()
|
imgui.table_next_row()
|
||||||
imgui.table_next_column()
|
imgui.table_next_column()
|
||||||
imgui.text(comp_name)
|
imgui.text(comp_name)
|
||||||
@@ -2366,6 +2372,12 @@ class App:
|
|||||||
else:
|
else:
|
||||||
imgui.text(f"{avg_val:.2f}")
|
imgui.text(f"{avg_val:.2f}")
|
||||||
imgui.table_next_column()
|
imgui.table_next_column()
|
||||||
|
imgui.text(f"{count}")
|
||||||
|
imgui.table_next_column()
|
||||||
|
imgui.text(f"{max_val:.2f}")
|
||||||
|
imgui.table_next_column()
|
||||||
|
imgui.text(f"{min_val:.2f}")
|
||||||
|
imgui.table_next_column()
|
||||||
self.perf_show_graphs.setdefault(comp_name, False)
|
self.perf_show_graphs.setdefault(comp_name, False)
|
||||||
_, self.perf_show_graphs[comp_name] = imgui.checkbox(f"##g_{comp_name}", self.perf_show_graphs[comp_name])
|
_, self.perf_show_graphs[comp_name] = imgui.checkbox(f"##g_{comp_name}", self.perf_show_graphs[comp_name])
|
||||||
imgui.end_table()
|
imgui.end_table()
|
||||||
|
|||||||
@@ -62,6 +62,18 @@ from collections import deque
|
|||||||
_instance: Optional[PerformanceMonitor] = None
|
_instance: Optional[PerformanceMonitor] = None
|
||||||
|
|
||||||
|
|
||||||
|
class PerformanceScope:
|
||||||
|
"""Helper class for PerformanceMonitor.scope() context manager."""
|
||||||
|
def __init__(self, monitor: PerformanceMonitor, name: str) -> None:
|
||||||
|
self.monitor = monitor
|
||||||
|
self.name = name
|
||||||
|
def __enter__(self) -> PerformanceScope:
|
||||||
|
self.monitor.start_component(self.name)
|
||||||
|
return self
|
||||||
|
def __exit__(self, exc_type: Any, exc_val: Any, exc_tb: Any) -> None:
|
||||||
|
self.monitor.end_component(self.name)
|
||||||
|
|
||||||
|
|
||||||
def get_monitor() -> PerformanceMonitor:
|
def get_monitor() -> PerformanceMonitor:
|
||||||
global _instance
|
global _instance
|
||||||
if _instance is None:
|
if _instance is None:
|
||||||
@@ -90,6 +102,9 @@ class PerformanceMonitor:
|
|||||||
|
|
||||||
self._component_starts: dict[str, float] = {}
|
self._component_starts: dict[str, float] = {}
|
||||||
self._component_timings: dict[str, float] = {}
|
self._component_timings: dict[str, float] = {}
|
||||||
|
self._component_counts: dict[str, int] = {}
|
||||||
|
self._component_max: dict[str, float] = {}
|
||||||
|
self._component_min: dict[str, float] = {}
|
||||||
|
|
||||||
# Rolling history and running sums for O(1) average calculation
|
# Rolling history and running sums for O(1) average calculation
|
||||||
# deques are thread-safe for appends and pops.
|
# deques are thread-safe for appends and pops.
|
||||||
@@ -192,6 +207,11 @@ class PerformanceMonitor:
|
|||||||
elapsed = (now - start) * 1000
|
elapsed = (now - start) * 1000
|
||||||
with self._lock:
|
with self._lock:
|
||||||
self._component_timings[name] = elapsed
|
self._component_timings[name] = elapsed
|
||||||
|
self._component_counts[name] = self._component_counts.get(name, 0) + 1
|
||||||
|
if name not in self._component_max or elapsed > self._component_max[name]:
|
||||||
|
self._component_max[name] = elapsed
|
||||||
|
if name not in self._component_min or elapsed < self._component_min[name]:
|
||||||
|
self._component_min[name] = elapsed
|
||||||
self._add_to_history(f'comp_{name}', elapsed)
|
self._add_to_history(f'comp_{name}', elapsed)
|
||||||
|
|
||||||
def get_metrics(self) -> dict[str, float]:
|
def get_metrics(self) -> dict[str, float]:
|
||||||
@@ -203,6 +223,9 @@ class PerformanceMonitor:
|
|||||||
ilag = self._input_lag_ms
|
ilag = self._input_lag_ms
|
||||||
last_calc_fps = self._last_calculated_fps
|
last_calc_fps = self._last_calculated_fps
|
||||||
timings_snapshot = dict(self._component_timings)
|
timings_snapshot = dict(self._component_timings)
|
||||||
|
counts_snapshot = dict(self._component_counts)
|
||||||
|
max_snapshot = dict(self._component_max)
|
||||||
|
min_snapshot = dict(self._component_min)
|
||||||
|
|
||||||
metrics = {
|
metrics = {
|
||||||
'fps': fps,
|
'fps': fps,
|
||||||
@@ -217,6 +240,9 @@ class PerformanceMonitor:
|
|||||||
for name, elapsed in timings_snapshot.items():
|
for name, elapsed in timings_snapshot.items():
|
||||||
metrics[f'time_{name}_ms'] = elapsed
|
metrics[f'time_{name}_ms'] = elapsed
|
||||||
metrics[f'time_{name}_ms_avg'] = self._get_avg(f'comp_{name}')
|
metrics[f'time_{name}_ms_avg'] = self._get_avg(f'comp_{name}')
|
||||||
|
metrics[f'count_{name}'] = float(counts_snapshot.get(name, 0))
|
||||||
|
metrics[f'max_{name}_ms'] = max_snapshot.get(name, 0.0)
|
||||||
|
metrics[f'min_{name}_ms'] = min_snapshot.get(name, 0.0)
|
||||||
return metrics
|
return metrics
|
||||||
|
|
||||||
def get_history(self, key: str) -> List[float]:
|
def get_history(self, key: str) -> List[float]:
|
||||||
@@ -228,6 +254,10 @@ class PerformanceMonitor:
|
|||||||
return list(self._history[f'comp_{key}'])
|
return list(self._history[f'comp_{key}'])
|
||||||
return []
|
return []
|
||||||
|
|
||||||
|
def scope(self, name: str) -> PerformanceScope:
|
||||||
|
"""Returns a context manager for timing a component."""
|
||||||
|
return PerformanceScope(self, name)
|
||||||
|
|
||||||
def stop(self) -> None:
|
def stop(self) -> None:
|
||||||
self._stop_event.set()
|
self._stop_event.set()
|
||||||
if self._cpu_thread.is_alive():
|
if self._cpu_thread.is_alive():
|
||||||
|
|||||||
@@ -26,3 +26,57 @@ def test_perf_monitor_component_timing() -> None:
|
|||||||
metrics = pm.get_metrics()
|
metrics = pm.get_metrics()
|
||||||
assert metrics['time_test_comp_ms'] >= 10.0
|
assert metrics['time_test_comp_ms'] >= 10.0
|
||||||
pm.stop()
|
pm.stop()
|
||||||
|
|
||||||
|
def test_perf_monitor_scope_context_manager() -> None:
|
||||||
|
pm = PerformanceMonitor()
|
||||||
|
pm.enabled = True
|
||||||
|
|
||||||
|
# Test normal usage
|
||||||
|
with pm.scope("test_scope"):
|
||||||
|
time.sleep(0.01)
|
||||||
|
|
||||||
|
metrics = pm.get_metrics()
|
||||||
|
assert metrics['time_test_scope_ms'] >= 10.0
|
||||||
|
|
||||||
|
# Test exception handling
|
||||||
|
try:
|
||||||
|
with pm.scope("test_error"):
|
||||||
|
time.sleep(0.01)
|
||||||
|
raise ValueError("test error")
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
metrics = pm.get_metrics()
|
||||||
|
# Component should still be finished, so timing should be recorded
|
||||||
|
assert metrics['time_test_error_ms'] >= 10.0
|
||||||
|
|
||||||
|
pm.stop()
|
||||||
|
|
||||||
|
def test_perf_monitor_extended_metrics() -> None:
|
||||||
|
pm = PerformanceMonitor()
|
||||||
|
pm.enabled = True
|
||||||
|
|
||||||
|
# 1st call: 10ms
|
||||||
|
pm.start_component("test_comp")
|
||||||
|
time.sleep(0.01)
|
||||||
|
pm.end_component("test_comp")
|
||||||
|
|
||||||
|
# 2nd call: 30ms
|
||||||
|
pm.start_component("test_comp")
|
||||||
|
time.sleep(0.03)
|
||||||
|
pm.end_component("test_comp")
|
||||||
|
|
||||||
|
# 3rd call: 20ms
|
||||||
|
pm.start_component("test_comp")
|
||||||
|
time.sleep(0.02)
|
||||||
|
pm.end_component("test_comp")
|
||||||
|
|
||||||
|
metrics = pm.get_metrics()
|
||||||
|
assert metrics['count_test_comp'] == 3.0
|
||||||
|
assert metrics['max_test_comp_ms'] >= 30.0
|
||||||
|
assert metrics['min_test_comp_ms'] >= 10.0
|
||||||
|
assert metrics['min_test_comp_ms'] < 20.0
|
||||||
|
|
||||||
|
pm.stop()
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user