Files
manual_slop/tests/test_perf_aggregate.py
T

69 lines
2.4 KiB
Python

import time
from pathlib import Path
from src.aggregate import build_tier3_context
from src.performance_monitor import get_monitor
def test_build_tier3_context_scaling():
perf = get_monitor()
perf.enabled = True
# 1. Create a large number of mock file items (e.g., 500)
file_items = []
for i in range(500):
path = Path(f"src/file_{i}.py")
file_items.append({
"path": path,
"entry": str(path),
"content": f"def func_{i}():\n \"\"\"Docstring for {i}\"\"\"\n pass\n",
"auto_aggregate": True,
"tier": 0
})
# 2. Create a large number of focus files (e.g., 100)
focus_files = [str(Path(f"src/file_{i}.py")) for i in range(100)]
# 3. Measure the time taken by build_tier3_context
with perf.scope("test_build_tier3_context_scaling"):
start_time = time.perf_counter()
# screenshot_base_dir, screenshots, history are empty Path/lists for this test
result = build_tier3_context(file_items, Path("assets"), [], [], focus_files)
end_time = time.perf_counter()
duration_ms = (end_time - start_time) * 1000
print(f"build_tier3_context took {duration_ms:.2f} ms for 500 items and 100 focus files")
# 4. Assert that the function correctly identifies focus files
for i in range(100):
# Focus files should have full content
entry = str(Path(f"src/file_{i}.py"))
expected_header = f"### `{entry}`"
assert expected_header in result
assert f"def func_{i}():" in result
assert "pass" in result
# Check non-focus files (should be skeletonized)
for i in range(100, 110): # Just check a few
entry = str(Path(f"src/file_{i}.py"))
# Non-focus files may have (AST Skeleton) in header if they are .py
expected_header = f"### `{entry}` (AST Skeleton)"
assert expected_header in result
assert f"def func_{i}():" in result
assert f"\"\"\"Docstring for {i}\"\"\"" in result
# The skeleton should strip the body 'pass'
assert "pass" not in result.split(expected_header)[1].split("###")[0]
# 5. Use the PerformanceMonitor to record the time under a 'test_build_tier3_context_scaling' component
metrics = perf.get_metrics()
assert "time_test_build_tier3_context_scaling_ms" in metrics
print(f"Recorded metric: {metrics['time_test_build_tier3_context_scaling_ms']:.2f} ms")
if __name__ == "__main__":
try:
test_build_tier3_context_scaling()
print("SUCCESS")
except Exception as e:
import traceback
traceback.print_exc()
print(f"FAILED: {e}")
exit(1)