- Implement Live Worker Streaming: wire ai_client.comms_log_callback to Tier 3 streams - Add Parallel DAG Execution using asyncio.gather for non-dependent tickets - Implement Automatic Retry with Model Escalation (Flash-Lite -> Flash -> Pro) - Add Tier Model Configuration UI to MMA Dashboard with project TOML persistence - Fix FPS reporting in PerformanceMonitor to prevent transient 0.0 values - Update Ticket model with retry_count and dictionary-like access - Stabilize Gemini CLI integration tests and handle script approval events in simulations - Finalize and verify all 6 phases of the implementation plan
116 lines
4.1 KiB
Python
116 lines
4.1 KiB
Python
import pytest
|
|
import time
|
|
import sys
|
|
import os
|
|
import json
|
|
|
|
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
|
|
|
|
from api_hook_client import ApiHookClient
|
|
|
|
@pytest.mark.integration
|
|
@pytest.mark.timeout(60)
|
|
def test_gui_ux_event_routing(live_gui) -> None:
|
|
client = ApiHookClient()
|
|
assert client.wait_for_server(timeout=15), "Hook server did not start"
|
|
|
|
# ------------------------------------------------------------------
|
|
# 1. Verify Streaming Event Routing
|
|
# ------------------------------------------------------------------
|
|
print("[SIM] Testing Streaming Event Routing...")
|
|
stream_id = "Tier 3 (Worker): T-SIM-001"
|
|
|
|
# We use push_event which POSTs to /api/gui with action=mma_stream_append
|
|
# As defined in App._process_pending_gui_tasks
|
|
client.push_event('mma_stream_append', {'stream_id': stream_id, 'text': 'Hello '})
|
|
time.sleep(0.5)
|
|
client.push_event('mma_stream_append', {'stream_id': stream_id, 'text': 'World!'})
|
|
time.sleep(1.0)
|
|
|
|
status = client.get_mma_status()
|
|
streams = status.get('mma_streams', {})
|
|
assert streams.get(stream_id) == 'Hello World!', f"Streaming failed: {streams.get(stream_id)}"
|
|
print("[SIM] Streaming event routing verified.")
|
|
|
|
# ------------------------------------------------------------------
|
|
# 2. Verify State Update (Usage/Cost) Routing
|
|
# ------------------------------------------------------------------
|
|
print("[SIM] Testing State Update Routing...")
|
|
usage = {
|
|
"Tier 1": {"input": 1000, "output": 500, "model": "gemini-3.1-pro-preview"},
|
|
"Tier 2": {"input": 2000, "output": 1000, "model": "gemini-3-flash-preview"}
|
|
}
|
|
|
|
client.push_event('mma_state_update', {
|
|
'status': 'simulating',
|
|
'tier_usage': usage,
|
|
'tickets': []
|
|
})
|
|
time.sleep(1.0)
|
|
|
|
status = client.get_mma_status()
|
|
assert status.get('mma_status') == 'simulating'
|
|
# The app merges or replaces usage. Let's check what we got back.
|
|
received_usage = status.get('mma_tier_usage', {})
|
|
assert received_usage.get('Tier 1', {}).get('input') == 1000
|
|
assert received_usage.get('Tier 2', {}).get('model') == 'gemini-3-flash-preview'
|
|
print("[SIM] State update routing verified.")
|
|
|
|
# ------------------------------------------------------------------
|
|
# 3. Verify Performance
|
|
# ------------------------------------------------------------------
|
|
print("[SIM] Testing Performance...")
|
|
# Wait for at least one second of frame data to accumulate for FPS calculation
|
|
time.sleep(2.0)
|
|
perf_data = client.get_performance()
|
|
assert perf_data is not None, "Failed to retrieve performance metrics"
|
|
perf = perf_data.get('performance', {})
|
|
fps = perf.get('fps', 0.0)
|
|
total_frames = perf.get('total_frames', 0)
|
|
print(f"[SIM] Current FPS: {fps}, Total Frames: {total_frames}")
|
|
assert fps >= 30.0, f"Performance degradation: {fps} FPS < 30.0 (Total Frames: {total_frames})"
|
|
print("[SIM] Performance verified.")
|
|
|
|
@pytest.mark.integration
|
|
@pytest.mark.timeout(60)
|
|
def test_gui_track_creation(live_gui) -> None:
|
|
client = ApiHookClient()
|
|
assert client.wait_for_server(timeout=15), "Hook server did not start"
|
|
|
|
print("[SIM] Testing Track Creation via GUI...")
|
|
track_name = 'UX_SIM_TEST'
|
|
track_desc = 'Simulation testing for GUI UX'
|
|
track_type = 'feature'
|
|
|
|
client.set_value('ui_new_track_name', track_name)
|
|
client.set_value('ui_new_track_desc', track_desc)
|
|
client.set_value('ui_new_track_type', track_type)
|
|
|
|
client.click('btn_mma_create_track')
|
|
time.sleep(2.0)
|
|
|
|
tracks_dir = 'conductor/tracks/'
|
|
found = False
|
|
# The implementation lowercases and replaces spaces with underscores
|
|
search_prefix = track_name.lower().replace(' ', '_')
|
|
|
|
for entry in os.listdir(tracks_dir):
|
|
if entry.startswith(search_prefix) and os.path.isdir(os.path.join(tracks_dir, entry)):
|
|
found = True
|
|
metadata_path = os.path.join(tracks_dir, entry, 'metadata.json')
|
|
assert os.path.exists(metadata_path), f"metadata.json missing in {entry}"
|
|
|
|
with open(metadata_path, 'r') as f:
|
|
meta = json.load(f)
|
|
|
|
assert meta.get('status') == 'new'
|
|
assert meta.get('title') == track_name
|
|
print(f"[SIM] Verified track directory: {entry}")
|
|
break
|
|
|
|
assert found, f"Track directory starting with {search_prefix} not found."
|
|
print("[SIM] Track creation verified.")
|
|
|
|
if __name__ == "__main__":
|
|
pass
|