feat(mma): Decouple UI from API calls using UserRequestEvent and AsyncEventQueue

This commit is contained in:
2026-02-26 20:45:23 -05:00
parent 5206c7c569
commit 68861c0744
5 changed files with 208 additions and 54 deletions

View File

@@ -63,3 +63,23 @@ class AsyncEventQueue:
A tuple containing (event_name, payload). A tuple containing (event_name, payload).
""" """
return await self._queue.get() return await self._queue.get()
class UserRequestEvent:
"""
Payload for a user request event.
"""
def __init__(self, prompt: str, stable_md: str, file_items: List[Any], disc_text: str, base_dir: str):
self.prompt = prompt
self.stable_md = stable_md
self.file_items = file_items
self.disc_text = disc_text
self.base_dir = base_dir
def to_dict(self) -> Dict[str, Any]:
return {
"prompt": self.prompt,
"stable_md": self.stable_md,
"file_items": self.file_items,
"disc_text": self.disc_text,
"base_dir": self.base_dir
}

155
gui_2.py
View File

@@ -1,6 +1,7 @@
# gui_2.py # gui_2.py
import tomli_w import tomli_w
import threading import threading
import asyncio
import time import time
import math import math
import json import json
@@ -10,6 +11,7 @@ import uuid
import requests import requests
from pathlib import Path from pathlib import Path
from tkinter import filedialog, Tk from tkinter import filedialog, Tk
from typing import Optional, Callable
import aggregate import aggregate
import ai_client import ai_client
from ai_client import ProviderError from ai_client import ProviderError
@@ -109,11 +111,15 @@ class ConfirmDialog:
return self._approved, self._script return self._approved, self._script
class App: class ManualSlopGUI:
"""The main ImGui interface orchestrator for Manual Slop.""" """The main ImGui interface orchestrator for Manual Slop."""
def __init__(self): def __init__(self):
self.config = load_config() self.config = load_config()
self.event_queue = events.AsyncEventQueue()
self._loop = asyncio.new_event_loop()
self._loop_thread = threading.Thread(target=self._run_event_loop, daemon=True)
self._loop_thread.start()
ai_cfg = self.config.get("ai", {}) ai_cfg = self.config.get("ai", {})
self._current_provider: str = ai_cfg.get("provider", "gemini") self._current_provider: str = ai_cfg.get("provider", "gemini")
@@ -806,6 +812,21 @@ class App:
if action == "refresh_api_metrics": if action == "refresh_api_metrics":
self._refresh_api_metrics(task.get("payload", {})) self._refresh_api_metrics(task.get("payload", {}))
elif action == "handle_ai_response":
payload = task.get("payload", {})
self.ai_response = payload.get("text", "")
self.ai_status = payload.get("status", "done")
self._trigger_blink = True
if self.ui_auto_add_history:
role = payload.get("role", "AI")
with self._pending_history_adds_lock:
self._pending_history_adds.append({
"role": role,
"content": self.ai_response,
"collapsed": False,
"ts": project_manager.now_ts()
})
elif action == "set_value": elif action == "set_value":
item = task.get("item") item = task.get("item")
value = task.get("value") value = task.get("value")
@@ -944,61 +965,89 @@ class App:
def _handle_generate_send(self): def _handle_generate_send(self):
"""Logic for the 'Gen + Send' action.""" """Logic for the 'Gen + Send' action."""
send_busy = False try:
with self._send_thread_lock: md, path, file_items, stable_md, disc_text = self._do_generate()
if self.send_thread and self.send_thread.is_alive(): self.last_md = md
send_busy = True self.last_md_path = path
self.last_file_items = file_items
except Exception as e:
self.ai_status = f"generate error: {e}"
return
if not send_busy: self.ai_status = "sending..."
try: user_msg = self.ui_ai_input
md, path, file_items, stable_md, disc_text = self._do_generate() base_dir = self.ui_files_base_dir
self.last_md = md
self.last_md_path = path
self.last_file_items = file_items
except Exception as e:
self.ai_status = f"generate error: {e}"
return
self.ai_status = "sending..." # Prepare event payload
user_msg = self.ui_ai_input event_payload = events.UserRequestEvent(
base_dir = self.ui_files_base_dir prompt=user_msg,
csp = filter(bool, [self.ui_global_system_prompt.strip(), self.ui_project_system_prompt.strip()]) stable_md=stable_md,
ai_client.set_custom_system_prompt("\n\n".join(csp)) file_items=file_items,
ai_client.set_model_params(self.temperature, self.max_tokens, self.history_trunc_limit) disc_text=disc_text,
ai_client.set_agent_tools(self.ui_agent_tools) base_dir=base_dir
send_md = stable_md )
send_disc = disc_text
def do_send(): # Push to async queue
if self.ui_auto_add_history: asyncio.run_coroutine_threadsafe(
with self._pending_history_adds_lock: self.event_queue.put("user_request", event_payload),
self._pending_history_adds.append({"role": "User", "content": user_msg, "collapsed": False, "ts": project_manager.now_ts()}) self._loop
try: )
resp = ai_client.send(send_md, user_msg, base_dir, self.last_file_items, send_disc)
self.ai_response = resp
self.ai_status = "done"
self._trigger_blink = True
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({"role": "AI", "content": resp, "collapsed": False, "ts": project_manager.now_ts()})
except ProviderError as e:
self.ai_response = e.ui_message()
self.ai_status = "error"
self._trigger_blink = True
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({"role": "Vendor API", "content": self.ai_response, "collapsed": False, "ts": project_manager.now_ts()})
except Exception as e:
self.ai_response = f"ERROR: {e}"
self.ai_status = "error"
self._trigger_blink = True
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({"role": "System", "content": self.ai_response, "collapsed": False, "ts": project_manager.now_ts()})
with self._send_thread_lock: def _run_event_loop(self):
self.send_thread = threading.Thread(target=do_send, daemon=True) """Runs the internal asyncio event loop."""
self.send_thread.start() asyncio.set_event_loop(self._loop)
self._loop.create_task(self._process_event_queue())
self._loop.run_forever()
async def _process_event_queue(self):
"""Listens for and processes events from the AsyncEventQueue."""
while True:
event_name, payload = await self.event_queue.get()
if event_name == "user_request":
# Handle the request (simulating what was previously in do_send thread)
self._handle_request_event(payload)
elif event_name == "response":
# Handle AI response event
with self._pending_gui_tasks_lock:
self._pending_gui_tasks.append({
"action": "handle_ai_response",
"payload": payload
})
def _handle_request_event(self, event: events.UserRequestEvent):
"""Processes a UserRequestEvent by calling the AI client."""
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({
"role": "User",
"content": event.prompt,
"collapsed": False,
"ts": project_manager.now_ts()
})
csp = filter(bool, [self.ui_global_system_prompt.strip(), self.ui_project_system_prompt.strip()])
ai_client.set_custom_system_prompt("\n\n".join(csp))
ai_client.set_model_params(self.temperature, self.max_tokens, self.history_trunc_limit)
ai_client.set_agent_tools(self.ui_agent_tools)
try:
resp = ai_client.send(event.stable_md, event.prompt, event.base_dir, event.file_items, event.disc_text)
# Emit response event
asyncio.run_coroutine_threadsafe(
self.event_queue.put("response", {"text": resp, "status": "done"}),
self._loop
)
except ProviderError as e:
asyncio.run_coroutine_threadsafe(
self.event_queue.put("response", {"text": e.ui_message(), "status": "error", "role": "Vendor API"}),
self._loop
)
except Exception as e:
asyncio.run_coroutine_threadsafe(
self.event_queue.put("response", {"text": f"ERROR: {e}", "status": "error", "role": "System"}),
self._loop
)
def _test_callback_func_write_to_file(self, data: str): def _test_callback_func_write_to_file(self, data: str):
"""A dummy function that a custom_callback would execute for testing.""" """A dummy function that a custom_callback would execute for testing."""
@@ -2546,7 +2595,7 @@ class App:
session_logger.close_session() session_logger.close_session()
def main(): def main():
app = App() app = ManualSlopGUI()
app.run() app.run()
if __name__ == "__main__": if __name__ == "__main__":

View File

@@ -20,6 +20,7 @@ dependencies = [
dev = [ dev = [
"pytest>=9.0.2", "pytest>=9.0.2",
"pytest-cov>=7.0.0", "pytest-cov>=7.0.0",
"pytest-asyncio>=0.25.3",
] ]
[tool.pytest.ini_options] [tool.pytest.ini_options]

View File

@@ -1115,7 +1115,12 @@ pygments==2.19.2 \
pytest==9.0.2 \ pytest==9.0.2 \
--hash=sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b \ --hash=sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b \
--hash=sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11 --hash=sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11
# via pytest-cov # via
# pytest-asyncio
# pytest-cov
pytest-asyncio==1.3.0 \
--hash=sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5 \
--hash=sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5
pytest-cov==7.0.0 \ pytest-cov==7.0.0 \
--hash=sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1 \ --hash=sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1 \
--hash=sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861 --hash=sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861
@@ -1249,6 +1254,7 @@ typing-extensions==4.15.0 \
# google-genai # google-genai
# pydantic # pydantic
# pydantic-core # pydantic-core
# pytest-asyncio
# starlette # starlette
# typing-inspection # typing-inspection
typing-inspection==0.4.2 \ typing-inspection==0.4.2 \

View File

@@ -0,0 +1,78 @@
import pytest
from unittest.mock import MagicMock, patch, AsyncMock
import asyncio
from gui_2 import ManualSlopGUI
from events import UserRequestEvent
@pytest.fixture
def mock_gui():
with patch('gui_2.load_config', return_value={
"ai": {"provider": "gemini", "model": "model-1"},
"projects": {"paths": [], "active": ""},
"gui": {"show_windows": {}}
}):
with patch('gui_2.project_manager.load_project', return_value={}):
with patch('gui_2.project_manager.migrate_from_legacy_config', return_value={}):
with patch('gui_2.project_manager.save_project'):
with patch('gui_2.session_logger.open_session'):
with patch('gui_2.ManualSlopGUI._init_ai_and_hooks'):
with patch('gui_2.ManualSlopGUI._fetch_models'):
gui = ManualSlopGUI()
return gui
def test_handle_generate_send_pushes_event(mock_gui):
# Mock _do_generate to return sample data
mock_gui._do_generate = MagicMock(return_value=(
"full_md", "path", [], "stable_md", "disc_text"
))
mock_gui.ui_ai_input = "test prompt"
mock_gui.ui_files_base_dir = "."
# Mock event_queue.put
mock_gui.event_queue.put = MagicMock()
# We need to mock asyncio.run_coroutine_threadsafe to immediately execute
with patch('asyncio.run_coroutine_threadsafe') as mock_run:
mock_gui._handle_generate_send()
# Verify run_coroutine_threadsafe was called
assert mock_run.called
# Verify the call to event_queue.put was correct
# This is a bit tricky since the first arg to run_coroutine_threadsafe
# is the coroutine returned by event_queue.put().
# Let's verify that the call to put occurred.
mock_gui.event_queue.put.assert_called_once()
args, kwargs = mock_gui.event_queue.put.call_args
assert args[0] == "user_request"
event = args[1]
assert isinstance(event, UserRequestEvent)
assert event.prompt == "test prompt"
assert event.stable_md == "stable_md"
assert event.disc_text == "disc_text"
assert event.base_dir == "."
def test_user_request_event_payload():
payload = UserRequestEvent(
prompt="hello",
stable_md="md",
file_items=[],
disc_text="disc",
base_dir="."
)
d = payload.to_dict()
assert d["prompt"] == "hello"
assert d["stable_md"] == "md"
assert d["file_items"] == []
assert d["disc_text"] == "disc"
assert d["base_dir"] == "."
@pytest.mark.asyncio
async def test_async_event_queue():
from events import AsyncEventQueue
q = AsyncEventQueue()
await q.put("test_event", {"data": 123})
name, payload = await q.get()
assert name == "test_event"
assert payload["data"] == 123