Botched: Need to do a higher reaosning model to fix this mess.

This commit is contained in:
2026-03-04 12:32:14 -05:00
parent 8d3fdb53d0
commit 01b3c26653
18 changed files with 180 additions and 132 deletions

View File

@@ -469,18 +469,7 @@ class AppController:
asyncio.set_event_loop(self._loop)
self._loop.create_task(self._process_event_queue())
# Fallback: process queues even if GUI thread is idling/stuck
async def queue_fallback() -> None:
while True:
try:
# Headless/fallback queue processing
# Note: In GUI mode, App._gui_func still calls these for low latency.
self._process_pending_gui_tasks()
# _process_pending_history_adds might need more care regarding project state
except: pass
await asyncio.sleep(0.1)
self._loop.create_task(queue_fallback())
pass # Loop runs the process_event_queue task
self._loop.run_forever()
async def _process_event_queue(self) -> None:
@@ -671,100 +660,6 @@ class AppController:
return True
return False
def _process_pending_gui_tasks(self) -> None:
if not self._pending_gui_tasks:
return
with self._pending_gui_tasks_lock:
tasks = self._pending_gui_tasks[:]
self._pending_gui_tasks.clear()
for task in tasks:
try:
action = task.get("action")
if action == "refresh_api_metrics":
self._refresh_api_metrics(task.get("payload", {}))
elif action == "handle_ai_response":
payload = task.get("payload", {})
text = payload.get("text", "")
stream_id = payload.get("stream_id")
is_streaming = payload.get("status") == "streaming..."
if stream_id:
if is_streaming:
if stream_id not in self.mma_streams: self.mma_streams[stream_id] = ""
self.mma_streams[stream_id] += text
else:
self.mma_streams[stream_id] = text
if stream_id == "Tier 1":
if "status" in payload:
self.ai_status = payload["status"]
else:
if is_streaming:
self.ai_response += text
else:
self.ai_response = text
self.ai_status = payload.get("status", "done")
self._trigger_blink = True
if not stream_id:
self._token_stats_dirty = True
elif action == "mma_stream_append":
payload = task.get("payload", {})
stream_id = payload.get("stream_id")
text = payload.get("text", "")
if stream_id:
if stream_id not in self.mma_streams:
self.mma_streams[stream_id] = ""
self.mma_streams[stream_id] += text
elif action == "mma_state_update":
payload = task.get("payload", {})
self.mma_status = payload.get("status", "idle")
self.active_tier = payload.get("active_tier")
self.mma_tier_usage = payload.get("tier_usage", self.mma_tier_usage)
self.active_tickets = payload.get("tickets", [])
elif action == "mma_step_approval":
dlg = MMAApprovalDialog(str(task.get("ticket_id") or ""), str(task.get("payload") or ""))
self._pending_mma_approval = task
if "dialog_container" in task:
task["dialog_container"][0] = dlg
elif action == "mma_spawn_approval":
spawn_dlg = MMASpawnApprovalDialog(
str(task.get("ticket_id") or ""),
str(task.get("role") or ""),
str(task.get("prompt") or ""),
str(task.get("context_md") or "")
)
self._pending_mma_spawn = task
if "dialog_container" in task:
task["dialog_container"][0] = spawn_dlg
except Exception as e:
print(f"Error executing GUI task: {e}")
def stop_services(self):
"""Stops background threads and async event loop."""
if self._loop:
self._loop.call_soon_threadsafe(self._loop.stop)
if self._loop_thread:
self._loop_thread.join(timeout=2.0)
@property
def current_provider(self) -> str:
return self._current_provider
@current_provider.setter
def current_provider(self, value: str) -> None:
if value != self._current_provider:
self._current_provider = value
ai_client.reset_session()
ai_client.set_provider(value, self.current_model)
if value == "gemini_cli":
if not ai_client._gemini_cli_adapter:
ai_client._gemini_cli_adapter = ai_client.GeminiCliAdapter(binary_path=self.ui_gemini_cli_path)
else:
ai_client._gemini_cli_adapter.binary_path = self.ui_gemini_cli_path
if hasattr(self, 'hook_server'):
self.hook_server.start()
self.available_models = []
self._fetch_models(value)
self._token_stats = {}
self._token_stats_dirty = True
@property
def current_model(self) -> str:

View File

@@ -15,6 +15,10 @@ _ENV_CONFIG: dict = {}
def _load_env_config() -> dict:
"""Load mcp_env.toml from project root (sibling of this file or parent dir)."""
env_path = os.environ.get("SLOP_MCP_ENV")
if env_path and Path(env_path).exists():
with open(env_path, "rb") as f:
return tomllib.load(f)
candidates = [
Path(__file__).parent / "mcp_env.toml",
Path(__file__).parent.parent / "mcp_env.toml",