feat(ai_client): isolation of current_tier using threading.local() for parallel agent safety

This commit is contained in:
2026-03-06 12:59:10 -05:00
parent 1fb6ebc4d0
commit 684a6d1d3b
11 changed files with 75 additions and 29 deletions

View File

@@ -86,8 +86,16 @@ comms_log_callback: Optional[Callable[[dict[str, Any]], None]] = None
# Injected by gui.py - called whenever a tool call completes.
tool_log_callback: Optional[Callable[[str, str], None]] = None
# Set by caller tiers before ai_client.send(); cleared in finally.
current_tier: Optional[str] = None
_local_storage = threading.local()
def get_current_tier() -> Optional[str]:
"""Returns the current tier from thread-local storage."""
return getattr(_local_storage, "current_tier", None)
def set_current_tier(tier: Optional[str]) -> None:
"""Sets the current tier in thread-local storage."""
_local_storage.current_tier = tier
# Increased to allow thorough code exploration before forcing a summary
MAX_TOOL_ROUNDS: int = 10
@@ -129,7 +137,6 @@ _comms_log: list[dict[str, Any]] = []
COMMS_CLAMP_CHARS: int = 300
def _append_comms(direction: str, kind: str, payload: dict[str, Any]) -> None:
global current_tier
entry: dict[str, Any] = {
"ts": datetime.datetime.now().strftime("%H:%M:%S"),
"direction": direction,
@@ -137,7 +144,7 @@ def _append_comms(direction: str, kind: str, payload: dict[str, Any]) -> None:
"provider": _provider,
"model": _model,
"payload": payload,
"source_tier": current_tier,
"source_tier": get_current_tier(),
}
_comms_log.append(entry)
if comms_log_callback is not None:
@@ -1231,7 +1238,7 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
tool_executed = True
if not tool_executed:
if name and name in mcp_client.TOOL_NAMES:
if b_name and b_name in mcp_client.TOOL_NAMES:
_append_comms("OUT", "tool_call", {"name": b_name, "id": b_id, "args": b_input})
if b_name in mcp_client.MUTATING_TOOLS and pre_tool_callback:

View File

@@ -851,7 +851,7 @@ class AppController:
self._api_event_queue.append({"type": "response", "payload": payload})
def _handle_request_event(self, event: events.UserRequestEvent) -> None:
"""Processes a UserRequestEvent by calling the AI client."""
ai_client.current_tier = None # Ensure main discussion is untagged
ai_client.set_current_tier(None) # Ensure main discussion is untagged
if self.ui_auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({
@@ -935,7 +935,7 @@ class AppController:
def _on_tool_log(self, script: str, result: str) -> None:
session_logger.log_tool_call(script, result, None)
source_tier = ai_client.current_tier
source_tier = ai_client.get_current_tier()
with self._pending_tool_calls_lock:
self._pending_tool_calls.append({"script": script, "result": result, "ts": time.time(), "source_tier": source_tier})

View File

@@ -20,7 +20,7 @@ def generate_tickets(track_brief: str, module_skeletons: str) -> list[dict[str,
# Set custom system prompt for this call
old_system_prompt = ai_client._custom_system_prompt
ai_client.set_custom_system_prompt(system_prompt or "")
ai_client.current_tier = "Tier 2"
ai_client.set_current_tier("Tier 2")
last_error = None
try:
for _ in range(3):
@@ -53,7 +53,7 @@ def generate_tickets(track_brief: str, module_skeletons: str) -> list[dict[str,
finally:
# Restore old system prompt and clear tier tag
ai_client.set_custom_system_prompt(old_system_prompt or "")
ai_client.current_tier = None
ai_client.set_current_tier(None)
from src.dag_engine import TrackDAG
from src.models import Ticket

View File

@@ -300,7 +300,7 @@ def run_worker_lifecycle(ticket: Ticket, context: WorkerContext, context_files:
old_comms_cb(entry)
ai_client.comms_log_callback = worker_comms_callback
ai_client.current_tier = "Tier 3"
ai_client.set_current_tier("Tier 3")
try:
comms_baseline = len(ai_client.get_comms_log())
response = ai_client.send(
@@ -313,7 +313,7 @@ def run_worker_lifecycle(ticket: Ticket, context: WorkerContext, context_files:
)
finally:
ai_client.comms_log_callback = old_comms_cb
ai_client.current_tier = None
ai_client.set_current_tier(None)
if event_queue:
# Push via "response" event type — _process_event_queue wraps this
# as {"action": "handle_ai_response", "payload": ...} for the GUI.