chore(conductor): Mark track 'Simulation Fidelity Enhancement' as complete.

This commit is contained in:
2026-03-06 13:38:15 -05:00
parent 0a63892395
commit ae237330e9
4 changed files with 64 additions and 37 deletions

View File

@@ -4,9 +4,10 @@ from typing import Any, Callable
import ai_client
class UserSimAgent:
def __init__(self, hook_client: Any, model: str = "gemini-2.5-flash-lite") -> None:
def __init__(self, hook_client: Any, model: str = "gemini-2.5-flash-lite", enable_delays: bool = True) -> None:
self.hook_client = hook_client
self.model = model
self.enable_delays = enable_delays
self.system_prompt = (
"You are a software engineer testing an AI coding assistant called 'Manual Slop'. "
"You want to build a small Python project and verify the assistant's capabilities. "
@@ -14,26 +15,46 @@ class UserSimAgent:
"Do not use markdown blocks for your main message unless you are providing code."
)
def calculate_reading_delay(self, text: str) -> float:
word_count = len(text.split())
delay = (word_count / 200.0) * 60.0 * 0.5
return max(0.5, min(5.0, delay))
def wait_to_read(self, text: str) -> None:
if self.enable_delays:
delay = self.calculate_reading_delay(text)
time.sleep(delay)
def wait_to_think(self, probability: float = 0.2, min_delay: float = 2.0, max_delay: float = 5.0) -> None:
if self.enable_delays and random.random() < probability:
delay = random.uniform(min_delay, max_delay)
time.sleep(delay)
def simulate_typing(self, text: str, jitter_range: tuple[float, float] = (0.01, 0.05)) -> None:
if self.enable_delays:
# Simulate typing by sleeping after chunks or characters to balance speed and realism
if len(text) > 200:
for i in range(0, len(text), 10):
time.sleep(random.uniform(jitter_range[0] * 3, jitter_range[1] * 3))
elif len(text) > 50:
for i in range(0, len(text), 3):
time.sleep(random.uniform(jitter_range[0] * 1.5, jitter_range[1] * 1.5))
else:
for char in text:
time.sleep(random.uniform(jitter_range[0], jitter_range[1]))
def generate_response(self, conversation_history: list[dict]) -> str:
"""
Generates a human-like response based on the conversation history.
conversation_history: list of dicts with 'role' and 'content'
"""
# Format history for ai_client
# ai_client expects md_content and user_message.
# It handles its own internal history.
# We want the 'User AI' to have context of what the 'Assistant AI' said.
# For now, let's just use the last message from Assistant as the prompt.
Generates a human-like response based on the conversation history.
conversation_history: list of dicts with 'role' and 'content'
"""
last_ai_msg = ""
for entry in reversed(conversation_history):
if entry.get('role') == 'AI':
last_ai_msg = entry.get('content', '')
break
# We need to set a custom system prompt for the User Simulator
try:
ai_client.set_custom_system_prompt(self.system_prompt)
# We'll use a blank md_content for now as the 'User' doesn't need to read its own files
# via the same mechanism, but we could provide it if needed.
response = ai_client.send(md_content="", user_message=last_ai_msg)
finally:
ai_client.set_custom_system_prompt("")
@@ -41,8 +62,9 @@ class UserSimAgent:
def perform_action_with_delay(self, action_func: Callable, *args: Any, **kwargs: Any) -> Any:
"""
Executes an action with a human-like delay.
"""
delay = random.uniform(0.5, 2.0)
time.sleep(delay)
Executes an action with a human-like delay if enabled.
"""
if self.enable_delays:
delay = random.uniform(0.5, 2.0)
time.sleep(delay)
return action_func(*args, **kwargs)

View File

@@ -59,10 +59,12 @@ class WorkflowSimulator:
active_disc = self.client.get_value("active_discussion")
print(f"[DEBUG] Current active discussion in GUI: {active_disc}")
print(f"\n[USER]: {user_message}")
self.user_agent.simulate_typing(user_message)
self.client.set_value("ai_input", user_message)
self.client.click("btn_gen_send")
def wait_for_ai_response(self, timeout: int = 60) -> dict | None:
self.user_agent.wait_to_think(probability=0.1)
print("Waiting for AI response...", end="", flush=True)
start_time = time.time()
@@ -120,6 +122,7 @@ class WorkflowSimulator:
if is_complete:
content = last_ai_entry.get('content', '')
print(f"\n[AI]: {content[:100]}...")
self.user_agent.wait_to_read(content)
return last_ai_entry
if non_system_entries: