fix(ai_client): Restore AI text capture and fix tool declaration in Gemini generation loop.

This commit is contained in:
2026-03-06 13:47:22 -05:00
parent ae237330e9
commit 9d5b874c66

View File

@@ -107,7 +107,7 @@ _MAX_TOOL_OUTPUT_BYTES: int = 500_000
_ANTHROPIC_CHUNK_SIZE: int = 120_000 _ANTHROPIC_CHUNK_SIZE: int = 120_000
_SYSTEM_PROMPT: str = ( _SYSTEM_PROMPT: str = (
"You are a helpful coding assistant with access to a PowerShell tool and MCP tools (file access: read_file, list_directory, search_files, get_file_summary, web access: web_search, fetch_url). " "You are a helpful coding assistant with access to a PowerShell tool (run_powershell) and MCP tools (file access: read_file, list_directory, search_files, get_file_summary, web access: web_search, fetch_url). "
"When calling file/directory tools, always use the 'path' parameter for the target path. " "When calling file/directory tools, always use the 'path' parameter for the target path. "
"When asked to create or edit files, prefer targeted edits over full rewrites. " "When asked to create or edit files, prefer targeted edits over full rewrites. "
"Always explain what you are doing before invoking the tool.\n\n" "Always explain what you are doing before invoking the tool.\n\n"
@@ -764,13 +764,16 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str,
r["output"] = val r["output"] = val
for r_idx in range(MAX_TOOL_ROUNDS + 2): for r_idx in range(MAX_TOOL_ROUNDS + 2):
events.emit("request_start", payload={"provider": "gemini", "model": _model, "round": r_idx}) events.emit("request_start", payload={"provider": "gemini", "model": _model, "round": r_idx})
if stream_callback:
# In 1.0.0, we use send_message with stream=True # Shared config for this round
td = _gemini_tool_declaration() if enable_tools else None
config = types.GenerateContentConfig( config = types.GenerateContentConfig(
tools=[types.Tool(function_declarations=[types.FunctionDeclaration(**s) for s in mcp_client.get_tool_schemas()])] if enable_tools else [], tools=[td] if td else [],
temperature=_temperature, temperature=_temperature,
max_output_tokens=_max_tokens, max_output_tokens=_max_tokens,
) )
if stream_callback:
resp = _gemini_chat.send_message_stream(payload, config=config) resp = _gemini_chat.send_message_stream(payload, config=config)
txt_chunks: list[str] = [] txt_chunks: list[str] = []
calls = [] calls = []
@@ -796,17 +799,14 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str,
} }
final_resp = chunk final_resp = chunk
txt = "".join(txt_chunks) txt = "".join(txt_chunks)
if txt: all_text.append(txt)
# Final validation of response object for subsequent code # Final validation of response object for subsequent code
resp = final_resp resp = final_resp
events.emit("response_received", payload={"provider": "gemini", "model": _model, "usage": usage, "round": r_idx}) events.emit("response_received", payload={"provider": "gemini", "model": _model, "usage": usage, "round": r_idx})
else: else:
config = types.GenerateContentConfig(
tools=[types.Tool(function_declarations=[types.FunctionDeclaration(**s) for s in mcp_client.get_tool_schemas()])] if enable_tools else [],
temperature=_temperature,
max_output_tokens=_max_tokens,
)
resp = _gemini_chat.send_message(payload, config=config) resp = _gemini_chat.send_message(payload, config=config)
txt = resp.text or "" txt = resp.text or ""
if txt: all_text.append(txt)
calls = [p.function_call for c in resp.candidates if getattr(c, "content", None) for p in c.content.parts if p.function_call] calls = [p.function_call for c in resp.candidates if getattr(c, "content", None) for p in c.content.parts if p.function_call]
usage = { usage = {
"input_tokens": getattr(resp.usage_metadata, "prompt_token_count", 0), "input_tokens": getattr(resp.usage_metadata, "prompt_token_count", 0),