This commit is contained in:
2026-02-21 22:25:11 -05:00
parent f126cdcb21
commit 32af96b365
2 changed files with 39 additions and 29 deletions

View File

@@ -420,21 +420,26 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str, file_items:
)
)
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
payload_to_send = f"<context>\n{md_content}\n</context>\n\n{user_message}"
_append_comms("OUT", "request", {
"message": f"[context {len(md_content)} chars + user message {len(user_message)} chars]",
})
response = _gemini_chat.send_message(full_message)
all_text_parts = []
for round_idx in range(MAX_TOOL_ROUNDS + 1):
response = _gemini_chat.send_message(payload_to_send)
for round_idx in range(MAX_TOOL_ROUNDS):
text_parts_raw = [
part.text
for candidate in response.candidates
for part in candidate.content.parts
if hasattr(part, "text") and part.text
]
if text_parts_raw:
all_text_parts.append("\n".join(text_parts_raw))
tool_calls = [
part.function_call
for candidate in response.candidates
@@ -468,6 +473,9 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str, file_items:
if not tool_calls:
break
if round_idx >= MAX_TOOL_ROUNDS:
break
function_responses = []
sent_results_log = []
for fc in tool_calls:
@@ -503,9 +511,6 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str, file_items:
)
sent_results_log.append({"tool_use_id": TOOL_NAME, "content": output})
if not function_responses:
break
# Refresh file context after tool calls locally, but DO NOT inject as text part into Gemini.
# Gemini strictly expects only function_responses in this array.
if file_items:
@@ -515,15 +520,10 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str, file_items:
"results": sent_results_log
})
response = _gemini_chat.send_message(function_responses)
payload_to_send = function_responses
text_parts = [
part.text
for candidate in response.candidates
for part in candidate.content.parts
if hasattr(part, "text") and part.text
]
return "\n".join(text_parts)
final_text = "\n\n".join(all_text_parts)
return final_text if final_text.strip() else "(No text returned by the model)"
except ProviderError:
raise
@@ -630,7 +630,9 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
),
})
for round_idx in range(MAX_TOOL_ROUNDS):
all_text_parts = []
for round_idx in range(MAX_TOOL_ROUNDS + 1):
response = _anthropic_client.messages.create(
model=_model,
max_tokens=8096,
@@ -654,6 +656,9 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
})
text_blocks = [b.text for b in response.content if hasattr(b, "text") and b.text]
if text_blocks:
all_text_parts.append("\n".join(text_blocks))
tool_use_blocks = [
{"id": b.id, "name": b.name, "input": b.input}
for b in response.content
@@ -682,6 +687,9 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
if response.stop_reason != "tool_use":
break
if round_idx >= MAX_TOOL_ROUNDS:
break
tool_results = []
for block in response.content:
if getattr(block, "type", None) != "tool_use":
@@ -746,12 +754,8 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
],
})
text_parts = [
block.text
for block in response.content
if hasattr(block, "text") and block.text
]
return "\n".join(text_parts)
final_text = "\n\n".join(all_text_parts)
return final_text if final_text.strip() else "(No text returned by the model)"
except ProviderError:
raise