fixing api usage for gemini
This commit is contained in:
14
ai_client.py
14
ai_client.py
@@ -506,20 +506,10 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str, file_items:
|
|||||||
if not function_responses:
|
if not function_responses:
|
||||||
break
|
break
|
||||||
|
|
||||||
# Refresh file context after tool calls and inject as a text part
|
# Refresh file context after tool calls locally, but DO NOT inject as text part into Gemini.
|
||||||
|
# Gemini strictly expects only function_responses in this array.
|
||||||
if file_items:
|
if file_items:
|
||||||
file_items = _reread_file_items(file_items)
|
file_items = _reread_file_items(file_items)
|
||||||
refreshed_ctx = _build_file_context_text(file_items)
|
|
||||||
if refreshed_ctx:
|
|
||||||
refreshed_text = (
|
|
||||||
"[FILES UPDATED — current contents below. "
|
|
||||||
"Do NOT re-read these files with PowerShell.]\n\n"
|
|
||||||
+ refreshed_ctx
|
|
||||||
)
|
|
||||||
if hasattr(types.Part, "from_text"):
|
|
||||||
function_responses.append(types.Part.from_text(text=refreshed_text))
|
|
||||||
else:
|
|
||||||
function_responses.append(types.Part(text=refreshed_text))
|
|
||||||
|
|
||||||
_append_comms("OUT", "tool_result_send", {
|
_append_comms("OUT", "tool_result_send", {
|
||||||
"results": sent_results_log
|
"results": sent_results_log
|
||||||
|
|||||||
1
gui.py
1
gui.py
@@ -1705,3 +1705,4 @@ def main():
|
|||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
main()
|
main()
|
||||||
|
|
||||||
@@ -29,7 +29,9 @@ paths = [
|
|||||||
|
|
||||||
[screenshots]
|
[screenshots]
|
||||||
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
|
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
|
||||||
paths = []
|
paths = [
|
||||||
|
"C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02/python_2026-02-21_22-11-51.png",
|
||||||
|
]
|
||||||
|
|
||||||
[discussion]
|
[discussion]
|
||||||
roles = [
|
roles = [
|
||||||
@@ -85,7 +87,7 @@ history = [
|
|||||||
|
|
||||||
[discussion.discussions."Support for system prompt"]
|
[discussion.discussions."Support for system prompt"]
|
||||||
git_commit = "f258fc5765d647eb821716bce3d2000fb2ba2857"
|
git_commit = "f258fc5765d647eb821716bce3d2000fb2ba2857"
|
||||||
last_updated = "2026-02-21T22:06:39"
|
last_updated = "2026-02-21T22:13:20"
|
||||||
history = [
|
history = [
|
||||||
"@2026-02-21T21:52:16\nUser:\ngemini testing 123 you get this message, first time using you proper in this custom frontend.",
|
"@2026-02-21T21:52:16\nUser:\ngemini testing 123 you get this message, first time using you proper in this custom frontend.",
|
||||||
"@2026-02-21T21:52:17\nAI:\nLoud and clear! I've received your message through your **Manual Slop** frontend. It looks like a very capable environment—I can see you've implemented:\n\n1. **Multi-provider support** (Anthropic and Gemini).\n2. **Tool use/MCP-like capabilities** (file reading, directory listing, searching, etc.).\n3. **Project & Discussion management** via TOML files.\n4. **Rich GUI** using `dearpygui`.\n\nSince this is our first \"proper\" session in this interface, I'm ready to help you develop, debug, or refine the project. I have access to your local files through the tools you've provided, so I can see the codebase and even run PowerShell scripts if needed.\n\nIs there a specific feature you'd like to work on next, or any bugs in the Gemini integration you've noticed during your \"123\" test?",
|
"@2026-02-21T21:52:17\nAI:\nLoud and clear! I've received your message through your **Manual Slop** frontend. It looks like a very capable environment—I can see you've implemented:\n\n1. **Multi-provider support** (Anthropic and Gemini).\n2. **Tool use/MCP-like capabilities** (file reading, directory listing, searching, etc.).\n3. **Project & Discussion management** via TOML files.\n4. **Rich GUI** using `dearpygui`.\n\nSince this is our first \"proper\" session in this interface, I'm ready to help you develop, debug, or refine the project. I have access to your local files through the tools you've provided, so I can see the codebase and even run PowerShell scripts if needed.\n\nIs there a specific feature you'd like to work on next, or any bugs in the Gemini integration you've noticed during your \"123\" test?",
|
||||||
@@ -93,4 +95,6 @@ history = [
|
|||||||
"@2026-02-21T21:59:35\nAI:\n---\n\n</context>\n\nYeah lets add the ability for the user to support system prompt templating from their side, I know the ai_client.py has some baked in so we can keep portion of it, I jsut want the suer to be able to inject project-agnostic and project specific system prompts.\nIf you think thats not a good idea let me know.",
|
"@2026-02-21T21:59:35\nAI:\n---\n\n</context>\n\nYeah lets add the ability for the user to support system prompt templating from their side, I know the ai_client.py has some baked in so we can keep portion of it, I jsut want the suer to be able to inject project-agnostic and project specific system prompts.\nIf you think thats not a good idea let me know.",
|
||||||
"@2026-02-21T22:01:32\nUser:\nUm are you working correctly you stopped for some rason I'll send a screen cap of the comms history...",
|
"@2026-02-21T22:01:32\nUser:\nUm are you working correctly you stopped for some rason I'll send a screen cap of the comms history...",
|
||||||
"@2026-02-21T22:01:40\nAI:\n---\n\n</context>\nI successfully applied the changes. Let me know if everything is working fine now.",
|
"@2026-02-21T22:01:40\nAI:\n---\n\n</context>\nI successfully applied the changes. Let me know if everything is working fine now.",
|
||||||
|
"@2026-02-21T22:11:52\nUser:\nYep it looks like the system prompts are now available.\n\nQuestion: For how we're doing things right now, you have to re-read files cause its not saved in discussion history rigt per me sending a gen + send?\nIs there a way you can do a routine to save important files to the discussion or upload them temporarily to a gemini cache so that we don't have to worrya bout re-reading read-only things you already gathered info on for an active discussion?\nI'm not sure the depths of how this custom api works or custom tooling, etc gemini api from google provides...",
|
||||||
|
"@2026-02-21T22:12:05\nSystem:\nAI got stuck and did nothing...",
|
||||||
]
|
]
|
||||||
|
|||||||
Reference in New Issue
Block a user