fixes
This commit is contained in:
328
ai_client.py
328
ai_client.py
@@ -16,27 +16,22 @@ _anthropic_history: list[dict] = []
|
||||
|
||||
# Injected by gui.py - called when AI wants to run a command.
|
||||
# Signature: (script: str, base_dir: str) -> str | None
|
||||
# Returns the output string if approved, None if rejected.
|
||||
confirm_and_run_callback = None
|
||||
|
||||
# Injected by gui.py - called whenever a comms entry is appended.
|
||||
# Signature: (entry: dict) -> None
|
||||
comms_log_callback = None
|
||||
|
||||
# Injected by gui.py - called whenever a tool call completes (after run).
|
||||
# Injected by gui.py - called whenever a tool call completes.
|
||||
# Signature: (script: str, result: str) -> None
|
||||
tool_log_callback = None
|
||||
|
||||
MAX_TOOL_ROUNDS = 5
|
||||
|
||||
# Maximum characters per text chunk sent to Anthropic.
|
||||
# Anthropic's limit is ~200k tokens; we use 180k chars as a safe ceiling
|
||||
# (1 token ~ 3-4 chars, so 180k chars ~ 45-60k tokens, well within limits
|
||||
# even for very large aggregated markdown files).
|
||||
# Kept well under the ~200k token API limit.
|
||||
_ANTHROPIC_CHUNK_SIZE = 180_000
|
||||
|
||||
# Anthropic system prompt - sent with cache_control so it is cached after the
|
||||
# first request and reused on every subsequent call within the TTL window.
|
||||
_ANTHROPIC_SYSTEM = (
|
||||
"You are a helpful coding assistant with access to a PowerShell tool. "
|
||||
"When asked to create or edit files, prefer targeted edits over full rewrites. "
|
||||
@@ -47,15 +42,10 @@ _ANTHROPIC_SYSTEM = (
|
||||
|
||||
_comms_log: list[dict] = []
|
||||
|
||||
MAX_FIELD_CHARS = 400 # beyond this we show a truncated preview in the UI
|
||||
COMMS_CLAMP_CHARS = 300
|
||||
|
||||
|
||||
def _append_comms(direction: str, kind: str, payload: dict):
|
||||
"""
|
||||
direction : "OUT" | "IN"
|
||||
kind : "request" | "response" | "tool_call" | "tool_result"
|
||||
payload : raw dict describing the event
|
||||
"""
|
||||
entry = {
|
||||
"ts": datetime.datetime.now().strftime("%H:%M:%S"),
|
||||
"direction": direction,
|
||||
@@ -81,13 +71,10 @@ def _load_credentials() -> dict:
|
||||
with open("credentials.toml", "rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ provider errors
|
||||
|
||||
class ProviderError(Exception):
|
||||
"""
|
||||
Raised when the upstream API returns a hard error we want to surface
|
||||
distinctly in the UI (quota, rate-limit, auth, balance, etc.).
|
||||
"""
|
||||
def __init__(self, kind: str, provider: str, original: Exception):
|
||||
self.kind = kind
|
||||
self.provider = provider
|
||||
@@ -162,6 +149,7 @@ def _classify_gemini_error(exc: Exception) -> ProviderError:
|
||||
return ProviderError("network", "gemini", exc)
|
||||
return ProviderError("unknown", "gemini", exc)
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ provider setup
|
||||
|
||||
def set_provider(provider: str, model: str):
|
||||
@@ -169,6 +157,7 @@ def set_provider(provider: str, model: str):
|
||||
_provider = provider
|
||||
_model = model
|
||||
|
||||
|
||||
def reset_session():
|
||||
global _gemini_client, _gemini_chat
|
||||
global _anthropic_client, _anthropic_history
|
||||
@@ -178,6 +167,7 @@ def reset_session():
|
||||
_anthropic_history = []
|
||||
file_cache.reset_client()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ model listing
|
||||
|
||||
def list_models(provider: str) -> list[str]:
|
||||
@@ -188,6 +178,7 @@ def list_models(provider: str) -> list[str]:
|
||||
return _list_anthropic_models()
|
||||
return []
|
||||
|
||||
|
||||
def _list_gemini_models(api_key: str) -> list[str]:
|
||||
from google import genai
|
||||
try:
|
||||
@@ -203,6 +194,7 @@ def _list_gemini_models(api_key: str) -> list[str]:
|
||||
except Exception as exc:
|
||||
raise _classify_gemini_error(exc) from exc
|
||||
|
||||
|
||||
def _list_anthropic_models() -> list[str]:
|
||||
import anthropic
|
||||
try:
|
||||
@@ -216,13 +208,10 @@ def _list_anthropic_models() -> list[str]:
|
||||
raise _classify_anthropic_error(exc) from exc
|
||||
|
||||
|
||||
# --------------------------------------------------------- tool definition
|
||||
# ------------------------------------------------------------------ tool definition
|
||||
|
||||
TOOL_NAME = "run_powershell"
|
||||
|
||||
# The tool list for Anthropic. cache_control is placed on the last (only) tool
|
||||
# so that the system-prompt + tools prefix is cached together after the first
|
||||
# request and served from cache on every subsequent round.
|
||||
_ANTHROPIC_TOOLS = [
|
||||
{
|
||||
"name": TOOL_NAME,
|
||||
@@ -247,6 +236,7 @@ _ANTHROPIC_TOOLS = [
|
||||
}
|
||||
]
|
||||
|
||||
|
||||
def _gemini_tool_declaration():
|
||||
from google.genai import types
|
||||
return types.Tool(
|
||||
@@ -273,12 +263,8 @@ def _gemini_tool_declaration():
|
||||
]
|
||||
)
|
||||
|
||||
|
||||
def _run_script(script: str, base_dir: str) -> str:
|
||||
"""
|
||||
Delegate to the GUI confirmation callback.
|
||||
Returns result string (stdout/stderr) or a rejection message.
|
||||
Also fires tool_log_callback if registered.
|
||||
"""
|
||||
if confirm_and_run_callback is None:
|
||||
return "ERROR: no confirmation handler registered"
|
||||
result = confirm_and_run_callback(script, base_dir)
|
||||
@@ -290,6 +276,7 @@ def _run_script(script: str, base_dir: str) -> str:
|
||||
tool_log_callback(script, output)
|
||||
return output
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ gemini
|
||||
|
||||
def _ensure_gemini_client():
|
||||
@@ -299,6 +286,7 @@ def _ensure_gemini_client():
|
||||
creds = _load_credentials()
|
||||
_gemini_client = genai.Client(api_key=creds["gemini"]["api_key"])
|
||||
|
||||
|
||||
def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
|
||||
global _gemini_chat
|
||||
from google import genai
|
||||
@@ -318,7 +306,7 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
|
||||
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
|
||||
|
||||
_append_comms("OUT", "request", {
|
||||
"summary": f"[Gemini: context {len(md_content)} chars + user message {len(user_message)} chars]",
|
||||
"message": f"[context {len(md_content)} chars + user message {len(user_message)} chars]",
|
||||
})
|
||||
|
||||
response = _gemini_chat.send_message(full_message)
|
||||
@@ -384,34 +372,8 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
|
||||
except Exception as exc:
|
||||
raise _classify_gemini_error(exc) from exc
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ anthropic
|
||||
#
|
||||
# Sending strategy for Anthropic:
|
||||
#
|
||||
# PRIMARY PATH (_send_anthropic_files) - used when file_items are provided
|
||||
# ============
|
||||
# Each file from config is uploaded via the Anthropic Files API
|
||||
# (file_cache.get_file_id handles upload + caching by mtime/size).
|
||||
# Files are sent as individual document/image content blocks in the first
|
||||
# user message. The discussion history section of the markdown (which is
|
||||
# small and changes each session) is still sent as a text block.
|
||||
# This keeps the per-message payload lean and lets the Files API handle
|
||||
# the heavy lifting of large source files.
|
||||
#
|
||||
# FALLBACK PATH (_send_anthropic_chunked) - used when no file_items, or if
|
||||
# ============ the Files API path fails
|
||||
# The full aggregated markdown is split into <=_ANTHROPIC_CHUNK_SIZE char
|
||||
# chunks and sent as separate text content blocks. cache_control:ephemeral
|
||||
# is placed on the LAST chunk so the whole context prefix is cached together.
|
||||
#
|
||||
# Caching strategy (Anthropic prompt caching):
|
||||
# - System prompt: cache_control:ephemeral on the text block
|
||||
# - Last tool in _ANTHROPIC_TOOLS: cache_control:ephemeral
|
||||
# - Context content blocks: cache_control:ephemeral on the last block
|
||||
# These three form a stable cached prefix that survives across turns.
|
||||
#
|
||||
# Token cost: cache creation ~25% more than normal input; cache reads ~10%
|
||||
# of normal input. Steady-state use is much cheaper after the first request.
|
||||
|
||||
def _ensure_anthropic_client():
|
||||
global _anthropic_client
|
||||
@@ -422,15 +384,14 @@ def _ensure_anthropic_client():
|
||||
|
||||
|
||||
def _chunk_text(text: str, chunk_size: int) -> list[str]:
|
||||
"""Split text into chunks of at most chunk_size characters."""
|
||||
return [text[i:i + chunk_size] for i in range(0, len(text), chunk_size)]
|
||||
|
||||
|
||||
def _build_chunked_context_blocks(md_content: str) -> list[dict]:
|
||||
"""
|
||||
Split md_content into <=_ANTHROPIC_CHUNK_SIZE char chunks and return
|
||||
a list of Anthropic text content blocks. cache_control:ephemeral is
|
||||
placed only on the LAST block so the whole prefix is cached as one unit.
|
||||
Split md_content into <=_ANTHROPIC_CHUNK_SIZE char chunks.
|
||||
cache_control:ephemeral is placed only on the LAST block so the whole
|
||||
prefix is cached as one unit.
|
||||
"""
|
||||
chunks = _chunk_text(md_content, _ANTHROPIC_CHUNK_SIZE)
|
||||
blocks = []
|
||||
@@ -442,118 +403,12 @@ def _build_chunked_context_blocks(md_content: str) -> list[dict]:
|
||||
return blocks
|
||||
|
||||
|
||||
def _build_files_context_blocks(
|
||||
md_header: str,
|
||||
file_items: list[dict],
|
||||
screenshot_items: list[dict] | None = None,
|
||||
) -> list[dict]:
|
||||
"""
|
||||
Build content blocks for the Files API path.
|
||||
|
||||
- md_header : the Discussion History section text (small, sent as text block)
|
||||
- file_items : list of dicts from aggregate.build_file_items()
|
||||
each has: path (Path|None), entry (str), content (str), error (bool)
|
||||
- screenshot_items: list of screenshot paths (Path) to include as image blocks
|
||||
|
||||
Returns a list of Anthropic content blocks.
|
||||
The last block gets cache_control:ephemeral.
|
||||
"""
|
||||
blocks: list[dict] = []
|
||||
|
||||
# Discussion history / header as a text block (small, always inline)
|
||||
if md_header.strip():
|
||||
blocks.append({
|
||||
"type": "text",
|
||||
"text": md_header,
|
||||
})
|
||||
|
||||
# One document/image block per file
|
||||
for item in file_items:
|
||||
path: Path | None = item.get("path")
|
||||
entry: str = item.get("entry", "")
|
||||
error: bool = item.get("error", False)
|
||||
|
||||
if error or path is None:
|
||||
# Fall back to inline text for error entries
|
||||
blocks.append({
|
||||
"type": "text",
|
||||
"text": f"### `{entry}`\n\nERROR: {item.get('content', 'unknown error')}",
|
||||
})
|
||||
continue
|
||||
|
||||
block_type = file_cache.content_block_type(path)
|
||||
|
||||
if block_type == "unsupported":
|
||||
# Inline as plain text
|
||||
blocks.append({
|
||||
"type": "text",
|
||||
"text": f"### `{entry}`\n\n```\n{item.get('content', '')}\n```",
|
||||
})
|
||||
continue
|
||||
|
||||
# Try to get/upload via Files API
|
||||
file_id = file_cache.get_file_id(path)
|
||||
|
||||
if file_id is None:
|
||||
# Unsupported or missing - inline fallback
|
||||
blocks.append({
|
||||
"type": "text",
|
||||
"text": f"### `{entry}`\n\n```\n{item.get('content', '')}\n```",
|
||||
})
|
||||
continue
|
||||
|
||||
if block_type == "document":
|
||||
blocks.append({
|
||||
"type": "document",
|
||||
"source": {
|
||||
"type": "file",
|
||||
"file_id": file_id,
|
||||
},
|
||||
"title": path.name,
|
||||
"citations": {"enabled": False},
|
||||
})
|
||||
elif block_type == "image":
|
||||
blocks.append({
|
||||
"type": "image",
|
||||
"source": {
|
||||
"type": "file",
|
||||
"file_id": file_id,
|
||||
},
|
||||
})
|
||||
|
||||
# Screenshots as image blocks
|
||||
for item in (screenshot_items or []):
|
||||
path = item.get("path")
|
||||
if path is None:
|
||||
continue
|
||||
block_type = file_cache.content_block_type(path)
|
||||
if block_type != "image":
|
||||
continue
|
||||
file_id = file_cache.get_file_id(path)
|
||||
if file_id:
|
||||
blocks.append({
|
||||
"type": "image",
|
||||
"source": {
|
||||
"type": "file",
|
||||
"file_id": file_id,
|
||||
},
|
||||
})
|
||||
|
||||
# Put cache_control on the last block
|
||||
if blocks:
|
||||
blocks[-1]["cache_control"] = {"type": "ephemeral"}
|
||||
|
||||
return blocks
|
||||
|
||||
|
||||
|
||||
def _strip_cache_controls(history: list[dict]):
|
||||
"""
|
||||
Remove cache_control from all content blocks in the message history.
|
||||
Anthropic allows a maximum of 4 cache_control blocks total across
|
||||
system + tools + messages. We reserve those slots for the stable
|
||||
system/tools prefix and the current turn's context block, so all
|
||||
older history entries must be clean.
|
||||
Remove cache_control from all content blocks in message history.
|
||||
Anthropic allows max 4 cache_control blocks total across system + tools +
|
||||
messages. We reserve those slots for the stable system/tools prefix and
|
||||
the current turn's context block, so all older history entries must be clean.
|
||||
"""
|
||||
for msg in history:
|
||||
content = msg.get("content")
|
||||
@@ -561,24 +416,32 @@ def _strip_cache_controls(history: list[dict]):
|
||||
for block in content:
|
||||
if isinstance(block, dict):
|
||||
block.pop("cache_control", None)
|
||||
def _run_anthropic_loop(
|
||||
user_content: list[dict],
|
||||
user_message: str,
|
||||
base_dir: str,
|
||||
log_summary: str,
|
||||
) -> str:
|
||||
|
||||
|
||||
def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
|
||||
"""
|
||||
Core Anthropic message loop shared by both send paths.
|
||||
Appends the user turn to _anthropic_history, runs the tool loop,
|
||||
and returns the final assistant text.
|
||||
Send via Anthropic using chunked inline text.
|
||||
Context is split into <=_ANTHROPIC_CHUNK_SIZE char blocks with
|
||||
cache_control:ephemeral on the last block, then the user message is appended.
|
||||
"""
|
||||
global _anthropic_history
|
||||
try:
|
||||
_ensure_anthropic_client()
|
||||
|
||||
context_blocks = _build_chunked_context_blocks(md_content)
|
||||
|
||||
user_content = context_blocks + [
|
||||
{"type": "text", "text": user_message}
|
||||
]
|
||||
|
||||
_strip_cache_controls(_anthropic_history)
|
||||
_anthropic_history.append({"role": "user", "content": user_content})
|
||||
|
||||
n_chunks = len(context_blocks)
|
||||
_append_comms("OUT", "request", {
|
||||
"message": log_summary,
|
||||
"message": (
|
||||
f"[{n_chunks} chunk(s), {len(md_content)} chars context] "
|
||||
f"{user_message[:200]}{'...' if len(user_message) > 200 else ''}"
|
||||
),
|
||||
})
|
||||
|
||||
for round_idx in range(MAX_TOOL_ROUNDS):
|
||||
@@ -594,12 +457,11 @@ def _run_anthropic_loop(
|
||||
],
|
||||
tools=_ANTHROPIC_TOOLS,
|
||||
messages=_anthropic_history,
|
||||
extra_headers={"anthropic-beta": "files-api-2025-04-14"},
|
||||
)
|
||||
|
||||
_anthropic_history.append({
|
||||
"role": "assistant",
|
||||
"content": response.content
|
||||
"content": response.content,
|
||||
})
|
||||
|
||||
text_blocks = [b.text for b in response.content if hasattr(b, "text") and b.text]
|
||||
@@ -674,126 +536,28 @@ def _run_anthropic_loop(
|
||||
]
|
||||
return "\n".join(text_parts)
|
||||
|
||||
|
||||
def _send_anthropic_files(
|
||||
md_content: str,
|
||||
user_message: str,
|
||||
base_dir: str,
|
||||
file_items: list[dict],
|
||||
) -> str:
|
||||
"""
|
||||
Files API send path. Uploads each file individually and sends document/image
|
||||
blocks instead of inlining everything as text. Falls back to chunked text
|
||||
on any upload error.
|
||||
|
||||
The discussion history section of md_content is extracted and sent inline
|
||||
as a text block (it's small and changes each session so not worth uploading).
|
||||
"""
|
||||
import anthropic
|
||||
|
||||
_ensure_anthropic_client()
|
||||
|
||||
# Extract just the Discussion History section to send inline.
|
||||
# Everything else comes via file blocks.
|
||||
discussion_section = ""
|
||||
files_marker = "\n\n---\n\n## Files\n\n"
|
||||
split_idx = md_content.find(files_marker)
|
||||
if split_idx != -1:
|
||||
discussion_section = md_content[:split_idx]
|
||||
else:
|
||||
# No files section - the whole thing is discussion/screenshots
|
||||
discussion_section = md_content
|
||||
|
||||
try:
|
||||
context_blocks = _build_files_context_blocks(discussion_section, file_items)
|
||||
except Exception as upload_err:
|
||||
_append_comms("OUT", "request", {
|
||||
"message": f"[Files API upload failed: {upload_err}] falling back to chunked text",
|
||||
})
|
||||
return _send_anthropic_chunked(md_content, user_message, base_dir)
|
||||
|
||||
user_content = context_blocks + [
|
||||
{
|
||||
"type": "text",
|
||||
"text": user_message,
|
||||
}
|
||||
]
|
||||
|
||||
log_summary = (
|
||||
f"[Files API: {len(file_items)} file(s) as document/image blocks, "
|
||||
f"discussion section {len(discussion_section)} chars inline]\n\n{user_message}"
|
||||
)
|
||||
|
||||
return _run_anthropic_loop(user_content, user_message, base_dir, log_summary)
|
||||
|
||||
|
||||
def _send_anthropic_chunked(md_content: str, user_message: str, base_dir: str) -> str:
|
||||
"""
|
||||
Chunked text fallback path. Splits md_content into <=_ANTHROPIC_CHUNK_SIZE
|
||||
char blocks, sends them all as text content blocks with cache_control on
|
||||
the last one, then appends the user question.
|
||||
"""
|
||||
_ensure_anthropic_client()
|
||||
|
||||
context_blocks = _build_chunked_context_blocks(md_content)
|
||||
|
||||
user_content = context_blocks + [
|
||||
{
|
||||
"type": "text",
|
||||
"text": user_message,
|
||||
}
|
||||
]
|
||||
|
||||
n_chunks = len(context_blocks)
|
||||
log_summary = (
|
||||
f"[Chunked text: {n_chunks} chunk(s), "
|
||||
f"{len(md_content)} chars total]\n\n{user_message}"
|
||||
)
|
||||
|
||||
return _run_anthropic_loop(user_content, user_message, base_dir, log_summary)
|
||||
|
||||
|
||||
def _send_anthropic(
|
||||
md_content: str,
|
||||
user_message: str,
|
||||
base_dir: str,
|
||||
file_items: list[dict] | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Entry point for Anthropic sends. Routes to the Files API path when
|
||||
file_items are provided, otherwise falls back to chunked text.
|
||||
"""
|
||||
try:
|
||||
if file_items:
|
||||
return _send_anthropic_files(md_content, user_message, base_dir, file_items)
|
||||
else:
|
||||
return _send_anthropic_chunked(md_content, user_message, base_dir)
|
||||
except ProviderError:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise _classify_anthropic_error(exc) from exc
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ unified send
|
||||
|
||||
def send(
|
||||
md_content: str,
|
||||
user_message: str,
|
||||
base_dir: str = ".",
|
||||
file_items: list[dict] | None = None,
|
||||
) -> str:
|
||||
"""
|
||||
Send a message to the active provider.
|
||||
|
||||
md_content : aggregated markdown string from aggregate.run()
|
||||
user_message: the user's question / instruction
|
||||
user_message: the user question / instruction
|
||||
base_dir : project base directory (for PowerShell tool calls)
|
||||
file_items : optional list of file dicts from aggregate.build_file_items();
|
||||
when provided and provider is anthropic, files are uploaded
|
||||
via the Files API rather than inlined as text
|
||||
"""
|
||||
if _provider == "gemini":
|
||||
return _send_gemini(md_content, user_message, base_dir)
|
||||
elif _provider == "anthropic":
|
||||
return _send_anthropic(md_content, user_message, base_dir, file_items)
|
||||
return _send_anthropic(md_content, user_message, base_dir)
|
||||
raise ValueError(f"unknown provider: {_provider}")
|
||||
|
||||
|
||||
File diff suppressed because one or more lines are too long
@@ -10,13 +10,13 @@ Collapsed=0
|
||||
|
||||
[Window][###22]
|
||||
Pos=0,0
|
||||
Size=549,652
|
||||
Size=599,652
|
||||
Collapsed=0
|
||||
DockId=0x00000005,0
|
||||
|
||||
[Window][###30]
|
||||
Pos=0,654
|
||||
Size=549,1342
|
||||
Size=599,793
|
||||
Collapsed=0
|
||||
DockId=0x00000027,0
|
||||
|
||||
@@ -106,14 +106,14 @@ Collapsed=0
|
||||
DockId=0x00000025,0
|
||||
|
||||
[Window][###106]
|
||||
Pos=551,0
|
||||
Size=972,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,0
|
||||
|
||||
[Window][###100]
|
||||
Pos=2622,0
|
||||
Size=1218,1412
|
||||
Pos=2687,0
|
||||
Size=1153,1412
|
||||
Collapsed=0
|
||||
DockId=0x00000024,0
|
||||
|
||||
@@ -173,8 +173,8 @@ Collapsed=0
|
||||
DockId=0x00000016,0
|
||||
|
||||
[Window][###112]
|
||||
Pos=551,0
|
||||
Size=972,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,2
|
||||
|
||||
@@ -209,8 +209,8 @@ Size=700,440
|
||||
Collapsed=0
|
||||
|
||||
[Window][###115]
|
||||
Pos=551,0
|
||||
Size=847,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,0
|
||||
|
||||
@@ -221,26 +221,26 @@ Collapsed=0
|
||||
DockId=0x00000022,0
|
||||
|
||||
[Window][###85]
|
||||
Pos=1400,0
|
||||
Size=1220,1412
|
||||
Pos=1525,0
|
||||
Size=1160,1412
|
||||
Collapsed=0
|
||||
DockId=0x00000025,0
|
||||
|
||||
[Window][###92]
|
||||
Pos=551,0
|
||||
Size=847,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,2
|
||||
|
||||
[Window][###107]
|
||||
Pos=1400,1414
|
||||
Size=2440,723
|
||||
Pos=1525,1414
|
||||
Size=2315,723
|
||||
Collapsed=0
|
||||
DockId=0x0000001A,0
|
||||
|
||||
[Window][###109]
|
||||
Pos=551,0
|
||||
Size=847,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,1
|
||||
|
||||
@@ -291,8 +291,8 @@ Collapsed=0
|
||||
DockId=0x00000026,0
|
||||
|
||||
[Window][###72]
|
||||
Pos=0,1998
|
||||
Size=549,139
|
||||
Pos=0,1449
|
||||
Size=599,688
|
||||
Collapsed=0
|
||||
DockId=0x00000028,0
|
||||
|
||||
@@ -303,8 +303,8 @@ Collapsed=0
|
||||
DockId=0x00000025,0
|
||||
|
||||
[Window][###89]
|
||||
Pos=551,0
|
||||
Size=972,2137
|
||||
Pos=601,0
|
||||
Size=922,2137
|
||||
Collapsed=0
|
||||
DockId=0x00000012,1
|
||||
|
||||
@@ -364,24 +364,29 @@ Pos=1578,868
|
||||
Size=700,440
|
||||
Collapsed=0
|
||||
|
||||
[Window][###352]
|
||||
Pos=1578,868
|
||||
Size=700,440
|
||||
Collapsed=0
|
||||
|
||||
[Docking][Data]
|
||||
DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,2137 Split=X Selected=0x40484D8F
|
||||
DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=549,1161 Split=Y Selected=0xEE087978
|
||||
DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=599,1161 Split=Y Selected=0xEE087978
|
||||
DockNode ID=0x00000005 Parent=0x00000003 SizeRef=235,354 Selected=0xEE087978
|
||||
DockNode ID=0x00000006 Parent=0x00000003 SizeRef=235,805 Split=Y Selected=0x5F94F9BD
|
||||
DockNode ID=0x00000009 Parent=0x00000006 SizeRef=235,453 Split=Y Selected=0x5F94F9BD
|
||||
DockNode ID=0x0000001D Parent=0x00000009 SizeRef=364,766 Split=Y Selected=0x5F94F9BD
|
||||
DockNode ID=0x00000021 Parent=0x0000001D SizeRef=549,696 Split=Y Selected=0x5F94F9BD
|
||||
DockNode ID=0x00000027 Parent=0x00000021 SizeRef=549,1342 Selected=0x5F94F9BD
|
||||
DockNode ID=0x00000028 Parent=0x00000021 SizeRef=549,139 Selected=0xBEC5E8CB
|
||||
DockNode ID=0x00000027 Parent=0x00000021 SizeRef=549,793 Selected=0x5F94F9BD
|
||||
DockNode ID=0x00000028 Parent=0x00000021 SizeRef=549,688 Selected=0xBEC5E8CB
|
||||
DockNode ID=0x00000022 Parent=0x0000001D SizeRef=549,785 Selected=0x0CE534DB
|
||||
DockNode ID=0x0000001E Parent=0x00000009 SizeRef=364,715 Selected=0xF475F06A
|
||||
DockNode ID=0x0000000A Parent=0x00000006 SizeRef=235,350 Selected=0x80199DAE
|
||||
DockNode ID=0x00000004 Parent=0x7C6B3D9B SizeRef=3289,1161 Split=X
|
||||
DockNode ID=0x00000004 Parent=0x7C6B3D9B SizeRef=3239,1161 Split=X
|
||||
DockNode ID=0x00000001 Parent=0x00000004 SizeRef=1060,1161 Split=Y Selected=0x40484D8F
|
||||
DockNode ID=0x00000007 Parent=0x00000001 SizeRef=595,492 Selected=0xBA13FCDE
|
||||
DockNode ID=0x00000008 Parent=0x00000001 SizeRef=595,1643 Split=X Selected=0x40484D8F
|
||||
DockNode ID=0x0000000F Parent=0x00000008 SizeRef=972,2137 Split=Y Selected=0x07E8375F
|
||||
DockNode ID=0x0000000F Parent=0x00000008 SizeRef=922,2137 Split=Y Selected=0x07E8375F
|
||||
DockNode ID=0x00000011 Parent=0x0000000F SizeRef=835,425 Selected=0x72F373AE
|
||||
DockNode ID=0x00000012 Parent=0x0000000F SizeRef=835,1710 Selected=0x3934423A
|
||||
DockNode ID=0x00000010 Parent=0x00000008 SizeRef=2315,2137 Split=Y Selected=0xCE7F911A
|
||||
@@ -389,12 +394,12 @@ DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,
|
||||
DockNode ID=0x00000017 Parent=0x00000013 SizeRef=1314,1749 Selected=0x4B454E0B
|
||||
DockNode ID=0x00000018 Parent=0x00000013 SizeRef=1309,1749 Split=Y Selected=0x88A8C2FF
|
||||
DockNode ID=0x00000019 Parent=0x00000018 SizeRef=2440,1412 Split=X Selected=0x88A8C2FF
|
||||
DockNode ID=0x00000023 Parent=0x00000019 SizeRef=1220,737 Split=Y Selected=0x4F935A1E
|
||||
DockNode ID=0x00000023 Parent=0x00000019 SizeRef=1160,737 Split=Y Selected=0x4F935A1E
|
||||
DockNode ID=0x0000001F Parent=0x00000023 SizeRef=2315,1853 Split=Y Selected=0x4F935A1E
|
||||
DockNode ID=0x00000025 Parent=0x0000001F SizeRef=2315,1244 CentralNode=1 Selected=0x4F935A1E
|
||||
DockNode ID=0x00000026 Parent=0x0000001F SizeRef=2315,607 Selected=0x7D28643F
|
||||
DockNode ID=0x00000020 Parent=0x00000023 SizeRef=2315,282 Selected=0x4C2F06CB
|
||||
DockNode ID=0x00000024 Parent=0x00000019 SizeRef=1218,737 Selected=0x88A8C2FF
|
||||
DockNode ID=0x00000024 Parent=0x00000019 SizeRef=1153,737 Selected=0x88A8C2FF
|
||||
DockNode ID=0x0000001A Parent=0x00000018 SizeRef=2440,723 Selected=0x3A881EEF
|
||||
DockNode ID=0x00000014 Parent=0x00000010 SizeRef=1967,445 Selected=0xC36FF36B
|
||||
DockNode ID=0x00000002 Parent=0x00000004 SizeRef=2227,1161 Split=X Selected=0x714F2F7B
|
||||
|
||||
176
file_cache.py
176
file_cache.py
@@ -1,193 +1,29 @@
|
||||
# file_cache.py
|
||||
"""
|
||||
Persistent cache of Anthropic Files API uploads.
|
||||
|
||||
Maps (absolute_path, mtime_ns, size_bytes) -> file_id so we only upload each
|
||||
file once and reuse the file_id on subsequent sends. If the file has changed
|
||||
on disk the old file_id is deleted from the API and a new one is uploaded.
|
||||
|
||||
Cache is stored as JSON at ./logs/file_cache.json so it survives restarts.
|
||||
Stub — the Anthropic Files API path has been removed.
|
||||
All context is now sent as inline chunked text via _send_anthropic_chunked.
|
||||
This file is kept so that any stale imports do not break.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import datetime
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
_CACHE_PATH = Path("./logs/file_cache.json")
|
||||
|
||||
# in-memory dict: abs_path_str -> {"file_id": str, "mtime_ns": int, "size": int}
|
||||
_cache: dict[str, dict] = {}
|
||||
_cache_loaded = False
|
||||
|
||||
_anthropic_client = None # set by _ensure_client()
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ persistence
|
||||
|
||||
def _load_cache():
|
||||
global _cache, _cache_loaded
|
||||
if _cache_loaded:
|
||||
return
|
||||
_cache_loaded = True
|
||||
if _CACHE_PATH.exists():
|
||||
try:
|
||||
_cache = json.loads(_CACHE_PATH.read_text(encoding="utf-8"))
|
||||
except Exception:
|
||||
_cache = {}
|
||||
else:
|
||||
_cache = {}
|
||||
|
||||
|
||||
def _save_cache():
|
||||
_CACHE_PATH.parent.mkdir(parents=True, exist_ok=True)
|
||||
_CACHE_PATH.write_text(json.dumps(_cache, indent=2, ensure_ascii=False), encoding="utf-8")
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ anthropic client
|
||||
|
||||
def _ensure_client():
|
||||
global _anthropic_client
|
||||
if _anthropic_client is not None:
|
||||
return _anthropic_client
|
||||
import tomllib
|
||||
import anthropic
|
||||
with open("credentials.toml", "rb") as f:
|
||||
creds = tomllib.load(f)
|
||||
_anthropic_client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
|
||||
return _anthropic_client
|
||||
|
||||
|
||||
def reset_client():
|
||||
"""Called when the main ai_client resets its session."""
|
||||
global _anthropic_client
|
||||
_anthropic_client = None
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ MIME helpers
|
||||
|
||||
_MIME_BY_EXT = {
|
||||
".pdf": "application/pdf",
|
||||
".txt": "text/plain",
|
||||
".md": "text/plain",
|
||||
".py": "text/plain",
|
||||
".toml": "text/plain",
|
||||
".json": "text/plain",
|
||||
".yaml": "text/plain",
|
||||
".yml": "text/plain",
|
||||
".ini": "text/plain",
|
||||
".ps1": "text/plain",
|
||||
".csv": "text/plain",
|
||||
".log": "text/plain",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
}
|
||||
|
||||
# File types that can be sent as 'document' blocks
|
||||
_DOCUMENT_MIMES = {"application/pdf", "text/plain"}
|
||||
# File types that can be sent as 'image' blocks
|
||||
_IMAGE_MIMES = {"image/png", "image/jpeg", "image/gif", "image/webp"}
|
||||
|
||||
|
||||
def _mime_for(path: Path) -> str:
|
||||
return _MIME_BY_EXT.get(path.suffix.lower(), "text/plain")
|
||||
pass
|
||||
|
||||
|
||||
def content_block_type(path: Path) -> str:
|
||||
"""Returns 'document', 'image', or 'unsupported'."""
|
||||
mime = _mime_for(path)
|
||||
if mime in _DOCUMENT_MIMES:
|
||||
return "document"
|
||||
if mime in _IMAGE_MIMES:
|
||||
return "image"
|
||||
return "unsupported"
|
||||
|
||||
|
||||
# ------------------------------------------------------------------ upload / cache logic
|
||||
|
||||
def _stat(path: Path) -> tuple[int, int]:
|
||||
"""Returns (mtime_ns, size_bytes). Raises if file missing."""
|
||||
st = path.stat()
|
||||
return int(st.st_mtime_ns), int(st.st_size)
|
||||
|
||||
|
||||
def _delete_remote(file_id: str):
|
||||
"""Best-effort delete of a stale file_id from the API."""
|
||||
try:
|
||||
client = _ensure_client()
|
||||
client.beta.files.delete(file_id, extra_headers={"anthropic-beta": "files-api-2025-04-14"})
|
||||
except Exception:
|
||||
pass # stale deletes are non-fatal
|
||||
|
||||
|
||||
def _upload(path: Path) -> str:
|
||||
"""Upload the file and return its new file_id."""
|
||||
client = _ensure_client()
|
||||
mime = _mime_for(path)
|
||||
filename = path.name
|
||||
with open(path, "rb") as fh:
|
||||
result = client.beta.files.upload(
|
||||
file=(filename, fh, mime),
|
||||
extra_headers={"anthropic-beta": "files-api-2025-04-14"},
|
||||
)
|
||||
return result.id
|
||||
|
||||
|
||||
def get_file_id(path: Path) -> Optional[str]:
|
||||
"""
|
||||
Return a valid Anthropic file_id for the given path, uploading if needed.
|
||||
Returns None if the file type is unsupported.
|
||||
Raises on network / API errors.
|
||||
"""
|
||||
_load_cache()
|
||||
|
||||
if content_block_type(path) == "unsupported":
|
||||
return None
|
||||
|
||||
abs_str = str(path.resolve())
|
||||
try:
|
||||
mtime_ns, size = _stat(path)
|
||||
except FileNotFoundError:
|
||||
# File gone - evict from cache
|
||||
if abs_str in _cache:
|
||||
_delete_remote(_cache[abs_str]["file_id"])
|
||||
del _cache[abs_str]
|
||||
_save_cache()
|
||||
return None
|
||||
|
||||
entry = _cache.get(abs_str)
|
||||
if entry and entry.get("mtime_ns") == mtime_ns and entry.get("size") == size:
|
||||
# Cache hit - file unchanged
|
||||
return entry["file_id"]
|
||||
|
||||
# Cache miss or stale - delete old remote if we had one
|
||||
if entry:
|
||||
_delete_remote(entry["file_id"])
|
||||
|
||||
file_id = _upload(path)
|
||||
_cache[abs_str] = {"file_id": file_id, "mtime_ns": mtime_ns, "size": size}
|
||||
_save_cache()
|
||||
return file_id
|
||||
|
||||
|
||||
def evict(path: Path):
|
||||
"""Manually evict a path from the cache (e.g. after a tool-call write)."""
|
||||
_load_cache()
|
||||
abs_str = str(path.resolve())
|
||||
entry = _cache.pop(abs_str, None)
|
||||
if entry:
|
||||
_delete_remote(entry["file_id"])
|
||||
_save_cache()
|
||||
pass
|
||||
|
||||
|
||||
def list_cached() -> list[dict]:
|
||||
"""Return a snapshot of the current cache for display."""
|
||||
_load_cache()
|
||||
return [
|
||||
{"path": k, **v}
|
||||
for k, v in _cache.items()
|
||||
]
|
||||
return []
|
||||
|
||||
6
gui.py
6
gui.py
@@ -329,6 +329,8 @@ class App:
|
||||
self.config.get("discussion", {}).get("history", [])
|
||||
)
|
||||
|
||||
self.disc_entries: list[dict] = _parse_history_entries(self.history)
|
||||
|
||||
ai_cfg = self.config.get("ai", {})
|
||||
self.current_provider: str = ai_cfg.get("provider", "gemini")
|
||||
self.current_model: str = ai_cfg.get("model", "gemini-2.0-flash")
|
||||
@@ -653,7 +655,7 @@ class App:
|
||||
|
||||
def do_send():
|
||||
try:
|
||||
response = ai_client.send(self.last_md, user_msg, base_dir, file_items_snap)
|
||||
response = ai_client.send(self.last_md, user_msg, base_dir)
|
||||
self._update_response(response)
|
||||
self._update_status("done")
|
||||
except Exception as e:
|
||||
@@ -940,3 +942,5 @@ def main():
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
|
||||
|
||||
|
||||
@@ -124,3 +124,4 @@ def log_tool_call(script: str, result: str, script_path: str | None):
|
||||
|
||||
return str(ps1_path) if ps1_path else None
|
||||
|
||||
|
||||
|
||||
Reference in New Issue
Block a user