Merge branch 'fixes'

This commit is contained in:
2026-02-21 15:13:59 -05:00
3 changed files with 213 additions and 104 deletions

1
.gitignore vendored
View File

@@ -3,3 +3,4 @@ __pycache__
uv.lock uv.lock
colorforth_bootslop_002.md colorforth_bootslop_002.md
md_gen md_gen
scripts/generated

View File

@@ -1,4 +1,4 @@
# ai_client.py # ai_client.py
import tomllib import tomllib
from pathlib import Path from pathlib import Path
@@ -22,6 +22,103 @@ def _load_credentials() -> dict:
with open("credentials.toml", "rb") as f: with open("credentials.toml", "rb") as f:
return tomllib.load(f) return tomllib.load(f)
# ------------------------------------------------------------------ provider errors
class ProviderError(Exception):
"""
Raised when the upstream API returns a hard error we want to surface
distinctly in the UI (quota, rate-limit, auth, balance, etc.).
Attributes
----------
kind : str
One of: "quota", "rate_limit", "auth", "balance", "network", "unknown"
provider : str
"gemini" or "anthropic"
original : Exception
The underlying SDK exception.
"""
def __init__(self, kind: str, provider: str, original: Exception):
self.kind = kind
self.provider = provider
self.original = original
super().__init__(str(original))
# Human-readable banner shown in the Response panel
def ui_message(self) -> str:
labels = {
"quota": "QUOTA EXHAUSTED",
"rate_limit": "RATE LIMITED",
"auth": "AUTH / API KEY ERROR",
"balance": "BALANCE / BILLING ERROR",
"network": "NETWORK / CONNECTION ERROR",
"unknown": "API ERROR",
}
label = labels.get(self.kind, "API ERROR")
return f"[{self.provider.upper()} {label}]\n\n{self.original}"
def _classify_anthropic_error(exc: Exception) -> ProviderError:
"""Map an anthropic SDK exception to a ProviderError."""
try:
import anthropic
if isinstance(exc, anthropic.RateLimitError):
return ProviderError("rate_limit", "anthropic", exc)
if isinstance(exc, anthropic.AuthenticationError):
return ProviderError("auth", "anthropic", exc)
if isinstance(exc, anthropic.PermissionDeniedError):
return ProviderError("auth", "anthropic", exc)
if isinstance(exc, anthropic.APIConnectionError):
return ProviderError("network", "anthropic", exc)
if isinstance(exc, anthropic.APIStatusError):
status = getattr(exc, "status_code", 0)
body = str(exc).lower()
if status == 429:
return ProviderError("rate_limit", "anthropic", exc)
if status in (401, 403):
return ProviderError("auth", "anthropic", exc)
if status == 402:
return ProviderError("balance", "anthropic", exc)
# Anthropic puts credit-balance errors in the body at 400
if "credit" in body or "balance" in body or "billing" in body:
return ProviderError("balance", "anthropic", exc)
if "quota" in body or "limit" in body or "exceeded" in body:
return ProviderError("quota", "anthropic", exc)
except ImportError:
pass
return ProviderError("unknown", "anthropic", exc)
def _classify_gemini_error(exc: Exception) -> ProviderError:
"""Map a google-genai SDK exception to a ProviderError."""
body = str(exc).lower()
# google-genai surfaces HTTP errors as google.api_core exceptions or
# google.genai exceptions; inspect the message text as a reliable fallback.
try:
from google.api_core import exceptions as gac
if isinstance(exc, gac.ResourceExhausted):
return ProviderError("quota", "gemini", exc)
if isinstance(exc, gac.TooManyRequests):
return ProviderError("rate_limit", "gemini", exc)
if isinstance(exc, (gac.Unauthenticated, gac.PermissionDenied)):
return ProviderError("auth", "gemini", exc)
if isinstance(exc, gac.ServiceUnavailable):
return ProviderError("network", "gemini", exc)
except ImportError:
pass
# Fallback: parse status code / message string
if "429" in body or "quota" in body or "resource exhausted" in body:
return ProviderError("quota", "gemini", exc)
if "rate" in body and "limit" in body:
return ProviderError("rate_limit", "gemini", exc)
if "401" in body or "403" in body or "api key" in body or "unauthenticated" in body:
return ProviderError("auth", "gemini", exc)
if "402" in body or "billing" in body or "balance" in body or "payment" in body:
return ProviderError("balance", "gemini", exc)
if "connection" in body or "timeout" in body or "unreachable" in body:
return ProviderError("network", "gemini", exc)
return ProviderError("unknown", "gemini", exc)
# ------------------------------------------------------------------ provider setup # ------------------------------------------------------------------ provider setup
def set_provider(provider: str, model: str): def set_provider(provider: str, model: str):
@@ -49,6 +146,7 @@ def list_models(provider: str) -> list[str]:
def _list_gemini_models(api_key: str) -> list[str]: def _list_gemini_models(api_key: str) -> list[str]:
from google import genai from google import genai
try:
client = genai.Client(api_key=api_key) client = genai.Client(api_key=api_key)
models = [] models = []
for m in client.models.list(): for m in client.models.list():
@@ -58,15 +156,20 @@ def _list_gemini_models(api_key: str) -> list[str]:
if "gemini" in name.lower(): if "gemini" in name.lower():
models.append(name) models.append(name)
return sorted(models) return sorted(models)
except Exception as exc:
raise _classify_gemini_error(exc) from exc
def _list_anthropic_models() -> list[str]: def _list_anthropic_models() -> list[str]:
import anthropic import anthropic
try:
creds = _load_credentials() creds = _load_credentials()
client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"]) client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
models = [] models = []
for m in client.models.list(): for m in client.models.list():
models.append(m.id) models.append(m.id)
return sorted(models) return sorted(models)
except Exception as exc:
raise _classify_anthropic_error(exc) from exc
# --------------------------------------------------------- tool definition # --------------------------------------------------------- tool definition
@@ -148,10 +251,9 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
from google import genai from google import genai
from google.genai import types from google.genai import types
try:
_ensure_gemini_client() _ensure_gemini_client()
# Gemini chats don't support mutating tools after creation,
# so we recreate if None (reset_session clears it).
if _gemini_chat is None: if _gemini_chat is None:
_gemini_chat = _gemini_client.chats.create( _gemini_chat = _gemini_client.chats.create(
model=_model, model=_model,
@@ -165,7 +267,6 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
response = _gemini_chat.send_message(full_message) response = _gemini_chat.send_message(full_message)
for _ in range(MAX_TOOL_ROUNDS): for _ in range(MAX_TOOL_ROUNDS):
# Collect all function calls in this response
tool_calls = [ tool_calls = [
part.function_call part.function_call
for candidate in response.candidates for candidate in response.candidates
@@ -175,7 +276,6 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
if not tool_calls: if not tool_calls:
break break
# Execute each tool call and collect results
function_responses = [] function_responses = []
for fc in tool_calls: for fc in tool_calls:
if fc.name == TOOL_NAME: if fc.name == TOOL_NAME:
@@ -193,7 +293,6 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
response = _gemini_chat.send_message(function_responses) response = _gemini_chat.send_message(function_responses)
# Extract text from final response
text_parts = [ text_parts = [
part.text part.text
for candidate in response.candidates for candidate in response.candidates
@@ -202,6 +301,11 @@ def _send_gemini(md_content: str, user_message: str, base_dir: str) -> str:
] ]
return "\n".join(text_parts) return "\n".join(text_parts)
except ProviderError:
raise
except Exception as exc:
raise _classify_gemini_error(exc) from exc
# ------------------------------------------------------------------ anthropic # ------------------------------------------------------------------ anthropic
def _ensure_anthropic_client(): def _ensure_anthropic_client():
@@ -215,6 +319,7 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
global _anthropic_history global _anthropic_history
import anthropic import anthropic
try:
_ensure_anthropic_client() _ensure_anthropic_client()
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}" full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
@@ -228,7 +333,6 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
messages=_anthropic_history messages=_anthropic_history
) )
# Always record the assistant turn
_anthropic_history.append({ _anthropic_history.append({
"role": "assistant", "role": "assistant",
"content": response.content "content": response.content
@@ -237,7 +341,6 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
if response.stop_reason != "tool_use": if response.stop_reason != "tool_use":
break break
# Process tool calls
tool_results = [] tool_results = []
for block in response.content: for block in response.content:
if block.type == "tool_use" and block.name == TOOL_NAME: if block.type == "tool_use" and block.name == TOOL_NAME:
@@ -257,7 +360,6 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
"content": tool_results "content": tool_results
}) })
# Extract final text
text_parts = [ text_parts = [
block.text block.text
for block in response.content for block in response.content
@@ -265,6 +367,11 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str) -> str:
] ]
return "\n".join(text_parts) return "\n".join(text_parts)
except ProviderError:
raise
except Exception as exc:
raise _classify_anthropic_error(exc) from exc
# ------------------------------------------------------------------ unified send # ------------------------------------------------------------------ unified send
def send(md_content: str, user_message: str, base_dir: str = ".") -> str: def send(md_content: str, user_message: str, base_dir: str = ".") -> str:

3
gui.py
View File

@@ -1,4 +1,4 @@
import dearpygui.dearpygui as dpg import dearpygui.dearpygui as dpg
import tomllib import tomllib
import tomli_w import tomli_w
import threading import threading
@@ -6,6 +6,7 @@ from pathlib import Path
from tkinter import filedialog, Tk from tkinter import filedialog, Tk
import aggregate import aggregate
import ai_client import ai_client
from ai_client import ProviderError
import shell_runner import shell_runner
CONFIG_PATH = Path("config.toml") CONFIG_PATH = Path("config.toml")