feat(anthropic): Align Anthropic integration with latest SDK and enable prompt caching beta
This commit is contained in:
12
ai_client.py
12
ai_client.py
@@ -18,6 +18,7 @@ import datetime
|
||||
from pathlib import Path
|
||||
import file_cache
|
||||
import mcp_client
|
||||
import anthropic
|
||||
from google import genai
|
||||
from google.genai import types
|
||||
from events import EventEmitter
|
||||
@@ -155,7 +156,7 @@ class ProviderError(Exception):
|
||||
|
||||
def _classify_anthropic_error(exc: Exception) -> ProviderError:
|
||||
try:
|
||||
import anthropic
|
||||
|
||||
if isinstance(exc, anthropic.RateLimitError):
|
||||
return ProviderError("rate_limit", "anthropic", exc)
|
||||
if isinstance(exc, anthropic.AuthenticationError):
|
||||
@@ -292,7 +293,7 @@ def _list_gemini_models(api_key: str) -> list[str]:
|
||||
|
||||
|
||||
def _list_anthropic_models() -> list[str]:
|
||||
import anthropic
|
||||
|
||||
try:
|
||||
creds = _load_credentials()
|
||||
client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
|
||||
@@ -858,9 +859,12 @@ def _trim_anthropic_history(system_blocks: list[dict], history: list[dict]):
|
||||
def _ensure_anthropic_client():
|
||||
global _anthropic_client
|
||||
if _anthropic_client is None:
|
||||
import anthropic
|
||||
creds = _load_credentials()
|
||||
_anthropic_client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
|
||||
# Enable prompt caching beta
|
||||
_anthropic_client = anthropic.Anthropic(
|
||||
api_key=creds["anthropic"]["api_key"],
|
||||
default_headers={"anthropic-beta": "prompt-caching-2024-07-31"}
|
||||
)
|
||||
|
||||
|
||||
def _chunk_text(text: str, chunk_size: int) -> list[str]:
|
||||
|
||||
Reference in New Issue
Block a user