feat(deepseek): Implement Phase 1 infrastructure and provider interface

This commit is contained in:
2026-02-25 22:33:20 -05:00
parent f0c1af986d
commit 1b3ff232c4
4 changed files with 137 additions and 5 deletions

View File

@@ -65,6 +65,11 @@ _GEMINI_CACHE_TTL = 3600
_anthropic_client = None _anthropic_client = None
_anthropic_history: list[dict] = [] _anthropic_history: list[dict] = []
_anthropic_history_lock = threading.Lock() _anthropic_history_lock = threading.Lock()
_deepseek_client = None
_deepseek_history: list[dict] = []
_deepseek_history_lock = threading.Lock()
_send_lock = threading.Lock() _send_lock = threading.Lock()
_gemini_cli_adapter = None _gemini_cli_adapter = None
@@ -159,10 +164,10 @@ def _load_credentials() -> dict:
f"Create a credentials.toml with:\n" f"Create a credentials.toml with:\n"
f" [gemini]\n api_key = \"your-key\"\n" f" [gemini]\n api_key = \"your-key\"\n"
f" [anthropic]\n api_key = \"your-key\"\n" f" [anthropic]\n api_key = \"your-key\"\n"
f" [deepseek]\n api_key = \"your-key\"\n"
f"Or set SLOP_CREDENTIALS env var to a custom path." f"Or set SLOP_CREDENTIALS env var to a custom path."
) )
# ------------------------------------------------------------------ provider errors # ------------------------------------------------------------------ provider errors
class ProviderError(Exception): class ProviderError(Exception):
@@ -241,6 +246,21 @@ def _classify_gemini_error(exc: Exception) -> ProviderError:
return ProviderError("unknown", "gemini", exc) return ProviderError("unknown", "gemini", exc)
def _classify_deepseek_error(exc: Exception) -> ProviderError:
body = str(exc).lower()
if "429" in body or "rate" in body:
return ProviderError("rate_limit", "deepseek", exc)
if "401" in body or "403" in body or "auth" in body or "api key" in body:
return ProviderError("auth", "deepseek", exc)
if "402" in body or "balance" in body or "billing" in body:
return ProviderError("balance", "deepseek", exc)
if "quota" in body or "limit exceeded" in body:
return ProviderError("quota", "deepseek", exc)
if "connection" in body or "timeout" in body or "network" in body:
return ProviderError("network", "deepseek", exc)
return ProviderError("unknown", "deepseek", exc)
# ------------------------------------------------------------------ provider setup # ------------------------------------------------------------------ provider setup
def set_provider(provider: str, model: str): def set_provider(provider: str, model: str):
@@ -280,6 +300,11 @@ def reset_session():
_anthropic_client = None _anthropic_client = None
with _anthropic_history_lock: with _anthropic_history_lock:
_anthropic_history = [] _anthropic_history = []
_deepseek_client = None
with _deepseek_history_lock:
_deepseek_history = []
_CACHED_ANTHROPIC_TOOLS = None _CACHED_ANTHROPIC_TOOLS = None
file_cache.reset_client() file_cache.reset_client()
@@ -308,6 +333,8 @@ def list_models(provider: str) -> list[str]:
return _list_gemini_models(creds["gemini"]["api_key"]) return _list_gemini_models(creds["gemini"]["api_key"])
elif provider == "anthropic": elif provider == "anthropic":
return _list_anthropic_models() return _list_anthropic_models()
elif provider == "deepseek":
return _list_deepseek_models(creds["deepseek"]["api_key"])
return [] return []
@@ -340,6 +367,14 @@ def _list_anthropic_models() -> list[str]:
raise _classify_anthropic_error(exc) from exc raise _classify_anthropic_error(exc) from exc
def _list_deepseek_models(api_key: str) -> list[str]:
"""
List available DeepSeek models.
"""
# For now, return the models specified in the requirements
return ["deepseek-chat", "deepseek-reasoner", "deepseek-v3", "deepseek-r1"]
# ------------------------------------------------------------------ tool definition # ------------------------------------------------------------------ tool definition
TOOL_NAME = "run_powershell" TOOL_NAME = "run_powershell"
@@ -1385,6 +1420,41 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item
raise _classify_anthropic_error(exc) from exc raise _classify_anthropic_error(exc) from exc
# ------------------------------------------------------------------ deepseek
def _ensure_deepseek_client():
global _deepseek_client
if _deepseek_client is None:
creds = _load_credentials()
# Placeholder for Dedicated DeepSeek SDK instantiation
# import deepseek
# _deepseek_client = deepseek.DeepSeek(api_key=creds["deepseek"]["api_key"])
pass
def _send_deepseek(md_content: str, user_message: str, base_dir: str,
file_items: list[dict] | None = None,
discussion_history: str = "") -> str:
"""
Placeholder implementation for DeepSeek provider.
Aligns with Gemini/Anthropic patterns for history and tool calling.
"""
try:
_ensure_deepseek_client()
mcp_client.configure(file_items or [], [base_dir])
# TODO: Implement full DeepSeek logic in Phase 2
# 1. Build system prompt with context
# 2. Manage _deepseek_history
# 3. Handle reasoning traces for R1
# 4. Handle tool calling loop
raise ValueError("DeepSeek provider is currently in the infrastructure phase and not yet fully implemented.")
except Exception as e:
raise _classify_deepseek_error(e) from e
# ------------------------------------------------------------------ unified send # ------------------------------------------------------------------ unified send
def send( def send(
@@ -1413,6 +1483,8 @@ def send(
return _send_gemini_cli(md_content, user_message, base_dir, file_items, discussion_history) return _send_gemini_cli(md_content, user_message, base_dir, file_items, discussion_history)
elif _provider == "anthropic": elif _provider == "anthropic":
return _send_anthropic(md_content, user_message, base_dir, file_items, discussion_history) return _send_anthropic(md_content, user_message, base_dir, file_items, discussion_history)
elif _provider == "deepseek":
return _send_deepseek(md_content, user_message, base_dir, file_items, discussion_history)
raise ValueError(f"unknown provider: {_provider}") raise ValueError(f"unknown provider: {_provider}")
def get_history_bleed_stats(md_content: str | None = None) -> dict: def get_history_bleed_stats(md_content: str | None = None) -> dict:
@@ -1524,6 +1596,14 @@ def get_history_bleed_stats(md_content: str | None = None) -> dict:
"current": current_tokens, "current": current_tokens,
"percentage": percentage, "percentage": percentage,
} }
elif _provider == "deepseek":
# Placeholder for DeepSeek token estimation
return {
"provider": "deepseek",
"limit": 64000, # Common limit for deepseek
"current": 0,
"percentage": 0,
}
# Default empty state # Default empty state
return { return {

View File

@@ -29,7 +29,7 @@ from pydantic import BaseModel
from imgui_bundle import imgui, hello_imgui, immapp from imgui_bundle import imgui, hello_imgui, immapp
CONFIG_PATH = Path("config.toml") CONFIG_PATH = Path("config.toml")
PROVIDERS = ["gemini", "anthropic", "gemini_cli"] PROVIDERS = ["gemini", "anthropic", "gemini_cli", "deepseek"]
COMMS_CLAMP_CHARS = 300 COMMS_CLAMP_CHARS = 300
def load_config() -> dict: def load_config() -> dict:

View File

@@ -101,6 +101,7 @@ def default_project(name: str = "unnamed") -> dict:
"files": {"base_dir": ".", "paths": []}, "files": {"base_dir": ".", "paths": []},
"screenshots": {"base_dir": ".", "paths": []}, "screenshots": {"base_dir": ".", "paths": []},
"gemini_cli": {"binary_path": "gemini"}, "gemini_cli": {"binary_path": "gemini"},
"deepseek": {"reasoning_effort": "medium"},
"agent": { "agent": {
"tools": { "tools": {
"run_powershell": True, "run_powershell": True,
@@ -113,7 +114,7 @@ def default_project(name: str = "unnamed") -> dict:
} }
}, },
"discussion": { "discussion": {
"roles": ["User", "AI", "Vendor API", "System"], "roles": ["User", "AI", "Vendor API", "System", "Reasoning"],
"active": "main", "active": "main",
"discussions": {"main": default_discussion()}, "discussions": {"main": default_discussion()},
}, },

View File

@@ -0,0 +1,51 @@
import pytest
import os
import tomllib
import tomli_w
from pathlib import Path
import sys
# Ensure project root is in path
sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), "..")))
import ai_client
import project_manager
def test_credentials_error_mentions_deepseek(monkeypatch):
"""
Verify that the error message shown when credentials.toml is missing
includes deepseek instructions.
"""
# Monkeypatch SLOP_CREDENTIALS to a non-existent file
monkeypatch.setenv("SLOP_CREDENTIALS", "non_existent_credentials_file.toml")
with pytest.raises(FileNotFoundError) as excinfo:
ai_client._load_credentials()
err_msg = str(excinfo.value)
assert "[deepseek]" in err_msg
assert "api_key" in err_msg
def test_default_project_includes_reasoning_role():
"""
Verify that 'Reasoning' is included in the default discussion roles
to support DeepSeek-R1 reasoning traces.
"""
proj = project_manager.default_project("test")
roles = proj["discussion"]["roles"]
assert "Reasoning" in roles
def test_gui_providers_list():
"""
Check if 'deepseek' is in the GUI's provider list.
"""
import gui_2
assert "deepseek" in gui_2.PROVIDERS
def test_deepseek_model_listing():
"""
Verify that list_models for deepseek returns expected models.
"""
models = ai_client.list_models("deepseek")
assert "deepseek-chat" in models
assert "deepseek-reasoner" in models