From 1b3ff232c49b1701a479f6f2e12438a8fa8f5f75 Mon Sep 17 00:00:00 2001 From: Ed_ Date: Wed, 25 Feb 2026 22:33:20 -0500 Subject: [PATCH] feat(deepseek): Implement Phase 1 infrastructure and provider interface --- ai_client.py | 86 ++++++++++++++++++++++++++++++++++-- gui_2.py | 2 +- project_manager.py | 3 +- tests/test_deepseek_infra.py | 51 +++++++++++++++++++++ 4 files changed, 137 insertions(+), 5 deletions(-) create mode 100644 tests/test_deepseek_infra.py diff --git a/ai_client.py b/ai_client.py index a2f9191..608b615 100644 --- a/ai_client.py +++ b/ai_client.py @@ -65,6 +65,11 @@ _GEMINI_CACHE_TTL = 3600 _anthropic_client = None _anthropic_history: list[dict] = [] _anthropic_history_lock = threading.Lock() + +_deepseek_client = None +_deepseek_history: list[dict] = [] +_deepseek_history_lock = threading.Lock() + _send_lock = threading.Lock() _gemini_cli_adapter = None @@ -159,10 +164,10 @@ def _load_credentials() -> dict: f"Create a credentials.toml with:\n" f" [gemini]\n api_key = \"your-key\"\n" f" [anthropic]\n api_key = \"your-key\"\n" + f" [deepseek]\n api_key = \"your-key\"\n" f"Or set SLOP_CREDENTIALS env var to a custom path." ) - # ------------------------------------------------------------------ provider errors class ProviderError(Exception): @@ -241,6 +246,21 @@ def _classify_gemini_error(exc: Exception) -> ProviderError: return ProviderError("unknown", "gemini", exc) +def _classify_deepseek_error(exc: Exception) -> ProviderError: + body = str(exc).lower() + if "429" in body or "rate" in body: + return ProviderError("rate_limit", "deepseek", exc) + if "401" in body or "403" in body or "auth" in body or "api key" in body: + return ProviderError("auth", "deepseek", exc) + if "402" in body or "balance" in body or "billing" in body: + return ProviderError("balance", "deepseek", exc) + if "quota" in body or "limit exceeded" in body: + return ProviderError("quota", "deepseek", exc) + if "connection" in body or "timeout" in body or "network" in body: + return ProviderError("network", "deepseek", exc) + return ProviderError("unknown", "deepseek", exc) + + # ------------------------------------------------------------------ provider setup def set_provider(provider: str, model: str): @@ -280,6 +300,11 @@ def reset_session(): _anthropic_client = None with _anthropic_history_lock: _anthropic_history = [] + + _deepseek_client = None + with _deepseek_history_lock: + _deepseek_history = [] + _CACHED_ANTHROPIC_TOOLS = None file_cache.reset_client() @@ -308,6 +333,8 @@ def list_models(provider: str) -> list[str]: return _list_gemini_models(creds["gemini"]["api_key"]) elif provider == "anthropic": return _list_anthropic_models() + elif provider == "deepseek": + return _list_deepseek_models(creds["deepseek"]["api_key"]) return [] @@ -340,6 +367,14 @@ def _list_anthropic_models() -> list[str]: raise _classify_anthropic_error(exc) from exc +def _list_deepseek_models(api_key: str) -> list[str]: + """ + List available DeepSeek models. + """ + # For now, return the models specified in the requirements + return ["deepseek-chat", "deepseek-reasoner", "deepseek-v3", "deepseek-r1"] + + # ------------------------------------------------------------------ tool definition TOOL_NAME = "run_powershell" @@ -1385,6 +1420,41 @@ def _send_anthropic(md_content: str, user_message: str, base_dir: str, file_item raise _classify_anthropic_error(exc) from exc +# ------------------------------------------------------------------ deepseek + +def _ensure_deepseek_client(): + global _deepseek_client + if _deepseek_client is None: + creds = _load_credentials() + # Placeholder for Dedicated DeepSeek SDK instantiation + # import deepseek + # _deepseek_client = deepseek.DeepSeek(api_key=creds["deepseek"]["api_key"]) + pass + + +def _send_deepseek(md_content: str, user_message: str, base_dir: str, + file_items: list[dict] | None = None, + discussion_history: str = "") -> str: + """ + Placeholder implementation for DeepSeek provider. + Aligns with Gemini/Anthropic patterns for history and tool calling. + """ + try: + _ensure_deepseek_client() + mcp_client.configure(file_items or [], [base_dir]) + + # TODO: Implement full DeepSeek logic in Phase 2 + # 1. Build system prompt with context + # 2. Manage _deepseek_history + # 3. Handle reasoning traces for R1 + # 4. Handle tool calling loop + + raise ValueError("DeepSeek provider is currently in the infrastructure phase and not yet fully implemented.") + + except Exception as e: + raise _classify_deepseek_error(e) from e + + # ------------------------------------------------------------------ unified send def send( @@ -1413,6 +1483,8 @@ def send( return _send_gemini_cli(md_content, user_message, base_dir, file_items, discussion_history) elif _provider == "anthropic": return _send_anthropic(md_content, user_message, base_dir, file_items, discussion_history) + elif _provider == "deepseek": + return _send_deepseek(md_content, user_message, base_dir, file_items, discussion_history) raise ValueError(f"unknown provider: {_provider}") def get_history_bleed_stats(md_content: str | None = None) -> dict: @@ -1516,7 +1588,7 @@ def get_history_bleed_stats(md_content: str | None = None) -> dict: # Stats from CLI use 'input_tokens' or 'input' u = _gemini_cli_adapter.last_usage current_tokens = u.get("input_tokens") or u.get("input", 0) - + percentage = (current_tokens / limit_tokens) * 100 if limit_tokens > 0 else 0 return { "provider": "gemini_cli", @@ -1524,7 +1596,15 @@ def get_history_bleed_stats(md_content: str | None = None) -> dict: "current": current_tokens, "percentage": percentage, } - + elif _provider == "deepseek": + # Placeholder for DeepSeek token estimation + return { + "provider": "deepseek", + "limit": 64000, # Common limit for deepseek + "current": 0, + "percentage": 0, + } + # Default empty state return { "provider": _provider, diff --git a/gui_2.py b/gui_2.py index 74a662e..e42d406 100644 --- a/gui_2.py +++ b/gui_2.py @@ -29,7 +29,7 @@ from pydantic import BaseModel from imgui_bundle import imgui, hello_imgui, immapp CONFIG_PATH = Path("config.toml") -PROVIDERS = ["gemini", "anthropic", "gemini_cli"] +PROVIDERS = ["gemini", "anthropic", "gemini_cli", "deepseek"] COMMS_CLAMP_CHARS = 300 def load_config() -> dict: diff --git a/project_manager.py b/project_manager.py index 8966fea..4c69b9c 100644 --- a/project_manager.py +++ b/project_manager.py @@ -101,6 +101,7 @@ def default_project(name: str = "unnamed") -> dict: "files": {"base_dir": ".", "paths": []}, "screenshots": {"base_dir": ".", "paths": []}, "gemini_cli": {"binary_path": "gemini"}, + "deepseek": {"reasoning_effort": "medium"}, "agent": { "tools": { "run_powershell": True, @@ -113,7 +114,7 @@ def default_project(name: str = "unnamed") -> dict: } }, "discussion": { - "roles": ["User", "AI", "Vendor API", "System"], + "roles": ["User", "AI", "Vendor API", "System", "Reasoning"], "active": "main", "discussions": {"main": default_discussion()}, }, diff --git a/tests/test_deepseek_infra.py b/tests/test_deepseek_infra.py new file mode 100644 index 0000000..71bd695 --- /dev/null +++ b/tests/test_deepseek_infra.py @@ -0,0 +1,51 @@ +import pytest +import os +import tomllib +import tomli_w +from pathlib import Path +import sys + +# Ensure project root is in path +sys.path.append(os.path.abspath(os.path.join(os.path.dirname(__file__), ".."))) + +import ai_client +import project_manager + +def test_credentials_error_mentions_deepseek(monkeypatch): + """ + Verify that the error message shown when credentials.toml is missing + includes deepseek instructions. + """ + # Monkeypatch SLOP_CREDENTIALS to a non-existent file + monkeypatch.setenv("SLOP_CREDENTIALS", "non_existent_credentials_file.toml") + + with pytest.raises(FileNotFoundError) as excinfo: + ai_client._load_credentials() + + err_msg = str(excinfo.value) + assert "[deepseek]" in err_msg + assert "api_key" in err_msg + +def test_default_project_includes_reasoning_role(): + """ + Verify that 'Reasoning' is included in the default discussion roles + to support DeepSeek-R1 reasoning traces. + """ + proj = project_manager.default_project("test") + roles = proj["discussion"]["roles"] + assert "Reasoning" in roles + +def test_gui_providers_list(): + """ + Check if 'deepseek' is in the GUI's provider list. + """ + import gui_2 + assert "deepseek" in gui_2.PROVIDERS + +def test_deepseek_model_listing(): + """ + Verify that list_models for deepseek returns expected models. + """ + models = ai_client.list_models("deepseek") + assert "deepseek-chat" in models + assert "deepseek-reasoner" in models