feat(provider): Add MiniMax AI provider integration

- Add minimax to PROVIDERS lists in gui_2.py and app_controller.py
- Add minimax credentials template in ai_client.py
- Implement _list_minimax_models, _classify_minimax_error, _ensure_minimax_client
- Implement _send_minimax with streaming and reasoning support
- Add minimax to send(), list_models(), reset_session(), get_history_bleed_stats()
- Add unit tests in tests/test_minimax_provider.py
This commit is contained in:
2026-03-06 23:36:30 -05:00
parent f25e6e0b34
commit b79c1fce3c
7 changed files with 516 additions and 5 deletions

View File

@@ -1,6 +1,6 @@
[ai] [ai]
provider = "deepseek" provider = "gemini"
model = "deepseek-v3" model = "gemini-2.5-flash-lite"
temperature = 0.0 temperature = 0.0
max_tokens = 8192 max_tokens = 8192
history_trunc_limit = 8000 history_trunc_limit = 8000

View File

@@ -700,3 +700,164 @@ System:
testing gemini cli testing gemini cli
------------------ ------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
PATH: Epic Initialization — please produce tracks
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please generate the implementation tickets for this track.
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
Please read test.txt
You are assigned to Ticket T1.
Task Description: do something
------------------
--- MOCK INVOKED ---
ARGS: ['tests/mock_gemini_cli.py']
PROMPT:
role: tool
Here are the results: {"content": "done"}
------------------

View File

@@ -8,5 +8,5 @@ active = "main"
[discussions.main] [discussions.main]
git_commit = "" git_commit = ""
last_updated = "2026-03-06T16:40:04" last_updated = "2026-03-06T23:22:48"
history = [] history = []

View File

@@ -75,6 +75,10 @@ _deepseek_client: Any = None
_deepseek_history: list[dict[str, Any]] = [] _deepseek_history: list[dict[str, Any]] = []
_deepseek_history_lock: threading.Lock = threading.Lock() _deepseek_history_lock: threading.Lock = threading.Lock()
_minimax_client: Any = None
_minimax_history: list[dict[str, Any]] = []
_minimax_history_lock: threading.Lock = threading.Lock()
_send_lock: threading.Lock = threading.Lock() _send_lock: threading.Lock = threading.Lock()
_gemini_cli_adapter: Optional[GeminiCliAdapter] = None _gemini_cli_adapter: Optional[GeminiCliAdapter] = None
@@ -176,6 +180,7 @@ def _load_credentials() -> dict[str, Any]:
f" [gemini]\n api_key = \"your-key\"\n" f" [gemini]\n api_key = \"your-key\"\n"
f" [anthropic]\n api_key = \"your-key\"\n" f" [anthropic]\n api_key = \"your-key\"\n"
f" [deepseek]\n api_key = \"your-key\"\n" f" [deepseek]\n api_key = \"your-key\"\n"
f" [minimax]\n api_key = \"your-key\"\n"
f"Or set SLOP_CREDENTIALS env var to a custom path." f"Or set SLOP_CREDENTIALS env var to a custom path."
) )
@@ -284,6 +289,37 @@ def _classify_deepseek_error(exc: Exception) -> ProviderError:
return ProviderError("unknown", "deepseek", Exception(body)) return ProviderError("unknown", "deepseek", Exception(body))
def _classify_minimax_error(exc: Exception) -> ProviderError:
body = ""
if isinstance(exc, requests.exceptions.HTTPError) and exc.response is not None:
try:
err_data = exc.response.json()
if "error" in err_data:
body = str(err_data["error"].get("message", exc.response.text))
else:
body = exc.response.text
except:
body = exc.response.text
else:
body = str(exc)
body_l = body.lower()
if "429" in body_l or "rate" in body_l:
return ProviderError("rate_limit", "minimax", Exception(body))
if "401" in body_l or "403" in body_l or "auth" in body_l or "api key" in body_l:
return ProviderError("auth", "minimax", Exception(body))
if "402" in body_l or "balance" in body_l or "billing" in body_l:
return ProviderError("balance", "minimax", Exception(body))
if "quota" in body_l or "limit exceeded" in body_l:
return ProviderError("quota", "minimax", Exception(body))
if "connection" in body_l or "timeout" in body_l or "network" in body_l:
return ProviderError("network", "minimax", Exception(body))
if "400" in body_l or "bad request" in body_l:
return ProviderError("unknown", "minimax", Exception(f"MiniMax Bad Request: {body}"))
return ProviderError("unknown", "minimax", Exception(body))
def set_provider(provider: str, model: str) -> None: def set_provider(provider: str, model: str) -> None:
global _provider, _model global _provider, _model
_provider = provider _provider = provider
@@ -293,6 +329,12 @@ def set_provider(provider: str, model: str) -> None:
_model = "gemini-3-flash-preview" _model = "gemini-3-flash-preview"
else: else:
_model = model _model = model
elif provider == "minimax":
valid_models = _list_minimax_models("")
if model not in valid_models:
_model = "MiniMax-M2.5"
else:
_model = model
else: else:
_model = model _model = model
@@ -312,6 +354,7 @@ def reset_session() -> None:
global _gemini_cache_md_hash, _gemini_cache_created_at global _gemini_cache_md_hash, _gemini_cache_created_at
global _anthropic_client, _anthropic_history global _anthropic_client, _anthropic_history
global _deepseek_client, _deepseek_history global _deepseek_client, _deepseek_history
global _minimax_client, _minimax_history
global _CACHED_ANTHROPIC_TOOLS global _CACHED_ANTHROPIC_TOOLS
global _gemini_cli_adapter global _gemini_cli_adapter
if _gemini_client and _gemini_cache: if _gemini_client and _gemini_cache:
@@ -336,6 +379,9 @@ def reset_session() -> None:
_deepseek_client = None _deepseek_client = None
with _deepseek_history_lock: with _deepseek_history_lock:
_deepseek_history = [] _deepseek_history = []
_minimax_client = None
with _minimax_history_lock:
_minimax_history = []
_CACHED_ANTHROPIC_TOOLS = None _CACHED_ANTHROPIC_TOOLS = None
file_cache.reset_client() file_cache.reset_client()
@@ -362,6 +408,8 @@ def list_models(provider: str) -> list[str]:
return _list_deepseek_models(creds["deepseek"]["api_key"]) return _list_deepseek_models(creds["deepseek"]["api_key"])
elif provider == "gemini_cli": elif provider == "gemini_cli":
return _list_gemini_cli_models() return _list_gemini_cli_models()
elif provider == "minimax":
return _list_minimax_models(creds["minimax"]["api_key"])
return [] return []
def _list_gemini_cli_models() -> list[str]: def _list_gemini_cli_models() -> list[str]:
@@ -402,6 +450,9 @@ def _list_anthropic_models() -> list[str]:
def _list_deepseek_models(api_key: str) -> list[str]: def _list_deepseek_models(api_key: str) -> list[str]:
return ["deepseek-chat", "deepseek-reasoner"] return ["deepseek-chat", "deepseek-reasoner"]
def _list_minimax_models(api_key: str) -> list[str]:
return ["MiniMax-M2.5", "MiniMax-M2.5-highspeed", "MiniMax-M2.1", "MiniMax-M2.1-highspeed", "MiniMax-M2"]
TOOL_NAME: str = "run_powershell" TOOL_NAME: str = "run_powershell"
_agent_tools: dict[str, bool] = {} _agent_tools: dict[str, bool] = {}
@@ -1405,6 +1456,16 @@ def _ensure_deepseek_client() -> None:
_load_credentials() _load_credentials()
pass pass
def _ensure_minimax_client() -> None:
global _minimax_client
if _minimax_client is None:
from openai import OpenAI
creds = _load_credentials()
api_key = creds.get("minimax", {}).get("api_key")
if not api_key:
raise ValueError("MiniMax API key not found in credentials.toml")
_minimax_client = OpenAI(api_key=api_key, base_url="https://api.minimax.chat/v1")
def _send_deepseek(md_content: str, user_message: str, base_dir: str, def _send_deepseek(md_content: str, user_message: str, base_dir: str,
file_items: list[dict[str, Any]] | None = None, file_items: list[dict[str, Any]] | None = None,
discussion_history: str = "", discussion_history: str = "",
@@ -1648,6 +1709,222 @@ def _send_deepseek(md_content: str, user_message: str, base_dir: str,
except Exception as e: except Exception as e:
raise _classify_deepseek_error(e) from e raise _classify_deepseek_error(e) from e
def _send_minimax(md_content: str, user_message: str, base_dir: str,
file_items: list[dict[str, Any]] | None = None,
discussion_history: str = "",
stream: bool = False,
pre_tool_callback: Optional[Callable[[str, str, Optional[Callable[[str], str]]], Optional[str]]] = None,
qa_callback: Optional[Callable[[str], str]] = None,
stream_callback: Optional[Callable[[str], None]] = None) -> str:
try:
mcp_client.configure(file_items or [], [base_dir])
creds = _load_credentials()
api_key = creds.get("minimax", {}).get("api_key")
if not api_key:
raise ValueError("MiniMax API key not found in credentials.toml")
from openai import OpenAI
client = OpenAI(api_key=api_key, base_url="https://api.minimax.chat/v1")
with _minimax_history_lock:
if discussion_history and not _minimax_history:
user_content = f"[DISCUSSION HISTORY]\n\n{discussion_history}\n\n---\n\n{user_message}"
else:
user_content = user_message
_minimax_history.append({"role": "user", "content": user_content})
all_text_parts: list[str] = []
_cumulative_tool_bytes = 0
for round_idx in range(MAX_TOOL_ROUNDS + 2):
current_api_messages: list[dict[str, Any]] = []
sys_msg = {"role": "system", "content": f"{_get_combined_system_prompt()}\n\n<context>\n{md_content}\n</context>"}
current_api_messages.append(sys_msg)
with _minimax_history_lock:
for i, msg in enumerate(_minimax_history):
role = msg.get("role")
api_msg = {"role": role}
content = msg.get("content")
if role == "assistant":
if msg.get("tool_calls"):
api_msg["content"] = content or None
api_msg["tool_calls"] = msg["tool_calls"]
else:
api_msg["content"] = content or ""
elif role == "tool":
api_msg["content"] = content or ""
api_msg["tool_call_id"] = msg.get("tool_call_id")
else:
api_msg["content"] = content or ""
current_api_messages.append(api_msg)
request_payload: dict[str, Any] = {
"model": _model,
"messages": current_api_messages,
"stream": stream,
"extra_body": {"reasoning_split": True},
}
if stream:
request_payload["stream_options"] = {"include_usage": True}
request_payload["temperature"] = _temperature
request_payload["max_tokens"] = min(_max_tokens, 8192)
tools = _get_deepseek_tools()
if tools:
request_payload["tools"] = tools
events.emit("request_start", payload={"provider": "minimax", "model": _model, "round": round_idx, "streaming": stream})
try:
response = client.chat.completions.create(**request_payload, timeout=120)
except Exception as e:
raise _classify_minimax_error(e) from e
assistant_text = ""
tool_calls_raw = []
reasoning_content = ""
finish_reason = "stop"
usage = {}
if stream:
aggregated_content = ""
aggregated_tool_calls: list[dict[str, Any]] = []
aggregated_reasoning = ""
current_usage: dict[str, Any] = {}
final_finish_reason = "stop"
for chunk in response:
if not chunk.choices:
if chunk.usage:
current_usage = chunk.usage.model_dump()
continue
delta = chunk.choices[0].delta
if delta.content:
content_chunk = delta.content
aggregated_content += content_chunk
if stream_callback:
stream_callback(content_chunk)
if hasattr(delta, "reasoning_details") and delta.reasoning_details:
for detail in delta.reasoning_details:
if "text" in detail:
aggregated_reasoning += detail["text"]
if delta.tool_calls:
for tc_delta in delta.tool_calls:
idx = tc_delta.index
while len(aggregated_tool_calls) <= idx:
aggregated_tool_calls.append({"id": "", "type": "function", "function": {"name": "", "arguments": ""}})
target = aggregated_tool_calls[idx]
if tc_delta.id:
target["id"] = tc_delta.id
if tc_delta.function and tc_delta.function.name:
target["function"]["name"] += tc_delta.function.name
if tc_delta.function and tc_delta.function.arguments:
target["function"]["arguments"] += tc_delta.function.arguments
if chunk.choices[0].finish_reason:
final_finish_reason = chunk.choices[0].finish_reason
if chunk.usage:
current_usage = chunk.usage.model_dump()
assistant_text = aggregated_content
tool_calls_raw = aggregated_tool_calls
reasoning_content = aggregated_reasoning
finish_reason = final_finish_reason
usage = current_usage
else:
choice = response.choices[0]
message = choice.message
assistant_text = message.content or ""
tool_calls_raw = message.tool_calls or []
if hasattr(message, "reasoning_details") and message.reasoning_details:
reasoning_content = message.reasoning_details[0].get("text", "") if message.reasoning_details else ""
finish_reason = choice.finish_reason or "stop"
usage = response.usage.model_dump() if response.usage else {}
thinking_tags = ""
if reasoning_content:
thinking_tags = f"<thinking>\n{reasoning_content}\n</thinking>\n"
full_assistant_text = thinking_tags + assistant_text
with _minimax_history_lock:
msg_to_store: dict[str, Any] = {"role": "assistant", "content": assistant_text or None}
if reasoning_content:
msg_to_store["reasoning_content"] = reasoning_content
if tool_calls_raw:
msg_to_store["tool_calls"] = tool_calls_raw
_minimax_history.append(msg_to_store)
if full_assistant_text:
all_text_parts.append(full_assistant_text)
_append_comms("IN", "response", {
"round": round_idx,
"stop_reason": finish_reason,
"text": full_assistant_text,
"tool_calls": tool_calls_raw,
"usage": usage,
"streaming": stream
})
if finish_reason != "tool_calls" and not tool_calls_raw:
break
if round_idx > MAX_TOOL_ROUNDS:
break
try:
loop = asyncio.get_running_loop()
results = asyncio.run_coroutine_threadsafe(
_execute_tool_calls_concurrently(tool_calls_raw, base_dir, pre_tool_callback, qa_callback, round_idx, "minimax"),
loop
).result()
except RuntimeError:
results = asyncio.run(_execute_tool_calls_concurrently(tool_calls_raw, base_dir, pre_tool_callback, qa_callback, round_idx, "minimax"))
tool_results_for_history: list[dict[str, Any]] = []
for i, (name, call_id, out, _) in enumerate(results):
if i == len(results) - 1:
if file_items:
file_items, changed = _reread_file_items(file_items)
ctx = _build_file_diff_text(changed)
if ctx:
out += f"\n\n[SYSTEM: FILES UPDATED]\n\n{ctx}"
if round_idx == MAX_TOOL_ROUNDS:
out += "\n\n[SYSTEM: MAX ROUNDS. PROVIDE FINAL ANSWER.]"
truncated = _truncate_tool_output(out)
_cumulative_tool_bytes += len(truncated)
tool_results_for_history.append({
"role": "tool",
"tool_call_id": call_id,
"content": truncated,
})
_append_comms("IN", "tool_result", {"name": name, "id": call_id, "output": out})
events.emit("tool_execution", payload={"status": "completed", "tool": name, "result": out, "round": round_idx})
if _cumulative_tool_bytes > _MAX_TOOL_OUTPUT_BYTES:
tool_results_for_history.append({
"role": "user",
"content": f"SYSTEM WARNING: Cumulative tool output exceeded {_MAX_TOOL_OUTPUT_BYTES // 1000}KB budget. Provide your final answer now."
})
_append_comms("OUT", "request", {"message": f"[TOOL OUTPUT BUDGET EXCEEDED: {_cumulative_tool_bytes} bytes]"})
with _minimax_history_lock:
for tr in tool_results_for_history:
_minimax_history.append(tr)
return "\n\n".join(all_text_parts) if all_text_parts else "(No text returned)"
except Exception as e:
raise _classify_minimax_error(e) from e
def run_tier4_analysis(stderr: str) -> str: def run_tier4_analysis(stderr: str) -> str:
if not stderr or not stderr.strip(): if not stderr or not stderr.strip():
@@ -1742,6 +2019,11 @@ def send(
md_content, user_message, base_dir, file_items, discussion_history, md_content, user_message, base_dir, file_items, discussion_history,
stream, pre_tool_callback, qa_callback, stream_callback stream, pre_tool_callback, qa_callback, stream_callback
) )
elif _provider == "minimax":
return _send_minimax(
md_content, user_message, base_dir, file_items, discussion_history,
stream, pre_tool_callback, qa_callback, stream_callback
)
else: else:
raise ValueError(f"Unknown provider: {_provider}") raise ValueError(f"Unknown provider: {_provider}")
@@ -1888,6 +2170,33 @@ def get_history_bleed_stats(md_content: Optional[str] = None) -> dict[str, Any]:
"current": current_tokens, "current": current_tokens,
"percentage": percentage, "percentage": percentage,
}) })
elif _provider == "minimax":
limit_tokens = 204800
current_tokens = 0
with _minimax_history_lock:
for msg in _minimax_history:
content = msg.get("content", "")
if isinstance(content, str):
current_tokens += len(content)
elif isinstance(content, list):
for block in content:
if isinstance(block, dict):
text = block.get("text", "")
if isinstance(text, str):
current_tokens += len(text)
inp = block.get("input")
if isinstance(inp, dict):
import json as _json
current_tokens += len(_json.dumps(inp, ensure_ascii=False))
if md_content: current_tokens += len(md_content)
current_tokens = max(1, int(current_tokens / _CHARS_PER_TOKEN))
percentage = (current_tokens / limit_tokens) * 100 if limit_tokens > 0 else 0
return _add_bleed_derived({
"provider": "minimax",
"limit": limit_tokens,
"current": current_tokens,
"percentage": percentage,
})
return _add_bleed_derived({ return _add_bleed_derived({
"provider": _provider, "provider": _provider,
"limit": 0, "limit": 0,

View File

@@ -110,7 +110,7 @@ class AppController:
The headless controller for the Manual Slop application. The headless controller for the Manual Slop application.
Owns the application state and manages background services. Owns the application state and manages background services.
""" """
PROVIDERS: list[str] = ["gemini", "anthropic", "gemini_cli", "deepseek"] PROVIDERS: list[str] = ["gemini", "anthropic", "gemini_cli", "deepseek", "minimax"]
def __init__(self): def __init__(self):
# Initialize locks first to avoid initialization order issues # Initialize locks first to avoid initialization order issues

View File

@@ -25,7 +25,7 @@ from src import app_controller
from pydantic import BaseModel from pydantic import BaseModel
from imgui_bundle import imgui, hello_imgui, immapp, imgui_node_editor as ed from imgui_bundle import imgui, hello_imgui, immapp, imgui_node_editor as ed
PROVIDERS: list[str] = ["gemini", "anthropic", "gemini_cli", "deepseek"] PROVIDERS: list[str] = ["gemini", "anthropic", "gemini_cli", "deepseek", "minimax"]
COMMS_CLAMP_CHARS: int = 300 COMMS_CLAMP_CHARS: int = 300
def hide_tk_root() -> Tk: def hide_tk_root() -> Tk:

View File

@@ -0,0 +1,41 @@
import unittest.mock
from unittest.mock import patch, MagicMock
from src import ai_client
def test_minimax_model_selection() -> None:
ai_client.set_provider("minimax", "MiniMax-M2.5")
assert ai_client._provider == "minimax"
assert ai_client._model == "MiniMax-M2.5"
def test_minimax_default_model() -> None:
ai_client.set_provider("minimax", "invalid-model")
assert ai_client._model == "MiniMax-M2.5"
def test_minimax_list_models() -> None:
models = ai_client.list_models("minimax")
assert "MiniMax-M2.5" in models
assert "MiniMax-M2.5-highspeed" in models
assert "MiniMax-M2.1" in models
assert "MiniMax-M2" in models
def test_minimax_history_bleed_stats() -> None:
ai_client.set_provider("minimax", "MiniMax-M2.5")
ai_client.reset_session()
stats = ai_client.get_history_bleed_stats(md_content="Test context")
assert stats["provider"] == "minimax"
assert stats["limit"] == 204800
def test_minimax_in_providers_list() -> None:
from src.gui_2 import PROVIDERS
assert "minimax" in PROVIDERS
def test_minimax_in_app_controller_providers() -> None:
from src.app_controller import AppController
assert "minimax" in AppController.PROVIDERS
def test_minimax_credentials_template() -> None:
try:
ai_client._load_credentials()
except FileNotFoundError as e:
error_msg = str(e)
assert "minimax" in error_msg