feat(headless): Implement Phase 2 - Core API Routes & Authentication

This commit is contained in:
2026-02-25 13:09:22 -05:00
parent d5f056c3d1
commit 4e0bcd5188
6 changed files with 152 additions and 28 deletions

View File

@@ -1,27 +1,27 @@
# Implementation Plan: Manual Slop Headless Backend
## Phase 1: Project Setup & Headless Scaffold
## Phase 1: Project Setup & Headless Scaffold [checkpoint: d5f056c]
- [x] Task: Update dependencies (02fc847)
- [x] Add `fastapi` and `uvicorn` to `pyproject.toml` (and sync `requirements.txt` via `uv`).
- [x] Task: Implement headless startup
- [x] Modify `gui_2.py` (or create `headless.py`) to parse a `--headless` CLI flag.
- [x] Update config parsing in `config.toml` to support headless configuration sections.
- [x] Bypass Dear PyGui initialization if headless mode is active.
- [~] Task: Create foundational API application
- [ ] Set up the core FastAPI application instance.
- [ ] Implement `/health` and `/status` endpoints for Docker lifecycle checks.
- [ ] Task: Conductor - User Manual Verification 'Project Setup & Headless Scaffold' (Protocol in workflow.md)
- [x] Task: Create foundational API application
- [x] Set up the core FastAPI application instance.
- [x] Implement `/health` and `/status` endpoints for Docker lifecycle checks.
- [x] Task: Conductor - User Manual Verification 'Project Setup & Headless Scaffold' (Protocol in workflow.md) d5f056c
## Phase 2: Core API Routes & Authentication
- [ ] Task: Implement API Key Security
- [ ] Create a dependency/middleware in FastAPI to validate `X-API-KEY`.
- [ ] Configure the API key validator to read from environment variables or `manual_slop.toml` (supporting Unraid template secrets).
- [ ] Add tests for authorized and unauthorized API access.
- [ ] Task: Implement AI Generation Endpoint
- [ ] Create a `/api/v1/generate` POST endpoint.
- [ ] Map request payloads to `ai_client.py` unified wrappers.
- [ ] Return standard JSON responses with the generated text and token metrics.
- [ ] Task: Conductor - User Manual Verification 'Core API Routes & Authentication' (Protocol in workflow.md)
- [x] Task: Implement API Key Security
- [x] Create a dependency/middleware in FastAPI to validate `X-API-KEY`.
- [x] Configure the API key validator to read from environment variables or `manual_slop.toml` (supporting Unraid template secrets).
- [x] Add tests for authorized and unauthorized API access.
- [x] Task: Implement AI Generation Endpoint
- [x] Create a `/api/v1/generate` POST endpoint.
- [x] Map request payloads to `ai_client.py` unified wrappers.
- [x] Return standard JSON responses with the generated text and token metrics.
- [~] Task: Conductor - User Manual Verification 'Core API Routes & Authentication' (Protocol in workflow.md)
## Phase 3: Remote Tool Confirmation Mechanism
- [ ] Task: Refactor Execution Engine for Async Wait

View File

@@ -7,9 +7,9 @@ history_trunc_limit = 8000
system_prompt = ""
[theme]
palette = "Gold"
font_size = 14.0
scale = 1.2000000476837158
palette = "ImGui Dark"
font_size = 16.0
scale = 1.0
font_path = ""
[projects]

101
gui_2.py
View File

@@ -22,7 +22,9 @@ import api_hooks
import mcp_client
from performance_monitor import PerformanceMonitor
from fastapi import FastAPI
from fastapi import FastAPI, Depends, HTTPException, Security
from fastapi.security.api_key import APIKeyHeader
from pydantic import BaseModel
from imgui_bundle import imgui, hello_imgui, immapp
CONFIG_PATH = Path("config.toml")
@@ -306,12 +308,32 @@ class App:
def create_api(self) -> FastAPI:
api = FastAPI(title="Manual Slop Headless API")
class GenerateRequest(BaseModel):
prompt: str
auto_add_history: bool = True
temperature: float | None = None
max_tokens: int | None = None
API_KEY_NAME = "X-API-KEY"
api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
async def get_api_key(header_key: str = Depends(api_key_header)):
headless_cfg = self.config.get("headless", {})
config_key = headless_cfg.get("api_key", "").strip()
env_key = os.environ.get("SLOP_API_KEY", "").strip()
target_key = env_key or config_key
if not target_key:
return None
if header_key == target_key:
return header_key
raise HTTPException(status_code=403, detail="Could not validate API Key")
@api.get("/health")
def health():
return {"status": "ok"}
@api.get("/status")
@api.get("/status", dependencies=[Depends(get_api_key)])
def status():
return {
"provider": self.current_provider,
@@ -321,6 +343,81 @@ class App:
"session_usage": self.session_usage
}
@api.post("/api/v1/generate", dependencies=[Depends(get_api_key)])
def generate(req: GenerateRequest):
if not req.prompt.strip():
raise HTTPException(status_code=400, detail="Prompt cannot be empty")
with self._send_thread_lock:
start_time = time.time()
try:
# Refresh context before sending
md, path, file_items, stable_md, disc_text = self._do_generate()
self.last_md = md
self.last_md_path = path
self.last_file_items = file_items
except Exception as e:
raise HTTPException(status_code=500, detail=f"Context aggregation failure: {e}")
user_msg = req.prompt
base_dir = self.ui_files_base_dir
csp = filter(bool, [self.ui_global_system_prompt.strip(), self.ui_project_system_prompt.strip()])
ai_client.set_custom_system_prompt("\n\n".join(csp))
# Override parameters if provided in request, otherwise use GUI defaults
temp = req.temperature if req.temperature is not None else self.temperature
tokens = req.max_tokens if req.max_tokens is not None else self.max_tokens
ai_client.set_model_params(temp, tokens, self.history_trunc_limit)
ai_client.set_agent_tools(self.ui_agent_tools)
if req.auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({
"role": "User",
"content": user_msg,
"collapsed": False,
"ts": project_manager.now_ts()
})
try:
resp = ai_client.send(stable_md, user_msg, base_dir, self.last_file_items, disc_text)
if req.auto_add_history:
with self._pending_history_adds_lock:
self._pending_history_adds.append({
"role": "AI",
"content": resp,
"collapsed": False,
"ts": project_manager.now_ts()
})
# Ensure metrics are updated for the response
self._recalculate_session_usage()
duration = time.time() - start_time
return {
"text": resp,
"metadata": {
"provider": self.current_provider,
"model": self.current_model,
"duration_sec": round(duration, 3),
"timestamp": project_manager.now_ts()
},
"usage": self.session_usage
}
except ProviderError as e:
# Specific error handling for vendor issues (4xx/5xx from Gemini/Anthropic)
raise HTTPException(status_code=502, detail=f"AI Provider Error: {e.ui_message()}")
except Exception as e:
# Generic internal error
raise HTTPException(status_code=500, detail=f"In-flight AI request failure: {e}")
@api.post("/api/v1/stream", dependencies=[Depends(get_api_key)])
async def stream(req: GenerateRequest):
# Streaming implementation would require ai_client to support yield-based responses.
# Currently added as a placeholder to satisfy spec requirements.
raise HTTPException(status_code=501, detail="Streaming endpoint (/api/v1/stream) is not yet supported in this version.")
return api
# ---------------------------------------------------------------- project loading

View File

@@ -8,5 +8,5 @@ active = "main"
[discussions.main]
git_commit = ""
last_updated = "2026-02-25T13:02:29"
last_updated = "2026-02-25T13:08:45"
history = []

View File

@@ -9,7 +9,7 @@ auto_add = true
[discussions.main]
git_commit = ""
last_updated = "2026-02-25T13:02:29"
last_updated = "2026-02-25T13:08:45"
history = [
"@1772042443.1159382\nUser:\nStress test entry 0 Stress test entry 0 Stress test entry 0 Stress test entry 0 Stress test entry 0",
"@1772042443.1159382\nUser:\nStress test entry 1 Stress test entry 1 Stress test entry 1 Stress test entry 1 Stress test entry 1",

View File

@@ -23,13 +23,40 @@ class TestHeadlessAPI(unittest.TestCase):
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json(), {"status": "ok"})
def test_status_endpoint(self):
response = self.client.get("/status")
self.assertEqual(response.status_code, 200)
data = response.json()
self.assertIn("provider", data)
self.assertIn("model", data)
self.assertIn("active_project", data)
def test_status_endpoint_unauthorized(self):
# Ensure a key is required
with patch.dict(self.app_instance.config, {"headless": {"api_key": "some-required-key"}}):
response = self.client.get("/status")
self.assertEqual(response.status_code, 403)
def test_status_endpoint_authorized(self):
# We'll use a test key
headers = {"X-API-KEY": "test-secret-key"}
with patch.dict(self.app_instance.config, {"headless": {"api_key": "test-secret-key"}}):
response = self.client.get("/status", headers=headers)
self.assertEqual(response.status_code, 200)
def test_generate_endpoint(self):
payload = {
"prompt": "Hello AI"
}
# Mock ai_client.send and get_comms_log
with patch('gui_2.ai_client.send') as mock_send, \
patch('gui_2.ai_client.get_comms_log') as mock_log:
mock_send.return_value = "Hello from Mock AI"
mock_log.return_value = [{
"kind": "response",
"payload": {
"usage": {"input_tokens": 10, "output_tokens": 5}
}
}]
response = self.client.post("/api/v1/generate", json=payload)
self.assertEqual(response.status_code, 200)
data = response.json()
self.assertEqual(data["text"], "Hello from Mock AI")
self.assertIn("metadata", data)
self.assertEqual(data["usage"]["input_tokens"], 10)
if __name__ == "__main__":
unittest.main()