This commit is contained in:
2026-02-22 12:03:07 -05:00
parent 8bf95866dc
commit 13ad7aea17
4 changed files with 5 additions and 40 deletions

View File

@@ -141,7 +141,7 @@ def build_markdown(base_dir: Path, files: list[str], screenshot_base_dir: Path,
parts.append("## Discussion History\n\n" + build_discussion_section(history))
return "\n\n---\n\n".join(parts)
def run(config: dict) -> tuple[str, Path]:
def run(config: dict) -> tuple[str, Path, list]:
namespace = config.get("project", {}).get("name")
if not namespace:
namespace = config.get("output", {}).get("namespace", "project")

View File

@@ -1,6 +1,6 @@
[ai]
provider = "anthropic"
model = "claude-sonnet-4-6"
provider = "gemini"
model = "gemini-2.5-pro"
temperature = 0.6000000238418579
max_tokens = 12000
history_trunc_limit = 8000
@@ -17,4 +17,4 @@ paths = [
"manual_slop.toml",
"C:/projects/forth/bootslop/bootslop.toml",
]
active = "C:/projects/forth/bootslop/bootslop.toml"
active = "manual_slop.toml"

View File

@@ -1,35 +0,0 @@
# gemini.py
import tomllib
from pathlib import Path
from google import genai
from google.genai import types
_client = None
_chat = None
def _load_key() -> str:
with open("credentials.toml", "rb") as f:
return tomllib.load(f)["gemini"]["api_key"]
def _ensure_client():
global _client
if _client is None:
_client = genai.Client(api_key=_load_key())
def _ensure_chat():
global _chat
if _chat is None:
_ensure_client()
_chat = _client.chats.create(model="gemini-2.0-flash")
def send(md_content: str, user_message: str) -> str:
global _chat
_ensure_chat()
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
response = _chat.send_message(full_message)
return response.text
def reset_session():
global _client, _chat
_client = None
_chat = None

View File

@@ -147,7 +147,7 @@ history = [
[discussion.discussions."docs writeup"]
git_commit = "bf2d09f3fd817d64fbf6b4aa667e2b635b6fbc0e"
last_updated = "2026-02-22T11:08:58"
last_updated = "2026-02-22T12:01:06"
history = [
"@2026-02-22T08:56:39\nUser:\nLets write extensive documentation in the same style that I used for my VEFontCache-Oodin project.\nI added it's directories to your context.",
"@2026-02-22T08:56:58\nAI:\n(No text returned)",