slop continues

This commit is contained in:
2026-02-21 14:26:41 -05:00
parent 15b2bd622f
commit 672b184c86
6 changed files with 328 additions and 4469 deletions

1
.gitignore vendored
View File

@@ -1,3 +1,4 @@
credentials.toml credentials.toml
__pycache__ __pycache__
uv.lock uv.lock
colorforth_bootslop_002.md

110
ai_client.py Normal file
View File

@@ -0,0 +1,110 @@
# ai_client.py
import tomllib
from pathlib import Path
_provider: str = "gemini"
_model: str = "gemini-2.0-flash"
_gemini_client = None
_gemini_chat = None
_anthropic_client = None
_anthropic_history: list[dict] = []
def _load_credentials() -> dict:
with open("credentials.toml", "rb") as f:
return tomllib.load(f)
# ------------------------------------------------------------------ provider setup
def set_provider(provider: str, model: str):
global _provider, _model
_provider = provider
_model = model
def reset_session():
global _gemini_client, _gemini_chat
global _anthropic_client, _anthropic_history
_gemini_client = None
_gemini_chat = None
_anthropic_client = None
_anthropic_history = []
# ------------------------------------------------------------------ model listing
def list_models(provider: str) -> list[str]:
creds = _load_credentials()
if provider == "gemini":
return _list_gemini_models(creds["gemini"]["api_key"])
elif provider == "anthropic":
return _list_anthropic_models()
return []
def _list_gemini_models(api_key: str) -> list[str]:
from google import genai
client = genai.Client(api_key=api_key)
models = []
for m in client.models.list():
name = m.name
if name.startswith("models/"):
name = name[len("models/"):]
if "gemini" in name.lower():
models.append(name)
return sorted(models)
def _list_anthropic_models() -> list[str]:
import anthropic
creds = _load_credentials()
client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
models = []
for m in client.models.list():
models.append(m.id)
return sorted(models)
# ------------------------------------------------------------------ gemini
def _ensure_gemini_chat():
global _gemini_client, _gemini_chat
if _gemini_chat is None:
from google import genai
creds = _load_credentials()
_gemini_client = genai.Client(api_key=creds["gemini"]["api_key"])
_gemini_chat = _gemini_client.chats.create(model=_model)
def _send_gemini(md_content: str, user_message: str) -> str:
_ensure_gemini_chat()
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
response = _gemini_chat.send_message(full_message)
return response.text
# ------------------------------------------------------------------ anthropic
def _ensure_anthropic_client():
global _anthropic_client
if _anthropic_client is None:
import anthropic
creds = _load_credentials()
_anthropic_client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
def _send_anthropic(md_content: str, user_message: str) -> str:
global _anthropic_history
_ensure_anthropic_client()
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
_anthropic_history.append({"role": "user", "content": full_message})
response = _anthropic_client.messages.create(
model=_model,
max_tokens=8096,
messages=_anthropic_history
)
reply = response.content[0].text
_anthropic_history.append({"role": "assistant", "content": reply})
return reply
# ------------------------------------------------------------------ unified send
def send(md_content: str, user_message: str) -> str:
if _provider == "gemini":
return _send_gemini(md_content, user_message)
elif _provider == "anthropic":
return _send_anthropic(md_content, user_message)
raise ValueError(f"unknown provider: {_provider}")

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,3 @@
# config.toml
[output] [output]
namespace = "colorforth_bootslop" namespace = "colorforth_bootslop"
output_dir = "." output_dir = "."
@@ -14,16 +13,17 @@ paths = [
"./references/kyra_in-depth.md", "./references/kyra_in-depth.md",
".editorconfig", ".editorconfig",
"GEMINI.md", "GEMINI.md",
"CONVENTIONS.md" "CONVENTIONS.md",
] ]
[screenshots] [screenshots]
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02" base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
paths = [ paths = []
]
[discussion] [discussion]
history = [ history = []
]
[ai]
provider = "gemini"
model = "gemini-2.0-flash"

300
gui.py
View File

@@ -6,9 +6,10 @@ import threading
from pathlib import Path from pathlib import Path
from tkinter import filedialog, Tk from tkinter import filedialog, Tk
import aggregate import aggregate
import gemini import ai_client
CONFIG_PATH = Path("config.toml") CONFIG_PATH = Path("config.toml")
PROVIDERS = ["gemini", "anthropic"]
def load_config() -> dict: def load_config() -> dict:
with open(CONFIG_PATH, "rb") as f: with open(CONFIG_PATH, "rb") as f:
@@ -31,14 +32,19 @@ class App:
self.screenshots: list[str] = list(self.config.get("screenshots", {}).get("paths", [])) self.screenshots: list[str] = list(self.config.get("screenshots", {}).get("paths", []))
self.history: list[str] = list(self.config.get("discussion", {}).get("history", [])) self.history: list[str] = list(self.config.get("discussion", {}).get("history", []))
ai_cfg = self.config.get("ai", {})
self.current_provider: str = ai_cfg.get("provider", "gemini")
self.current_model: str = ai_cfg.get("model", "gemini-2.0-flash")
self.available_models: list[str] = []
self.gemini_status = "idle" self.gemini_status = "idle"
self.gemini_response = "" self.gemini_response = ""
self.last_md = "" self.last_md = ""
self.last_md_path: Path | None = None self.last_md_path: Path | None = None
self.send_thread: threading.Thread | None = None self.send_thread: threading.Thread | None = None
self.models_thread: threading.Thread | None = None
self.file_rows: list[str] = [] ai_client.set_provider(self.current_provider, self.current_model)
self.shot_rows: list[str] = []
# ------------------------------------------------------------------ helpers # ------------------------------------------------------------------ helpers
@@ -54,6 +60,10 @@ class App:
raw = dpg.get_value("discussion_box") raw = dpg.get_value("discussion_box")
self.history = [s.strip() for s in raw.split("---") if s.strip()] self.history = [s.strip() for s in raw.split("---") if s.strip()]
self.config["discussion"] = {"history": self.history} self.config["discussion"] = {"history": self.history}
self.config["ai"] = {
"provider": self.current_provider,
"model": self.current_model
}
def _do_generate(self) -> tuple[str, Path]: def _do_generate(self) -> tuple[str, Path]:
self._flush_to_config() self._flush_to_config()
@@ -62,48 +72,76 @@ class App:
def _update_status(self, status: str): def _update_status(self, status: str):
self.gemini_status = status self.gemini_status = status
dpg.set_value("gemini_status", f"Status: {status}") if dpg.does_item_exist("ai_status"):
dpg.set_value("ai_status", f"Status: {status}")
def _update_response(self, text: str): def _update_response(self, text: str):
self.gemini_response = text self.gemini_response = text
if dpg.does_item_exist("gemini_response"):
dpg.set_value("gemini_response", text) dpg.set_value("gemini_response", text)
def _rebuild_files_table(self): def _rebuild_files_list(self):
dpg.delete_item("files_table", children_only=True) if dpg.does_item_exist("files_scroll"):
dpg.delete_item("files_scroll", children_only=True)
for i, f in enumerate(self.files): for i, f in enumerate(self.files):
with dpg.table_row(parent="files_table"): with dpg.group(horizontal=True, parent="files_scroll"):
dpg.add_button(
label="x",
width=24,
callback=self._make_remove_file_cb(i)
)
dpg.add_text(f) dpg.add_text(f)
dpg.add_button(
label="x",
callback=self._make_remove_file_cb(i),
width=24
)
def _rebuild_shots_table(self): def _rebuild_shots_list(self):
dpg.delete_item("shots_table", children_only=True) if dpg.does_item_exist("shots_scroll"):
dpg.delete_item("shots_scroll", children_only=True)
for i, s in enumerate(self.screenshots): for i, s in enumerate(self.screenshots):
with dpg.table_row(parent="shots_table"): with dpg.group(horizontal=True, parent="shots_scroll"):
dpg.add_text(s)
dpg.add_button( dpg.add_button(
label="x", label="x",
callback=self._make_remove_shot_cb(i), width=24,
width=24 callback=self._make_remove_shot_cb(i)
) )
dpg.add_text(s)
def _rebuild_models_list(self):
if not dpg.does_item_exist("model_listbox"):
return
dpg.configure_item("model_listbox", items=self.available_models)
if self.current_model in self.available_models:
dpg.set_value("model_listbox", self.current_model)
elif self.available_models:
dpg.set_value("model_listbox", self.available_models[0])
self.current_model = self.available_models[0]
ai_client.set_provider(self.current_provider, self.current_model)
def _make_remove_file_cb(self, idx: int): def _make_remove_file_cb(self, idx: int):
def cb(): def cb():
if idx < len(self.files): if idx < len(self.files):
self.files.pop(idx) self.files.pop(idx)
self._rebuild_files_table() self._rebuild_files_list()
return cb return cb
def _make_remove_shot_cb(self, idx: int): def _make_remove_shot_cb(self, idx: int):
def cb(): def cb():
if idx < len(self.screenshots): if idx < len(self.screenshots):
self.screenshots.pop(idx) self.screenshots.pop(idx)
self._rebuild_shots_table() self._rebuild_shots_list()
return cb return cb
def _fetch_models(self, provider: str):
self._update_status("fetching models...")
def do_fetch():
try:
models = ai_client.list_models(provider)
self.available_models = models
self._rebuild_models_list()
self._update_status(f"models loaded: {len(models)}")
except Exception as e:
self._update_status(f"model fetch error: {e}")
self.models_thread = threading.Thread(target=do_fetch, daemon=True)
self.models_thread.start()
# ---------------------------------------------------------------- callbacks # ---------------------------------------------------------------- callbacks
def cb_browse_output(self): def cb_browse_output(self):
@@ -132,7 +170,7 @@ class App:
for p in paths: for p in paths:
if p not in self.files: if p not in self.files:
self.files.append(p) self.files.append(p)
self._rebuild_files_table() self._rebuild_files_list()
def cb_add_wildcard(self): def cb_add_wildcard(self):
root = hide_tk_root() root = hide_tk_root()
@@ -140,7 +178,7 @@ class App:
root.destroy() root.destroy()
if d: if d:
self.files.append(str(Path(d) / "**" / "*")) self.files.append(str(Path(d) / "**" / "*"))
self._rebuild_files_table() self._rebuild_files_list()
def cb_browse_shots_base(self): def cb_browse_shots_base(self):
root = hide_tk_root() root = hide_tk_root()
@@ -159,7 +197,7 @@ class App:
for p in paths: for p in paths:
if p not in self.screenshots: if p not in self.screenshots:
self.screenshots.append(p) self.screenshots.append(p)
self._rebuild_shots_table() self._rebuild_shots_list()
def cb_add_excerpt(self): def cb_add_excerpt(self):
current = dpg.get_value("discussion_box") current = dpg.get_value("discussion_box")
@@ -183,7 +221,7 @@ class App:
self._update_status(f"error: {e}") self._update_status(f"error: {e}")
def cb_reset_session(self): def cb_reset_session(self):
gemini.reset_session() ai_client.reset_session()
self._update_status("session reset") self._update_status("session reset")
self._update_response("") self._update_response("")
@@ -199,11 +237,11 @@ class App:
return return
self._update_status("sending...") self._update_status("sending...")
user_msg = dpg.get_value("gemini_input") user_msg = dpg.get_value("ai_input")
def do_send(): def do_send():
try: try:
response = gemini.send(self.last_md, user_msg) response = ai_client.send(self.last_md, user_msg)
self._update_response(response) self._update_response(response)
self._update_status("done") self._update_status("done")
except Exception as e: except Exception as e:
@@ -213,117 +251,116 @@ class App:
self.send_thread = threading.Thread(target=do_send, daemon=True) self.send_thread = threading.Thread(target=do_send, daemon=True)
self.send_thread.start() self.send_thread.start()
def cb_provider_changed(self, sender, app_data):
self.current_provider = app_data
ai_client.reset_session()
ai_client.set_provider(self.current_provider, self.current_model)
self.available_models = []
self._rebuild_models_list()
self._fetch_models(self.current_provider)
def cb_model_changed(self, sender, app_data):
if app_data:
self.current_model = app_data
ai_client.reset_session()
ai_client.set_provider(self.current_provider, self.current_model)
self._update_status(f"model set: {self.current_model}")
def cb_fetch_models(self):
self._fetch_models(self.current_provider)
# ---------------------------------------------------------------- build ui # ---------------------------------------------------------------- build ui
def _build_ui(self): def _build_ui(self):
with dpg.window( with dpg.window(
label="Config", label="Config",
tag="win_config", tag="win_config",
pos=(8, 8), pos=(8, 8),
width=340, width=400,
height=220, height=200,
no_close=True no_close=True
): ):
dpg.add_text("Namespace")
dpg.add_input_text( dpg.add_input_text(
label="Namespace",
tag="namespace", tag="namespace",
default_value=self.config["output"]["namespace"], default_value=self.config["output"]["namespace"],
width=-1 width=-1
) )
dpg.add_text("Output Dir")
dpg.add_input_text( dpg.add_input_text(
label="Output Dir",
tag="output_dir", tag="output_dir",
default_value=self.config["output"]["output_dir"], default_value=self.config["output"]["output_dir"],
width=-1 width=-1
) )
dpg.add_button(label="Browse Output Dir", callback=self.cb_browse_output, width=-1) with dpg.group(horizontal=True):
dpg.add_separator() dpg.add_button(label="Browse Output Dir", callback=self.cb_browse_output)
dpg.add_button(label="Save Config", callback=self.cb_save_config, width=-1) dpg.add_button(label="Save Config", callback=self.cb_save_config)
with dpg.window( with dpg.window(
label="Files", label="Files",
tag="win_files", tag="win_files",
pos=(8, 236), pos=(8, 216),
width=340, width=400,
height=460, height=500,
no_close=True no_close=True
): ):
dpg.add_text("Base Dir")
with dpg.group(horizontal=True):
dpg.add_input_text( dpg.add_input_text(
label="Base Dir",
tag="files_base_dir", tag="files_base_dir",
default_value=self.config["files"]["base_dir"], default_value=self.config["files"]["base_dir"],
width=-1 width=-220
) )
dpg.add_button(label="Browse Base Dir##files", callback=self.cb_browse_files_base, width=-1) dpg.add_button(label="Browse##filesbase", callback=self.cb_browse_files_base)
dpg.add_separator() dpg.add_separator()
dpg.add_text("Paths")
with dpg.table( with dpg.child_window(tag="files_scroll", height=-64, border=True):
tag="files_table", pass
header_row=False, self._rebuild_files_list()
resizable=True,
borders_innerV=True,
scrollY=True,
height=280
):
dpg.add_table_column(label="Path", width_stretch=True)
dpg.add_table_column(label="", width_fixed=True, init_width_or_weight=28)
self._rebuild_files_table()
dpg.add_separator() dpg.add_separator()
with dpg.group(horizontal=True): with dpg.group(horizontal=True):
dpg.add_button(label="Add File(s)", callback=self.cb_add_files, width=-1) dpg.add_button(label="Add File(s)", callback=self.cb_add_files)
with dpg.group(horizontal=True): dpg.add_button(label="Add Wildcard", callback=self.cb_add_wildcard)
dpg.add_button(label="Add Wildcard", callback=self.cb_add_wildcard, width=-1)
with dpg.window( with dpg.window(
label="Screenshots", label="Screenshots",
tag="win_screenshots", tag="win_screenshots",
pos=(356, 8), pos=(416, 8),
width=340, width=400,
height=460, height=500,
no_close=True no_close=True
): ):
dpg.add_text("Base Dir")
with dpg.group(horizontal=True):
dpg.add_input_text( dpg.add_input_text(
label="Base Dir",
tag="shots_base_dir", tag="shots_base_dir",
default_value=self.config.get("screenshots", {}).get("base_dir", "."), default_value=self.config.get("screenshots", {}).get("base_dir", "."),
width=-1 width=-220
) )
dpg.add_button(label="Browse Base Dir##shots", callback=self.cb_browse_shots_base, width=-1) dpg.add_button(label="Browse##shotsbase", callback=self.cb_browse_shots_base)
dpg.add_separator() dpg.add_separator()
dpg.add_text("Paths")
with dpg.table( with dpg.child_window(tag="shots_scroll", height=-48, border=True):
tag="shots_table", pass
header_row=False, self._rebuild_shots_list()
resizable=True,
borders_innerV=True,
scrollY=True,
height=280
):
dpg.add_table_column(label="Path", width_stretch=True)
dpg.add_table_column(label="", width_fixed=True, init_width_or_weight=28)
self._rebuild_shots_table()
dpg.add_separator() dpg.add_separator()
dpg.add_button(label="Add Screenshot(s)", callback=self.cb_add_shots, width=-1) dpg.add_button(label="Add Screenshot(s)", callback=self.cb_add_shots)
with dpg.window( with dpg.window(
label="Discussion History", label="Discussion History",
tag="win_discussion", tag="win_discussion",
pos=(704, 8), pos=(824, 8),
width=340, width=400,
height=460, height=500,
no_close=True no_close=True
): ):
dpg.add_input_text( dpg.add_input_text(
label="##discussion_box",
tag="discussion_box", tag="discussion_box",
default_value="\n---\n".join(self.history), default_value="\n---\n".join(self.history),
multiline=True, multiline=True,
width=-1, width=-1,
height=340 height=-64
) )
dpg.add_separator() dpg.add_separator()
with dpg.group(horizontal=True): with dpg.group(horizontal=True):
@@ -332,31 +369,66 @@ class App:
dpg.add_button(label="Save", callback=self.cb_save_discussion) dpg.add_button(label="Save", callback=self.cb_save_discussion)
with dpg.window( with dpg.window(
label="Gemini", label="AI Client",
tag="win_gemini", tag="win_ai",
pos=(1052, 8), pos=(1232, 8),
width=340, width=420,
height=700, height=900,
no_close=True no_close=True
): ):
dpg.add_text("Status: idle", tag="gemini_status") # provider
dpg.add_text("Provider")
dpg.add_combo(
tag="provider_combo",
items=PROVIDERS,
default_value=self.current_provider,
width=-1,
callback=self.cb_provider_changed
)
dpg.add_separator() dpg.add_separator()
# model
with dpg.group(horizontal=True):
dpg.add_text("Model")
dpg.add_button(
label="Fetch Models",
callback=self.cb_fetch_models
)
dpg.add_listbox(
tag="model_listbox",
items=self.available_models,
default_value=self.current_model,
width=-1,
num_items=6,
callback=self.cb_model_changed
)
dpg.add_separator()
dpg.add_text("Status: idle", tag="ai_status")
dpg.add_separator()
dpg.add_text("Message")
dpg.add_input_text( dpg.add_input_text(
label="##gemini_input", tag="ai_input",
tag="gemini_input",
multiline=True, multiline=True,
width=-1, width=-1,
height=120 height=140
) )
dpg.add_separator() dpg.add_separator()
with dpg.group(horizontal=True): with dpg.group(horizontal=True):
dpg.add_button(label="Gen + Send", callback=self.cb_generate_send) dpg.add_button(label="Gen + Send", callback=self.cb_generate_send)
dpg.add_button(label="MD Only", callback=self.cb_md_only) dpg.add_button(label="MD Only", callback=self.cb_md_only)
dpg.add_button(label="Reset", callback=self.cb_reset_session) dpg.add_button(label="Reset", callback=self.cb_reset_session)
dpg.add_separator() dpg.add_separator()
dpg.add_text("Response:")
dpg.add_text("Response")
dpg.add_input_text( dpg.add_input_text(
label="##gemini_response",
tag="gemini_response", tag="gemini_response",
multiline=True, multiline=True,
readonly=True, readonly=True,
@@ -364,31 +436,29 @@ class App:
height=-1 height=-1
) )
def run(self): def run(config: dict) -> tuple[str, Path]:
dpg.create_context() namespace = config["output"]["namespace"]
dpg.configure_app(docking=True, docking_space=True) output_dir = Path(config["output"]["output_dir"]) / "md_gen"
base_dir = Path(config["files"]["base_dir"])
dpg.create_viewport( files = config["files"].get("paths", [])
title="manual slop", screenshot_base_dir = Path(config.get("screenshots", {}).get("base_dir", "."))
width=1600, screenshots = config.get("screenshots", {}).get("paths", [])
height=900 history = config.get("discussion", {}).get("history", [])
)
dpg.setup_dearpygui()
dpg.show_viewport()
dpg.maximize_viewport()
self._build_ui()
while dpg.is_dearpygui_running():
dpg.render_dearpygui_frame()
dpg.destroy_context()
output_dir.mkdir(parents=True, exist_ok=True)
increment = find_next_increment(output_dir, namespace)
output_file = output_dir / f"{namespace}_{increment:03d}.md"
markdown = build_markdown(base_dir, files, screenshot_base_dir, screenshots, history)
output_file.write_text(markdown, encoding="utf-8")
return markdown, output_file
def main(): def main():
app = App() with open("config.toml", "rb") as f:
app.run() import tomllib
config = tomllib.load(f)
markdown, output_file = run(config)
print(f"Written: {output_file}")
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -1,3 +1,4 @@
# pyproject.toml
[project] [project]
name = "manual_slop" name = "manual_slop"
version = "0.1.0" version = "0.1.0"
@@ -5,6 +6,6 @@ requires-python = ">=3.11"
dependencies = [ dependencies = [
"dearpygui", "dearpygui",
"google-genai", "google-genai",
"anthropic",
"tomli-w" "tomli-w"
] ]