slop continues
This commit is contained in:
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,3 +1,4 @@
|
||||
credentials.toml
|
||||
__pycache__
|
||||
uv.lock
|
||||
colorforth_bootslop_002.md
|
||||
|
||||
110
ai_client.py
Normal file
110
ai_client.py
Normal file
@@ -0,0 +1,110 @@
|
||||
# ai_client.py
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
|
||||
_provider: str = "gemini"
|
||||
_model: str = "gemini-2.0-flash"
|
||||
|
||||
_gemini_client = None
|
||||
_gemini_chat = None
|
||||
|
||||
_anthropic_client = None
|
||||
_anthropic_history: list[dict] = []
|
||||
|
||||
def _load_credentials() -> dict:
|
||||
with open("credentials.toml", "rb") as f:
|
||||
return tomllib.load(f)
|
||||
|
||||
# ------------------------------------------------------------------ provider setup
|
||||
|
||||
def set_provider(provider: str, model: str):
|
||||
global _provider, _model
|
||||
_provider = provider
|
||||
_model = model
|
||||
|
||||
def reset_session():
|
||||
global _gemini_client, _gemini_chat
|
||||
global _anthropic_client, _anthropic_history
|
||||
_gemini_client = None
|
||||
_gemini_chat = None
|
||||
_anthropic_client = None
|
||||
_anthropic_history = []
|
||||
|
||||
# ------------------------------------------------------------------ model listing
|
||||
|
||||
def list_models(provider: str) -> list[str]:
|
||||
creds = _load_credentials()
|
||||
if provider == "gemini":
|
||||
return _list_gemini_models(creds["gemini"]["api_key"])
|
||||
elif provider == "anthropic":
|
||||
return _list_anthropic_models()
|
||||
return []
|
||||
|
||||
def _list_gemini_models(api_key: str) -> list[str]:
|
||||
from google import genai
|
||||
client = genai.Client(api_key=api_key)
|
||||
models = []
|
||||
for m in client.models.list():
|
||||
name = m.name
|
||||
if name.startswith("models/"):
|
||||
name = name[len("models/"):]
|
||||
if "gemini" in name.lower():
|
||||
models.append(name)
|
||||
return sorted(models)
|
||||
|
||||
def _list_anthropic_models() -> list[str]:
|
||||
import anthropic
|
||||
creds = _load_credentials()
|
||||
client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
|
||||
models = []
|
||||
for m in client.models.list():
|
||||
models.append(m.id)
|
||||
return sorted(models)
|
||||
|
||||
# ------------------------------------------------------------------ gemini
|
||||
|
||||
def _ensure_gemini_chat():
|
||||
global _gemini_client, _gemini_chat
|
||||
if _gemini_chat is None:
|
||||
from google import genai
|
||||
creds = _load_credentials()
|
||||
_gemini_client = genai.Client(api_key=creds["gemini"]["api_key"])
|
||||
_gemini_chat = _gemini_client.chats.create(model=_model)
|
||||
|
||||
def _send_gemini(md_content: str, user_message: str) -> str:
|
||||
_ensure_gemini_chat()
|
||||
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
|
||||
response = _gemini_chat.send_message(full_message)
|
||||
return response.text
|
||||
|
||||
# ------------------------------------------------------------------ anthropic
|
||||
|
||||
def _ensure_anthropic_client():
|
||||
global _anthropic_client
|
||||
if _anthropic_client is None:
|
||||
import anthropic
|
||||
creds = _load_credentials()
|
||||
_anthropic_client = anthropic.Anthropic(api_key=creds["anthropic"]["api_key"])
|
||||
|
||||
def _send_anthropic(md_content: str, user_message: str) -> str:
|
||||
global _anthropic_history
|
||||
_ensure_anthropic_client()
|
||||
full_message = f"<context>\n{md_content}\n</context>\n\n{user_message}"
|
||||
_anthropic_history.append({"role": "user", "content": full_message})
|
||||
response = _anthropic_client.messages.create(
|
||||
model=_model,
|
||||
max_tokens=8096,
|
||||
messages=_anthropic_history
|
||||
)
|
||||
reply = response.content[0].text
|
||||
_anthropic_history.append({"role": "assistant", "content": reply})
|
||||
return reply
|
||||
|
||||
# ------------------------------------------------------------------ unified send
|
||||
|
||||
def send(md_content: str, user_message: str) -> str:
|
||||
if _provider == "gemini":
|
||||
return _send_gemini(md_content, user_message)
|
||||
elif _provider == "anthropic":
|
||||
return _send_anthropic(md_content, user_message)
|
||||
raise ValueError(f"unknown provider: {_provider}")
|
||||
File diff suppressed because it is too large
Load Diff
30
config.toml
30
config.toml
@@ -1,4 +1,3 @@
|
||||
# config.toml
|
||||
[output]
|
||||
namespace = "colorforth_bootslop"
|
||||
output_dir = "."
|
||||
@@ -6,24 +5,25 @@ output_dir = "."
|
||||
[files]
|
||||
base_dir = "C:/projects/forth/bootslop"
|
||||
paths = [
|
||||
"./attempt_1/*",
|
||||
"./scripts/*",
|
||||
"./references/Architectural_Consolidation.md",
|
||||
"./references/neokineogfx_in-depth.md",
|
||||
"./references/blog_in-depth.md",
|
||||
"./references/kyra_in-depth.md",
|
||||
".editorconfig",
|
||||
"GEMINI.md",
|
||||
"CONVENTIONS.md"
|
||||
"./attempt_1/*",
|
||||
"./scripts/*",
|
||||
"./references/Architectural_Consolidation.md",
|
||||
"./references/neokineogfx_in-depth.md",
|
||||
"./references/blog_in-depth.md",
|
||||
"./references/kyra_in-depth.md",
|
||||
".editorconfig",
|
||||
"GEMINI.md",
|
||||
"CONVENTIONS.md",
|
||||
]
|
||||
|
||||
[screenshots]
|
||||
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
|
||||
paths = [
|
||||
]
|
||||
|
||||
paths = []
|
||||
|
||||
[discussion]
|
||||
history = [
|
||||
]
|
||||
history = []
|
||||
|
||||
[ai]
|
||||
provider = "gemini"
|
||||
model = "gemini-2.0-flash"
|
||||
|
||||
|
||||
330
gui.py
330
gui.py
@@ -6,9 +6,10 @@ import threading
|
||||
from pathlib import Path
|
||||
from tkinter import filedialog, Tk
|
||||
import aggregate
|
||||
import gemini
|
||||
import ai_client
|
||||
|
||||
CONFIG_PATH = Path("config.toml")
|
||||
PROVIDERS = ["gemini", "anthropic"]
|
||||
|
||||
def load_config() -> dict:
|
||||
with open(CONFIG_PATH, "rb") as f:
|
||||
@@ -31,14 +32,19 @@ class App:
|
||||
self.screenshots: list[str] = list(self.config.get("screenshots", {}).get("paths", []))
|
||||
self.history: list[str] = list(self.config.get("discussion", {}).get("history", []))
|
||||
|
||||
ai_cfg = self.config.get("ai", {})
|
||||
self.current_provider: str = ai_cfg.get("provider", "gemini")
|
||||
self.current_model: str = ai_cfg.get("model", "gemini-2.0-flash")
|
||||
self.available_models: list[str] = []
|
||||
|
||||
self.gemini_status = "idle"
|
||||
self.gemini_response = ""
|
||||
self.last_md = ""
|
||||
self.last_md_path: Path | None = None
|
||||
self.send_thread: threading.Thread | None = None
|
||||
self.models_thread: threading.Thread | None = None
|
||||
|
||||
self.file_rows: list[str] = []
|
||||
self.shot_rows: list[str] = []
|
||||
ai_client.set_provider(self.current_provider, self.current_model)
|
||||
|
||||
# ------------------------------------------------------------------ helpers
|
||||
|
||||
@@ -54,6 +60,10 @@ class App:
|
||||
raw = dpg.get_value("discussion_box")
|
||||
self.history = [s.strip() for s in raw.split("---") if s.strip()]
|
||||
self.config["discussion"] = {"history": self.history}
|
||||
self.config["ai"] = {
|
||||
"provider": self.current_provider,
|
||||
"model": self.current_model
|
||||
}
|
||||
|
||||
def _do_generate(self) -> tuple[str, Path]:
|
||||
self._flush_to_config()
|
||||
@@ -62,48 +72,76 @@ class App:
|
||||
|
||||
def _update_status(self, status: str):
|
||||
self.gemini_status = status
|
||||
dpg.set_value("gemini_status", f"Status: {status}")
|
||||
if dpg.does_item_exist("ai_status"):
|
||||
dpg.set_value("ai_status", f"Status: {status}")
|
||||
|
||||
def _update_response(self, text: str):
|
||||
self.gemini_response = text
|
||||
dpg.set_value("gemini_response", text)
|
||||
if dpg.does_item_exist("gemini_response"):
|
||||
dpg.set_value("gemini_response", text)
|
||||
|
||||
def _rebuild_files_table(self):
|
||||
dpg.delete_item("files_table", children_only=True)
|
||||
for i, f in enumerate(self.files):
|
||||
with dpg.table_row(parent="files_table"):
|
||||
dpg.add_text(f)
|
||||
dpg.add_button(
|
||||
label="x",
|
||||
callback=self._make_remove_file_cb(i),
|
||||
width=24
|
||||
)
|
||||
def _rebuild_files_list(self):
|
||||
if dpg.does_item_exist("files_scroll"):
|
||||
dpg.delete_item("files_scroll", children_only=True)
|
||||
for i, f in enumerate(self.files):
|
||||
with dpg.group(horizontal=True, parent="files_scroll"):
|
||||
dpg.add_button(
|
||||
label="x",
|
||||
width=24,
|
||||
callback=self._make_remove_file_cb(i)
|
||||
)
|
||||
dpg.add_text(f)
|
||||
|
||||
def _rebuild_shots_table(self):
|
||||
dpg.delete_item("shots_table", children_only=True)
|
||||
for i, s in enumerate(self.screenshots):
|
||||
with dpg.table_row(parent="shots_table"):
|
||||
dpg.add_text(s)
|
||||
dpg.add_button(
|
||||
label="x",
|
||||
callback=self._make_remove_shot_cb(i),
|
||||
width=24
|
||||
)
|
||||
def _rebuild_shots_list(self):
|
||||
if dpg.does_item_exist("shots_scroll"):
|
||||
dpg.delete_item("shots_scroll", children_only=True)
|
||||
for i, s in enumerate(self.screenshots):
|
||||
with dpg.group(horizontal=True, parent="shots_scroll"):
|
||||
dpg.add_button(
|
||||
label="x",
|
||||
width=24,
|
||||
callback=self._make_remove_shot_cb(i)
|
||||
)
|
||||
dpg.add_text(s)
|
||||
|
||||
def _rebuild_models_list(self):
|
||||
if not dpg.does_item_exist("model_listbox"):
|
||||
return
|
||||
dpg.configure_item("model_listbox", items=self.available_models)
|
||||
if self.current_model in self.available_models:
|
||||
dpg.set_value("model_listbox", self.current_model)
|
||||
elif self.available_models:
|
||||
dpg.set_value("model_listbox", self.available_models[0])
|
||||
self.current_model = self.available_models[0]
|
||||
ai_client.set_provider(self.current_provider, self.current_model)
|
||||
|
||||
def _make_remove_file_cb(self, idx: int):
|
||||
def cb():
|
||||
if idx < len(self.files):
|
||||
self.files.pop(idx)
|
||||
self._rebuild_files_table()
|
||||
self._rebuild_files_list()
|
||||
return cb
|
||||
|
||||
def _make_remove_shot_cb(self, idx: int):
|
||||
def cb():
|
||||
if idx < len(self.screenshots):
|
||||
self.screenshots.pop(idx)
|
||||
self._rebuild_shots_table()
|
||||
self._rebuild_shots_list()
|
||||
return cb
|
||||
|
||||
def _fetch_models(self, provider: str):
|
||||
self._update_status("fetching models...")
|
||||
def do_fetch():
|
||||
try:
|
||||
models = ai_client.list_models(provider)
|
||||
self.available_models = models
|
||||
self._rebuild_models_list()
|
||||
self._update_status(f"models loaded: {len(models)}")
|
||||
except Exception as e:
|
||||
self._update_status(f"model fetch error: {e}")
|
||||
self.models_thread = threading.Thread(target=do_fetch, daemon=True)
|
||||
self.models_thread.start()
|
||||
|
||||
# ---------------------------------------------------------------- callbacks
|
||||
|
||||
def cb_browse_output(self):
|
||||
@@ -132,7 +170,7 @@ class App:
|
||||
for p in paths:
|
||||
if p not in self.files:
|
||||
self.files.append(p)
|
||||
self._rebuild_files_table()
|
||||
self._rebuild_files_list()
|
||||
|
||||
def cb_add_wildcard(self):
|
||||
root = hide_tk_root()
|
||||
@@ -140,7 +178,7 @@ class App:
|
||||
root.destroy()
|
||||
if d:
|
||||
self.files.append(str(Path(d) / "**" / "*"))
|
||||
self._rebuild_files_table()
|
||||
self._rebuild_files_list()
|
||||
|
||||
def cb_browse_shots_base(self):
|
||||
root = hide_tk_root()
|
||||
@@ -159,7 +197,7 @@ class App:
|
||||
for p in paths:
|
||||
if p not in self.screenshots:
|
||||
self.screenshots.append(p)
|
||||
self._rebuild_shots_table()
|
||||
self._rebuild_shots_list()
|
||||
|
||||
def cb_add_excerpt(self):
|
||||
current = dpg.get_value("discussion_box")
|
||||
@@ -183,7 +221,7 @@ class App:
|
||||
self._update_status(f"error: {e}")
|
||||
|
||||
def cb_reset_session(self):
|
||||
gemini.reset_session()
|
||||
ai_client.reset_session()
|
||||
self._update_status("session reset")
|
||||
self._update_response("")
|
||||
|
||||
@@ -199,11 +237,11 @@ class App:
|
||||
return
|
||||
|
||||
self._update_status("sending...")
|
||||
user_msg = dpg.get_value("gemini_input")
|
||||
user_msg = dpg.get_value("ai_input")
|
||||
|
||||
def do_send():
|
||||
try:
|
||||
response = gemini.send(self.last_md, user_msg)
|
||||
response = ai_client.send(self.last_md, user_msg)
|
||||
self._update_response(response)
|
||||
self._update_status("done")
|
||||
except Exception as e:
|
||||
@@ -213,117 +251,116 @@ class App:
|
||||
self.send_thread = threading.Thread(target=do_send, daemon=True)
|
||||
self.send_thread.start()
|
||||
|
||||
def cb_provider_changed(self, sender, app_data):
|
||||
self.current_provider = app_data
|
||||
ai_client.reset_session()
|
||||
ai_client.set_provider(self.current_provider, self.current_model)
|
||||
self.available_models = []
|
||||
self._rebuild_models_list()
|
||||
self._fetch_models(self.current_provider)
|
||||
|
||||
def cb_model_changed(self, sender, app_data):
|
||||
if app_data:
|
||||
self.current_model = app_data
|
||||
ai_client.reset_session()
|
||||
ai_client.set_provider(self.current_provider, self.current_model)
|
||||
self._update_status(f"model set: {self.current_model}")
|
||||
|
||||
def cb_fetch_models(self):
|
||||
self._fetch_models(self.current_provider)
|
||||
|
||||
# ---------------------------------------------------------------- build ui
|
||||
|
||||
def _build_ui(self):
|
||||
|
||||
with dpg.window(
|
||||
label="Config",
|
||||
tag="win_config",
|
||||
pos=(8, 8),
|
||||
width=340,
|
||||
height=220,
|
||||
width=400,
|
||||
height=200,
|
||||
no_close=True
|
||||
):
|
||||
dpg.add_text("Namespace")
|
||||
dpg.add_input_text(
|
||||
label="Namespace",
|
||||
tag="namespace",
|
||||
default_value=self.config["output"]["namespace"],
|
||||
width=-1
|
||||
)
|
||||
dpg.add_text("Output Dir")
|
||||
dpg.add_input_text(
|
||||
label="Output Dir",
|
||||
tag="output_dir",
|
||||
default_value=self.config["output"]["output_dir"],
|
||||
width=-1
|
||||
)
|
||||
dpg.add_button(label="Browse Output Dir", callback=self.cb_browse_output, width=-1)
|
||||
dpg.add_separator()
|
||||
dpg.add_button(label="Save Config", callback=self.cb_save_config, width=-1)
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_button(label="Browse Output Dir", callback=self.cb_browse_output)
|
||||
dpg.add_button(label="Save Config", callback=self.cb_save_config)
|
||||
|
||||
with dpg.window(
|
||||
label="Files",
|
||||
tag="win_files",
|
||||
pos=(8, 236),
|
||||
width=340,
|
||||
height=460,
|
||||
pos=(8, 216),
|
||||
width=400,
|
||||
height=500,
|
||||
no_close=True
|
||||
):
|
||||
dpg.add_input_text(
|
||||
label="Base Dir",
|
||||
tag="files_base_dir",
|
||||
default_value=self.config["files"]["base_dir"],
|
||||
width=-1
|
||||
)
|
||||
dpg.add_button(label="Browse Base Dir##files", callback=self.cb_browse_files_base, width=-1)
|
||||
dpg.add_text("Base Dir")
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_input_text(
|
||||
tag="files_base_dir",
|
||||
default_value=self.config["files"]["base_dir"],
|
||||
width=-220
|
||||
)
|
||||
dpg.add_button(label="Browse##filesbase", callback=self.cb_browse_files_base)
|
||||
dpg.add_separator()
|
||||
|
||||
with dpg.table(
|
||||
tag="files_table",
|
||||
header_row=False,
|
||||
resizable=True,
|
||||
borders_innerV=True,
|
||||
scrollY=True,
|
||||
height=280
|
||||
):
|
||||
dpg.add_table_column(label="Path", width_stretch=True)
|
||||
dpg.add_table_column(label="", width_fixed=True, init_width_or_weight=28)
|
||||
|
||||
self._rebuild_files_table()
|
||||
|
||||
dpg.add_text("Paths")
|
||||
with dpg.child_window(tag="files_scroll", height=-64, border=True):
|
||||
pass
|
||||
self._rebuild_files_list()
|
||||
dpg.add_separator()
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_button(label="Add File(s)", callback=self.cb_add_files, width=-1)
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_button(label="Add Wildcard", callback=self.cb_add_wildcard, width=-1)
|
||||
dpg.add_button(label="Add File(s)", callback=self.cb_add_files)
|
||||
dpg.add_button(label="Add Wildcard", callback=self.cb_add_wildcard)
|
||||
|
||||
with dpg.window(
|
||||
label="Screenshots",
|
||||
tag="win_screenshots",
|
||||
pos=(356, 8),
|
||||
width=340,
|
||||
height=460,
|
||||
pos=(416, 8),
|
||||
width=400,
|
||||
height=500,
|
||||
no_close=True
|
||||
):
|
||||
dpg.add_input_text(
|
||||
label="Base Dir",
|
||||
tag="shots_base_dir",
|
||||
default_value=self.config.get("screenshots", {}).get("base_dir", "."),
|
||||
width=-1
|
||||
)
|
||||
dpg.add_button(label="Browse Base Dir##shots", callback=self.cb_browse_shots_base, width=-1)
|
||||
dpg.add_text("Base Dir")
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_input_text(
|
||||
tag="shots_base_dir",
|
||||
default_value=self.config.get("screenshots", {}).get("base_dir", "."),
|
||||
width=-220
|
||||
)
|
||||
dpg.add_button(label="Browse##shotsbase", callback=self.cb_browse_shots_base)
|
||||
dpg.add_separator()
|
||||
|
||||
with dpg.table(
|
||||
tag="shots_table",
|
||||
header_row=False,
|
||||
resizable=True,
|
||||
borders_innerV=True,
|
||||
scrollY=True,
|
||||
height=280
|
||||
):
|
||||
dpg.add_table_column(label="Path", width_stretch=True)
|
||||
dpg.add_table_column(label="", width_fixed=True, init_width_or_weight=28)
|
||||
|
||||
self._rebuild_shots_table()
|
||||
|
||||
dpg.add_text("Paths")
|
||||
with dpg.child_window(tag="shots_scroll", height=-48, border=True):
|
||||
pass
|
||||
self._rebuild_shots_list()
|
||||
dpg.add_separator()
|
||||
dpg.add_button(label="Add Screenshot(s)", callback=self.cb_add_shots, width=-1)
|
||||
dpg.add_button(label="Add Screenshot(s)", callback=self.cb_add_shots)
|
||||
|
||||
with dpg.window(
|
||||
label="Discussion History",
|
||||
tag="win_discussion",
|
||||
pos=(704, 8),
|
||||
width=340,
|
||||
height=460,
|
||||
pos=(824, 8),
|
||||
width=400,
|
||||
height=500,
|
||||
no_close=True
|
||||
):
|
||||
dpg.add_input_text(
|
||||
label="##discussion_box",
|
||||
tag="discussion_box",
|
||||
default_value="\n---\n".join(self.history),
|
||||
multiline=True,
|
||||
width=-1,
|
||||
height=340
|
||||
height=-64
|
||||
)
|
||||
dpg.add_separator()
|
||||
with dpg.group(horizontal=True):
|
||||
@@ -332,31 +369,66 @@ class App:
|
||||
dpg.add_button(label="Save", callback=self.cb_save_discussion)
|
||||
|
||||
with dpg.window(
|
||||
label="Gemini",
|
||||
tag="win_gemini",
|
||||
pos=(1052, 8),
|
||||
width=340,
|
||||
height=700,
|
||||
label="AI Client",
|
||||
tag="win_ai",
|
||||
pos=(1232, 8),
|
||||
width=420,
|
||||
height=900,
|
||||
no_close=True
|
||||
):
|
||||
dpg.add_text("Status: idle", tag="gemini_status")
|
||||
# provider
|
||||
dpg.add_text("Provider")
|
||||
dpg.add_combo(
|
||||
tag="provider_combo",
|
||||
items=PROVIDERS,
|
||||
default_value=self.current_provider,
|
||||
width=-1,
|
||||
callback=self.cb_provider_changed
|
||||
)
|
||||
|
||||
dpg.add_separator()
|
||||
|
||||
# model
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_text("Model")
|
||||
dpg.add_button(
|
||||
label="Fetch Models",
|
||||
callback=self.cb_fetch_models
|
||||
)
|
||||
dpg.add_listbox(
|
||||
tag="model_listbox",
|
||||
items=self.available_models,
|
||||
default_value=self.current_model,
|
||||
width=-1,
|
||||
num_items=6,
|
||||
callback=self.cb_model_changed
|
||||
)
|
||||
|
||||
dpg.add_separator()
|
||||
|
||||
dpg.add_text("Status: idle", tag="ai_status")
|
||||
|
||||
dpg.add_separator()
|
||||
|
||||
dpg.add_text("Message")
|
||||
dpg.add_input_text(
|
||||
label="##gemini_input",
|
||||
tag="gemini_input",
|
||||
tag="ai_input",
|
||||
multiline=True,
|
||||
width=-1,
|
||||
height=120
|
||||
height=140
|
||||
)
|
||||
|
||||
dpg.add_separator()
|
||||
|
||||
with dpg.group(horizontal=True):
|
||||
dpg.add_button(label="Gen + Send", callback=self.cb_generate_send)
|
||||
dpg.add_button(label="MD Only", callback=self.cb_md_only)
|
||||
dpg.add_button(label="Reset", callback=self.cb_reset_session)
|
||||
|
||||
dpg.add_separator()
|
||||
dpg.add_text("Response:")
|
||||
|
||||
dpg.add_text("Response")
|
||||
dpg.add_input_text(
|
||||
label="##gemini_response",
|
||||
tag="gemini_response",
|
||||
multiline=True,
|
||||
readonly=True,
|
||||
@@ -364,31 +436,29 @@ class App:
|
||||
height=-1
|
||||
)
|
||||
|
||||
def run(self):
|
||||
dpg.create_context()
|
||||
dpg.configure_app(docking=True, docking_space=True)
|
||||
|
||||
dpg.create_viewport(
|
||||
title="manual slop",
|
||||
width=1600,
|
||||
height=900
|
||||
)
|
||||
|
||||
dpg.setup_dearpygui()
|
||||
dpg.show_viewport()
|
||||
dpg.maximize_viewport()
|
||||
|
||||
self._build_ui()
|
||||
|
||||
while dpg.is_dearpygui_running():
|
||||
dpg.render_dearpygui_frame()
|
||||
|
||||
dpg.destroy_context()
|
||||
def run(config: dict) -> tuple[str, Path]:
|
||||
namespace = config["output"]["namespace"]
|
||||
output_dir = Path(config["output"]["output_dir"]) / "md_gen"
|
||||
base_dir = Path(config["files"]["base_dir"])
|
||||
files = config["files"].get("paths", [])
|
||||
screenshot_base_dir = Path(config.get("screenshots", {}).get("base_dir", "."))
|
||||
screenshots = config.get("screenshots", {}).get("paths", [])
|
||||
history = config.get("discussion", {}).get("history", [])
|
||||
|
||||
output_dir.mkdir(parents=True, exist_ok=True)
|
||||
increment = find_next_increment(output_dir, namespace)
|
||||
output_file = output_dir / f"{namespace}_{increment:03d}.md"
|
||||
markdown = build_markdown(base_dir, files, screenshot_base_dir, screenshots, history)
|
||||
output_file.write_text(markdown, encoding="utf-8")
|
||||
return markdown, output_file
|
||||
|
||||
def main():
|
||||
app = App()
|
||||
app.run()
|
||||
with open("config.toml", "rb") as f:
|
||||
import tomllib
|
||||
config = tomllib.load(f)
|
||||
markdown, output_file = run(config)
|
||||
print(f"Written: {output_file}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
# pyproject.toml
|
||||
[project]
|
||||
name = "manual_slop"
|
||||
version = "0.1.0"
|
||||
@@ -5,6 +6,6 @@ requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
"dearpygui",
|
||||
"google-genai",
|
||||
"anthropic",
|
||||
"tomli-w"
|
||||
]
|
||||
|
||||
|
||||
Reference in New Issue
Block a user