adjustments
This commit is contained in:
54
.editorconfig
Normal file
54
.editorconfig
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
root = true
|
||||||
|
|
||||||
|
[*.py]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.s]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.asm]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
|
||||||
|
[*.refactor]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.md]
|
||||||
|
indent_style = space
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.c]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.cpp]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.h]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.hpp]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.{ps1, psm1}]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
|
|
||||||
|
[*.odin]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 2
|
||||||
|
charset = utf-8
|
||||||
|
|
||||||
|
[*.{natvis, natstepfilter}]
|
||||||
|
indent_style = tab
|
||||||
|
indent_size = 4
|
||||||
16
config.toml
16
config.toml
@@ -3,17 +3,8 @@ namespace = "colorforth_bootslop"
|
|||||||
output_dir = "."
|
output_dir = "."
|
||||||
|
|
||||||
[files]
|
[files]
|
||||||
base_dir = "C:/projects/forth/bootslop"
|
base_dir = "C:/projects/manual_slop"
|
||||||
paths = [
|
paths = [
|
||||||
"./attempt_1/*",
|
|
||||||
"./scripts/*",
|
|
||||||
"./references/Architectural_Consolidation.md",
|
|
||||||
"./references/neokineogfx_in-depth.md",
|
|
||||||
"./references/blog_in-depth.md",
|
|
||||||
"./references/kyra_in-depth.md",
|
|
||||||
".editorconfig",
|
|
||||||
"GEMINI.md",
|
|
||||||
"CONVENTIONS.md",
|
|
||||||
]
|
]
|
||||||
|
|
||||||
[screenshots]
|
[screenshots]
|
||||||
@@ -24,6 +15,5 @@ paths = []
|
|||||||
history = []
|
history = []
|
||||||
|
|
||||||
[ai]
|
[ai]
|
||||||
provider = "gemini"
|
provider = "anthropic"
|
||||||
model = "gemini-2.0-flash"
|
model = "claude-sonnet-4-6"
|
||||||
|
|
||||||
|
|||||||
28
config_bootslop.toml
Normal file
28
config_bootslop.toml
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
[output]
|
||||||
|
namespace = "colorforth_bootslop"
|
||||||
|
output_dir = "."
|
||||||
|
|
||||||
|
[files]
|
||||||
|
base_dir = "C:/projects/forth/bootslop"
|
||||||
|
paths = [
|
||||||
|
"./attempt_1/*",
|
||||||
|
"./scripts/*",
|
||||||
|
"./references/Architectural_Consolidation.md",
|
||||||
|
"./references/neokineogfx_in-depth.md",
|
||||||
|
"./references/blog_in-depth.md",
|
||||||
|
"./references/kyra_in-depth.md",
|
||||||
|
".editorconfig",
|
||||||
|
"GEMINI.md",
|
||||||
|
"CONVENTIONS.md",
|
||||||
|
]
|
||||||
|
|
||||||
|
[screenshots]
|
||||||
|
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
|
||||||
|
paths = []
|
||||||
|
|
||||||
|
[discussion]
|
||||||
|
history = []
|
||||||
|
|
||||||
|
[ai]
|
||||||
|
provider = "anthropic"
|
||||||
|
model = "claude-sonnet-4-6"
|
||||||
101
gui.py
101
gui.py
@@ -37,8 +37,8 @@ class App:
|
|||||||
self.current_model: str = ai_cfg.get("model", "gemini-2.0-flash")
|
self.current_model: str = ai_cfg.get("model", "gemini-2.0-flash")
|
||||||
self.available_models: list[str] = []
|
self.available_models: list[str] = []
|
||||||
|
|
||||||
self.gemini_status = "idle"
|
self.ai_status = "idle"
|
||||||
self.gemini_response = ""
|
self.ai_response = ""
|
||||||
self.last_md = ""
|
self.last_md = ""
|
||||||
self.last_md_path: Path | None = None
|
self.last_md_path: Path | None = None
|
||||||
self.send_thread: threading.Thread | None = None
|
self.send_thread: threading.Thread | None = None
|
||||||
@@ -71,17 +71,18 @@ class App:
|
|||||||
return aggregate.run(self.config)
|
return aggregate.run(self.config)
|
||||||
|
|
||||||
def _update_status(self, status: str):
|
def _update_status(self, status: str):
|
||||||
self.gemini_status = status
|
self.ai_status = status
|
||||||
if dpg.does_item_exist("ai_status"):
|
if dpg.does_item_exist("ai_status"):
|
||||||
dpg.set_value("ai_status", f"Status: {status}")
|
dpg.set_value("ai_status", f"Status: {status}")
|
||||||
|
|
||||||
def _update_response(self, text: str):
|
def _update_response(self, text: str):
|
||||||
self.gemini_response = text
|
self.ai_response = text
|
||||||
if dpg.does_item_exist("gemini_response"):
|
if dpg.does_item_exist("ai_response"):
|
||||||
dpg.set_value("gemini_response", text)
|
dpg.set_value("ai_response", text)
|
||||||
|
|
||||||
def _rebuild_files_list(self):
|
def _rebuild_files_list(self):
|
||||||
if dpg.does_item_exist("files_scroll"):
|
if not dpg.does_item_exist("files_scroll"):
|
||||||
|
return
|
||||||
dpg.delete_item("files_scroll", children_only=True)
|
dpg.delete_item("files_scroll", children_only=True)
|
||||||
for i, f in enumerate(self.files):
|
for i, f in enumerate(self.files):
|
||||||
with dpg.group(horizontal=True, parent="files_scroll"):
|
with dpg.group(horizontal=True, parent="files_scroll"):
|
||||||
@@ -93,7 +94,8 @@ class App:
|
|||||||
dpg.add_text(f)
|
dpg.add_text(f)
|
||||||
|
|
||||||
def _rebuild_shots_list(self):
|
def _rebuild_shots_list(self):
|
||||||
if dpg.does_item_exist("shots_scroll"):
|
if not dpg.does_item_exist("shots_scroll"):
|
||||||
|
return
|
||||||
dpg.delete_item("shots_scroll", children_only=True)
|
dpg.delete_item("shots_scroll", children_only=True)
|
||||||
for i, s in enumerate(self.screenshots):
|
for i, s in enumerate(self.screenshots):
|
||||||
with dpg.group(horizontal=True, parent="shots_scroll"):
|
with dpg.group(horizontal=True, parent="shots_scroll"):
|
||||||
@@ -111,8 +113,8 @@ class App:
|
|||||||
if self.current_model in self.available_models:
|
if self.current_model in self.available_models:
|
||||||
dpg.set_value("model_listbox", self.current_model)
|
dpg.set_value("model_listbox", self.current_model)
|
||||||
elif self.available_models:
|
elif self.available_models:
|
||||||
dpg.set_value("model_listbox", self.available_models[0])
|
|
||||||
self.current_model = self.available_models[0]
|
self.current_model = self.available_models[0]
|
||||||
|
dpg.set_value("model_listbox", self.current_model)
|
||||||
ai_client.set_provider(self.current_provider, self.current_model)
|
ai_client.set_provider(self.current_provider, self.current_model)
|
||||||
|
|
||||||
def _make_remove_file_cb(self, idx: int):
|
def _make_remove_file_cb(self, idx: int):
|
||||||
@@ -235,7 +237,6 @@ class App:
|
|||||||
except Exception as e:
|
except Exception as e:
|
||||||
self._update_status(f"generate error: {e}")
|
self._update_status(f"generate error: {e}")
|
||||||
return
|
return
|
||||||
|
|
||||||
self._update_status("sending...")
|
self._update_status("sending...")
|
||||||
user_msg = dpg.get_value("ai_input")
|
user_msg = dpg.get_value("ai_input")
|
||||||
|
|
||||||
@@ -369,14 +370,13 @@ class App:
|
|||||||
dpg.add_button(label="Save", callback=self.cb_save_discussion)
|
dpg.add_button(label="Save", callback=self.cb_save_discussion)
|
||||||
|
|
||||||
with dpg.window(
|
with dpg.window(
|
||||||
label="AI Client",
|
label="Provider",
|
||||||
tag="win_ai",
|
tag="win_provider",
|
||||||
pos=(1232, 8),
|
pos=(1232, 8),
|
||||||
width=420,
|
width=420,
|
||||||
height=900,
|
height=280,
|
||||||
no_close=True
|
no_close=True
|
||||||
):
|
):
|
||||||
# provider
|
|
||||||
dpg.add_text("Provider")
|
dpg.add_text("Provider")
|
||||||
dpg.add_combo(
|
dpg.add_combo(
|
||||||
tag="provider_combo",
|
tag="provider_combo",
|
||||||
@@ -385,16 +385,10 @@ class App:
|
|||||||
width=-1,
|
width=-1,
|
||||||
callback=self.cb_provider_changed
|
callback=self.cb_provider_changed
|
||||||
)
|
)
|
||||||
|
|
||||||
dpg.add_separator()
|
dpg.add_separator()
|
||||||
|
|
||||||
# model
|
|
||||||
with dpg.group(horizontal=True):
|
with dpg.group(horizontal=True):
|
||||||
dpg.add_text("Model")
|
dpg.add_text("Model")
|
||||||
dpg.add_button(
|
dpg.add_button(label="Fetch Models", callback=self.cb_fetch_models)
|
||||||
label="Fetch Models",
|
|
||||||
callback=self.cb_fetch_models
|
|
||||||
)
|
|
||||||
dpg.add_listbox(
|
dpg.add_listbox(
|
||||||
tag="model_listbox",
|
tag="model_listbox",
|
||||||
items=self.available_models,
|
items=self.available_models,
|
||||||
@@ -403,61 +397,64 @@ class App:
|
|||||||
num_items=6,
|
num_items=6,
|
||||||
callback=self.cb_model_changed
|
callback=self.cb_model_changed
|
||||||
)
|
)
|
||||||
|
|
||||||
dpg.add_separator()
|
dpg.add_separator()
|
||||||
|
|
||||||
dpg.add_text("Status: idle", tag="ai_status")
|
dpg.add_text("Status: idle", tag="ai_status")
|
||||||
|
|
||||||
dpg.add_separator()
|
with dpg.window(
|
||||||
|
label="Message",
|
||||||
dpg.add_text("Message")
|
tag="win_message",
|
||||||
|
pos=(1232, 296),
|
||||||
|
width=420,
|
||||||
|
height=280,
|
||||||
|
no_close=True
|
||||||
|
):
|
||||||
dpg.add_input_text(
|
dpg.add_input_text(
|
||||||
tag="ai_input",
|
tag="ai_input",
|
||||||
multiline=True,
|
multiline=True,
|
||||||
width=-1,
|
width=-1,
|
||||||
height=140
|
height=-64
|
||||||
)
|
)
|
||||||
|
|
||||||
dpg.add_separator()
|
dpg.add_separator()
|
||||||
|
|
||||||
with dpg.group(horizontal=True):
|
with dpg.group(horizontal=True):
|
||||||
dpg.add_button(label="Gen + Send", callback=self.cb_generate_send)
|
dpg.add_button(label="Gen + Send", callback=self.cb_generate_send)
|
||||||
dpg.add_button(label="MD Only", callback=self.cb_md_only)
|
dpg.add_button(label="MD Only", callback=self.cb_md_only)
|
||||||
dpg.add_button(label="Reset", callback=self.cb_reset_session)
|
dpg.add_button(label="Reset", callback=self.cb_reset_session)
|
||||||
|
|
||||||
dpg.add_separator()
|
with dpg.window(
|
||||||
|
label="Response",
|
||||||
dpg.add_text("Response")
|
tag="win_response",
|
||||||
|
pos=(1232, 584),
|
||||||
|
width=420,
|
||||||
|
height=400,
|
||||||
|
no_close=True
|
||||||
|
):
|
||||||
dpg.add_input_text(
|
dpg.add_input_text(
|
||||||
tag="gemini_response",
|
tag="ai_response",
|
||||||
multiline=True,
|
multiline=True,
|
||||||
readonly=True,
|
readonly=True,
|
||||||
width=-1,
|
width=-1,
|
||||||
height=-1
|
height=-1
|
||||||
)
|
)
|
||||||
|
|
||||||
def run(config: dict) -> tuple[str, Path]:
|
def run(self):
|
||||||
namespace = config["output"]["namespace"]
|
dpg.create_context()
|
||||||
output_dir = Path(config["output"]["output_dir"]) / "md_gen"
|
dpg.configure_app(docking=True, docking_space=True)
|
||||||
base_dir = Path(config["files"]["base_dir"])
|
dpg.create_viewport(title="manual slop", width=1600, height=900)
|
||||||
files = config["files"].get("paths", [])
|
dpg.setup_dearpygui()
|
||||||
screenshot_base_dir = Path(config.get("screenshots", {}).get("base_dir", "."))
|
dpg.show_viewport()
|
||||||
screenshots = config.get("screenshots", {}).get("paths", [])
|
dpg.maximize_viewport()
|
||||||
history = config.get("discussion", {}).get("history", [])
|
self._build_ui()
|
||||||
|
self._fetch_models(self.current_provider)
|
||||||
|
|
||||||
|
while dpg.is_dearpygui_running():
|
||||||
|
dpg.render_dearpygui_frame()
|
||||||
|
|
||||||
|
dpg.destroy_context()
|
||||||
|
|
||||||
output_dir.mkdir(parents=True, exist_ok=True)
|
|
||||||
increment = find_next_increment(output_dir, namespace)
|
|
||||||
output_file = output_dir / f"{namespace}_{increment:03d}.md"
|
|
||||||
markdown = build_markdown(base_dir, files, screenshot_base_dir, screenshots, history)
|
|
||||||
output_file.write_text(markdown, encoding="utf-8")
|
|
||||||
return markdown, output_file
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
with open("config.toml", "rb") as f:
|
app = App()
|
||||||
import tomllib
|
app.run()
|
||||||
config = tomllib.load(f)
|
|
||||||
markdown, output_file = run(config)
|
|
||||||
print(f"Written: {output_file}")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|||||||
Reference in New Issue
Block a user