wip multiple projects

This commit is contained in:
2026-02-21 19:26:18 -05:00
parent c0535bc9a6
commit e9e5e29065
3 changed files with 252 additions and 57 deletions

View File

@@ -16,54 +16,18 @@ paths = [
"C:/projects/manual_slop/session_logger.py", "C:/projects/manual_slop/session_logger.py",
"C:/projects/manual_slop/file_cache.py", "C:/projects/manual_slop/file_cache.py",
"C:/projects/manual_slop/theme.py", "C:/projects/manual_slop/theme.py",
"C:/projects/manual_slop/project_manager.py",
] ]
[screenshots] [screenshots]
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02" base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
paths = [] paths = [
"C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02/2026-02-21_19-17-10.png",
"C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02/Code_-_Insiders_2026-02-21_19-17-23.png",
]
[discussion] [discussion]
history = [ history = [
"User:\nAdd support for properly handling files that we have in config with anthropics api (your context will have documentation reguarding it). This will allow us to free up the main markdown file were sending as a message.\nWe also want to make sure the messaging of that giant markdown is chunked as anthropic does have an upper limit per messages.",
"AI:\nGood. Now the big rewrite of `ai_client.py`. The strategy:\n\n- Add a new `_send_anthropic_files` path that uses `file_cache.get_file_id()` to upload files individually as `document`/`image` content blocks, keeping the context message lean\n- The fallback (and also for oversized content) uses chunked text blocks — split the markdown into ≤180k-char chunks (well under the ~200k token limit with headroom for overhead), each sent as a separate `text` content block with `cache_control` on the last one\n- `send()` now accepts an optional `file_items` parameter from aggregate\n- `reset_session()` also resets the file_cache client",
"User:\ncontinue. Also got this error when trying to run latest version of this program. However I can use an old version for now.",
"AI:\nNow fix `gui.py` — patch the three 2-tuple unpack sites and thread `file_items` through to `send()`:",
"User:\ncontinue. My bad the error in comms was shown int he screenshot I linked. in the md context. Make sure to update MainContext.md when done.",
"AI:\nfinished without sending a response...",
"User:\nSeems to be working...! \nMake a richer discussioon history editor, with structual support for the user/ai message nodes and buttons in the reponse and message panels for adding their entries to it on click.",
"AI:\nThe existing format already uses `[User]\\n...` and `[AI]\\n...` prefixes. The new structured editor will parse those naturally and write them back in the same format. Now let me implement the full changes:",
"User:\nYou had failres in your last few called scripts. Git diff shows now changes to python files.\nI'll add the comms and tool logs to your context. \nWe should remove the script content from the tool log and just save the script name so that it doesn't bloat it. \nSame with omitting full message sent in the comms log, very expensive to send to you or read manually.",
"Vendor API:\nERROR: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'A maximum of 4 blocks with cache_control may be provided. Found 5.'}, 'request_id': 'req_011CYMxgMXvcZUVFNyFz1Lx8'}",
"User:\nI'm not able to send a message for a while now...\n(Message finally sent)",
"AI:\nCompleted without a response message showing up.",
"User:\nRestarts custom prompting program.",
"User:\nOk lets try this again...\nLets do the discussion history improvmeents. I want a sort of visual editor where each entry is its own block. \nThere should be a role field and a content filed and I should have buttons for appending the current response or current message to it\n. I should be able to insert or remove anywhere as well.",
"Vendor API:\nERROR: Error code: 500 - {'type': 'error', 'error': {'type': 'api_error', 'message': 'Internal server error'}, 'request_id': 'req_011CYMyjgyfDn2b6JnPD6rvJ'}",
"User:\ncontinue...",
"Vendor API:\nERROR: Error code: 404 - {'type': 'error', 'error': {'type': 'not_found_error', 'message': 'File not found: file_011CYMyjXYsf951nBgTvQmkE'}, 'request_id': 'req_011CYMysoSaH2xPxVtZurjSc'}",
"User:\nreverting to older editor...\nOk I'm back on an older prompting editor. Lets get this working again... with latest..",
"AI:\nGood. Now I'll rewrite the Discussion History panel in `gui.py`. The changes are:\n\n1. Add `_parse_history_entries()` — converts the raw TOML string array into `[{role, content}]` dicts\n2. Add `self.disc_entries` list of dicts to `App.__init__`\n3. Replace `_build_ui`'s Discussion window with a structured block editor inside a scrollable child window\n4. Add `_rebuild_disc_list()` — full redraw of the structured editor\n5. Add `_make_disc_*` callbacks for insert/remove/role-change/content-change\n6. Add `cb_append_message_to_history()` and `cb_append_response_to_history()`\n7. Add **→ History** buttons to Message and Response panels\n8. Update `_flush_to_config()` to serialise `disc_entries` back to the string array format\n9. Remove the old `cb_add_excerpt` / `cb_clear_discussion` / `cb_save_discussion` callbacks (replaced)",
"User:\nsounds good, continue!",
"Vendor API:\nERROR: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'messages.10: `tool_use` ids were found without `tool_result` blocks immediately after: toolu_01BjaPJPVFoNnNoRCLvPA8qF. Each `tool_use` block must have a corresponding `tool_result` block in the next message.'}, 'request_id': 'req_011CYMzHG47TKTPp7iLcE5NP'}",
"User:\nreverting to an even older version..",
"User:\nOk lets see if we can get it to work with this version and get everything fixed on latest...",
"AI:\nThe cache_control logic looks correct already (system=1, tools=1, context_last=1 = 3 total, under the limit of 4). The error seen previously was likely from an older version. Good.\n\nNow let me fix the **comms log** (omit full message from OUT/request) and **tool log** (omit script body from `session_logger`):",
"AI:\nNo response",
"User:\nOnly see changes done to gui.py...\nrestarting...",
"User:\nIs everything fixed? not sure?",
"AI:\nNow I have a clear picture. The files on disk match the context documents closely. The outstanding tasks from the discussion history are:\n\n1. **`session_logger.py`** — omit the script body from the tool-call log (just save script name/path, not contents)\n2. **`ai_client.py`** — omit full message from OUT/request comms log (already mostly done — Gemini still uses `summary` key; Anthropic uses `message` with a summary. The Gemini one sends `summary` but the renderer looks for `message`. Fix that too.)\n3. **`gui.py`** — Discussion History structured block editor (the big one that kept failing)\n\nLet me tackle all three cleanly now.\n\n**First: `session_logger.py`** — remove script body from toolcalls log, just reference the `.ps1` file:",
"Vendor API:\nERROR: Error code: 404 - {'type': 'error', 'error': {'type': 'not_found_error', 'message': 'File not found: file_011CYN4ecyVxDg7ujLUYWeD3'}, 'request_id': 'req_011CYN4kpavNrA5KYXnMqKLq'}\nUser (Background):\nConstraining the discussion history to one entry...",
"Vendor API:\nERROR: Error code: 404 - {'type': 'error', 'error': {'type': 'not_found_error', 'message': 'File not found: file_011CYN4ecyVxDg7ujLUYWeD3'}, 'request_id': 'req_011CYN4rxjETKA5B9ACMkm2T'}",
"User:\nSigh, here we go again.\nAI - Observed by user:\nDoes many actions (recorded in screenshot but no direct response from them).",
"Vendor API:\nERROR: Error code: 404 - {'type': 'error', 'error': {'type': 'not_found_error', 'message': 'File not found: file_011CYN4wQpEzM4W4KFA48HDz'}, 'request_id': 'req_011CYN59c31BxJsgDZ6Rzqby'}\nUser (Background):\nOk definitely not...",
"User:\nAttempt to fix these error with files were getting... maybe this file usage thing with anthropic was a bad idea...",
"Vendor API:\nERROR: Error code: 500 - {'type': 'error', 'error': {'type': 'api_error', 'message': 'Internal server error'}, 'request_id': 'req_011CYN5PqDQLs5d1B9QS8inZ'}\nUser (Background):\nWeird it got pretty far and gutted the file_cache.py file. Not sure why that error popped up for the service...\nrestarting prompting editor to latest instance...",
"Vendor API:\nERROR: send() takes from 2 to 3 positional arguments but 4 were given",
"User:\nok... maybe I cant use latest...\nI applied a fix to gui.py line 658 was using the files arg with the send call thats no longer used.",
"AI:\nThe file has `\\r\\n` line endings mixed in. I'll write the replacement using a Python one-liner to avoid PowerShell line-ending issues:",
"User:\nSure, if you want also enforce LF if you want on all files. I manually attempted so in vscode.",
"AI:\nDoes work... (Noted by user)\n- Reponse:\nNow clean up the temp patch scripts and do a quick sanity check:",
"User:\nLets add some theming to this. Add options to change the ui dpi scale and the font used, also add the ability to change the palette I guess. We can reference my 10x editor settings for inspiration:\n```\n[Omitted as its already been utilized]\n\n```",
"AI:\nGood. Now I'll patch `gui.py` — adding the Theme window and wiring it up. I'll make targeted insertions rather than a full rewrite:\n\n1. Add `import theme` at the top\n2. Add `_build_theme_window()` and its callbacks to `App`\n3. Call `theme.load_from_config()` at startup (in `run()`)\n4. Call `theme.save_to_config()` in `_flush_to_config()`\n5. Add `win_theme` to `_build_ui()`", "AI:\nGood. Now I'll patch `gui.py` — adding the Theme window and wiring it up. I'll make targeted insertions rather than a full rewrite:\n\n1. Add `import theme` at the top\n2. Add `_build_theme_window()` and its callbacks to `App`\n3. Call `theme.load_from_config()` at startup (in `run()`)\n4. Call `theme.save_to_config()` in `_flush_to_config()`\n5. Add `win_theme` to `_build_ui()`",
"User:\nSure lets patch gui.py.", "User:\nSure lets patch gui.py.",
"AI:\nGood. Now I have a clear picture. I'll do the full patch in one Python script to avoid PowerShell string escaping issues:\n(Finished the job)", "AI:\nGood. Now I have a clear picture. I'll do the full patch in one Python script to avoid PowerShell string escaping issues:\n(Finished the job)",
@@ -73,6 +37,16 @@ history = [
"AI:\nGood, I have a clear picture. Here's the plan:\n\n**Changes needed:**\n1. **`config.toml`** — add `[discussion] roles = [...]` so roles are configurable\n2. **`gui.py`** — 4 targeted edits:\n - `DISC_ROLES` becomes a module-level fallback; the real list comes from config at load time and lives on `App`\n - `_parse_history_entries()` takes `roles` as a parameter\n - `_rebuild_disc_list()` — add collapse/expand per entry (using a `bool` `collapsed` flag in each dict), Collapse All / Expand All buttons in the toolbar\n - Add a \"Roles\" sub-section in the Discussion window for adding/removing role names from the config list, and wire it into `_flush_to_config()`\n\nI'll do this as a Python patch script to avoid PowerShell string escaping issues:", "AI:\nGood, I have a clear picture. Here's the plan:\n\n**Changes needed:**\n1. **`config.toml`** — add `[discussion] roles = [...]` so roles are configurable\n2. **`gui.py`** — 4 targeted edits:\n - `DISC_ROLES` becomes a module-level fallback; the real list comes from config at load time and lives on `App`\n - `_parse_history_entries()` takes `roles` as a parameter\n - `_rebuild_disc_list()` — add collapse/expand per entry (using a `bool` `collapsed` flag in each dict), Collapse All / Expand All buttons in the toolbar\n - Add a \"Roles\" sub-section in the Discussion window for adding/removing role names from the config list, and wire it into `_flush_to_config()`\n\nI'll do this as a Python patch script to avoid PowerShell string escaping issues:",
"User:\nSure, go ahead and make to those change patches and apply them.", "User:\nSure, go ahead and make to those change patches and apply them.",
"AI:\nAll 16 checks pass. Now update `MainContext.md` and clean up the patch file:\n(Note from user: the AI did so!)", "AI:\nAll 16 checks pass. Now update `MainContext.md` and clean up the patch file:\n(Note from user: the AI did so!)",
"User:\nLets add multiple project support. currently this project would be the \"manual_slop\" project. \nI need to make a separate \"manual_slop.toml\" for this and offload things that would be within a per project to it.\nI also want to have several discussion histories that I can label. \nThis would allow me to stash and continue separate effort's histories..\nIt would be important if these discussions saved the last git commit they were on. (I'll need a buton to manually update it).\nAnd timestamps on each discussion entry to give an idea of the duration involved and \nhow the project has evolved since the discussion stream's effort was last done.\nBasically preserve context so ai can use that as valuable information in its working memory.\n\nTo load a project I'll need to hoist it into the config by providing the path.\nMaybe allow multiple project tomls to be specified path wise in the array.",
"AI:\nAnnotated by user:\nNo response from AI, taking sceenshot of scripts run... and directory's state",
"User:\nHey it seems like you were not done want to continue?\n",
"AI:\nDid work, No response from AI (this is an annotated entry by the user), made project_manager.py, adding it to md context.",
]
roles = [
"User",
"AI",
"Vendor API",
"System",
] ]
[ai] [ai]
@@ -83,4 +57,4 @@ model = "claude-sonnet-4-6"
palette = "10x Dark" palette = "10x Dark"
font_path = "C:/Users/Ed/AppData/Local/uv/cache/archive-v0/WSthkYsQ82b_ywV6DkiaJ/pygame_gui/data/FiraCode-Regular.ttf" font_path = "C:/Users/Ed/AppData/Local/uv/cache/archive-v0/WSthkYsQ82b_ywV6DkiaJ/pygame_gui/data/FiraCode-Regular.ttf"
font_size = 18.0 font_size = 18.0
scale = 1.25 scale = 1.0

View File

@@ -63,7 +63,7 @@ Collapsed=0
[Window][###126] [Window][###126]
Pos=611,0 Pos=611,0
Size=699,2137 Size=1152,2137
Collapsed=0 Collapsed=0
DockId=0x00000031,0 DockId=0x00000031,0
@@ -114,7 +114,7 @@ DockId=0x00000012,0
[Window][###100] [Window][###100]
Pos=611,0 Pos=611,0
Size=699,2137 Size=1152,2137
Collapsed=0 Collapsed=0
DockId=0x00000031,1 DockId=0x00000031,1
@@ -222,8 +222,8 @@ Collapsed=0
DockId=0x00000022,0 DockId=0x00000022,0
[Window][###85] [Window][###85]
Pos=1312,0 Pos=1765,0
Size=1187,2137 Size=808,2137
Collapsed=0 Collapsed=0
DockId=0x00000032,0 DockId=0x00000032,0
@@ -472,8 +472,8 @@ Size=700,440
Collapsed=0 Collapsed=0
[Window][###108] [Window][###108]
Pos=2501,0 Pos=2575,0
Size=1339,1331 Size=1265,966
Collapsed=0 Collapsed=0
DockId=0x0000002B,0 DockId=0x0000002B,0
@@ -559,17 +559,92 @@ Size=700,440
Collapsed=0 Collapsed=0
[Window][###116] [Window][###116]
Pos=2501,1333 Pos=2575,968
Size=1339,804 Size=1265,1169
Collapsed=0 Collapsed=0
DockId=0x0000002C,0 DockId=0x0000002C,0
[Window][###120] [Window][###120]
Pos=611,0 Pos=611,0
Size=699,2137 Size=1152,2137
Collapsed=0 Collapsed=0
DockId=0x00000031,2 DockId=0x00000031,2
[Window][###12467]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###12567]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###12668]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###12773]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###12885]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###13228]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###13348]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###13469]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###13597]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###13729]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###14084]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###14224]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###14368]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###14513]
Pos=1578,868
Size=700,440
Collapsed=0
[Window][###14662]
Pos=1578,868
Size=700,440
Collapsed=0
[Docking][Data] [Docking][Data]
DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,2137 Split=X Selected=0x40484D8F DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,2137 Split=X Selected=0x40484D8F
DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=609,1161 Split=Y Selected=0xEE087978 DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=609,1161 Split=Y Selected=0xEE087978
@@ -599,18 +674,18 @@ DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3
DockNode ID=0x00000017 Parent=0x00000013 SizeRef=1314,1749 Selected=0x4B454E0B DockNode ID=0x00000017 Parent=0x00000013 SizeRef=1314,1749 Selected=0x4B454E0B
DockNode ID=0x00000018 Parent=0x00000013 SizeRef=1309,1749 Split=Y Selected=0x88A8C2FF DockNode ID=0x00000018 Parent=0x00000013 SizeRef=1309,1749 Split=Y Selected=0x88A8C2FF
DockNode ID=0x00000019 Parent=0x00000018 SizeRef=2440,1412 Split=X Selected=0x88A8C2FF DockNode ID=0x00000019 Parent=0x00000018 SizeRef=2440,1412 Split=X Selected=0x88A8C2FF
DockNode ID=0x00000023 Parent=0x00000019 SizeRef=1888,737 Split=Y Selected=0x4F935A1E DockNode ID=0x00000023 Parent=0x00000019 SizeRef=1962,737 Split=Y Selected=0x4F935A1E
DockNode ID=0x0000001F Parent=0x00000023 SizeRef=2315,1191 Split=Y Selected=0x4F935A1E DockNode ID=0x0000001F Parent=0x00000023 SizeRef=2315,1191 Split=Y Selected=0x4F935A1E
DockNode ID=0x00000025 Parent=0x0000001F SizeRef=2315,1244 Split=X Selected=0x4F935A1E DockNode ID=0x00000025 Parent=0x0000001F SizeRef=2315,1244 Split=X Selected=0x4F935A1E
DockNode ID=0x00000029 Parent=0x00000025 SizeRef=853,1867 Split=X Selected=0xFDB3860E DockNode ID=0x00000029 Parent=0x00000025 SizeRef=853,1867 Split=X Selected=0xFDB3860E
DockNode ID=0x00000031 Parent=0x00000029 SizeRef=699,2137 Selected=0xC56063F4 DockNode ID=0x00000031 Parent=0x00000029 SizeRef=1152,2137 Selected=0x4A209654
DockNode ID=0x00000032 Parent=0x00000029 SizeRef=1187,2137 CentralNode=1 Selected=0xFDB3860E DockNode ID=0x00000032 Parent=0x00000029 SizeRef=808,2137 CentralNode=1 Selected=0xFDB3860E
DockNode ID=0x0000002A Parent=0x00000025 SizeRef=890,1867 Selected=0x40484D8F DockNode ID=0x0000002A Parent=0x00000025 SizeRef=890,1867 Selected=0x40484D8F
DockNode ID=0x00000026 Parent=0x0000001F SizeRef=2315,621 Selected=0x7D28643F DockNode ID=0x00000026 Parent=0x0000001F SizeRef=2315,621 Selected=0x7D28643F
DockNode ID=0x00000020 Parent=0x00000023 SizeRef=2315,944 Selected=0x4C2F06CB DockNode ID=0x00000020 Parent=0x00000023 SizeRef=2315,944 Selected=0x4C2F06CB
DockNode ID=0x00000024 Parent=0x00000019 SizeRef=1339,737 Split=Y Selected=0xB8D8893E DockNode ID=0x00000024 Parent=0x00000019 SizeRef=1265,737 Split=Y Selected=0xB8D8893E
DockNode ID=0x0000002B Parent=0x00000024 SizeRef=1153,1331 Selected=0xB8D8893E DockNode ID=0x0000002B Parent=0x00000024 SizeRef=1153,966 Selected=0xB8D8893E
DockNode ID=0x0000002C Parent=0x00000024 SizeRef=1153,804 Selected=0xCCB4E4FA DockNode ID=0x0000002C Parent=0x00000024 SizeRef=1153,1169 Selected=0xCCB4E4FA
DockNode ID=0x0000001A Parent=0x00000018 SizeRef=2440,723 Selected=0x3A881EEF DockNode ID=0x0000001A Parent=0x00000018 SizeRef=2440,723 Selected=0x3A881EEF
DockNode ID=0x00000014 Parent=0x00000010 SizeRef=1967,445 Selected=0xC36FF36B DockNode ID=0x00000014 Parent=0x00000010 SizeRef=1967,445 Selected=0xC36FF36B
DockNode ID=0x00000002 Parent=0x00000004 SizeRef=2227,1161 Split=X Selected=0x714F2F7B DockNode ID=0x00000002 Parent=0x00000004 SizeRef=2227,1161 Split=X Selected=0x714F2F7B

146
project_manager.py Normal file
View File

@@ -0,0 +1,146 @@
# project_manager.py
"""
Project-level config management for manual_slop.
A project file (e.g. manual_slop.toml) stores everything project-specific:
[project] name, git_dir
[output] namespace, output_dir
[files] base_dir, paths
[screenshots] base_dir, paths
[discussion]
roles = [...]
active = "main"
[discussion.discussions.main]
git_commit = ""
last_updated = ""
history = [...] # each entry: "@ISO\\nRole:\\ncontent"
The global config.toml keeps:
[projects] paths = [...], active = "..."
[ai] provider, model
[theme] palette, font_path, font_size, scale
"""
import subprocess
import datetime
import tomllib
import tomli_w
import re
from pathlib import Path
TS_FMT = "%Y-%m-%dT%H:%M:%S"
def now_ts() -> str:
return datetime.datetime.now().strftime(TS_FMT)
def parse_ts(s: str) -> datetime.datetime | None:
try:
return datetime.datetime.strptime(s, TS_FMT)
except Exception:
return None
def entry_to_str(entry: dict) -> str:
ts = entry.get("ts", "")
role = entry.get("role", "User")
content = entry.get("content", "")
if ts:
return f"@{ts}\n{role}:\n{content}"
return f"{role}:\n{content}"
def str_to_entry(raw: str, roles: list[str]) -> dict:
ts = ""
rest = raw
if rest.startswith("@"):
nl = rest.find("\n")
if nl != -1:
ts = rest[1:nl]
rest = rest[nl + 1:]
known = roles if roles else ["User", "AI", "Vendor API", "System"]
role_pat = re.compile(
r"^(?:\\[)?(" + "|".join(re.escape(r) for r in known) + r")(?:\\])?:?\\s*$",
re.IGNORECASE,
)
lines = rest.split("\n", 1)
matched_role = "User"
content = rest.strip()
if lines:
m = role_pat.match(lines[0].strip())
if m:
raw_role = m.group(1)
matched_role = next((r for r in known if r.lower() == raw_role.lower()), raw_role)
content = lines[1].strip() if len(lines) > 1 else ""
return {"role": matched_role, "content": content, "collapsed": False, "ts": ts}
def get_git_commit(git_dir: str) -> str:
try:
r = subprocess.run(["git", "rev-parse", "HEAD"],
capture_output=True, text=True, cwd=git_dir, timeout=5)
if r.returncode == 0:
return r.stdout.strip()
except Exception:
pass
return ""
def get_git_log(git_dir: str, n: int = 5) -> str:
try:
r = subprocess.run(["git", "log", "--oneline", f"-{n}"],
capture_output=True, text=True, cwd=git_dir, timeout=5)
if r.returncode == 0:
return r.stdout.strip()
except Exception:
pass
return ""
def default_discussion() -> dict:
return {"git_commit": "", "last_updated": now_ts(), "history": []}
def default_project(name: str = "unnamed") -> dict:
return {
"project": {"name": name, "git_dir": ""},
"output": {"namespace": name, "output_dir": "./md_gen"},
"files": {"base_dir": ".", "paths": []},
"screenshots": {"base_dir": ".", "paths": []},
"discussion": {
"roles": ["User", "AI", "Vendor API", "System"],
"active": "main",
"discussions": {"main": default_discussion()},
},
}
def load_project(path: str | Path) -> dict:
with open(path, "rb") as f:
return tomllib.load(f)
def save_project(proj: dict, path: str | Path):
with open(path, "wb") as f:
tomli_w.dump(proj, f)
def migrate_from_legacy_config(cfg: dict) -> dict:
"""Build a project dict from a legacy config.toml. Does NOT save it."""
name = cfg.get("output", {}).get("namespace", "project")
proj = default_project(name)
if "output" in cfg: proj["output"] = dict(cfg["output"])
if "files" in cfg: proj["files"] = dict(cfg["files"])
if "screenshots" in cfg: proj["screenshots"] = dict(cfg["screenshots"])
disc_cfg = cfg.get("discussion", {})
roles = disc_cfg.get("roles", ["User", "AI", "Vendor API", "System"])
old_history = disc_cfg.get("history", [])
proj["discussion"]["roles"] = roles
proj["discussion"]["discussions"]["main"]["history"] = old_history
proj["discussion"]["discussions"]["main"]["last_updated"] = now_ts()
return proj
def flat_config(proj: dict, disc_name: str | None = None) -> dict:
"""Return a flat config dict compatible with aggregate.run()."""
disc_section = proj.get("discussion", {})
name = disc_name or disc_section.get("active", "main")
disc_data = disc_section.get("discussions", {}).get(name, {})
return {
"output": proj.get("output", {}),
"files": proj.get("files", {}),
"screenshots": proj.get("screenshots", {}),
"discussion": {
"roles": disc_section.get("roles", []),
"history": disc_data.get("history", []),
},
}