39 lines
2.1 KiB
TOML
39 lines
2.1 KiB
TOML
[output]
|
|
namespace = "manual_slop"
|
|
output_dir = "./md_gen"
|
|
|
|
[files]
|
|
base_dir = "C:/projects/manual_slop"
|
|
paths = [
|
|
"config.toml",
|
|
"ai_client.py",
|
|
"aggregate.py",
|
|
"gemini.py",
|
|
"gui.py",
|
|
"pyproject.toml",
|
|
"MainContext.md",
|
|
"C:/projects/manual_slop/shell_runner.py",
|
|
"C:/projects/manual_slop/session_logger.py",
|
|
"C:/projects/manual_slop/docs/anthropic_api_ref_create_message.md",
|
|
"C:/projects/manual_slop/docs/anthropic_api_ref_create_message_beta.md",
|
|
"C:/projects/manual_slop/docs/anthropic_prompt_caching.md",
|
|
]
|
|
|
|
[screenshots]
|
|
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
|
|
paths = []
|
|
|
|
[discussion]
|
|
history = [
|
|
"[User]\nMake sure we are optimially using the anthropic api for this. \nI want to fully utilize caching if possible and just reduce overall loss of limits. \nAdd a log for comms history thats saved in ./logs and a the same for tool calls (scripts in ./scripts/generated, and their call equence in ./logs) these logs are closed in the next runtime of this gui program. \nOn open they amke new file buffers, each file buffer has a timestamp of when it was first made.",
|
|
"[Claude]\nNow finish the gui portion: in gui.py or anything left (last made seesion_logger.py it seems). Caching strategy also looks to be updated in ai_client.py but not complete had to be reverted.",
|
|
"[User]\ncontinue. I had to revert ai_client.py becuase it was causing reponse code failures from the vendor. I added docs for anthropic into your context as well. For lasts version before revert of ai_client.py see:\n\n```py\n[Omitted already updated in your context as you fixed it last]\n```",
|
|
"[Claude]\nNow update `gui.py` to wire up `session_logger` and `tool_log_callback`:",
|
|
"[Claude]\nThe PowerShell `-replace` with a multi-line string replacement needs to be done differently. I'll write the complete updated `gui.py` directly with all the changes applied. The changes are targeted and well-understood:",
|
|
"[Claude]\nGood. Now let's verify all hooks are correctly in place and clean up the temp script:\n```",
|
|
]
|
|
|
|
[ai]
|
|
provider = "anthropic"
|
|
model = "claude-sonnet-4-6"
|