Files
manual_slop/config.toml
2026-02-21 15:53:05 -05:00

35 lines
1.4 KiB
TOML

[output]
namespace = "manual_slop"
output_dir = "./md_gen"
[files]
base_dir = "C:/projects/manual_slop"
paths = [
"config.toml",
"ai_client.py",
"aggregate.py",
"gemini.py",
"gui.py",
"pyproject.toml",
"MainContext.md",
"C:/projects/manual_slop/shell_runner.py",
"C:/projects/manual_slop/session_logger.py",
"C:/projects/manual_slop/docs/anthropic_api_ref_create_message.md",
"C:/projects/manual_slop/docs/anthropic_api_ref_create_message_beta.md",
"C:/projects/manual_slop/docs/anthropic_prompt_caching.md",
]
[screenshots]
base_dir = "C:/Users/Ed/scoop/apps/sharex/current/ShareX/Screenshots/2026-02"
paths = []
[discussion]
history = [
"Make sure we are optimially using the anthropic api for this. \nI want to fully utilize caching if possible and just reduce overall loss of limits. \nAdd a log for comms history thats saved in ./logs and a the same for tool calls (scripts in ./scripts/generated, and their call equence in ./logs) these logs are closed in the next runtime of this gui program. \nOn open they amke new file buffers, each file buffer has a timestamp of when it was first made.",
"Now finish the gui portion: in gui.py or anything left (last made seesion_logger.py it seems). Caching strategy also looks to be updated in ai_client.py but not complete.",
]
[ai]
provider = "anthropic"
model = "claude-sonnet-4-6"