From 9272fd42d6a20161f1ddf18d3c9eda350cfb6ea9 Mon Sep 17 00:00:00 2001 From: Ed_ Date: Sat, 21 Feb 2026 16:22:33 -0500 Subject: [PATCH] progress --- .gitignore | 1 + MainContext.md | 64 +++++++++++++--- config.toml | 15 ++-- dpg_layout.ini | 197 +++++++++++++++++++++++++++++++++++-------------- gui.py | 139 ++++++++++++++++++++++++++++++---- 5 files changed, 325 insertions(+), 91 deletions(-) diff --git a/.gitignore b/.gitignore index 8a3e2d6..0c8007d 100644 --- a/.gitignore +++ b/.gitignore @@ -4,3 +4,4 @@ uv.lock colorforth_bootslop_002.md md_gen scripts/generated +logs diff --git a/MainContext.md b/MainContext.md index 1e710d8..66c124b 100644 --- a/MainContext.md +++ b/MainContext.md @@ -10,10 +10,12 @@ - `uv` - package/env management **Files:** -- `gui.py` - main GUI, `App` class, all panels, all callbacks, confirmation dialog, layout persistence -- `ai_client.py` - unified provider wrapper, model listing, session management, send, tool/function-call loop, comms log +- `gui.py` - main GUI, `App` class, all panels, all callbacks, confirmation dialog, layout persistence, rich comms rendering +- `ai_client.py` - unified provider wrapper, model listing, session management, send, tool/function-call loop, comms log, provider error classification - `aggregate.py` - reads config, collects files/screenshots/discussion, writes numbered `.md` files to `output_dir` - `shell_runner.py` - subprocess wrapper that runs PowerShell scripts sandboxed to `base_dir`, returns stdout/stderr/exit code as a string +- `session_logger.py` - opens timestamped log files at session start; writes comms entries as JSON-L and tool calls as markdown; saves each AI-generated script as a `.ps1` file +- `gemini.py` - legacy standalone Gemini wrapper (not used by the main GUI; superseded by `ai_client.py`) - `config.toml` - namespace, output_dir, files paths+base_dir, screenshots paths+base_dir, discussion history array, ai provider+model - `credentials.toml` - gemini api_key, anthropic api_key - `dpg_layout.ini` - Dear PyGui window layout file (auto-saved on exit, auto-loaded on startup); gitignore this per-user @@ -27,7 +29,7 @@ - **Message** - multiline input, Gen+Send button, MD Only button, Reset session button - **Response** - readonly multiline displaying last AI response - **Tool Calls** - scrollable log of every PowerShell tool call the AI made, showing script and result; Clear button -- **Comms History** - live log of every raw request/response/tool_call/tool_result exchanged with the vendor API; status line lives here; Clear button; heavy fields (message, text, script, output) clamped to an 80px scrollable box when they exceed `COMMS_CLAMP_CHARS` (300) characters +- **Comms History** - rich structured live log of every API interaction; status line at top; colour legend; Clear button; each entry rendered with kind-specific layout rather than raw JSON **Layout persistence:** - `dpg.configure_app(..., init_file="dpg_layout.ini")` loads the ini at startup if it exists; DPG silently ignores a missing file @@ -51,17 +53,50 @@ - `_comms_log: list[dict]` accumulates every API interaction during a session - `_append_comms(direction, kind, payload)` called at each boundary: OUT/request before sending, IN/response after each model reply, OUT/tool_call before executing, IN/tool_result after executing, OUT/tool_result_send when returning results to the model - Entry fields: `ts` (HH:MM:SS), `direction` (OUT/IN), `kind`, `provider`, `model`, `payload` (dict) -- Anthropic responses also include `usage` (input_tokens/output_tokens) and `stop_reason` in payload +- Anthropic responses also include `usage` (input_tokens, output_tokens, cache_creation_input_tokens, cache_read_input_tokens) and `stop_reason` in payload - `get_comms_log()` returns a snapshot; `clear_comms_log()` empties it - `comms_log_callback` (injected by gui.py) is called from the background thread with each new entry; gui queues entries in `_pending_comms` (lock-protected) and flushes them to the DPG panel each render frame -- `MAX_FIELD_CHARS = 400` in ai_client is the threshold used for the clamp decision in the UI (`COMMS_CLAMP_CHARS = 300` in gui.py governs the display cutoff) +- `MAX_FIELD_CHARS = 400` in ai_client (unused in display logic; kept as reference); `COMMS_CLAMP_CHARS = 300` in gui.py governs the display cutoff for heavy text fields -**Comms History panel rendering:** -- Each entry shows: index, timestamp, direction (colour-coded blue=OUT / green=IN), kind (colour-coded), provider/model -- Payload fields rendered below the header; fields in `_HEAVY_KEYS` (`message`, `text`, `script`, `output`, `content`) that exceed `COMMS_CLAMP_CHARS` are shown in an 80px tall readonly scrollable `input_text` box instead of a plain `add_text` -- Colour legend row at the top of the panel -- Status line (formerly in Provider panel) moved to top of Comms History panel -- Reset session also clears the comms log and panel; Clear button in Comms History clears only the comms log +**Comms History panel — rich structured rendering (gui.py):** + +Rather than showing raw JSON, each comms entry is rendered using a kind-specific renderer function. Unknown kinds fall back to a generic key/value layout. + +Colour maps: +- Direction: OUT = blue-ish `(100,200,255)`, IN = green-ish `(140,255,160)` +- Kind: request=gold, response=light-green, tool_call=orange, tool_result=light-blue, tool_result_send=lavender +- Labels: grey `(180,180,180)`; values: near-white `(220,220,220)`; dict keys/indices: `(140,200,255)`; numbers/token counts: `(180,255,180)`; sub-headers: `(220,200,120)` + +Helper functions: +- `_add_text_field(parent, label, value)` — labelled text; strings longer than `COMMS_CLAMP_CHARS` render as an 80px readonly scrollable `input_text`; shorter strings render as `add_text` +- `_add_kv_row(parent, key, val)` — single horizontal key: value row +- `_render_usage(parent, usage)` — renders Anthropic token usage dict in a fixed display order (input → cache_read → cache_creation → output) +- `_render_tool_calls_list(parent, tool_calls)` — iterates tool call list, showing name, id, and all args via `_add_text_field` + +Kind-specific renderers (in `_KIND_RENDERERS` dict, dispatched by `_render_comms_entry`): +- `_render_payload_request` — shows `message` field via `_add_text_field` +- `_render_payload_response` — shows round, stop_reason (orange), text, tool_calls list, usage block +- `_render_payload_tool_call` — shows name, optional id, script via `_add_text_field` +- `_render_payload_tool_result` — shows name, optional id, output via `_add_text_field` +- `_render_payload_tool_result_send` — iterates results list, shows tool_use_id and content per result +- `_render_payload_generic` — fallback for unknown kinds; renders all keys, using `_add_text_field` for keys in `_HEAVY_KEYS`, `_add_kv_row` for others; dicts/lists are JSON-serialised + +Entry layout: index + timestamp + direction + kind + provider/model header row, then payload rendered by the appropriate function, then a separator line. + +Status line and colour legend live at the top of the Comms History window (above the scrollable child window `comms_scroll`). + +**Session Logger (session_logger.py):** +- `open_session()` called once at GUI startup; creates `logs/` and `scripts/generated/` directories; opens `logs/comms_.log` and `logs/toolcalls_.log` (line-buffered) +- `log_comms(entry)` appends each comms entry as a JSON-L line to the comms log; called from `App._on_comms_entry` (background thread); thread-safe via GIL + line buffering +- `log_tool_call(script, result, script_path)` appends a markdown-formatted tool-call record to the toolcalls log and writes the script to `scripts/generated/_.ps1`; uses a `threading.Lock` for the sequence counter +- `close_session()` flushes and closes both file handles; called just before `dpg.destroy_context()` +- `_on_tool_log` in `App` is wired to `ai_client.tool_log_callback` and calls `session_logger.log_tool_call` + +**Anthropic prompt caching:** +- System prompt sent as an array with `cache_control: ephemeral` on the text block +- Last tool in `_ANTHROPIC_TOOLS` has `cache_control: ephemeral`; system + tools prefix is cached together after the first request +- First user message content[0] is the `` block with `cache_control: ephemeral`; content[1] is the user question without cache control +- Cache stats (creation tokens, read tokens) are surfaced in the comms log usage dict and displayed in the Comms History panel **Data flow:** 1. GUI edits are held in `App` state lists (`self.files`, `self.screenshots`, `self.history`) and dpg widget values @@ -84,9 +119,14 @@ - `dialog.wait()` blocks the background thread on a `threading.Event` until the user acts - `_pending_comms` (guarded by a separate `threading.Lock`) is populated by `_on_comms_entry` (background thread) and drained by `_flush_pending_comms()` each render frame (main thread) +**Provider error handling:** +- `ProviderError(kind, provider, original)` wraps upstream API exceptions with a classified `kind`: quota, rate_limit, auth, balance, network, unknown +- `_classify_anthropic_error` and `_classify_gemini_error` inspect exception types and status codes/message bodies to assign the kind +- `ui_message()` returns a human-readable label for display in the Response panel + **Known extension points:** - Add more providers by adding a section to `credentials.toml`, a `_list_*` and `_send_*` function in `ai_client.py`, and the provider name to the `PROVIDERS` list in `gui.py` - System prompt support could be added as a field in `config.toml` and passed in `ai_client.send()` - Discussion history excerpts could be individually toggleable for inclusion in the generated md - `MAX_TOOL_ROUNDS` in `ai_client.py` caps agentic loops at 5 rounds; adjustable -- `COMMS_CLAMP_CHARS` in `gui.py` controls the character threshold for clamping heavy payload fields +- `COMMS_CLAMP_CHARS` in `gui.py` controls the character threshold for clamping heavy payload fields in the Comms History panel diff --git a/config.toml b/config.toml index 34b7019..f884e79 100644 --- a/config.toml +++ b/config.toml @@ -14,9 +14,6 @@ paths = [ "MainContext.md", "C:/projects/manual_slop/shell_runner.py", "C:/projects/manual_slop/session_logger.py", - "C:/projects/manual_slop/docs/anthropic_api_ref_create_message.md", - "C:/projects/manual_slop/docs/anthropic_api_ref_create_message_beta.md", - "C:/projects/manual_slop/docs/anthropic_prompt_caching.md", ] [screenshots] @@ -25,12 +22,12 @@ paths = [] [discussion] history = [ - "[User]\nMake sure we are optimially using the anthropic api for this. \nI want to fully utilize caching if possible and just reduce overall loss of limits. \nAdd a log for comms history thats saved in ./logs and a the same for tool calls (scripts in ./scripts/generated, and their call equence in ./logs) these logs are closed in the next runtime of this gui program. \nOn open they amke new file buffers, each file buffer has a timestamp of when it was first made.", - "[Claude]\nNow finish the gui portion: in gui.py or anything left (last made seesion_logger.py it seems). Caching strategy also looks to be updated in ai_client.py but not complete had to be reverted.", - "[User]\ncontinue. I had to revert ai_client.py becuase it was causing reponse code failures from the vendor. I added docs for anthropic into your context as well. For lasts version before revert of ai_client.py see:\n\n```py\n[Omitted already updated in your context as you fixed it last]\n```", - "[Claude]\nNow update `gui.py` to wire up `session_logger` and `tool_log_callback`:", - "[Claude]\nThe PowerShell `-replace` with a multi-line string replacement needs to be done differently. I'll write the complete updated `gui.py` directly with all the changes applied. The changes are targeted and well-understood:", - "[Claude]\nGood. Now let's verify all hooks are correctly in place and clean up the temp script:\n```", + "[User]\nAdd rich visualization for the comms history's json, we shouldn't see the raw text unelss we don't have the json object type detected. Also update MainContext.md with latest state of this codebase.", + "[Claude]\nThe file uses CRLF. I'll use a Python script to do the targeted replacement reliably:", + "[API]\nERROR: Error code: 400 - {'type': 'error', 'error': {'type': 'invalid_request_error', 'message': 'prompt is too long: 201675 tokens > 200000 maximum'}, 'request_id': 'req_011CYMuPtmZC7akLfcaT7b7r'}", + "[User]\nA _patch_gui.py was left over as an artifact. You were in the middle of making edits before a error reponse occured.\nTo save space on the message I removed the prompt caching documentation from the context.\nWe may have to change how messages are sent or discussion so that its chunked appropriately.", + "[Claude]\n`gui.py` is complete with all the rich rendering code. Now let me delete the artifact and update `MainContext.md`:", + "[User]\nartifact deleted MainContext.md has not been updated though. (Removed anthropic documetantion from context, if you want any docs back you can look up docs in ./docs)", ] [ai] diff --git a/dpg_layout.ini b/dpg_layout.ini index c943ed2..92a0726 100644 --- a/dpg_layout.ini +++ b/dpg_layout.ini @@ -10,15 +10,15 @@ Collapsed=0 [Window][###22] Pos=0,0 -Size=364,652 +Size=549,652 Collapsed=0 DockId=0x00000005,0 [Window][###30] Pos=0,654 -Size=364,766 +Size=549,696 Collapsed=0 -DockId=0x0000001D,0 +DockId=0x00000021,0 [Window][###66] Pos=0,1491 @@ -42,11 +42,11 @@ DockId=0x0000000D,0 Pos=378,494 Size=829,1643 Collapsed=0 -DockId=0x00000018,0 +DockId=0x00000023,0 [Window][###103] -Pos=2004,1330 -Size=1836,807 +Pos=1613,1330 +Size=2227,807 Collapsed=0 DockId=0x0000001C,0 @@ -103,7 +103,7 @@ DockId=0x00000014,0 Pos=2531,0 Size=1309,1690 Collapsed=0 -DockId=0x00000018,0 +DockId=0x00000023,0 [Window][###106] Pos=366,427 @@ -112,10 +112,10 @@ Collapsed=0 DockId=0x00000012,0 [Window][###100] -Pos=366,427 -Size=847,1710 +Pos=2622,0 +Size=1218,1412 Collapsed=0 -DockId=0x00000012,1 +DockId=0x00000024,0 [Window][###133] Pos=1306,785 @@ -143,40 +143,40 @@ Size=700,440 Collapsed=0 [Window][###118] -Pos=366,0 -Size=650,2137 +Pos=551,0 +Size=1060,2137 Collapsed=0 -DockId=0x00000018,0 +DockId=0x00000023,0 [Window][###78] Pos=0,1422 -Size=364,715 +Size=549,715 Collapsed=0 DockId=0x0000001E,0 [Window][###88] -Pos=1018,0 -Size=984,2137 +Pos=1613,0 +Size=823,1328 Collapsed=0 -DockId=0x00000019,0 +DockId=0x00000015,0 [Window][###95] -Pos=366,0 -Size=650,2137 +Pos=551,0 +Size=1060,2137 Collapsed=0 -DockId=0x00000018,1 +DockId=0x00000023,1 [Window][###110] -Pos=2004,0 -Size=1836,1328 +Pos=2438,0 +Size=1402,1328 Collapsed=0 -DockId=0x0000001B,0 +DockId=0x00000016,0 [Window][###112] -Pos=366,0 -Size=650,2137 +Pos=551,0 +Size=1060,2137 Collapsed=0 -DockId=0x00000018,2 +DockId=0x00000023,2 [Window][###145] Pos=1578,868 @@ -199,38 +199,125 @@ Size=700,440 Collapsed=0 [Window][###513] +Pos=1310,780 +Size=1166,1126 +Collapsed=0 + +[Window][###631] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###115] +Pos=551,0 +Size=847,2137 +Collapsed=0 +DockId=0x00000012,0 + +[Window][###75] +Pos=0,1352 +Size=549,785 +Collapsed=0 +DockId=0x00000022,0 + +[Window][###85] +Pos=1400,0 +Size=1220,1412 +Collapsed=0 +DockId=0x00000023,0 + +[Window][###92] +Pos=551,0 +Size=847,2137 +Collapsed=0 +DockId=0x00000012,2 + +[Window][###107] +Pos=1400,1414 +Size=2440,723 +Collapsed=0 +DockId=0x0000001A,0 + +[Window][###109] +Pos=551,0 +Size=847,2137 +Collapsed=0 +DockId=0x00000012,1 + +[Window][###142] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###244] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###349] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###463] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###580] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###199] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###301] +Pos=1578,868 +Size=700,440 +Collapsed=0 + +[Window][###409] Pos=1578,868 Size=700,440 Collapsed=0 [Docking][Data] -DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,2137 Split=X Selected=0x40484D8F - DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=364,1161 Split=Y Selected=0xEE087978 - DockNode ID=0x00000005 Parent=0x00000003 SizeRef=235,354 Selected=0xEE087978 - DockNode ID=0x00000006 Parent=0x00000003 SizeRef=235,805 Split=Y Selected=0x5F94F9BD - DockNode ID=0x00000009 Parent=0x00000006 SizeRef=235,453 Split=Y Selected=0x5F94F9BD - DockNode ID=0x0000001D Parent=0x00000009 SizeRef=364,766 Selected=0x5F94F9BD - DockNode ID=0x0000001E Parent=0x00000009 SizeRef=364,715 Selected=0xF475F06A - DockNode ID=0x0000000A Parent=0x00000006 SizeRef=235,350 Selected=0x80199DAE - DockNode ID=0x00000004 Parent=0x7C6B3D9B SizeRef=3474,1161 Split=X - DockNode ID=0x00000001 Parent=0x00000004 SizeRef=650,1161 Split=Y Selected=0x40484D8F - DockNode ID=0x00000007 Parent=0x00000001 SizeRef=595,492 Selected=0xBA13FCDE - DockNode ID=0x00000008 Parent=0x00000001 SizeRef=595,1643 Split=X Selected=0x40484D8F - DockNode ID=0x0000000F Parent=0x00000008 SizeRef=847,2137 Split=Y Selected=0x07E8375F - DockNode ID=0x00000011 Parent=0x0000000F SizeRef=835,425 Selected=0x72F373AE - DockNode ID=0x00000012 Parent=0x0000000F SizeRef=835,1710 Selected=0x07E8375F - DockNode ID=0x00000010 Parent=0x00000008 SizeRef=2625,2137 Split=Y Selected=0xCE7F911A - DockNode ID=0x00000013 Parent=0x00000010 SizeRef=1967,1690 Split=X Selected=0xCE7F911A - DockNode ID=0x00000017 Parent=0x00000013 SizeRef=1314,1749 Selected=0x4B454E0B - DockNode ID=0x00000018 Parent=0x00000013 SizeRef=1309,1749 CentralNode=1 Selected=0x73845A9B - DockNode ID=0x00000014 Parent=0x00000010 SizeRef=1967,445 Selected=0xC36FF36B - DockNode ID=0x00000002 Parent=0x00000004 SizeRef=2822,1161 Split=X Selected=0x714F2F7B - DockNode ID=0x0000000B Parent=0x00000002 SizeRef=968,1161 Selected=0xC915D9DA - DockNode ID=0x0000000C Parent=0x00000002 SizeRef=1661,1161 Split=Y Selected=0x714F2F7B - DockNode ID=0x0000000D Parent=0x0000000C SizeRef=396,342 Selected=0x714F2F7B - DockNode ID=0x0000000E Parent=0x0000000C SizeRef=396,817 Split=X Selected=0xCF08B82F - DockNode ID=0x00000019 Parent=0x0000000E SizeRef=984,2137 Selected=0x052342BF - DockNode ID=0x0000001A Parent=0x0000000E SizeRef=1836,2137 Split=Y Selected=0xCF08B82F - DockNode ID=0x0000001B Parent=0x0000001A SizeRef=2104,1328 Selected=0x43F4115A - DockNode ID=0x0000001C Parent=0x0000001A SizeRef=2104,807 Selected=0xCF08B82F +DockSpace ID=0x7C6B3D9B Window=0xA87D555D Pos=0,0 Size=3840,2137 Split=X Selected=0x40484D8F + DockNode ID=0x00000003 Parent=0x7C6B3D9B SizeRef=549,1161 Split=Y Selected=0xEE087978 + DockNode ID=0x00000005 Parent=0x00000003 SizeRef=235,354 Selected=0xEE087978 + DockNode ID=0x00000006 Parent=0x00000003 SizeRef=235,805 Split=Y Selected=0x5F94F9BD + DockNode ID=0x00000009 Parent=0x00000006 SizeRef=235,453 Split=Y Selected=0x5F94F9BD + DockNode ID=0x0000001D Parent=0x00000009 SizeRef=364,766 Split=Y Selected=0x5F94F9BD + DockNode ID=0x00000021 Parent=0x0000001D SizeRef=549,696 Selected=0x5F94F9BD + DockNode ID=0x00000022 Parent=0x0000001D SizeRef=549,785 Selected=0x0CE534DB + DockNode ID=0x0000001E Parent=0x00000009 SizeRef=364,715 Selected=0xF475F06A + DockNode ID=0x0000000A Parent=0x00000006 SizeRef=235,350 Selected=0x80199DAE + DockNode ID=0x00000004 Parent=0x7C6B3D9B SizeRef=3289,1161 Split=X + DockNode ID=0x00000001 Parent=0x00000004 SizeRef=1060,1161 Split=Y Selected=0x40484D8F + DockNode ID=0x00000007 Parent=0x00000001 SizeRef=595,492 Selected=0xBA13FCDE + DockNode ID=0x00000008 Parent=0x00000001 SizeRef=595,1643 Split=X Selected=0x40484D8F + DockNode ID=0x0000000F Parent=0x00000008 SizeRef=847,2137 Split=Y Selected=0x07E8375F + DockNode ID=0x00000011 Parent=0x0000000F SizeRef=835,425 Selected=0x72F373AE + DockNode ID=0x00000012 Parent=0x0000000F SizeRef=835,1710 Selected=0x8B149E2A + DockNode ID=0x00000010 Parent=0x00000008 SizeRef=2625,2137 Split=Y Selected=0xCE7F911A + DockNode ID=0x00000013 Parent=0x00000010 SizeRef=1967,1690 Split=X Selected=0xCE7F911A + DockNode ID=0x00000017 Parent=0x00000013 SizeRef=1314,1749 Selected=0x4B454E0B + DockNode ID=0x00000018 Parent=0x00000013 SizeRef=1309,1749 Split=Y Selected=0x88A8C2FF + DockNode ID=0x00000019 Parent=0x00000018 SizeRef=2440,1412 Split=X Selected=0x88A8C2FF + DockNode ID=0x00000023 Parent=0x00000019 SizeRef=1220,737 CentralNode=1 Selected=0xFDB3860E + DockNode ID=0x00000024 Parent=0x00000019 SizeRef=1218,737 Selected=0x88A8C2FF + DockNode ID=0x0000001A Parent=0x00000018 SizeRef=2440,723 Selected=0x3A881EEF + DockNode ID=0x00000014 Parent=0x00000010 SizeRef=1967,445 Selected=0xC36FF36B + DockNode ID=0x00000002 Parent=0x00000004 SizeRef=2227,1161 Split=X Selected=0x714F2F7B + DockNode ID=0x0000000B Parent=0x00000002 SizeRef=968,1161 Selected=0xC915D9DA + DockNode ID=0x0000000C Parent=0x00000002 SizeRef=1661,1161 Split=Y Selected=0x714F2F7B + DockNode ID=0x0000000D Parent=0x0000000C SizeRef=396,342 Selected=0x714F2F7B + DockNode ID=0x0000000E Parent=0x0000000C SizeRef=396,817 Split=Y Selected=0xCF08B82F + DockNode ID=0x0000001B Parent=0x0000000E SizeRef=2104,1328 Split=X Selected=0x43F4115A + DockNode ID=0x00000015 Parent=0x0000001B SizeRef=823,1328 Selected=0x052342BF + DockNode ID=0x00000016 Parent=0x0000001B SizeRef=1402,1328 Selected=0x43F4115A + DockNode ID=0x0000001C Parent=0x0000000E SizeRef=2104,807 Selected=0xCF08B82F diff --git a/gui.py b/gui.py index 35333ad..a739deb 100644 --- a/gui.py +++ b/gui.py @@ -54,13 +54,19 @@ _KIND_COLORS = { _HEAVY_KEYS = {"message", "text", "script", "output", "content"} +# Label colours used in rich rendering +_LABEL_COLOR = (180, 180, 180) +_VALUE_COLOR = (220, 220, 220) +_KEY_COLOR = (140, 200, 255) # dict key / call index +_NUM_COLOR = (180, 255, 180) # numbers / token counts +_SUBHDR_COLOR = (220, 200, 120) # sub-section header -def _add_comms_field(parent: str, label: str, value: str, heavy: bool): - """Add a labelled field inside parent. Heavy fields get a clamped input_text box.""" + +def _add_text_field(parent: str, label: str, value: str): + """Render a labelled text value; long values get a scrollable box.""" with dpg.group(horizontal=False, parent=parent): - dpg.add_text(f"{label}:", color=(200, 200, 200)) - if heavy and len(value) > COMMS_CLAMP_CHARS: - # Show clamped scrollable box + dpg.add_text(f"{label}:", color=_LABEL_COLOR) + if len(value) > COMMS_CLAMP_CHARS: dpg.add_input_text( default_value=value, multiline=True, @@ -69,7 +75,116 @@ def _add_comms_field(parent: str, label: str, value: str, heavy: bool): height=80, ) else: - dpg.add_text(value if value else "(empty)", wrap=460) + dpg.add_text(value if value else "(empty)", wrap=460, color=_VALUE_COLOR) + + +def _add_kv_row(parent: str, key: str, val, val_color=None): + """Single key: value row, horizontally laid out.""" + vc = val_color or _VALUE_COLOR + with dpg.group(horizontal=True, parent=parent): + dpg.add_text(f"{key}:", color=_LABEL_COLOR) + dpg.add_text(str(val), color=vc) + + +def _render_usage(parent: str, usage: dict): + """Render Anthropic usage dict as a compact token table.""" + if not usage: + return + dpg.add_text("usage:", color=_SUBHDR_COLOR, parent=parent) + order = [ + "input_tokens", + "cache_read_input_tokens", + "cache_creation_input_tokens", + "output_tokens", + ] + shown: set = set() + for key in order: + if key in usage: + shown.add(key) + _add_kv_row(parent, f" {key.replace('_', ' ')}", usage[key], _NUM_COLOR) + for key, val in usage.items(): + if key not in shown: + _add_kv_row(parent, f" {key}", val, _NUM_COLOR) + + +def _render_tool_calls_list(parent: str, tool_calls: list): + """Render a list of tool_call dicts inline.""" + if not tool_calls: + dpg.add_text(" (none)", color=_VALUE_COLOR, parent=parent) + return + for i, tc in enumerate(tool_calls): + dpg.add_text(f" call[{i}] {tc.get('name', '?')}", color=_KEY_COLOR, parent=parent) + if "id" in tc: + _add_kv_row(parent, " id", tc["id"]) + args = tc.get("args") or tc.get("input") or {} + if isinstance(args, dict): + for ak, av in args.items(): + _add_text_field(parent, f" {ak}", str(av)) + elif args: + _add_text_field(parent, " args", str(args)) + + +# ---- kind-specific renderers ------------------------------------------------ + +def _render_payload_request(parent: str, payload: dict): + _add_text_field(parent, "message", payload.get("message", "")) + + +def _render_payload_response(parent: str, payload: dict): + _add_kv_row(parent, "round", payload.get("round", "")) + _add_kv_row(parent, "stop_reason", payload.get("stop_reason", ""), (255, 200, 120)) + text = payload.get("text", "") + if text: + _add_text_field(parent, "text", text) + dpg.add_text("tool_calls:", color=_LABEL_COLOR, parent=parent) + _render_tool_calls_list(parent, payload.get("tool_calls", [])) + usage = payload.get("usage") + if usage: + _render_usage(parent, usage) + + +def _render_payload_tool_call(parent: str, payload: dict): + _add_kv_row(parent, "name", payload.get("name", "")) + if "id" in payload: + _add_kv_row(parent, "id", payload["id"]) + _add_text_field(parent, "script", payload.get("script", "")) + + +def _render_payload_tool_result(parent: str, payload: dict): + _add_kv_row(parent, "name", payload.get("name", "")) + if "id" in payload: + _add_kv_row(parent, "id", payload["id"]) + _add_text_field(parent, "output", payload.get("output", "")) + + +def _render_payload_tool_result_send(parent: str, payload: dict): + for i, r in enumerate(payload.get("results", [])): + dpg.add_text(f"result[{i}]", color=_KEY_COLOR, parent=parent) + _add_kv_row(parent, " tool_use_id", r.get("tool_use_id", "")) + _add_text_field(parent, " content", str(r.get("content", ""))) + + +def _render_payload_generic(parent: str, payload: dict): + """Fallback: render any unknown payload kind as labelled fields.""" + import json + for key, val in payload.items(): + if isinstance(val, (dict, list)): + val_str = json.dumps(val, ensure_ascii=False, indent=2) + else: + val_str = str(val) + if key in _HEAVY_KEYS: + _add_text_field(parent, key, val_str) + else: + _add_kv_row(parent, key, val_str) + + +_KIND_RENDERERS = { + "request": _render_payload_request, + "response": _render_payload_response, + "tool_call": _render_payload_tool_call, + "tool_result": _render_payload_tool_result, + "tool_result_send": _render_payload_tool_result_send, +} def _render_comms_entry(parent: str, entry: dict, idx: int): @@ -92,15 +207,9 @@ def _render_comms_entry(parent: str, entry: dict, idx: int): dpg.add_text(kind, color=kind_color) dpg.add_text(f"{provider}/{model}", color=(180, 180, 180)) - # Payload fields - for key, val in payload.items(): - is_heavy = key in _HEAVY_KEYS - if isinstance(val, (dict, list)): - import json - val_str = json.dumps(val, ensure_ascii=False, indent=2) - else: - val_str = str(val) - _add_comms_field(parent, key, val_str, is_heavy) + # Payload - use rich renderer if available, else generic fallback + renderer = _KIND_RENDERERS.get(kind, _render_payload_generic) + renderer(parent, payload) dpg.add_separator()