From 3d0c40de45731e44541d6fde5b84929aa63dc94c Mon Sep 17 00:00:00 2001 From: Ed_ Date: Sat, 14 Mar 2026 09:19:47 -0400 Subject: [PATCH] fix(gui): parse thinking traces out of response text before rendering in history and comms panels --- src/app_controller.py | 17 +++++++++++------ src/gui_2.py | 17 +++++++++++++++-- 2 files changed, 26 insertions(+), 8 deletions(-) diff --git a/src/app_controller.py b/src/app_controller.py index 9a48ccf..5153993 100644 --- a/src/app_controller.py +++ b/src/app_controller.py @@ -25,6 +25,7 @@ from src import project_manager from src import performance_monitor from src import models from src import presets +from src import thinking_parser from src.file_cache import ASTParser from src import ai_client from src import shell_runner @@ -613,13 +614,17 @@ class AppController: # ONLY add to history when turn is complete if self.ui_auto_add_history and not stream_id and not is_streaming: role = payload.get("role", "AI") + segments, parsed_response = thinking_parser.parse_thinking_trace(self.ai_response) + entry = { + "role": role, + "content": parsed_response, + "collapsed": True, + "ts": project_manager.now_ts() + } + if segments: + entry["thinking_segments"] = [{"content": s.content, "marker": s.marker} for s in segments] with self._pending_history_adds_lock: - self._pending_history_adds.append({ - "role": role, - "content": self.ai_response, - "collapsed": True, - "ts": project_manager.now_ts() - }) + self._pending_history_adds.append(entry) elif action in ("mma_stream", "mma_stream_append"): # Some events might have these at top level, some in a 'payload' dict stream_id = task.get("stream_id") or task.get("payload", {}).get("stream_id") diff --git a/src/gui_2.py b/src/gui_2.py index 7261c7a..c01e964 100644 --- a/src/gui_2.py +++ b/src/gui_2.py @@ -2757,7 +2757,13 @@ def hello(): imgui.begin_child("response_scroll_area", imgui.ImVec2(0, -40), True) is_nerv = theme.is_nerv_active() if is_nerv: imgui.push_style_color(imgui.Col_.text, vec4(80, 255, 80)) - markdown_helper.render(self.ai_response, context_id="response") + + segments, parsed_response = thinking_parser.parse_thinking_trace(self.ai_response) + if segments: + self._render_thinking_trace([{"content": s.content, "marker": s.marker} for s in segments], 9999) + + markdown_helper.render(parsed_response, context_id="response") + if is_nerv: imgui.pop_style_color() imgui.end_child() @@ -2909,7 +2915,14 @@ def hello(): r = payload.get("round", 0) sr = payload.get("stop_reason", "STOP") imgui.text_colored(C_LBL, f"round: {r} stop_reason: {sr}") - self._render_heavy_text("text", payload.get("text", ""), idx_str) + + text_content = payload.get("text", "") + segments, parsed_response = thinking_parser.parse_thinking_trace(text_content) + if segments: + self._render_thinking_trace([{"content": s.content, "marker": s.marker} for s in segments], i) + if parsed_response: + self._render_heavy_text("text", parsed_response, idx_str) + tcs = payload.get("tool_calls", []) if tcs: self._render_heavy_text("tool_calls", json.dumps(tcs, indent=1), idx_str)