Compare commits

..

7 Commits

47 changed files with 817 additions and 256 deletions

View File

@@ -10,7 +10,7 @@ A high-density GUI orchestrator for local LLM-driven coding sessions. Manual Slo
**Providers**: Gemini API, Anthropic API, DeepSeek, Gemini CLI (headless), MiniMax **Providers**: Gemini API, Anthropic API, DeepSeek, Gemini CLI (headless), MiniMax
**Platform**: Windows (PowerShell) — single developer, local use **Platform**: Windows (PowerShell) — single developer, local use
![img](./gallery/python_2026-03-07_14-32-50.png) ![img](./gallery/python_2026-03-11_00-37-21.png)
--- ---

View File

@@ -1,11 +1,11 @@
[ai] [ai]
provider = "gemini_cli" provider = "minimax"
model = "gemini-2.5-flash-lite" model = "MiniMax-M2.5"
temperature = 0.30000001192092896 temperature = 0.0
max_tokens = 32000 max_tokens = 32000
history_trunc_limit = 900000 history_trunc_limit = 900000
active_preset = "TestGlobal" active_preset = "Default"
system_prompt = "Overridden Prompt" system_prompt = ""
[projects] [projects]
paths = [ paths = [
@@ -25,10 +25,10 @@ separate_tool_calls_panel = false
bg_shader_enabled = true bg_shader_enabled = true
crt_filter_enabled = false crt_filter_enabled = false
separate_task_dag = false separate_task_dag = false
separate_usage_analytics = true separate_usage_analytics = false
separate_tier1 = false separate_tier1 = false
separate_tier2 = false separate_tier2 = false
separate_tier3 = true separate_tier3 = false
separate_tier4 = false separate_tier4 = false
[gui.show_windows] [gui.show_windows]
@@ -44,7 +44,7 @@ separate_tier4 = false
"Tier 4" = false "Tier 4" = false
"Tier 1: Strategy" = false "Tier 1: Strategy" = false
"Tier 2: Tech Lead" = false "Tier 2: Tech Lead" = false
"Tier 3: Workers" = true "Tier 3: Workers" = false
"Tier 4: QA" = false "Tier 4: QA" = false
"Discussion Hub" = true "Discussion Hub" = true
"Operations Hub" = true "Operations Hub" = true
@@ -59,9 +59,9 @@ Diagnostics = false
palette = "Nord Dark" palette = "Nord Dark"
font_path = "C:/projects/manual_slop/assets/fonts/Inter-Regular.ttf" font_path = "C:/projects/manual_slop/assets/fonts/Inter-Regular.ttf"
font_size = 14.0 font_size = 14.0
scale = 1.0 scale = 1.0099999904632568
transparency = 0.5099999904632568 transparency = 1.0
child_transparency = 0.699999988079071 child_transparency = 1.0
[mma] [mma]
max_workers = 4 max_workers = 4

Binary file not shown.

After

Width:  |  Height:  |  Size: 607 KiB

View File

@@ -73,8 +73,8 @@ Collapsed=0
DockId=0xAFC85805,2 DockId=0xAFC85805,2
[Window][Theme] [Window][Theme]
Pos=0,1016 Pos=0,1670
Size=623,401 Size=840,467
Collapsed=0 Collapsed=0
DockId=0x00000002,2 DockId=0x00000002,2
@@ -84,14 +84,14 @@ Size=900,700
Collapsed=0 Collapsed=0
[Window][Diagnostics] [Window][Diagnostics]
Pos=2833,28 Pos=2641,22
Size=1007,2109 Size=1199,2115
Collapsed=0 Collapsed=0
DockId=0x0000000C,2 DockId=0x0000000C,2
[Window][Context Hub] [Window][Context Hub]
Pos=0,1016 Pos=0,1670
Size=623,401 Size=840,467
Collapsed=0 Collapsed=0
DockId=0x00000002,1 DockId=0x00000002,1
@@ -102,26 +102,26 @@ Collapsed=0
DockId=0x0000000D,0 DockId=0x0000000D,0
[Window][Discussion Hub] [Window][Discussion Hub]
Pos=1296,22 Pos=1668,22
Size=659,1395 Size=971,2115
Collapsed=0 Collapsed=0
DockId=0x00000013,0 DockId=0x00000013,0
[Window][Operations Hub] [Window][Operations Hub]
Pos=625,22 Pos=842,22
Size=669,1395 Size=824,2115
Collapsed=0 Collapsed=0
DockId=0x00000012,0 DockId=0x00000012,0
[Window][Files & Media] [Window][Files & Media]
Pos=0,1016 Pos=0,1670
Size=623,401 Size=840,467
Collapsed=0 Collapsed=0
DockId=0x00000002,0 DockId=0x00000002,0
[Window][AI Settings] [Window][AI Settings]
Pos=0,22 Pos=0,22
Size=623,992 Size=840,1646
Collapsed=0 Collapsed=0
DockId=0x00000001,0 DockId=0x00000001,0
@@ -131,14 +131,14 @@ Size=416,325
Collapsed=0 Collapsed=0
[Window][MMA Dashboard] [Window][MMA Dashboard]
Pos=1957,22 Pos=2641,22
Size=603,1395 Size=1199,2115
Collapsed=0 Collapsed=0
DockId=0x0000000C,0 DockId=0x0000000C,0
[Window][Log Management] [Window][Log Management]
Pos=1957,22 Pos=2641,22
Size=603,1395 Size=1199,2115
Collapsed=0 Collapsed=0
DockId=0x0000000C,1 DockId=0x0000000C,1
@@ -166,8 +166,8 @@ Collapsed=0
DockId=0x0000000F,0 DockId=0x0000000F,0
[Window][Tier 3: Workers] [Window][Tier 3: Workers]
Pos=2905,1238 Pos=2641,1235
Size=935,899 Size=1199,902
Collapsed=0 Collapsed=0
DockId=0x0000000F,0 DockId=0x0000000F,0
@@ -322,12 +322,12 @@ Size=420,966
Collapsed=0 Collapsed=0
[Window][Preset Manager] [Window][Preset Manager]
Pos=403,396 Pos=1693,826
Size=956,958 Size=933,839
Collapsed=0 Collapsed=0
[Window][Task DAG] [Window][Task DAG]
Pos=1700,1199 Pos=1661,1181
Size=1079,662 Size=1079,662
Collapsed=0 Collapsed=0
@@ -337,13 +337,13 @@ Size=275,375
Collapsed=0 Collapsed=0
[Window][Tool Preset Manager] [Window][Tool Preset Manager]
Pos=192,90 Pos=1014,522
Size=1066,1324 Size=1155,1011
Collapsed=0 Collapsed=0
[Window][Persona Editor] [Window][Persona Editor]
Pos=956,447 Pos=995,597
Size=549,447 Size=1868,1434
Collapsed=0 Collapsed=0
[Table][0xFB6E3870,4] [Table][0xFB6E3870,4]
@@ -377,11 +377,11 @@ Column 3 Width=20
Column 4 Weight=1.0000 Column 4 Weight=1.0000
[Table][0x2A6000B6,4] [Table][0x2A6000B6,4]
RefScale=17 RefScale=14
Column 0 Width=51 Column 0 Width=42
Column 1 Width=76 Column 1 Width=62
Column 2 Weight=1.0000 Column 2 Weight=1.0000
Column 3 Width=128 Column 3 Width=105
[Table][0x8BCC69C7,6] [Table][0x8BCC69C7,6]
RefScale=13 RefScale=13
@@ -393,18 +393,18 @@ Column 4 Weight=1.0000
Column 5 Width=50 Column 5 Width=50
[Table][0x3751446B,4] [Table][0x3751446B,4]
RefScale=20 RefScale=17
Column 0 Width=60 Column 0 Width=51
Column 1 Width=91 Column 1 Width=77
Column 2 Weight=1.0000 Column 2 Weight=1.0000
Column 3 Width=151 Column 3 Width=128
[Table][0x2C515046,4] [Table][0x2C515046,4]
RefScale=20 RefScale=14
Column 0 Width=63 Column 0 Width=43
Column 1 Weight=1.0000 Column 1 Weight=1.0000
Column 2 Width=152 Column 2 Width=106
Column 3 Width=60 Column 3 Width=42
[Table][0xD99F45C5,4] [Table][0xD99F45C5,4]
Column 0 Sort=0v Column 0 Sort=0v
@@ -425,28 +425,28 @@ Column 1 Width=100
Column 2 Weight=1.0000 Column 2 Weight=1.0000
[Table][0xA02D8C87,3] [Table][0xA02D8C87,3]
RefScale=20 RefScale=14
Column 0 Width=227 Column 0 Width=158
Column 1 Width=150 Column 1 Width=105
Column 2 Weight=1.0000 Column 2 Weight=1.0000
[Docking][Data] [Docking][Data]
DockNode ID=0x00000008 Pos=3125,170 Size=593,1157 Split=Y DockNode ID=0x00000008 Pos=3125,170 Size=593,1157 Split=Y
DockNode ID=0x00000009 Parent=0x00000008 SizeRef=1029,147 Selected=0x0469CA7A DockNode ID=0x00000009 Parent=0x00000008 SizeRef=1029,147 Selected=0x0469CA7A
DockNode ID=0x0000000A Parent=0x00000008 SizeRef=1029,145 Selected=0xDF822E02 DockNode ID=0x0000000A Parent=0x00000008 SizeRef=1029,145 Selected=0xDF822E02
DockSpace ID=0xAFC85805 Window=0x079D3A04 Pos=0,22 Size=2560,1395 Split=X DockSpace ID=0xAFC85805 Window=0x079D3A04 Pos=0,22 Size=3840,2115 Split=X
DockNode ID=0x00000003 Parent=0xAFC85805 SizeRef=1955,1183 Split=X DockNode ID=0x00000003 Parent=0xAFC85805 SizeRef=2639,1183 Split=X
DockNode ID=0x0000000B Parent=0x00000003 SizeRef=404,1186 Split=X Selected=0xF4139CA2 DockNode ID=0x0000000B Parent=0x00000003 SizeRef=404,1186 Split=X Selected=0xF4139CA2
DockNode ID=0x00000007 Parent=0x0000000B SizeRef=623,858 Split=Y Selected=0x8CA2375C DockNode ID=0x00000007 Parent=0x0000000B SizeRef=840,858 Split=Y Selected=0x8CA2375C
DockNode ID=0x00000001 Parent=0x00000007 SizeRef=824,989 CentralNode=1 Selected=0x7BD57D6A DockNode ID=0x00000001 Parent=0x00000007 SizeRef=824,1646 CentralNode=1 Selected=0x7BD57D6A
DockNode ID=0x00000002 Parent=0x00000007 SizeRef=824,401 Selected=0x1DCB2623 DockNode ID=0x00000002 Parent=0x00000007 SizeRef=824,467 Selected=0x1DCB2623
DockNode ID=0x0000000E Parent=0x0000000B SizeRef=1330,858 Split=X Selected=0x418C7449 DockNode ID=0x0000000E Parent=0x0000000B SizeRef=1797,858 Split=X Selected=0x418C7449
DockNode ID=0x00000012 Parent=0x0000000E SizeRef=669,402 Selected=0x418C7449 DockNode ID=0x00000012 Parent=0x0000000E SizeRef=824,402 Selected=0x418C7449
DockNode ID=0x00000013 Parent=0x0000000E SizeRef=659,402 Selected=0x6F2B5B04 DockNode ID=0x00000013 Parent=0x0000000E SizeRef=971,402 Selected=0x6F2B5B04
DockNode ID=0x0000000D Parent=0x00000003 SizeRef=435,1186 Selected=0x363E93D6 DockNode ID=0x0000000D Parent=0x00000003 SizeRef=435,1186 Selected=0x363E93D6
DockNode ID=0x00000004 Parent=0xAFC85805 SizeRef=603,1183 Split=Y Selected=0x3AEC3498 DockNode ID=0x00000004 Parent=0xAFC85805 SizeRef=1199,1183 Split=Y Selected=0x3AEC3498
DockNode ID=0x0000000C Parent=0x00000004 SizeRef=1074,1208 Selected=0x2C0206CE DockNode ID=0x0000000C Parent=0x00000004 SizeRef=1074,1208 Selected=0x3AEC3498
DockNode ID=0x0000000F Parent=0x00000004 SizeRef=1074,899 Selected=0x5CDB7A4B DockNode ID=0x0000000F Parent=0x00000004 SizeRef=1074,899 Selected=0x655BC6E9
;;;<<<Layout_655921752_Default>>>;;; ;;;<<<Layout_655921752_Default>>>;;;
;;;<<<HelloImGui_Misc>>>;;; ;;;<<<HelloImGui_Misc>>>;;;

View File

@@ -1,10 +1,19 @@
[personas.Default] [personas.Default]
system_prompt = "" system_prompt = ""
provider = "minimax" tool_preset = "Default"
bias_profile = "Balanced"
[[personas.Default.preferred_models]]
model = "MiniMax-M2.5" model = "MiniMax-M2.5"
preferred_models = [ provider = "minimax"
"MiniMax-M2.5",
]
temperature = 0.0 temperature = 0.0
top_p = 1.0 top_p = 1.0
max_output_tokens = 32000 max_output_tokens = 32000
history_trunc_limit = 900000
[[personas.Default.preferred_models]]
provider = "gemini_cli"
model = "gemini-3-flash-preview"
temperature = -1.4901161193847656e-08
max_output_tokens = 32000
history_trunc_limit = 900000

View File

@@ -34,13 +34,8 @@ def migrate():
preset = models.Preset.from_dict(name, data) preset = models.Preset.from_dict(name, data)
persona = models.Persona( persona = models.Persona(
name=name, name=name,
provider=provider, preferred_models=[{"provider": provider, "model": model}],
model=model, system_prompt=preset.system_prompt
preferred_models=[model] if model else [],
system_prompt=preset.system_prompt,
temperature=preset.temperature,
top_p=preset.top_p,
max_output_tokens=preset.max_output_tokens
) )
persona_manager.save_persona(persona, scope="global") persona_manager.save_persona(persona, scope="global")
print(f"Migrated global preset to persona: {name}") print(f"Migrated global preset to persona: {name}")
@@ -50,12 +45,13 @@ def migrate():
if active_preset and active_preset not in persona_manager.load_all(): if active_preset and active_preset not in persona_manager.load_all():
persona = models.Persona( persona = models.Persona(
name=active_preset, name=active_preset,
provider=provider, preferred_models=[{
model=model, "provider": provider,
preferred_models=[model] if model else [], "model": model,
system_prompt=ai_cfg.get("system_prompt", ""), "temperature": ai_cfg.get("temperature"),
temperature=ai_cfg.get("temperature"), "max_output_tokens": ai_cfg.get("max_tokens")
max_output_tokens=ai_cfg.get("max_tokens") }],
system_prompt=ai_cfg.get("system_prompt", "")
) )
persona_manager.save_persona(persona, scope="global") persona_manager.save_persona(persona, scope="global")
print(f"Created Initial Legacy persona from active_preset: {active_preset}") print(f"Created Initial Legacy persona from active_preset: {active_preset}")

View File

@@ -0,0 +1,252 @@
import sys
with open("src/gui_2.py", "r", encoding="utf-8") as f:
content = f.read()
# 1. In _render_provider_panel, remove Fetch Models
old_fetch = """ imgui.text("Model")
imgui.same_line()
if imgui.button("Fetch Models"):
self._fetch_models(self.current_provider)
if imgui.begin_list_box("##models", imgui.ImVec2(-1, 120)):"""
new_fetch = """ imgui.text("Model")
if imgui.begin_list_box("##models", imgui.ImVec2(-1, 120)):"""
content = content.replace(old_fetch, new_fetch)
# 2. Extract Persona block
# We need to find the start of 'imgui.text("Persona")' and end of 'self._editing_persona_is_new = True'
# Let's be very careful.
old_persona_block = """ imgui.text("Persona")
if not hasattr(self, 'ui_active_persona'):
self.ui_active_persona = ""
personas = getattr(self.controller, 'personas', {})
if imgui.begin_combo("##persona", self.ui_active_persona or "None"):
if imgui.selectable("None", not self.ui_active_persona)[0]:
self.ui_active_persona = ""
for pname in sorted(personas.keys()):
if imgui.selectable(pname, pname == self.ui_active_persona)[0]:
self.ui_active_persona = pname
if pname in personas:
persona = personas[pname]
self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature or 0.7
self._editing_persona_max_tokens = persona.max_output_tokens or 4096
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
import json
self._editing_persona_preferred_models = json.dumps(persona.preferred_models) if persona.preferred_models else "[]"
self._editing_persona_is_new = False
if persona.provider and persona.provider in self.controller.PROVIDERS:
self.current_provider = persona.provider
if persona.model:
self.current_model = persona.model
if persona.temperature is not None:
ai_client.temperature = persona.temperature
if persona.max_output_tokens:
ai_client.max_output_tokens = persona.max_output_tokens
if persona.system_prompt:
ai_client.system_instruction = persona.system_prompt
if persona.tool_preset:
self.ui_active_tool_preset = persona.tool_preset
ai_client.set_tool_preset(persona.tool_preset)
if persona.bias_profile:
self.ui_active_bias_profile = persona.bias_profile
ai_client.set_bias_profile(persona.bias_profile)
imgui.end_combo()
imgui.same_line()
if imgui.button("Manage Personas"):
self.show_persona_editor_window = True
if self.ui_active_persona and self.ui_active_persona in personas:
persona = personas[self.ui_active_persona]
self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature if persona.temperature is not None else 0.7
self._editing_persona_max_tokens = persona.max_output_tokens if persona.max_output_tokens is not None else 4096
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
self._editing_persona_preferred_models_list = list(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_scope = self.controller.persona_manager.get_persona_scope(persona.name)
self._editing_persona_is_new = False
else:
self._editing_persona_name = ""
self._editing_persona_provider = self.current_provider
self._editing_persona_model = self.current_model
self._editing_persona_system_prompt = ""
self._editing_persona_temperature = 0.7
self._editing_persona_max_tokens = 4096
self._editing_persona_tool_preset_id = ""
self._editing_persona_bias_profile_id = ""
self._editing_persona_preferred_models_list = []
self._editing_persona_scope = "project"
self._editing_persona_is_new = True"""
# We need to extract the bias profile block as well
old_bias_block = """ imgui.text("Bias Profile")
if imgui.begin_combo("##bias", self.ui_active_bias_profile or "None"):
if imgui.selectable("None", not self.ui_active_bias_profile)[0]:
self.ui_active_bias_profile = ""
ai_client.set_bias_profile(None)
for bname in sorted(self.bias_profiles.keys()):
if imgui.selectable(bname, bname == self.ui_active_bias_profile)[0]:
self.ui_active_bias_profile = bname
ai_client.set_bias_profile(bname)
imgui.end_combo()"""
# Remove them from their original spots
content = content.replace(old_bias_block, "")
content = content.replace(old_persona_block, "")
# Insert Persona block at the top of _render_provider_panel
old_provider_start = """ def _render_provider_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel")
imgui.text("Provider")"""
new_provider_start = f""" def _render_provider_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel")
{old_persona_block}
imgui.separator()
imgui.text("Provider")"""
content = content.replace(old_provider_start, new_provider_start)
# Update _render_agent_tools_panel
old_agent_tools_start = """ def _render_agent_tools_panel(self) -> None:
imgui.text_colored(C_LBL, 'Active Tool Preset')"""
new_agent_tools_start = f""" def _render_agent_tools_panel(self) -> None:
if imgui.collapsing_header("Active Tool Presets & Biases", imgui.TreeNodeFlags_.default_open):
imgui.text("Tool Preset")"""
content = content.replace(old_agent_tools_start, new_agent_tools_start)
# Wait, if I do collapsing header, I need to indent the rest of the function.
# Instead of indenting the whole function, I can just use the header.
# But wait, ImGui collapsing_header doesn't require indenting, it just returns true if open.
# So I should write it properly:
old_agent_tools_func = """ def _render_agent_tools_panel(self) -> None:
imgui.text_colored(C_LBL, 'Active Tool Preset')
presets = self.controller.tool_presets
preset_names = [""] + sorted(list(presets.keys()))
# Gracefully handle None or missing preset
active = getattr(self, "ui_active_tool_preset", "")
if active is None: active = ""
try:
idx = preset_names.index(active)
except ValueError:
idx = 0
ch, new_idx = imgui.combo("##tool_preset_select", idx, preset_names)
if ch:
self.ui_active_tool_preset = preset_names[new_idx]
imgui.same_line()
if imgui.button("Manage Presets##tools"):
self.show_tool_preset_manager_window = True
if imgui.is_item_hovered():
imgui.set_tooltip("Configure tool availability and default modes.")
imgui.dummy(imgui.ImVec2(0, 8))
active_name = self.ui_active_tool_preset
if active_name and active_name in presets:
preset = presets[active_name]
for cat_name, tools in preset.categories.items():
if imgui.tree_node(cat_name):
for tool in tools:
if tool.weight >= 5:
imgui.text_colored(vec4(255, 100, 100), "[HIGH]")
imgui.same_line()
elif tool.weight == 4:
imgui.text_colored(vec4(255, 255, 100), "[PREF]")
imgui.same_line()
elif tool.weight == 2:
imgui.text_colored(vec4(255, 150, 50), "[REJECT]")
imgui.same_line()
elif tool.weight <= 1:
imgui.text_colored(vec4(180, 180, 180), "[LOW]")
imgui.same_line()
imgui.text(tool.name)
imgui.same_line(180)
mode = tool.approval
if imgui.radio_button(f"Auto##{cat_name}_{tool.name}", mode == "auto"):
tool.approval = "auto"
imgui.same_line()
if imgui.radio_button(f"Ask##{cat_name}_{tool.name}", mode == "ask"):
tool.approval = "ask"
imgui.tree_pop()"""
new_agent_tools_func = """ def _render_agent_tools_panel(self) -> None:
if imgui.collapsing_header("Active Tool Presets & Biases", imgui.TreeNodeFlags_.default_open):
imgui.text("Tool Preset")
presets = self.controller.tool_presets
preset_names = [""] + sorted(list(presets.keys()))
# Gracefully handle None or missing preset
active = getattr(self, "ui_active_tool_preset", "")
if active is None: active = ""
try:
idx = preset_names.index(active)
except ValueError:
idx = 0
ch, new_idx = imgui.combo("##tool_preset_select", idx, preset_names)
if ch:
self.ui_active_tool_preset = preset_names[new_idx]
imgui.same_line()
if imgui.button("Manage Tools##tools"):
self.show_tool_preset_manager_window = True
if imgui.is_item_hovered():
imgui.set_tooltip("Configure tool availability and default modes.")
imgui.dummy(imgui.ImVec2(0, 4))
""" + "\n ".join(old_bias_block.split("\n")) + """
imgui.dummy(imgui.ImVec2(0, 8))
active_name = self.ui_active_tool_preset
if active_name and active_name in presets:
preset = presets[active_name]
for cat_name, tools in preset.categories.items():
if imgui.tree_node(cat_name):
for tool in tools:
if tool.weight >= 5:
imgui.text_colored(vec4(255, 100, 100), "[HIGH]")
imgui.same_line()
elif tool.weight == 4:
imgui.text_colored(vec4(255, 255, 100), "[PREF]")
imgui.same_line()
elif tool.weight == 2:
imgui.text_colored(vec4(255, 150, 50), "[REJECT]")
imgui.same_line()
elif tool.weight <= 1:
imgui.text_colored(vec4(180, 180, 180), "[LOW]")
imgui.same_line()
imgui.text(tool.name)
imgui.same_line(180)
mode = tool.approval
if imgui.radio_button(f"Auto##{cat_name}_{tool.name}", mode == "auto"):
tool.approval = "auto"
imgui.same_line()
if imgui.radio_button(f"Ask##{cat_name}_{tool.name}", mode == "ask"):
tool.approval = "ask"
imgui.tree_pop()"""
content = content.replace(old_agent_tools_func, new_agent_tools_func)
# Fix cache text display in Usage Analytics
content = content.replace('self._gemini_cache_text = f"Gemini Caches: {count} ({size_bytes / 1024:.1f} KB)"', 'self._gemini_cache_text = f"Cache Usage: {count} ({size_bytes / 1024:.1f} KB)"')
content = content.replace('imgui.text_colored(C_LBL, f"Gemini Cache: ACTIVE | Age: {age:.0f}s / {ttl}s | Renews at: {ttl * 0.9:.0f}s")', 'imgui.text_colored(C_LBL, f"Cache Usage: ACTIVE | Age: {age:.0f}s / {ttl}s | Renews at: {ttl * 0.9:.0f}s")')
content = content.replace('imgui.text_disabled("Gemini Cache: INACTIVE")', 'imgui.text_disabled("Cache Usage: INACTIVE")')
# Also, user requested: "The persona should problably just mess with the project system prompt for now."
# Currently in persona selection: `ai_client.system_instruction = persona.system_prompt`
# Let's change that to `self.ui_project_system_prompt = persona.system_prompt` and remove ai_client direct injection
content = content.replace('ai_client.system_instruction = persona.system_prompt', 'self.ui_project_system_prompt = persona.system_prompt')
with open("src/gui_2.py", "w", encoding="utf-8") as f:
f.write(content)
print("done")

View File

@@ -0,0 +1,228 @@
import sys
with open("src/gui_2.py", "r", encoding="utf-8") as f:
content = f.read()
# 1. Update _gui_func:
# Extract Persona out of Provider panel. I will create a new method _render_persona_selector_panel
old_gui_settings = """ if self.show_windows.get("AI Settings", False):
exp, opened = imgui.begin("AI Settings", self.show_windows["AI Settings"])
self.show_windows["AI Settings"] = bool(opened)
if exp:
if imgui.collapsing_header("Provider & Model"):
self._render_provider_panel()
if imgui.collapsing_header("System Prompts"):
self._render_system_prompts_panel()
self._render_agent_tools_panel()
self._render_cache_panel()
imgui.end()
if self.ui_separate_usage_analytics and self.show_windows.get("Usage Analytics", False):
exp, opened = imgui.begin("Usage Analytics", self.show_windows["Usage Analytics"])
self.show_windows["Usage Analytics"] = bool(opened)
if exp:
self._render_usage_analytics_panel()
imgui.end()"""
new_gui_settings = """ if self.show_windows.get("AI Settings", False):
exp, opened = imgui.begin("AI Settings", self.show_windows["AI Settings"])
self.show_windows["AI Settings"] = bool(opened)
if exp:
self._render_persona_selector_panel()
if imgui.collapsing_header("Provider & Model"):
self._render_provider_panel()
if imgui.collapsing_header("System Prompts"):
self._render_system_prompts_panel()
self._render_agent_tools_panel()
imgui.end()
if self.ui_separate_usage_analytics and self.show_windows.get("Usage Analytics", False):
exp, opened = imgui.begin("Usage Analytics", self.show_windows["Usage Analytics"])
self.show_windows["Usage Analytics"] = bool(opened)
if exp:
self._render_usage_analytics_panel()
imgui.end()"""
content = content.replace(old_gui_settings, new_gui_settings)
# Update _render_usage_analytics_panel
old_usage = """ def _render_usage_analytics_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_usage_analytics_panel")
self._render_token_budget_panel()
imgui.separator()
self._render_tool_analytics_panel()
imgui.separator()
self._render_session_insights_panel()
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_usage_analytics_panel")"""
new_usage = """ def _render_usage_analytics_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_usage_analytics_panel")
self._render_token_budget_panel()
imgui.separator()
self._render_cache_panel()
imgui.separator()
self._render_tool_analytics_panel()
imgui.separator()
self._render_session_insights_panel()
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_usage_analytics_panel")"""
content = content.replace(old_usage, new_usage)
# Remove the persona block from _render_provider_panel and put it in _render_persona_selector_panel
old_persona_block = """ def _render_provider_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel")
imgui.text("Persona")
if not hasattr(self, 'ui_active_persona'):
self.ui_active_persona = ""
personas = getattr(self.controller, 'personas', {})
if imgui.begin_combo("##persona", self.ui_active_persona or "None"):
if imgui.selectable("None", not self.ui_active_persona)[0]:
self.ui_active_persona = ""
for pname in sorted(personas.keys()):
if imgui.selectable(pname, pname == self.ui_active_persona)[0]:
self.ui_active_persona = pname
if pname in personas:
persona = personas[pname]
self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature or 0.7
self._editing_persona_max_tokens = persona.max_output_tokens or 4096
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
import json
self._editing_persona_preferred_models = json.dumps(persona.preferred_models) if persona.preferred_models else "[]"
self._editing_persona_is_new = False
if persona.provider and persona.provider in self.controller.PROVIDERS:
self.current_provider = persona.provider
if persona.model:
self.current_model = persona.model
if persona.temperature is not None:
ai_client.temperature = persona.temperature
if persona.max_output_tokens:
ai_client.max_output_tokens = persona.max_output_tokens
if persona.system_prompt:
self.ui_project_system_prompt = persona.system_prompt
if persona.tool_preset:
self.ui_active_tool_preset = persona.tool_preset
ai_client.set_tool_preset(persona.tool_preset)
if persona.bias_profile:
self.ui_active_bias_profile = persona.bias_profile
ai_client.set_bias_profile(persona.bias_profile)
imgui.end_combo()
imgui.same_line()
if imgui.button("Manage Personas"):
self.show_persona_editor_window = True
if self.ui_active_persona and self.ui_active_persona in personas:
persona = personas[self.ui_active_persona]
self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature if persona.temperature is not None else 0.7
self._editing_persona_max_tokens = persona.max_output_tokens if persona.max_output_tokens is not None else 4096
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
self._editing_persona_preferred_models_list = list(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_scope = self.controller.persona_manager.get_persona_scope(persona.name)
self._editing_persona_is_new = False
else:
self._editing_persona_name = ""
self._editing_persona_provider = self.current_provider
self._editing_persona_model = self.current_model
self._editing_persona_system_prompt = ""
self._editing_persona_temperature = 0.7
self._editing_persona_max_tokens = 4096
self._editing_persona_tool_preset_id = ""
self._editing_persona_bias_profile_id = ""
self._editing_persona_preferred_models_list = []
self._editing_persona_scope = "project"
self._editing_persona_is_new = True
imgui.separator()
imgui.text("Provider")"""
new_persona_block = """ def _render_persona_selector_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_persona_selector_panel")
imgui.text("Persona")
if not hasattr(self, 'ui_active_persona'):
self.ui_active_persona = ""
personas = getattr(self.controller, 'personas', {})
if imgui.begin_combo("##persona", self.ui_active_persona or "None"):
if imgui.selectable("None", not self.ui_active_persona)[0]:
self.ui_active_persona = ""
for pname in sorted(personas.keys()):
if imgui.selectable(pname, pname == self.ui_active_persona)[0]:
self.ui_active_persona = pname
if pname in personas:
persona = personas[pname]
self._editing_persona_name = persona.name
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
import copy
self._editing_persona_preferred_models_list = copy.deepcopy(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_is_new = False
# Apply persona to current state immediately
if persona.preferred_models and len(persona.preferred_models) > 0:
first_model = persona.preferred_models[0]
if first_model.get("provider"):
self.current_provider = first_model.get("provider")
if first_model.get("model"):
self.current_model = first_model.get("model")
if first_model.get("temperature") is not None:
ai_client.temperature = first_model.get("temperature")
self.temperature = first_model.get("temperature")
if first_model.get("max_output_tokens"):
ai_client.max_output_tokens = first_model.get("max_output_tokens")
self.max_tokens = first_model.get("max_output_tokens")
if first_model.get("history_trunc_limit"):
self.history_trunc_limit = first_model.get("history_trunc_limit")
if persona.system_prompt:
self.ui_project_system_prompt = persona.system_prompt
if persona.tool_preset:
self.ui_active_tool_preset = persona.tool_preset
ai_client.set_tool_preset(persona.tool_preset)
if persona.bias_profile:
self.ui_active_bias_profile = persona.bias_profile
ai_client.set_bias_profile(persona.bias_profile)
imgui.end_combo()
imgui.same_line()
if imgui.button("Manage Personas"):
self.show_persona_editor_window = True
if self.ui_active_persona and self.ui_active_persona in personas:
persona = personas[self.ui_active_persona]
self._editing_persona_name = persona.name
self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or ""
import copy
self._editing_persona_preferred_models_list = copy.deepcopy(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_scope = self.controller.persona_manager.get_persona_scope(persona.name)
self._editing_persona_is_new = False
else:
self._editing_persona_name = ""
self._editing_persona_system_prompt = ""
self._editing_persona_tool_preset_id = ""
self._editing_persona_bias_profile_id = ""
self._editing_persona_preferred_models_list = [{
"provider": self.current_provider,
"model": self.current_model,
"temperature": getattr(self, "temperature", 0.7),
"max_output_tokens": getattr(self, "max_tokens", 4096),
"history_trunc_limit": getattr(self, "history_trunc_limit", 900000)
}]
self._editing_persona_scope = "project"
self._editing_persona_is_new = True
imgui.separator()
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_persona_selector_panel")
def _render_provider_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel")
imgui.text("Provider")"""
content = content.replace(old_persona_block, new_persona_block)
with open("src/gui_2.py", "w", encoding="utf-8") as f:
f.write(content)
print("done gui updates")

View File

@@ -363,3 +363,4 @@ def main() -> None:
if __name__ == "__main__": if __name__ == "__main__":
main() main()

View File

@@ -2400,3 +2400,4 @@ def get_history_bleed_stats(md_content: Optional[str] = None) -> dict[str, Any]:
"current": 0, "current": 0,
"percentage": 0, "percentage": 0,
}) })

View File

@@ -223,3 +223,4 @@ class ApiHookClient:
def get_patch_status(self) -> dict[str, Any]: def get_patch_status(self) -> dict[str, Any]:
"""Gets the current patch modal status.""" """Gets the current patch modal status."""
return self._make_request('GET', '/api/patch/status') or {} return self._make_request('GET', '/api/patch/status') or {}

View File

@@ -523,3 +523,4 @@ class HookServer:
if self.thread: if self.thread:
self.thread.join() self.thread.join()
logging.info("Hook server stopped") logging.info("Hook server stopped")

View File

@@ -883,6 +883,8 @@ class AppController:
self.persona_manager = PersonaManager(Path(self.active_project_path).parent if self.active_project_path else None) self.persona_manager = PersonaManager(Path(self.active_project_path).parent if self.active_project_path else None)
self.personas = self.persona_manager.load_all() self.personas = self.persona_manager.load_all()
self._fetch_models(self.current_provider)
self.ui_active_tool_preset = os.environ.get('SLOP_TOOL_PRESET') or ai_cfg.get("active_tool_preset") self.ui_active_tool_preset = os.environ.get('SLOP_TOOL_PRESET') or ai_cfg.get("active_tool_preset")
self.ui_active_bias_profile = ai_cfg.get("active_bias_profile") self.ui_active_bias_profile = ai_cfg.get("active_bias_profile")
ai_client.set_tool_preset(self.ui_active_tool_preset) ai_client.set_tool_preset(self.ui_active_tool_preset)
@@ -1496,6 +1498,9 @@ class AppController:
self._current_provider = value self._current_provider = value
ai_client.reset_session() ai_client.reset_session()
ai_client.set_provider(value, self.current_model) ai_client.set_provider(value, self.current_model)
self.available_models = self.all_available_models.get(value, [])
if not self.available_models:
self._fetch_models(value)
self._token_stats = {} self._token_stats = {}
self._token_stats_dirty = True self._token_stats_dirty = True

View File

@@ -63,3 +63,4 @@ def get_bg():
if _bg is None: if _bg is None:
_bg = BackgroundShader() _bg = BackgroundShader()
return _bg return _bg

View File

@@ -118,3 +118,4 @@ if __name__ == "__main__":
test_skeletons = "class NewFeature: pass" test_skeletons = "class NewFeature: pass"
tickets = generate_tickets(test_brief, test_skeletons) tickets = generate_tickets(test_brief, test_skeletons)
print(json.dumps(tickets, indent=2)) print(json.dumps(tickets, indent=2))

View File

@@ -59,3 +59,4 @@ def estimate_cost(model: str, input_tokens: int, output_tokens: int) -> float:
return input_cost + output_cost return input_cost + output_cost
return 0.0 return 0.0

View File

@@ -193,3 +193,4 @@ class ExecutionEngine:
ticket = self.dag.ticket_map.get(task_id) ticket = self.dag.ticket_map.get(task_id)
if ticket: if ticket:
ticket.status = status ticket.status = status

View File

@@ -1,4 +1,4 @@
from typing import List, Dict, Optional, Tuple from typing import List, Dict, Optional, Tuple
from dataclasses import dataclass from dataclasses import dataclass
import shutil import shutil
import os import os

View File

@@ -126,3 +126,4 @@ class UserRequestEvent:
"disc_text": self.disc_text, "disc_text": self.disc_text,
"base_dir": self.base_dir "base_dir": self.base_dir
} }

View File

@@ -376,3 +376,4 @@ def evict(path: Path) -> None:
def list_cached() -> List[Dict[str, Any]]: def list_cached() -> List[Dict[str, Any]]:
return [] return []

View File

@@ -189,3 +189,4 @@ class GeminiCliAdapter:
""" """
total_chars = len("\n".join(contents)) total_chars = len("\n".join(contents))
return total_chars // 4 return total_chars // 4

View File

@@ -451,12 +451,12 @@ class App:
exp, opened = imgui.begin("AI Settings", self.show_windows["AI Settings"]) exp, opened = imgui.begin("AI Settings", self.show_windows["AI Settings"])
self.show_windows["AI Settings"] = bool(opened) self.show_windows["AI Settings"] = bool(opened)
if exp: if exp:
self._render_persona_selector_panel()
if imgui.collapsing_header("Provider & Model"): if imgui.collapsing_header("Provider & Model"):
self._render_provider_panel() self._render_provider_panel()
if imgui.collapsing_header("System Prompts"): if imgui.collapsing_header("System Prompts"):
self._render_system_prompts_panel() self._render_system_prompts_panel()
self._render_agent_tools_panel() self._render_agent_tools_panel()
self._render_cache_panel()
imgui.end() imgui.end()
if self.ui_separate_usage_analytics and self.show_windows.get("Usage Analytics", False): if self.ui_separate_usage_analytics and self.show_windows.get("Usage Analytics", False):
@@ -1271,14 +1271,15 @@ class App:
"provider": self.current_provider, "provider": self.current_provider,
"model": self.current_model, "model": self.current_model,
"temperature": 0.7, "temperature": 0.7,
"max_output_tokens": 4096 "max_output_tokens": 4096,
"history_trunc_limit": 900000
}] }]
self._editing_persona_scope = "project" self._editing_persona_scope = "project"
self._editing_persona_is_new = True self._editing_persona_is_new = True
imgui.separator() imgui.separator()
personas = getattr(self.controller, 'personas', {}) personas = getattr(self.controller, 'personas', {})
for name in sorted(personas.keys()): for name in sorted(personas.keys()):
is_sel = (name == self._editing_persona_name and not self._editing_persona_is_new) is_sel = (name == self._editing_persona_name and not getattr(self, '_editing_persona_is_new', False))
if imgui.selectable(name, is_sel)[0]: if imgui.selectable(name, is_sel)[0]:
p = personas[name] p = personas[name]
self._editing_persona_name = p.name self._editing_persona_name = p.name
@@ -1306,73 +1307,99 @@ class App:
_, self._editing_persona_name = imgui.input_text("##pname", self._editing_persona_name, 128) _, self._editing_persona_name = imgui.input_text("##pname", self._editing_persona_name, 128)
imgui.text("Scope:") imgui.text("Scope:")
if imgui.radio_button("Global##pscope", self._editing_persona_scope == "global"): if imgui.radio_button("Global##pscope", getattr(self, '_editing_persona_scope', 'project') == "global"):
self._editing_persona_scope = "global" self._editing_persona_scope = "global"
imgui.same_line() imgui.same_line()
if imgui.radio_button("Project##pscope", self._editing_persona_scope == "project"): if imgui.radio_button("Project##pscope", getattr(self, '_editing_persona_scope', 'project') == "project"):
self._editing_persona_scope = "project" self._editing_persona_scope = "project"
imgui.separator() imgui.separator()
imgui.text("Preferred Models:") imgui.text("Preferred Models:")
providers = self.controller.PROVIDERS providers = self.controller.PROVIDERS
imgui.begin_child("pref_models_list", imgui.ImVec2(0, 150), True) if not hasattr(self, '_persona_pref_models_expanded'):
self._persona_pref_models_expanded = {}
imgui.begin_child("pref_models_list", imgui.ImVec2(0, 200), True)
to_remove = [] to_remove = []
for i, entry in enumerate(self._editing_persona_preferred_models_list): for i, entry in enumerate(self._editing_persona_preferred_models_list):
imgui.push_id(f"pref_model_{i}") imgui.push_id(f"pref_model_{i}")
imgui.text(f"{i+1}.")
prov = entry.get("provider", "Unknown")
mod = entry.get("model", "Unknown")
is_expanded = self._persona_pref_models_expanded.get(i, False)
if imgui.button("-" if is_expanded else "+"):
self._persona_pref_models_expanded[i] = not is_expanded
imgui.same_line() imgui.same_line()
imgui.set_next_item_width(120) imgui.text(f"{i+1}. {prov} - {mod}")
prov = entry.get("provider", "") imgui.same_line(imgui.get_content_region_avail().x - 30)
if imgui.button("x"):
to_remove.append(i)
if is_expanded:
imgui.indent(20)
imgui.text("Provider:")
imgui.same_line()
imgui.set_next_item_width(150)
p_idx = providers.index(prov) + 1 if prov in providers else 0 p_idx = providers.index(prov) + 1 if prov in providers else 0
changed_p, p_idx = imgui.combo("##prov", p_idx, ["None"] + providers) changed_p, p_idx = imgui.combo("##prov", p_idx, ["None"] + providers)
if changed_p: if changed_p:
entry["provider"] = providers[p_idx-1] if p_idx > 0 else "" entry["provider"] = providers[p_idx-1] if p_idx > 0 else ""
imgui.same_line() imgui.same_line()
imgui.set_next_item_width(200) imgui.text("Model:")
imgui.same_line()
imgui.set_next_item_width(250)
curr_prov = entry.get("provider", "") curr_prov = entry.get("provider", "")
m_list = self.controller.all_available_models.get(curr_prov, []) m_list = self.controller.all_available_models.get(curr_prov, [])
model = entry.get("model", "") m_idx = m_list.index(mod) + 1 if mod in m_list else 0
m_idx = m_list.index(model) + 1 if model in m_list else 0
changed_m, m_idx = imgui.combo("##model", m_idx, ["None"] + m_list) changed_m, m_idx = imgui.combo("##model", m_idx, ["None"] + m_list)
if changed_m: if changed_m:
entry["model"] = m_list[m_idx-1] if m_idx > 0 else "" entry["model"] = m_list[m_idx-1] if m_idx > 0 else ""
imgui.text("Temperature:")
imgui.same_line() imgui.same_line()
imgui.text("T:") imgui.set_next_item_width(100)
imgui.same_line()
imgui.set_next_item_width(60)
_, entry["temperature"] = imgui.input_float("##temp", entry.get("temperature", 0.7), 0.1, 0.1, "%.1f") _, entry["temperature"] = imgui.input_float("##temp", entry.get("temperature", 0.7), 0.1, 0.1, "%.1f")
imgui.same_line() imgui.same_line()
imgui.text("Max:") imgui.text("Max Output Tokens:")
imgui.same_line() imgui.same_line()
imgui.set_next_item_width(80) imgui.set_next_item_width(100)
_, entry["max_output_tokens"] = imgui.input_int("##maxt", entry.get("max_output_tokens", 4096)) _, entry["max_output_tokens"] = imgui.input_int("##maxt", entry.get("max_output_tokens", 4096))
imgui.text("History Truncation Limit:")
imgui.same_line() imgui.same_line()
if imgui.button("x"): imgui.set_next_item_width(100)
to_remove.append(i) _, entry["history_trunc_limit"] = imgui.input_int("##hist", entry.get("history_trunc_limit", 900000))
imgui.unindent(20)
imgui.dummy(imgui.ImVec2(0, 4))
imgui.pop_id() imgui.pop_id()
for i in reversed(to_remove): for i in reversed(to_remove):
self._editing_persona_preferred_models_list.pop(i) self._editing_persona_preferred_models_list.pop(i)
if i in self._persona_pref_models_expanded:
del self._persona_pref_models_expanded[i]
imgui.end_child() imgui.end_child()
if imgui.button("Add Preferred Model"): if imgui.button("Add Preferred Model"):
idx = len(self._editing_persona_preferred_models_list)
self._editing_persona_preferred_models_list.append({ self._editing_persona_preferred_models_list.append({
"provider": self.current_provider, "provider": self.current_provider,
"model": self.current_model, "model": self.current_model,
"temperature": 0.7, "temperature": 0.7,
"max_output_tokens": 4096 "max_output_tokens": 4096,
"history_trunc_limit": 900000
}) })
self._persona_pref_models_expanded[idx] = True
imgui.separator() imgui.separator()
imgui.text("Tool Preset:") imgui.text("Tool Preset:")
imgui.same_line() imgui.same_line()
t_preset_names = ["None"] + sorted(self.controller.tool_presets.keys()) t_preset_names = ["None"] + sorted(self.controller.tool_presets.keys())
t_idx = t_preset_names.index(self._editing_persona_tool_preset_id) if self._editing_persona_tool_preset_id in t_preset_names else 0 t_idx = t_preset_names.index(self._editing_persona_tool_preset_id) if getattr(self, '_editing_persona_tool_preset_id', '') in t_preset_names else 0
imgui.push_item_width(200) imgui.push_item_width(200)
_, t_idx = imgui.combo("##ptoolpreset", t_idx, t_preset_names) _, t_idx = imgui.combo("##ptoolpreset", t_idx, t_preset_names)
self._editing_persona_tool_preset_id = t_preset_names[t_idx] if t_idx > 0 else "" self._editing_persona_tool_preset_id = t_preset_names[t_idx] if t_idx > 0 else ""
@@ -1382,16 +1409,15 @@ class App:
imgui.text("Bias Profile:") imgui.text("Bias Profile:")
imgui.same_line() imgui.same_line()
bias_names = ["None"] + sorted(self.controller.bias_profiles.keys()) bias_names = ["None"] + sorted(self.controller.bias_profiles.keys())
b_idx = bias_names.index(self._editing_persona_bias_profile_id) if self._editing_persona_bias_profile_id in bias_names else 0 b_idx = bias_names.index(self._editing_persona_bias_profile_id) if getattr(self, '_editing_persona_bias_profile_id', '') in bias_names else 0
imgui.push_item_width(200) imgui.push_item_width(200)
_, b_idx = imgui.combo("##pbiasprofile", b_idx, bias_names) _, b_idx = imgui.combo("##pbiasprofile", b_idx, bias_names)
self._editing_persona_bias_profile_id = bias_names[b_idx] if b_idx > 0 else "" self._editing_persona_bias_profile_id = bias_names[b_idx] if b_idx > 0 else ""
imgui.pop_item_width() imgui.pop_item_width()
if imgui.collapsing_header("Manage Tool Presets & Biases"): imgui.same_line()
imgui.begin_child("tool_preset_embedded", imgui.ImVec2(0, 300), True) if imgui.button("Manage Tools##p_tools"):
self._render_tool_preset_manager_content(is_embedded=True) self.show_tool_preset_manager_window = True
imgui.end_child()
imgui.separator() imgui.separator()
imgui.text("System Prompt:") imgui.text("System Prompt:")
@@ -1412,10 +1438,9 @@ class App:
self._editing_persona_system_prompt = p.system_prompt self._editing_persona_system_prompt = p.system_prompt
self._load_preset_idx = 0 self._load_preset_idx = 0
if imgui.collapsing_header("Manage Prompt Presets"): imgui.same_line()
imgui.begin_child("prompt_preset_embedded", imgui.ImVec2(0, 200), True) if imgui.button("Manage Prompts##p_prompts"):
self._render_preset_manager_content(is_embedded=True) self.show_preset_manager_window = True
imgui.end_child()
_, self._editing_persona_system_prompt = imgui.input_text_multiline("##pprompt", self._editing_persona_system_prompt, imgui.ImVec2(-1, 150)) _, self._editing_persona_system_prompt = imgui.input_text_multiline("##pprompt", self._editing_persona_system_prompt, imgui.ImVec2(-1, 150))
@@ -1433,7 +1458,7 @@ class App:
bias_profile=self._editing_persona_bias_profile_id or None, bias_profile=self._editing_persona_bias_profile_id or None,
preferred_models=save_models, preferred_models=save_models,
) )
self.controller._cb_save_persona(persona, self._editing_persona_scope) self.controller._cb_save_persona(persona, getattr(self, '_editing_persona_scope', 'project'))
self.ai_status = f"Saved Persona: {persona.name}" self.ai_status = f"Saved Persona: {persona.name}"
except Exception as e: except Exception as e:
self.ai_status = f"Error saving persona: {e}" self.ai_status = f"Error saving persona: {e}"
@@ -1445,7 +1470,7 @@ class App:
imgui.same_line() imgui.same_line()
if imgui.button("Delete", imgui.ImVec2(100, 0)): if imgui.button("Delete", imgui.ImVec2(100, 0)):
if not getattr(self, '_editing_persona_is_new', True) and self._editing_persona_name: if not getattr(self, '_editing_persona_is_new', True) and self._editing_persona_name:
self.controller._cb_delete_persona(self._editing_persona_name, self._editing_persona_scope) self.controller._cb_delete_persona(self._editing_persona_name, getattr(self, '_editing_persona_scope', 'project'))
self.ai_status = f"Deleted Persona: {self._editing_persona_name}" self.ai_status = f"Deleted Persona: {self._editing_persona_name}"
self._editing_persona_name = "" self._editing_persona_name = ""
self._editing_persona_is_new = True self._editing_persona_is_new = True
@@ -2225,40 +2250,8 @@ def hello():
self._scroll_disc_to_bottom = False self._scroll_disc_to_bottom = False
imgui.end_child() imgui.end_child()
def _render_provider_panel(self) -> None: def _render_persona_selector_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel") if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_persona_selector_panel")
imgui.text("Provider")
if imgui.begin_combo("##prov", self.current_provider):
for p in PROVIDERS:
if imgui.selectable(p, p == self.current_provider)[0]:
self.current_provider = p
imgui.end_combo()
imgui.separator()
imgui.text("Model")
imgui.same_line()
if imgui.button("Fetch Models"):
self._fetch_models(self.current_provider)
if imgui.begin_list_box("##models", imgui.ImVec2(-1, 120)):
for m in self.available_models:
if imgui.selectable(m, m == self.current_model)[0]:
self.current_model = m
imgui.end_list_box()
imgui.separator()
imgui.text("Parameters")
ch, self.temperature = imgui.slider_float("Temperature", self.temperature, 0.0, 2.0, "%.2f")
ch, self.max_tokens = imgui.input_int("Max Tokens (Output)", self.max_tokens, 1024)
ch, self.history_trunc_limit = imgui.input_int("History Truncation Limit", self.history_trunc_limit, 1024)
imgui.text("Bias Profile")
if imgui.begin_combo("##bias", self.ui_active_bias_profile or "None"):
if imgui.selectable("None", not self.ui_active_bias_profile)[0]:
self.ui_active_bias_profile = ""
ai_client.set_bias_profile(None)
for bname in sorted(self.bias_profiles.keys()):
if imgui.selectable(bname, bname == self.ui_active_bias_profile)[0]:
self.ui_active_bias_profile = bname
ai_client.set_bias_profile(bname)
imgui.end_combo()
imgui.text("Persona") imgui.text("Persona")
if not hasattr(self, 'ui_active_persona'): if not hasattr(self, 'ui_active_persona'):
self.ui_active_persona = "" self.ui_active_persona = ""
@@ -2272,26 +2265,31 @@ def hello():
if pname in personas: if pname in personas:
persona = personas[pname] persona = personas[pname]
self._editing_persona_name = persona.name self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or "" self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature or 0.7
self._editing_persona_max_tokens = persona.max_output_tokens or 4096
self._editing_persona_tool_preset_id = persona.tool_preset or "" self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or "" self._editing_persona_bias_profile_id = persona.bias_profile or ""
import json import copy
self._editing_persona_preferred_models = json.dumps(persona.preferred_models) if persona.preferred_models else "[]" self._editing_persona_preferred_models_list = copy.deepcopy(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_is_new = False self._editing_persona_is_new = False
if persona.provider and persona.provider in self.controller.PROVIDERS:
self.current_provider = persona.provider # Apply persona to current state immediately
if persona.model: if persona.preferred_models and len(persona.preferred_models) > 0:
self.current_model = persona.model first_model = persona.preferred_models[0]
if persona.temperature is not None: if first_model.get("provider"):
ai_client.temperature = persona.temperature self.current_provider = first_model.get("provider")
if persona.max_output_tokens: if first_model.get("model"):
ai_client.max_output_tokens = persona.max_output_tokens self.current_model = first_model.get("model")
if first_model.get("temperature") is not None:
ai_client.temperature = first_model.get("temperature")
self.temperature = first_model.get("temperature")
if first_model.get("max_output_tokens"):
ai_client.max_output_tokens = first_model.get("max_output_tokens")
self.max_tokens = first_model.get("max_output_tokens")
if first_model.get("history_trunc_limit"):
self.history_trunc_limit = first_model.get("history_trunc_limit")
if persona.system_prompt: if persona.system_prompt:
ai_client.system_instruction = persona.system_prompt self.ui_project_system_prompt = persona.system_prompt
if persona.tool_preset: if persona.tool_preset:
self.ui_active_tool_preset = persona.tool_preset self.ui_active_tool_preset = persona.tool_preset
ai_client.set_tool_preset(persona.tool_preset) ai_client.set_tool_preset(persona.tool_preset)
@@ -2305,28 +2303,53 @@ def hello():
if self.ui_active_persona and self.ui_active_persona in personas: if self.ui_active_persona and self.ui_active_persona in personas:
persona = personas[self.ui_active_persona] persona = personas[self.ui_active_persona]
self._editing_persona_name = persona.name self._editing_persona_name = persona.name
self._editing_persona_provider = persona.provider or ""
self._editing_persona_model = persona.model or ""
self._editing_persona_system_prompt = persona.system_prompt or "" self._editing_persona_system_prompt = persona.system_prompt or ""
self._editing_persona_temperature = persona.temperature if persona.temperature is not None else 0.7
self._editing_persona_max_tokens = persona.max_output_tokens if persona.max_output_tokens is not None else 4096
self._editing_persona_tool_preset_id = persona.tool_preset or "" self._editing_persona_tool_preset_id = persona.tool_preset or ""
self._editing_persona_bias_profile_id = persona.bias_profile or "" self._editing_persona_bias_profile_id = persona.bias_profile or ""
self._editing_persona_preferred_models_list = list(persona.preferred_models) if persona.preferred_models else [] import copy
self._editing_persona_preferred_models_list = copy.deepcopy(persona.preferred_models) if persona.preferred_models else []
self._editing_persona_scope = self.controller.persona_manager.get_persona_scope(persona.name) self._editing_persona_scope = self.controller.persona_manager.get_persona_scope(persona.name)
self._editing_persona_is_new = False self._editing_persona_is_new = False
else: else:
self._editing_persona_name = "" self._editing_persona_name = ""
self._editing_persona_provider = self.current_provider
self._editing_persona_model = self.current_model
self._editing_persona_system_prompt = "" self._editing_persona_system_prompt = ""
self._editing_persona_temperature = 0.7
self._editing_persona_max_tokens = 4096
self._editing_persona_tool_preset_id = "" self._editing_persona_tool_preset_id = ""
self._editing_persona_bias_profile_id = "" self._editing_persona_bias_profile_id = ""
self._editing_persona_preferred_models_list = [] self._editing_persona_preferred_models_list = [{
"provider": self.current_provider,
"model": self.current_model,
"temperature": getattr(self, "temperature", 0.7),
"max_output_tokens": getattr(self, "max_tokens", 4096),
"history_trunc_limit": getattr(self, "history_trunc_limit", 900000)
}]
self._editing_persona_scope = "project" self._editing_persona_scope = "project"
self._editing_persona_is_new = True self._editing_persona_is_new = True
imgui.separator()
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_persona_selector_panel")
def _render_provider_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_provider_panel")
imgui.text("Provider")
if imgui.begin_combo("##prov", self.current_provider):
for p in PROVIDERS:
if imgui.selectable(p, p == self.current_provider)[0]:
self.current_provider = p
imgui.end_combo()
imgui.separator()
imgui.text("Model")
if imgui.begin_list_box("##models", imgui.ImVec2(-1, 120)):
for m in self.available_models:
if imgui.selectable(m, m == self.current_model)[0]:
self.current_model = m
imgui.end_list_box()
imgui.separator()
imgui.text("Parameters")
ch, self.temperature = imgui.slider_float("Temperature", self.temperature, 0.0, 2.0, "%.2f")
ch, self.max_tokens = imgui.input_int("Max Tokens (Output)", self.max_tokens, 1024)
ch, self.history_trunc_limit = imgui.input_int("History Truncation Limit", self.history_trunc_limit, 1024)
if self.current_provider == "gemini_cli": if self.current_provider == "gemini_cli":
imgui.separator() imgui.separator()
imgui.text("Gemini CLI") imgui.text("Gemini CLI")
@@ -2450,9 +2473,9 @@ def hello():
if cache_stats.get("cache_exists"): if cache_stats.get("cache_exists"):
age = cache_stats.get("cache_age_seconds", 0) age = cache_stats.get("cache_age_seconds", 0)
ttl = cache_stats.get("ttl_seconds", 3600) ttl = cache_stats.get("ttl_seconds", 3600)
imgui.text_colored(C_LBL, f"Gemini Cache: ACTIVE | Age: {age:.0f}s / {ttl}s | Renews at: {ttl * 0.9:.0f}s") imgui.text_colored(C_LBL, f"Cache Usage: ACTIVE | Age: {age:.0f}s / {ttl}s | Renews at: {ttl * 0.9:.0f}s")
else: else:
imgui.text_disabled("Gemini Cache: INACTIVE") imgui.text_disabled("Cache Usage: INACTIVE")
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_token_budget_panel") if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_token_budget_panel")
def _render_cache_panel(self) -> None: def _render_cache_panel(self) -> None:
@@ -2460,9 +2483,7 @@ def hello():
if self.current_provider != "gemini": if self.current_provider != "gemini":
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_cache_panel") if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_cache_panel")
return return
if not imgui.collapsing_header("Cache Analytics"): imgui.text_colored(C_LBL, 'Cache Analytics')
if self.perf_profiling_enabled: self.perf_monitor.end_component("_render_cache_panel")
return
stats = getattr(self.controller, '_cached_cache_stats', {}) stats = getattr(self.controller, '_cached_cache_stats', {})
if not stats.get("cache_exists"): if not stats.get("cache_exists"):
imgui.text_disabled("No active cache") imgui.text_disabled("No active cache")
@@ -2550,6 +2571,8 @@ def hello():
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_usage_analytics_panel") if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_usage_analytics_panel")
self._render_token_budget_panel() self._render_token_budget_panel()
imgui.separator() imgui.separator()
self._render_cache_panel()
imgui.separator()
self._render_tool_analytics_panel() self._render_tool_analytics_panel()
imgui.separator() imgui.separator()
self._render_session_insights_panel() self._render_session_insights_panel()
@@ -3637,11 +3660,11 @@ def hello():
imgui.set_item_tooltip("Open preset management modal") imgui.set_item_tooltip("Open preset management modal")
ch, self.ui_project_system_prompt = imgui.input_text_multiline("##psp", self.ui_project_system_prompt, imgui.ImVec2(-1, 100)) ch, self.ui_project_system_prompt = imgui.input_text_multiline("##psp", self.ui_project_system_prompt, imgui.ImVec2(-1, 100))
def _render_agent_tools_panel(self) -> None: def _render_agent_tools_panel(self) -> None:
imgui.text_colored(C_LBL, 'Active Tool Preset') if imgui.collapsing_header("Active Tool Presets & Biases", imgui.TreeNodeFlags_.default_open):
imgui.text("Tool Preset")
presets = self.controller.tool_presets presets = self.controller.tool_presets
preset_names = [""] + sorted(list(presets.keys())) preset_names = [""] + sorted(list(presets.keys()))
# Gracefully handle None or missing preset
active = getattr(self, "ui_active_tool_preset", "") active = getattr(self, "ui_active_tool_preset", "")
if active is None: active = "" if active is None: active = ""
try: try:
@@ -3659,6 +3682,20 @@ def hello():
if imgui.is_item_hovered(): if imgui.is_item_hovered():
imgui.set_tooltip("Configure tool availability and default modes.") imgui.set_tooltip("Configure tool availability and default modes.")
imgui.dummy(imgui.ImVec2(0, 4))
imgui.text("Bias Profile")
if imgui.begin_combo("##bias", getattr(self, 'ui_active_bias_profile', "") or "None"):
if imgui.selectable("None", not getattr(self, 'ui_active_bias_profile', ""))[0]:
self.ui_active_bias_profile = ""
from src import ai_client
ai_client.set_bias_profile(None)
for bname in sorted(self.controller.bias_profiles.keys()):
if imgui.selectable(bname, bname == getattr(self, 'ui_active_bias_profile', ""))[0]:
self.ui_active_bias_profile = bname
from src import ai_client
ai_client.set_bias_profile(bname)
imgui.end_combo()
imgui.dummy(imgui.ImVec2(0, 8)) imgui.dummy(imgui.ImVec2(0, 8))
active_name = self.ui_active_tool_preset active_name = self.ui_active_tool_preset
if active_name and active_name in presets: if active_name and active_name in presets:
@@ -3689,7 +3726,6 @@ def hello():
if imgui.radio_button(f"Ask##{cat_name}_{tool.name}", mode == "ask"): if imgui.radio_button(f"Ask##{cat_name}_{tool.name}", mode == "ask"):
tool.approval = "ask" tool.approval = "ask"
imgui.tree_pop() imgui.tree_pop()
def _render_theme_panel(self) -> None: def _render_theme_panel(self) -> None:
if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_theme_panel") if self.perf_profiling_enabled: self.perf_monitor.start_component("_render_theme_panel")
exp, opened = imgui.begin("Theme", self.show_windows["Theme"]) exp, opened = imgui.begin("Theme", self.show_windows["Theme"])

View File

@@ -115,3 +115,4 @@ class LogPruner:
sys.stderr.write(f"[LogPruner] Error removing {resolved_path}: {e}\n") sys.stderr.write(f"[LogPruner] Error removing {resolved_path}: {e}\n")
self.log_registry.save_registry() self.log_registry.save_registry()

View File

@@ -301,3 +301,4 @@ class LogRegistry:
}) })
return old_sessions return old_sessions

View File

@@ -166,3 +166,4 @@ def render_unindented(text: str) -> None:
def render_code(code: str, lang: str = "", context_id: str = "default", block_idx: int = 0) -> None: def render_code(code: str, lang: str = "", context_id: str = "default", block_idx: int = 0) -> None:
get_renderer().render_code(code, lang, context_id, block_idx) get_renderer().render_code(code, lang, context_id, block_idx)

View File

@@ -1,4 +1,4 @@
# mcp_client.py # mcp_client.py
""" """
MCP Client - Multi-tool filesystem and network operations with sandboxing. MCP Client - Multi-tool filesystem and network operations with sandboxing.
@@ -782,10 +782,10 @@ def get_tree(path: str, max_depth: int = 2) -> str:
entries = [e for e in entries if not e.name.startswith('.') and e.name not in ('__pycache__', 'venv', 'env') and e.name != "history.toml" and not e.name.endswith("_history.toml")] entries = [e for e in entries if not e.name.startswith('.') and e.name not in ('__pycache__', 'venv', 'env') and e.name != "history.toml" and not e.name.endswith("_history.toml")]
for i, entry in enumerate(entries): for i, entry in enumerate(entries):
is_last = (i == len(entries) - 1) is_last = (i == len(entries) - 1)
connector = "└── " if is_last else "├── " connector = "└── " if is_last else "├── "
lines.append(f"{prefix}{connector}{entry.name}") lines.append(f"{prefix}{connector}{entry.name}")
if entry.is_dir(): if entry.is_dir():
extension = " " if is_last else "│ " extension = " " if is_last else " "
lines.extend(_build_tree(entry, current_depth + 1, prefix + extension)) lines.extend(_build_tree(entry, current_depth + 1, prefix + extension))
return lines return lines
tree_lines = [f"{p.name}/"] + _build_tree(p, 1) tree_lines = [f"{p.name}/"] + _build_tree(p, 1)
@@ -1466,3 +1466,4 @@ MCP_TOOL_SPECS: list[dict[str, Any]] = [

View File

@@ -178,3 +178,4 @@ RULES:
Analyze this error and generate the patch: Analyze this error and generate the patch:
""" """

View File

@@ -614,3 +614,4 @@ def run_worker_lifecycle(ticket: Ticket, context: WorkerContext, context_files:
if event_queue: if event_queue:
_queue_put(event_queue, "ticket_completed", {"ticket_id": ticket.id, "timestamp": time.time()}) _queue_put(event_queue, "ticket_completed", {"ticket_id": ticket.id, "timestamp": time.time()})
return response return response

View File

@@ -86,3 +86,4 @@ class NativeOrchestrator:
"""Tier 4: Generate patch for error""" """Tier 4: Generate patch for error"""
from src import ai_client from src import ai_client
return ai_client.run_tier4_patch_generation(error, file_context) return ai_client.run_tier4_patch_generation(error, file_context)

View File

@@ -125,3 +125,4 @@ if __name__ == "__main__":
history = get_track_history_summary() history = get_track_history_summary()
tracks = generate_tracks("Implement a basic unit test for the ai_client.py module.", flat, file_items, history_summary=history) tracks = generate_tracks("Implement a basic unit test for the ai_client.py module.", flat, file_items, history_summary=history)
print(json.dumps(tracks, indent=2)) print(json.dumps(tracks, indent=2))

View File

@@ -87,3 +87,4 @@ def get_outline(path: Path, code: str) -> str:
return outliner.outline(code) return outliner.outline(code)
else: else:
return f"Outlining not supported for {suffix} files yet." return f"Outlining not supported for {suffix} files yet."

View File

@@ -1,4 +1,4 @@
from typing import Optional, Callable, List from typing import Optional, Callable, List
from dataclasses import dataclass, field from dataclasses import dataclass, field
@dataclass @dataclass

View File

@@ -110,3 +110,4 @@ def get_archive_dir() -> Path:
def reset_resolved() -> None: def reset_resolved() -> None:
"""For testing only - clear cached resolutions.""" """For testing only - clear cached resolutions."""
_RESOLVED.clear() _RESOLVED.clear()

View File

@@ -232,3 +232,4 @@ class PerformanceMonitor:
self._stop_event.set() self._stop_event.set()
if self._cpu_thread.is_alive(): if self._cpu_thread.is_alive():
self._cpu_thread.join(timeout=2.0) self._cpu_thread.join(timeout=2.0)

View File

@@ -82,3 +82,4 @@ class PersonaManager:
path.parent.mkdir(parents=True, exist_ok=True) path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "wb") as f: with open(path, "wb") as f:
tomli_w.dump(data, f) tomli_w.dump(data, f)

View File

@@ -89,3 +89,4 @@ class PresetManager:
path.parent.mkdir(parents=True, exist_ok=True) path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "wb") as f: with open(path, "wb") as f:
f.write(tomli_w.dumps(data).encode("utf-8")) f.write(tomli_w.dumps(data).encode("utf-8"))

View File

@@ -391,3 +391,4 @@ def calculate_track_progress(tickets: list) -> dict:
"blocked": blocked, "blocked": blocked,
"todo": todo "todo": todo
} }

View File

@@ -217,3 +217,4 @@ def log_cli_call(command: str, stdin_content: Optional[str], stdout_content: Opt
_cli_fh.flush() _cli_fh.flush()
except Exception: except Exception:
pass pass

View File

@@ -70,3 +70,4 @@ def apply_faux_acrylic_glass(draw_list: imgui.ImDrawList, p_min: imgui.ImVec2, p
flags=imgui.ImDrawFlags_.round_corners_all if rounding > 0 else imgui.ImDrawFlags_.none, flags=imgui.ImDrawFlags_.round_corners_all if rounding > 0 else imgui.ImDrawFlags_.none,
thickness=1.0 thickness=1.0
) )

View File

@@ -90,3 +90,4 @@ def run_powershell(script: str, base_dir: str, qa_callback: Optional[Callable[[s
if 'process' in locals() and process: if 'process' in locals() and process:
subprocess.run(["taskkill", "/F", "/T", "/PID", str(process.pid)], capture_output=True) subprocess.run(["taskkill", "/F", "/T", "/PID", str(process.pid)], capture_output=True)
return f"ERROR: {e}" return f"ERROR: {e}"

View File

@@ -190,3 +190,4 @@ def build_summary_markdown(file_items: list[dict[str, Any]]) -> str:
summary = item.get("summary", "") summary = item.get("summary", "")
parts.append(f"### `{path}`\n\n{summary}") parts.append(f"### `{path}`\n\n{summary}")
return "\n\n---\n\n".join(parts) return "\n\n---\n\n".join(parts)

View File

@@ -388,3 +388,4 @@ def load_from_config(config: dict[str, Any]) -> None:
if font_path: if font_path:
apply_font(font_path, font_size) apply_font(font_path, font_size)
set_scale(scale) set_scale(scale)

View File

@@ -392,3 +392,4 @@ def get_tweaked_theme() -> hello_imgui.ImGuiTweakedTheme:
# Sync tweaks # Sync tweaks
tt.tweaks.rounding = 6.0 tt.tweaks.rounding = 6.0
return tt return tt

View File

@@ -82,3 +82,4 @@ def apply_nerv() -> None:
style.popup_border_size = 1.0 style.popup_border_size = 1.0
style.child_border_size = 1.0 style.child_border_size = 1.0
style.tab_border_size = 1.0 style.tab_border_size = 1.0

View File

@@ -83,3 +83,4 @@ class AlertPulsing:
alpha = 0.05 + 0.15 * ((math.sin(time.time() * 4.0) + 1.0) / 2.0) alpha = 0.05 + 0.15 * ((math.sin(time.time() * 4.0) + 1.0) / 2.0)
color = imgui.get_color_u32((1.0, 0.0, 0.0, alpha)) color = imgui.get_color_u32((1.0, 0.0, 0.0, alpha))
draw_list.add_rect((0.0, 0.0), (width, height), color, 0.0, 0, 10.0) draw_list.add_rect((0.0, 0.0), (width, height), color, 0.0, 0, 10.0)

View File

@@ -63,3 +63,4 @@ class ToolBiasEngine:
lines.append(f"- {cat}: {mult}x") lines.append(f"- {cat}: {mult}x")
return "\n\n".join(lines) return "\n\n".join(lines)

View File

@@ -108,3 +108,4 @@ class ToolPresetManager:
if "bias_profiles" in data and name in data["bias_profiles"]: if "bias_profiles" in data and name in data["bias_profiles"]:
del data["bias_profiles"][name] del data["bias_profiles"][name]
self._write_raw(path, data) self._write_raw(path, data)