refactor(gui): redesign persona editor UI and replace popup modals with standard windows

This commit is contained in:
2026-03-10 23:21:14 -04:00
parent 6ae8737c1a
commit 6da3d95c0e
5 changed files with 654 additions and 537 deletions

View File

@@ -300,7 +300,9 @@ class AppController:
self._inject_mode: str = "skeleton" self._inject_mode: str = "skeleton"
self._inject_preview: str = "" self._inject_preview: str = ""
self._show_inject_modal: bool = False self._show_inject_modal: bool = False
self.show_preset_manager_modal: bool = False self.show_preset_manager_window: bool = False
self.show_tool_preset_manager_window: bool = False
self.show_persona_editor_window: bool = False
self._editing_preset_name: str = "" self._editing_preset_name: str = ""
self._editing_preset_content: str = "" self._editing_preset_content: str = ""
self._editing_preset_temperature: float = 0.0 self._editing_preset_temperature: float = 0.0
@@ -342,7 +344,9 @@ class AppController:
'ui_active_tool_preset': 'ui_active_tool_preset', 'ui_active_tool_preset': 'ui_active_tool_preset',
'temperature': 'temperature', 'temperature': 'temperature',
'max_tokens': 'max_tokens', 'max_tokens': 'max_tokens',
'show_preset_manager_modal': 'show_preset_manager_modal', 'show_preset_manager_window': 'show_preset_manager_window',
'show_tool_preset_manager_window': 'show_tool_preset_manager_window',
'show_persona_editor_window': 'show_persona_editor_window',
'_editing_preset_name': '_editing_preset_name', '_editing_preset_name': '_editing_preset_name',
'_editing_preset_content': '_editing_preset_content', '_editing_preset_content': '_editing_preset_content',
'_editing_preset_temperature': '_editing_preset_temperature', '_editing_preset_temperature': '_editing_preset_temperature',
@@ -390,7 +394,9 @@ class AppController:
'ui_active_tool_preset': 'ui_active_tool_preset', 'ui_active_tool_preset': 'ui_active_tool_preset',
'temperature': 'temperature', 'temperature': 'temperature',
'max_tokens': 'max_tokens', 'max_tokens': 'max_tokens',
'show_preset_manager_modal': 'show_preset_manager_modal', 'show_preset_manager_window': 'show_preset_manager_window',
'show_tool_preset_manager_window': 'show_tool_preset_manager_window',
'show_persona_editor_window': 'show_persona_editor_window',
'_editing_preset_name': '_editing_preset_name', '_editing_preset_name': '_editing_preset_name',
'_editing_preset_content': '_editing_preset_content', '_editing_preset_content': '_editing_preset_content',
'_editing_preset_temperature': '_editing_preset_temperature', '_editing_preset_temperature': '_editing_preset_temperature',
@@ -2567,3 +2573,4 @@ class AppController:
tasks=self.active_track.tickets tasks=self.active_track.tickets
) )
project_manager.save_track_state(self.active_track.id, state, self.ui_files_base_dir) project_manager.save_track_state(self.active_track.id, state, self.ui_files_base_dir)

File diff suppressed because it is too large Load Diff

View File

@@ -434,32 +434,48 @@ class BiasProfile:
@dataclass @dataclass
class Persona: class Persona:
name: str name: str
provider: Optional[str] = None preferred_models: List[Dict[str, Any]] = field(default_factory=list)
model: Optional[str] = None
preferred_models: List[str] = field(default_factory=list)
system_prompt: str = '' system_prompt: str = ''
temperature: Optional[float] = None
top_p: Optional[float] = None
max_output_tokens: Optional[int] = None
tool_preset: Optional[str] = None tool_preset: Optional[str] = None
bias_profile: Optional[str] = None bias_profile: Optional[str] = None
@property
def provider(self) -> Optional[str]:
if not self.preferred_models: return None
return self.preferred_models[0].get("provider")
@property
def model(self) -> Optional[str]:
if not self.preferred_models: return None
return self.preferred_models[0].get("model")
@property
def temperature(self) -> Optional[float]:
if not self.preferred_models: return None
return self.preferred_models[0].get("temperature")
@property
def top_p(self) -> Optional[float]:
if not self.preferred_models: return None
return self.preferred_models[0].get("top_p")
@property
def max_output_tokens(self) -> Optional[int]:
if not self.preferred_models: return None
return self.preferred_models[0].get("max_output_tokens")
def to_dict(self) -> Dict[str, Any]: def to_dict(self) -> Dict[str, Any]:
res = { res = {
"system_prompt": self.system_prompt, "system_prompt": self.system_prompt,
} }
if self.provider is not None:
res["provider"] = self.provider
if self.model is not None:
res["model"] = self.model
if self.preferred_models: if self.preferred_models:
res["preferred_models"] = self.preferred_models processed = []
if self.temperature is not None: for m in self.preferred_models:
res["temperature"] = self.temperature if isinstance(m, str):
if self.top_p is not None: processed.append({"model": m})
res["top_p"] = self.top_p else:
if self.max_output_tokens is not None: processed.append(m)
res["max_output_tokens"] = self.max_output_tokens res["preferred_models"] = processed
if self.tool_preset is not None: if self.tool_preset is not None:
res["tool_preset"] = self.tool_preset res["tool_preset"] = self.tool_preset
if self.bias_profile is not None: if self.bias_profile is not None:
@@ -468,15 +484,34 @@ class Persona:
@classmethod @classmethod
def from_dict(cls, name: str, data: Dict[str, Any]) -> "Persona": def from_dict(cls, name: str, data: Dict[str, Any]) -> "Persona":
raw_models = data.get("preferred_models", [])
parsed_models = []
for m in raw_models:
if isinstance(m, str):
parsed_models.append({"model": m})
else:
parsed_models.append(m)
# Migration logic: merge legacy fields if they exist
legacy = {}
for k in ["provider", "model", "temperature", "top_p", "max_output_tokens"]:
if data.get(k) is not None:
legacy[k] = data[k]
if legacy:
if not parsed_models:
parsed_models.append(legacy)
else:
# Merge into first item if it's missing these specific legacy fields
for k, v in legacy.items():
if k not in parsed_models[0] or parsed_models[0][k] is None:
parsed_models[0][k] = v
return cls( return cls(
name=name, name=name,
provider=data.get("provider"), preferred_models=parsed_models,
model=data.get("model"),
preferred_models=data.get("preferred_models", []),
system_prompt=data.get("system_prompt", ""), system_prompt=data.get("system_prompt", ""),
temperature=data.get("temperature"),
top_p=data.get("top_p"),
max_output_tokens=data.get("max_output_tokens"),
tool_preset=data.get("tool_preset"), tool_preset=data.get("tool_preset"),
bias_profile=data.get("bias_profile"), bias_profile=data.get("bias_profile"),
) )

View File

@@ -59,7 +59,7 @@ def test_load_all_merged(temp_paths):
def test_save_persona(temp_paths): def test_save_persona(temp_paths):
manager = PersonaManager(project_root=temp_paths["project_root"]) manager = PersonaManager(project_root=temp_paths["project_root"])
persona = Persona(name="New", provider="gemini", system_prompt="Test") persona = Persona(name="New", preferred_models=[{"provider": "gemini"}], system_prompt="Test")
manager.save_persona(persona, scope="project") manager.save_persona(persona, scope="project")
loaded = manager.load_all() loaded = manager.load_all()

View File

@@ -4,30 +4,38 @@ from src.models import Persona
def test_persona_serialization(): def test_persona_serialization():
persona = Persona( persona = Persona(
name="SecuritySpecialist", name="SecuritySpecialist",
provider="anthropic", preferred_models=[
model="claude-3-7-sonnet-20250219", {"provider": "anthropic", "model": "claude-3-7-sonnet-20250219", "temperature": 0.2, "top_p": 0.9, "max_output_tokens": 4000},
preferred_models=["claude-3-7-sonnet-20250219", "claude-3-5-sonnet-20241022"], "claude-3-5-sonnet-20241022"
],
system_prompt="You are a security expert.", system_prompt="You are a security expert.",
temperature=0.2,
top_p=0.9,
max_output_tokens=4000,
tool_preset="SecurityTools", tool_preset="SecurityTools",
bias_profile="Execution-Focused" bias_profile="Execution-Focused"
) )
assert persona.provider == "anthropic"
assert persona.model == "claude-3-7-sonnet-20250219"
assert persona.temperature == 0.2
assert persona.top_p == 0.9
assert persona.max_output_tokens == 4000
data = persona.to_dict() data = persona.to_dict()
assert data["provider"] == "anthropic" # data should NOT have top-level provider/model anymore, it's in preferred_models
assert data["model"] == "claude-3-7-sonnet-20250219" assert "provider" not in data
assert "claude-3-5-sonnet-20241022" in data["preferred_models"] assert "model" not in data
assert data["preferred_models"][0]["provider"] == "anthropic"
assert data["preferred_models"][0]["model"] == "claude-3-7-sonnet-20250219"
assert data["preferred_models"][1] == {"model": "claude-3-5-sonnet-20241022"}
assert data["system_prompt"] == "You are a security expert." assert data["system_prompt"] == "You are a security expert."
assert data["temperature"] == 0.2 assert data["preferred_models"][0]["temperature"] == 0.2
assert data["top_p"] == 0.9 assert data["preferred_models"][0]["top_p"] == 0.9
assert data["max_output_tokens"] == 4000 assert data["preferred_models"][0]["max_output_tokens"] == 4000
assert data["tool_preset"] == "SecurityTools" assert data["tool_preset"] == "SecurityTools"
assert data["bias_profile"] == "Execution-Focused" assert data["bias_profile"] == "Execution-Focused"
def test_persona_deserialization(): def test_persona_deserialization():
# Old config format (legacy)
data = { data = {
"provider": "gemini", "provider": "gemini",
"model": "gemini-2.5-flash", "model": "gemini-2.5-flash",
@@ -45,7 +53,9 @@ def test_persona_deserialization():
assert persona.name == "Assistant" assert persona.name == "Assistant"
assert persona.provider == "gemini" assert persona.provider == "gemini"
assert persona.model == "gemini-2.5-flash" assert persona.model == "gemini-2.5-flash"
assert persona.preferred_models == ["gemini-2.5-flash"] # Migration logic should have put legacy fields into preferred_models since it only had a string
assert persona.preferred_models[0]["provider"] == "gemini"
assert persona.preferred_models[0]["model"] == "gemini-2.5-flash"
assert persona.system_prompt == "You are a helpful assistant." assert persona.system_prompt == "You are a helpful assistant."
assert persona.temperature == 0.5 assert persona.temperature == 0.5
assert persona.top_p == 1.0 assert persona.top_p == 1.0