refactor(gui): redesign persona editor UI and replace popup modals with standard windows
This commit is contained in:
@@ -300,7 +300,9 @@ class AppController:
|
||||
self._inject_mode: str = "skeleton"
|
||||
self._inject_preview: str = ""
|
||||
self._show_inject_modal: bool = False
|
||||
self.show_preset_manager_modal: bool = False
|
||||
self.show_preset_manager_window: bool = False
|
||||
self.show_tool_preset_manager_window: bool = False
|
||||
self.show_persona_editor_window: bool = False
|
||||
self._editing_preset_name: str = ""
|
||||
self._editing_preset_content: str = ""
|
||||
self._editing_preset_temperature: float = 0.0
|
||||
@@ -342,7 +344,9 @@ class AppController:
|
||||
'ui_active_tool_preset': 'ui_active_tool_preset',
|
||||
'temperature': 'temperature',
|
||||
'max_tokens': 'max_tokens',
|
||||
'show_preset_manager_modal': 'show_preset_manager_modal',
|
||||
'show_preset_manager_window': 'show_preset_manager_window',
|
||||
'show_tool_preset_manager_window': 'show_tool_preset_manager_window',
|
||||
'show_persona_editor_window': 'show_persona_editor_window',
|
||||
'_editing_preset_name': '_editing_preset_name',
|
||||
'_editing_preset_content': '_editing_preset_content',
|
||||
'_editing_preset_temperature': '_editing_preset_temperature',
|
||||
@@ -390,7 +394,9 @@ class AppController:
|
||||
'ui_active_tool_preset': 'ui_active_tool_preset',
|
||||
'temperature': 'temperature',
|
||||
'max_tokens': 'max_tokens',
|
||||
'show_preset_manager_modal': 'show_preset_manager_modal',
|
||||
'show_preset_manager_window': 'show_preset_manager_window',
|
||||
'show_tool_preset_manager_window': 'show_tool_preset_manager_window',
|
||||
'show_persona_editor_window': 'show_persona_editor_window',
|
||||
'_editing_preset_name': '_editing_preset_name',
|
||||
'_editing_preset_content': '_editing_preset_content',
|
||||
'_editing_preset_temperature': '_editing_preset_temperature',
|
||||
@@ -2567,3 +2573,4 @@ class AppController:
|
||||
tasks=self.active_track.tickets
|
||||
)
|
||||
project_manager.save_track_state(self.active_track.id, state, self.ui_files_base_dir)
|
||||
|
||||
|
||||
1059
src/gui_2.py
1059
src/gui_2.py
File diff suppressed because it is too large
Load Diff
@@ -434,32 +434,48 @@ class BiasProfile:
|
||||
@dataclass
|
||||
class Persona:
|
||||
name: str
|
||||
provider: Optional[str] = None
|
||||
model: Optional[str] = None
|
||||
preferred_models: List[str] = field(default_factory=list)
|
||||
preferred_models: List[Dict[str, Any]] = field(default_factory=list)
|
||||
system_prompt: str = ''
|
||||
temperature: Optional[float] = None
|
||||
top_p: Optional[float] = None
|
||||
max_output_tokens: Optional[int] = None
|
||||
tool_preset: Optional[str] = None
|
||||
bias_profile: Optional[str] = None
|
||||
|
||||
@property
|
||||
def provider(self) -> Optional[str]:
|
||||
if not self.preferred_models: return None
|
||||
return self.preferred_models[0].get("provider")
|
||||
|
||||
@property
|
||||
def model(self) -> Optional[str]:
|
||||
if not self.preferred_models: return None
|
||||
return self.preferred_models[0].get("model")
|
||||
|
||||
@property
|
||||
def temperature(self) -> Optional[float]:
|
||||
if not self.preferred_models: return None
|
||||
return self.preferred_models[0].get("temperature")
|
||||
|
||||
@property
|
||||
def top_p(self) -> Optional[float]:
|
||||
if not self.preferred_models: return None
|
||||
return self.preferred_models[0].get("top_p")
|
||||
|
||||
@property
|
||||
def max_output_tokens(self) -> Optional[int]:
|
||||
if not self.preferred_models: return None
|
||||
return self.preferred_models[0].get("max_output_tokens")
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
res = {
|
||||
"system_prompt": self.system_prompt,
|
||||
}
|
||||
if self.provider is not None:
|
||||
res["provider"] = self.provider
|
||||
if self.model is not None:
|
||||
res["model"] = self.model
|
||||
if self.preferred_models:
|
||||
res["preferred_models"] = self.preferred_models
|
||||
if self.temperature is not None:
|
||||
res["temperature"] = self.temperature
|
||||
if self.top_p is not None:
|
||||
res["top_p"] = self.top_p
|
||||
if self.max_output_tokens is not None:
|
||||
res["max_output_tokens"] = self.max_output_tokens
|
||||
processed = []
|
||||
for m in self.preferred_models:
|
||||
if isinstance(m, str):
|
||||
processed.append({"model": m})
|
||||
else:
|
||||
processed.append(m)
|
||||
res["preferred_models"] = processed
|
||||
if self.tool_preset is not None:
|
||||
res["tool_preset"] = self.tool_preset
|
||||
if self.bias_profile is not None:
|
||||
@@ -468,15 +484,34 @@ class Persona:
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, name: str, data: Dict[str, Any]) -> "Persona":
|
||||
raw_models = data.get("preferred_models", [])
|
||||
parsed_models = []
|
||||
for m in raw_models:
|
||||
if isinstance(m, str):
|
||||
parsed_models.append({"model": m})
|
||||
else:
|
||||
parsed_models.append(m)
|
||||
|
||||
# Migration logic: merge legacy fields if they exist
|
||||
legacy = {}
|
||||
for k in ["provider", "model", "temperature", "top_p", "max_output_tokens"]:
|
||||
if data.get(k) is not None:
|
||||
legacy[k] = data[k]
|
||||
|
||||
if legacy:
|
||||
if not parsed_models:
|
||||
parsed_models.append(legacy)
|
||||
else:
|
||||
# Merge into first item if it's missing these specific legacy fields
|
||||
for k, v in legacy.items():
|
||||
if k not in parsed_models[0] or parsed_models[0][k] is None:
|
||||
parsed_models[0][k] = v
|
||||
|
||||
return cls(
|
||||
name=name,
|
||||
provider=data.get("provider"),
|
||||
model=data.get("model"),
|
||||
preferred_models=data.get("preferred_models", []),
|
||||
preferred_models=parsed_models,
|
||||
system_prompt=data.get("system_prompt", ""),
|
||||
temperature=data.get("temperature"),
|
||||
top_p=data.get("top_p"),
|
||||
max_output_tokens=data.get("max_output_tokens"),
|
||||
tool_preset=data.get("tool_preset"),
|
||||
bias_profile=data.get("bias_profile"),
|
||||
)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user