81 lines
2.8 KiB
Python
81 lines
2.8 KiB
Python
import pytest
|
|
from src.models import Persona
|
|
|
|
def test_persona_serialization():
|
|
persona = Persona(
|
|
name="SecuritySpecialist",
|
|
preferred_models=[
|
|
{"provider": "anthropic", "model": "claude-3-7-sonnet-20250219", "temperature": 0.2, "top_p": 0.9, "max_output_tokens": 4000},
|
|
"claude-3-5-sonnet-20241022"
|
|
],
|
|
system_prompt="You are a security expert.",
|
|
tool_preset="SecurityTools",
|
|
bias_profile="Execution-Focused"
|
|
)
|
|
|
|
assert persona.provider == "anthropic"
|
|
assert persona.model == "claude-3-7-sonnet-20250219"
|
|
assert persona.temperature == 0.2
|
|
assert persona.top_p == 0.9
|
|
assert persona.max_output_tokens == 4000
|
|
|
|
data = persona.to_dict()
|
|
|
|
# data should NOT have top-level provider/model anymore, it's in preferred_models
|
|
assert "provider" not in data
|
|
assert "model" not in data
|
|
assert data["preferred_models"][0]["provider"] == "anthropic"
|
|
assert data["preferred_models"][0]["model"] == "claude-3-7-sonnet-20250219"
|
|
assert data["preferred_models"][1] == {"model": "claude-3-5-sonnet-20241022"}
|
|
assert data["system_prompt"] == "You are a security expert."
|
|
assert data["preferred_models"][0]["temperature"] == 0.2
|
|
assert data["preferred_models"][0]["top_p"] == 0.9
|
|
assert data["preferred_models"][0]["max_output_tokens"] == 4000
|
|
assert data["tool_preset"] == "SecurityTools"
|
|
assert data["bias_profile"] == "Execution-Focused"
|
|
|
|
def test_persona_deserialization():
|
|
# Old config format (legacy)
|
|
data = {
|
|
"provider": "gemini",
|
|
"model": "gemini-2.5-flash",
|
|
"preferred_models": ["gemini-2.5-flash"],
|
|
"system_prompt": "You are a helpful assistant.",
|
|
"temperature": 0.5,
|
|
"top_p": 1.0,
|
|
"max_output_tokens": 8192,
|
|
"tool_preset": "Default",
|
|
"bias_profile": "Balanced"
|
|
}
|
|
|
|
persona = Persona.from_dict("Assistant", data)
|
|
|
|
assert persona.name == "Assistant"
|
|
assert persona.provider == "gemini"
|
|
assert persona.model == "gemini-2.5-flash"
|
|
# Migration logic should have put legacy fields into preferred_models since it only had a string
|
|
assert persona.preferred_models[0]["provider"] == "gemini"
|
|
assert persona.preferred_models[0]["model"] == "gemini-2.5-flash"
|
|
assert persona.system_prompt == "You are a helpful assistant."
|
|
assert persona.temperature == 0.5
|
|
assert persona.top_p == 1.0
|
|
assert persona.max_output_tokens == 8192
|
|
assert persona.tool_preset == "Default"
|
|
assert persona.bias_profile == "Balanced"
|
|
|
|
def test_persona_defaults():
|
|
persona = Persona(name="Minimal", system_prompt="Just the basics")
|
|
assert persona.provider is None
|
|
assert persona.model is None
|
|
assert persona.preferred_models == []
|
|
assert persona.temperature is None
|
|
assert persona.tool_preset is None
|
|
|
|
data = persona.to_dict()
|
|
assert "provider" not in data
|
|
assert "preferred_models" not in data
|
|
assert "temperature" not in data
|
|
|
|
loaded = Persona.from_dict("Minimal", data)
|
|
assert loaded.preferred_models == []
|