80 lines
2.7 KiB
Python
80 lines
2.7 KiB
Python
import pytest
|
|
from unittest.mock import patch, MagicMock
|
|
from gui_2 import App
|
|
import ai_client
|
|
from events import EventEmitter
|
|
|
|
@pytest.fixture
|
|
def app_instance():
|
|
if not hasattr(ai_client, 'events') or ai_client.events is None:
|
|
ai_client.events = EventEmitter()
|
|
|
|
with (
|
|
patch('gui_2.load_config', return_value={'ai': {}, 'projects': {}}),
|
|
patch('gui_2.save_config'),
|
|
patch('gui_2.project_manager'),
|
|
patch('gui_2.session_logger'),
|
|
patch('gui_2.immapp.run'),
|
|
patch.object(App, '_load_active_project'),
|
|
patch.object(App, '_fetch_models'),
|
|
patch.object(App, '_load_fonts'),
|
|
patch.object(App, '_post_init')
|
|
):
|
|
yield App()
|
|
|
|
def test_mcp_tool_call_is_dispatched(app_instance):
|
|
"""
|
|
This test verifies that when the AI returns a tool call for an MCP function,
|
|
the ai_client correctly dispatches it to mcp_client.
|
|
This will fail until mcp_client is properly integrated.
|
|
"""
|
|
# 1. Define the mock tool call from the AI
|
|
mock_fc = MagicMock()
|
|
mock_fc.name = "read_file"
|
|
mock_fc.args = {"file_path": "test.txt"}
|
|
|
|
# 2. Construct the mock AI response (Gemini format)
|
|
mock_response_with_tool = MagicMock()
|
|
mock_part = MagicMock()
|
|
mock_part.text = ""
|
|
mock_part.function_call = mock_fc
|
|
mock_candidate = MagicMock()
|
|
mock_candidate.content.parts = [mock_part]
|
|
mock_candidate.finish_reason.name = "TOOL_CALLING"
|
|
mock_response_with_tool.candidates = [mock_candidate]
|
|
|
|
class DummyUsage:
|
|
prompt_token_count = 100
|
|
candidates_token_count = 10
|
|
cached_content_token_count = 0
|
|
|
|
mock_response_with_tool.usage_metadata = DummyUsage()
|
|
|
|
# 3. Create a mock for the final AI response after the tool call
|
|
mock_response_final = MagicMock()
|
|
mock_response_final.text = "Final answer"
|
|
mock_response_final.candidates = []
|
|
mock_response_final.usage_metadata = DummyUsage()
|
|
|
|
# 4. Patch the necessary components
|
|
with patch("ai_client._ensure_gemini_client"), \
|
|
patch("ai_client._gemini_client") as mock_client, \
|
|
patch('mcp_client.dispatch', return_value="file content") as mock_dispatch:
|
|
|
|
mock_chat = mock_client.chats.create.return_value
|
|
mock_chat.send_message.side_effect = [mock_response_with_tool, mock_response_final]
|
|
|
|
ai_client.set_provider("gemini", "mock-model")
|
|
|
|
# 5. Call the send function
|
|
ai_client.send(
|
|
md_content="some context",
|
|
user_message="read the file",
|
|
base_dir=".",
|
|
file_items=[],
|
|
discussion_history=""
|
|
)
|
|
|
|
# 6. Assert that the MCP dispatch function was called
|
|
mock_dispatch.assert_called_once_with("read_file", {"file_path": "test.txt"})
|