From 91e18558ebe1b2af24ae149c044fcbe8d2d8709c Mon Sep 17 00:00:00 2001 From: Ed_ Date: Wed, 23 Apr 2025 19:54:38 -0400 Subject: [PATCH] Add test files --- test_claude.py | 84 ++++++++++++++++++++++++++++ test_client.py | 147 +++++++++++++++++++++++++++++++++++++++++++++++++ test_mcp.py | 31 +++++++++++ test_task.toml | 64 +++++++++++++++++++++ 4 files changed, 326 insertions(+) create mode 100644 test_claude.py create mode 100644 test_client.py create mode 100644 test_mcp.py create mode 100644 test_task.toml diff --git a/test_claude.py b/test_claude.py new file mode 100644 index 0000000..3f538be --- /dev/null +++ b/test_claude.py @@ -0,0 +1,84 @@ +import anthropic +import tomli +import time +from anthropic import APIError, APIConnectionError, RateLimitError + +# Exponential backoff settings +MAX_RETRIES = 5 +INITIAL_DELAY = 1 # seconds +BACKOFF_FACTOR = 2 + +# def add_agent_entry(): +def add_user_entry(role: str, message:str): + return + +def add_system_entry(): + return + +def test(): + with open('anthropic.toml', 'rb') as f: + toml_anthropic = tomli.load(f) + with open('test_task.toml', 'rb') as f: + toml_task = tomli.load(f) + + sonnet_3_7_latest = "claude-3-7-sonnet-20250219" + + retry_count = 0 + delay = INITIAL_DELAY + + client = anthropic.Anthropic(api_key=toml_anthropic['API']['API_KEY']) + + while retry_count < MAX_RETRIES: + try: + response = client.messages.create( + model=sonnet_3_7_latest, + max_tokens=20000, + temperature=0.8, + system=[ + { + "type": "text", + # "text": toml_task['System']['content'], + "text": toml_task['System']['content'], + "cache_control": {"type": "ephemeral"} + } + ], + messages=[ + { + "role": "user", + "content": [ + { + "type": "text", + "text": toml_task['User']['background'], + "cache_control": {"type": "ephemeral"} + }, + { + "type": "text", + "text": toml_task['User']['request'], + "cache_control": {"type": "ephemeral"} + } + ], + } + ] + ) + + print(f"Usage: {response.usage}") + with open('test_response.md', 'w', encoding='utf-8') as f: + for content_block in response.content: + f.write(content_block.text + '\n') + return + + except (APIError, APIConnectionError, RateLimitError) as e: + print(f"Attempt {retry_count + 1} failed: {str(e)}") + if retry_count == MAX_RETRIES - 1: + raise + + time.sleep(delay) + delay *= BACKOFF_FACTOR + retry_count += 1 + + except Exception as e: + print(f"Unexpected error: {str(e)}") + raise + +if __name__ == "__main__": + test() diff --git a/test_client.py b/test_client.py new file mode 100644 index 0000000..d3642ed --- /dev/null +++ b/test_client.py @@ -0,0 +1,147 @@ +import asyncio +from typing import Optional +from contextlib import AsyncExitStack + +from mcp import ClientSession, StdioServerParameters +from mcp.client.stdio import stdio_client + +from anthropic import Anthropic +from dotenv import load_dotenv + +load_dotenv() # load environment variables from .env + +class MCPClient: + def __init__(self): + # Initialize session and client objects + self.session: Optional[ClientSession] = None + self.exit_stack = AsyncExitStack() + self.anthropic = Anthropic() + + async def connect_to_server(self, server_script_path: str): + """Connect to an MCP server + + Args: + server_script_path: Path to the server script (.py or .js) + """ + is_python = server_script_path.endswith('.py') + is_js = server_script_path.endswith('.js') + if not (is_python or is_js): + raise ValueError("Server script must be a .py or .js file") + + command = "python" if is_python else "node" + server_params = StdioServerParameters( + command=command, + args=[server_script_path], + env=None + ) + + stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params)) + self.stdio, self.write = stdio_transport + self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write)) + + await self.session.initialize() + + # List available tools + response = await self.session.list_tools() + tools = response.tools + print("\nConnected to server with tools:", [tool.name for tool in tools]) + + async def process_query(self, query: str) -> str: + """Process a query using Claude and available tools""" + messages = [ + { + "role": "user", + "content": query + } + ] + + response = await self.session.list_tools() + available_tools = [{ + "name": tool.name, + "description": tool.description, + "input_schema": tool.inputSchema + } for tool in response.tools] + + # Initial Claude API call + response = self.anthropic.messages.create( + model="claude-3-5-sonnet-20241022", + max_tokens=1000, + messages=messages, + tools=available_tools + ) + + # Process response and handle tool calls + tool_results = [] + final_text = [] + + for content in response.content: + if content.type == 'text': + final_text.append(content.text) + elif content.type == 'tool_use': + tool_name = content.name + tool_args = content.input + + # Execute tool call + result = await self.session.call_tool(tool_name, tool_args) + tool_results.append({"call": tool_name, "result": result}) + final_text.append(f"[Calling tool {tool_name} with args {tool_args}]") + + # Continue conversation with tool results + if hasattr(content, 'text') and content.text: + messages.append({ + "role": "assistant", + "content": content.text + }) + messages.append({ + "role": "user", + "content": result.content + }) + + # Get next response from Claude + response = self.anthropic.messages.create( + model="claude-3-5-sonnet-20241022", + max_tokens=1000, + messages=messages, + ) + + final_text.append(response.content[0].text) + + return "\n".join(final_text) + + async def chat_loop(self): + """Run an interactive chat loop""" + print("\nMCP Client Started!") + print("Type your queries or 'quit' to exit.") + + while True: + try: + query = input("\nQuery: ").strip() + + if query.lower() == 'quit': + break + + response = await self.process_query(query) + print("\n" + response) + + except Exception as e: + print(f"\nError: {str(e)}") + + async def cleanup(self): + """Clean up resources""" + await self.exit_stack.aclose() + +async def main(): + if len(sys.argv) < 2: + print("Usage: python client.py ") + sys.exit(1) + + client = MCPClient() + try: + await client.connect_to_server(sys.argv[1]) + await client.chat_loop() + finally: + await client.cleanup() + +if __name__ == "__main__": + import sys + asyncio.run(main()) diff --git a/test_mcp.py b/test_mcp.py new file mode 100644 index 0000000..1412da4 --- /dev/null +++ b/test_mcp.py @@ -0,0 +1,31 @@ +# import anthropic +# import configparser as ini_parser + +# ini = ini_parser.ConfigParser() +# ini.read('anthropic.ini') + +# client = anthropic.Anthropic( +# api_key = ini['API']['API_KEY'] +# ) + +path_project = "C:\projects\gencpp\base" + +from mcp.server.fastmcp import FastMCP + +mcp = FastMCP("Demo") + +@mcp.resource("echo://{message}") +def echo_resource(message: str) -> str: + """Echo a message""" + return f"Resource echo: {message}!" + +@mcp.tool() +def echo_tool(message: str) -> str: + """Echo a message as a tool""" + return f"Tool echo: {message}" + + +@mcp.prompt() +def echo_prompt(message: str) -> str: + """Create an echo prompt""" + return f"Please process this message: {message}" diff --git a/test_task.toml b/test_task.toml new file mode 100644 index 0000000..e2272f9 --- /dev/null +++ b/test_task.toml @@ -0,0 +1,64 @@ + +[System] +content=""" +Role: Systems engineer aiding with advising modifications to a codebase. +Codebase Properties: +- C++ +- staged metaprogramming +- library +- handmade community philsophy +- minimal dependencies +- written from scratch +- do not use std +- do not use templates +- Stick to codebase's convention. + +Response format: +- Output to file specifier by user prompt. +- Each entry is a "Modification Slice". + +Modification Slice, a record made up of the following: +