Skip to content

Instantly share code, notes, and snippets.

@twinsant
Created April 1, 2025 13:12
Show Gist options
  • Select an option

  • Save twinsant/1869cf0d9287fdd4e9598507be8569ba to your computer and use it in GitHub Desktop.

Select an option

Save twinsant/1869cf0d9287fdd4e9598507be8569ba to your computer and use it in GitHub Desktop.
MCP Client which supports loading MCP servers from windsurf liked config file.
import os
import json
import sys
import asyncio
from typing import Optional
from contextlib import AsyncExitStack
from colored import Fore, Style
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from anthropic import Anthropic
from anthropic import DefaultHttpxClient
from loguru import logger
from dotenv import load_dotenv
from phoenix.otel import register
load_dotenv()
ANTHROPIC_API_KEY = os.getenv('CLAUDE_API_KEY')
PROXY = os.getenv('PROXY')
PHOENIX_ENDPOINT = os.getenv("PHOENIX_ENDPOINT")
tracer_provider = register(
project_name="agents3",
endpoint=PHOENIX_ENDPOINT,
)
logger.info('Register AnthropicInstrumentor')
from openinference.instrumentation.anthropic import AnthropicInstrumentor
AnthropicInstrumentor().instrument(tracer_provider=tracer_provider)
class MCPClient:
def __init__(self):
# Initialize session and client objects
self.sessions = []
self.exit_stack = AsyncExitStack()
self.anthropic = Anthropic(
api_key=ANTHROPIC_API_KEY,
http_client=DefaultHttpxClient(
proxy=PROXY.replace('socks5h', 'socks5')
),
)
self.load_config()
def load_config(self):
config_path = os.getenv('MCP_CONFIG', 'mcp_config.json')
with open(config_path, 'r') as f:
self.config = json.load(f)
logger.info(f"Loaded config from {config_path}")
async def connect_to_server(self):
"""Connect to an MCP server
"""
# Loop all available servers in config
for s_name, s_params in self.config['mcpServers'].items():
_server_params = StdioServerParameters(
command=s_params['command'],
args=s_params['args'],
env=s_params.get('env', None)
)
_stdio_transport = await self.exit_stack.enter_async_context(stdio_client(_server_params, errlog=sys.stdout))
_stdio, _write = _stdio_transport
s = {
'name': s_name,
'session': await self.exit_stack.enter_async_context(ClientSession(_stdio, _write))
}
self.sessions.append(s)
# List available tools
for session in self.sessions:
await session['session'].initialize()
response = await session['session'].list_tools()
tools = response.tools
session['tools'] = tools
print(f"\nConnected to {Fore.white}{Style.BOLD}{session['name']}{Style.reset} with tools:", [tool.name for tool in tools])
async def process_query(self, query: str) -> str:
"""Process a query using Claude and available tools"""
messages = [
{
"role": "user",
"content": query
}
]
available_tools = []
for session in self.sessions:
available_tools.extend([{"name": tool.name, "description": tool.description, "input_schema": tool.inputSchema} for tool in session['tools']])
# Initial Claude API call
response = self.anthropic.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1000,
messages=messages,
tools=available_tools
)
# Process response and handle tool calls
final_text = []
assistant_message_content = []
for content in response.content:
if content.type == 'text':
final_text.append(content.text)
assistant_message_content.append(content)
elif content.type == 'tool_use':
tool_name = content.name
tool_args = content.input
# Execute tool call by finding the tool
for session in self.sessions:
if tool_name in [tool.name for tool in session['tools']]:
result = await session['session'].call_tool(tool_name, tool_args)
final_text.append(f"[Calling tool {Fore.green}{Style.BOLD}{tool_name}{Style.reset} with args {tool_args}]")
break
assistant_message_content.append(content)
messages.append({
"role": "assistant",
"content": assistant_message_content
})
messages.append({
"role": "user",
"content": [
{
"type": "tool_result",
"tool_use_id": content.id,
"content": result.content
}
]
})
# Get next response from Claude
response = self.anthropic.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1000,
messages=messages,
tools=available_tools
)
final_text.append(response.content[0].text)
return "\n".join(final_text)
async def chat_loop(self):
"""Run an interactive chat loop"""
print(f"\n{Fore.white}{Style.BOLD}MCP Client Started!{Style.reset}")
print("Type your queries or 'quit' to exit.")
while True:
try:
query = input("\nQuery: ").strip()
if query.lower() == 'quit':
break
if query == '':
continue
response = await self.process_query(query)
print("\n" + response)
except Exception as e:
print(f"\n{Fore.red}{Style.BOLD}Error: {str(e)}{Style.reset}")
async def cleanup(self):
"""Clean up resources"""
await self.exit_stack.aclose()
async def main():
client = MCPClient()
try:
await client.connect_to_server()
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == "__main__":
import sys
asyncio.run(main())
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment