What if you could teach AI to search the internet or do math problems on its own? This guide shows you how to use something called MCP (Model Context Protocol) to give AI new abilities. We'll show you step-by-step how to build special connections between AI and tools using regular computer inputs and outputs.
You'll learn how to make AI models like Claude use tools you create, all in real-time! Whether you want AI to look things up online or solve problems, this guide has everything you need to know. By the end, you'll be able to make AI do way more than it normally can!
Let's dive in and start building!
Create a virtual environment and run the following command to install the required dependencies:
python -m venv venv
source venv/bin/activate
pip install "mcp[cli]" anthropic python-dotenv requests
Create a .env file and add your API keys:
SERPER_API_KEY=your_serper_api_key_here
ANTHROPIC_API_KEY=your_anthropic_api_key_here
This ensures sensitive credentials remain secure.
Let's begin by creating an MCP server that provides two functionalities:
from mcp.server.fastmcp import FastMCP
import requests
import os
from dotenv import load_dotenv
load_dotenv()
mcp = FastMCP()
Configuring Tools in MCP
Experience seamless collaboration and exceptional results.
For example:
API_KEY = os.getenv("SERPER_API_KEY")
API_URL = "https://google.serper.dev/search"
@mcp.tool()
def serper_search(query: str) -> dict:
"""Search the web using Serper API for user queries"""
headers = {"X-API-KEY": API_KEY, "Content-Type": "application/json"}
data = {"q": query}
try:
response = requests.post(API_URL, json=data, headers=headers)
response.raise_for_status()
result = response.json()
print(f"Search result for '{query}': {result}")
return result
except requests.exceptions.RequestException as e:
print(f"Error: {e}")
return {"error": str(e)}
@mcp.tool()
def add(a: int, b: int) -> int:
"""Add two numbers"""
print(f"Adding {a} and {b}")
return a + b
if __name__ == "__main__":
mcp.run(transport="stdio")
import asyncio
from typing import Optional
from contextlib import AsyncExitStack
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from anthropic import Anthropic
from dotenv import load_dotenv
load_dotenv()
class MCPClient:
def __init__(self):
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
self.anthropic = Anthropic()
async def connect_to_server(self, server_script_path: str):
"""Connect to an MCP server
Args:
server_script_path: Path to the server script (.py or .js)
"""
# Determine script type
is_python = server_script_path.endswith('.py')
is_js = server_script_path.endswith('.js')
if not (is_python or is_js):
raise ValueError("Server script must be a .py or .js file")
# Choose command based on file type
command = "python" if is_python else "node"
# Set up Stdio transport parameters
server_params = StdioServerParameters(
command=command,
args=[server_script_path],
env=None
)
# Establish stdio transport
stdio_transport = await self.exit_stack.enter_async_context(stdio_client(server_params))
self.stdio, self.write = stdio_transport
self.session = await self.exit_stack.enter_async_context(ClientSession(self.stdio, self.write))
await self.session.initialize()
# List available tools
response = await self.session.list_tools()
tools = response.tools
print("\nConnected to server with tools:", [tool.name for tool in tools])
async def process_query(self, query: str) -> str:
"""Process a query using Claude and available tools"""
messages = [{"role": "user", "content": query}]
# Get available tools from the server
response = await self.session.list_tools()
available_tools = [
{
"name": tool.name,
"description": tool.description,
"input_schema": tool.inputSchema
}
for tool in response.tools
]
# Generate response using Claude
response = self.anthropic.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1000,
messages=messages,
tools=available_tools
)
tool_results = []
final_text = []
# Process each content piece in response
for content in response.content:
if content.type == 'text':
final_text.append(content.text)
elif content.type == 'tool_use':
tool_name = content.name
tool_args = content.input
# Call the tool and get results
result = await self.session.call_tool(tool_name, tool_args)
tool_results.append({"call": tool_name, "result": result})
final_text.append(f"[Calling tool {tool_name} with args {tool_args}]")
# Update conversation history
if hasattr(content, 'text') and content.text:
messages.append({"role": "assistant", "content": content.text})
messages.append({"role": "user", "content": result.content})
# Generate follow-up response
response = self.anthropic.messages.create(
model="claude-3-5-sonnet-20241022",
max_tokens=1000,
messages=messages,
)
final_text.append(response.content[0].text)
return "\n".join(final_text)
async def chat_loop(self):
"""Run an interactive chat loop"""
print("\nMCP Client Started!")
print("Type your queries or 'quit' to exit.")
while True:
try:
query = input("\nQuery: ").strip()
if query.lower() == 'quit':
break
response = await self.process_query(query)
print("\n" + response)
except Exception as e:
print(f"\nError: {str(e)}")
async def cleanup(self):
"""Clean up resources"""
await self.exit_stack.aclose()
Experience seamless collaboration and exceptional results.
async def main():
if len(sys.argv) < 2:
print("Usage: python client.py <path_to_server_script>")
sys.exit(1)
client = MCPClient()
try:
await client.connect_to_server(sys.argv[1])
await client.chat_loop()
finally:
await client.cleanup()
if __name__ == "__main__":
import sys
asyncio.run(main())
python client.py /path/to/serper_server.py
Type your queries and click enter in the terminal:
Query: What is Model Context Protocol?
To exit, type:
Query: quit
Suggested Read- MCP Practical Guide with SSE Transport
I hope this guide has equipped you with the practical knowledge to implement MCP with STDIO transport! You've learned how to create seamless integration between AI models and custom tools using a straightforward communication approach. By following this guide, you can integrate MCP into AI-driven applications. This tutorial demonstrated: