Installation & Setup
Add MCP to your project (uv)
uv add "mcp[cli]"
Add MCP to your project (pip)
pip install "mcp[cli]"
Run standalone MCP dev tools
uv run mcp
Quickstart Server
A simple server that exposes a tool and a resource. Save as `server.py`.
# server.py
from mcp.server.fastmcp import FastMCP
# Create an MCP server
mcp = FastMCP("Demo")
# Add an addition tool
@mcp.tool()
def add(a: int, b: int) -> int:
"""Add two numbers"""
return a + b
# Add a dynamic greeting resource
@mcp.resource("greeting://{name}")
def get_greeting(name: str) -> str:
"""Get a personalized greeting"""
return f"Hello, {name}!"
Running Your Server
Development Mode (with Inspector)
mcp dev server.py
Development with dependencies
mcp dev server.py --with pandas --with numpy
Install in Claude Desktop
mcp install server.py
Install with a custom name
mcp install server.py --name "My Analytics Server"
Install with environment variables
# From command line
mcp install server.py -v API_KEY=abc123
# From file
mcp install server.py -f .env
Direct Execution
mcp run server.py
Core Concepts: Server
Basic Server
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("My App")
Server with Lifespan & Context
from mcp.server.fastmcp import FastMCP
from contextlib import asynccontextmanager
from collections.abc import AsyncIterator
@asynccontextmanager
async def app_lifespan(server: FastMCP) -> AsyncIterator[dict]:
# db = await Database.connect()
print("Server starting up...")
try:
yield {"db": "fake_db_connection"}
finally:
print("Server shutting down...")
# await db.disconnect()
mcp = FastMCP("My App", lifespan=app_lifespan)
@mcp.tool()
def query_db() -> str:
"""Tool that uses initialized resources"""
ctx = mcp.get_context()
db = ctx.request_context.lifespan_context["db"]
return f"Querying with {db}"
Core Concepts: Resources
Expose data to LLMs (like GET endpoints).
Static Resource
@mcp.resource("config://app")
def get_config() -> str:
"""Static configuration data"""
return "App configuration here"
Dynamic Resource
@mcp.resource("users://{user_id}/profile")
def get_user_profile(user_id: str) -> str:
"""Dynamic user data"""
return f"Profile data for user {user_id}"
Core Concepts: Tools
Allow LLMs to take actions (like POST endpoints).
Synchronous Tool
@mcp.tool()
def calculate_bmi(weight_kg: float, height_m: float) -> float:
"""Calculate BMI"""
return weight_kg / (height_m**2)
Asynchronous Tool
import httpx
@mcp.tool()
async def fetch_weather(city: str) -> str:
"""Fetch current weather for a city"""
async with httpx.AsyncClient() as client:
# Using a dummy URL for example
api_url = f"https://api.weather.com/{city}"
return f"Weather data for {city} from {api_url}"
Tool with Progress Reporting
from mcp.server.fastmcp import Context
@mcp.tool()
async def long_task(files: list[str], ctx: Context) -> str:
"""Process files with progress tracking"""
for i, file in enumerate(files):
ctx.info(f"Processing {file}")
await ctx.report_progress(i + 1, len(files))
# data, mime_type = await ctx.read_resource(f"file://{file}")
return "Processing complete"
Core Concepts: Prompts
Reusable templates for LLM interactions.
Simple Text Prompt
@mcp.prompt()
def review_code(code: str) -> str:
return f"Please review this code:\n\n{code}"
Multi-Message Prompt
from mcp.server.fastmcp.prompts import base
@mcp.prompt()
def debug_error(error: str) -> list[base.Message]:
return [
base.UserMessage("I'm seeing this error:"),
base.UserMessage(error),
base.AssistantMessage("I'll help debug that."),
]
Advanced: Transports & Mounting
Stateless HTTP Server
For deployments that don't need session persistence.
# No session persistence
mcp = FastMCP("StatelessServer", stateless_http=True)
# No session persistence, JSON response (no SSE)
mcp = FastMCP(
"StatelessServer",
stateless_http=True,
json_response=True
)
# Run with streamable-http transport
if __name__ == "__main__":
mcp.run(transport="streamable-http")
Mounting Multiple MCP Servers (FastAPI)
# main.py
from fastapi import FastAPI
from mcp.echo import echo_mcp_app
from mcp.math import math_mcp_app
# Assume echo_mcp_app and math_mcp_app are defined
# in echo.py and math.py respectively.
app = FastAPI()
app.mount("/echo", echo_mcp_app.streamable_http_app())
app.mount("/math", math_mcp_app.streamable_http_app())
Writing MCP Clients
Client for Stdio Server
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
async def run_client():
server_params = StdioServerParameters(
command="python",
args=["server.py"]
)
async with stdio_client(server_params) as (read, write):
async with ClientSession(read, write) as session:
await session.initialize()
tools = await session.list_tools()
print(f"Available tools: {tools}")
result = await session.call_tool(
"add", {"a": 5, "b": 7}
)
print(f"Tool result: {result}")
Client for Streamable HTTP Server
from mcp.client.streamable_http import streamablehttp_client
from mcp import ClientSession
async def main():
# Connect to a server running at http://localhost:8000/mcp
async with streamablehttp_client("http://localhost:8000/mcp") as (
read, write, _
):
async with ClientSession(read, write) as session:
await session.initialize()
result = await session.call_tool(
"echo", {"message": "hello"}
)
print(f"Tool result: {result}")