\n \n \n \"\"\"\n return HTMLResponse(html_content)\n\n# Create Starlette application with SSE transport\ndef create_starlette_app(mcp_server: Server, *, debug: bool = False) -> Starlette:\n \"\"\"Create a Starlette application that can serve the provided mcp server with SSE.\"\"\"\n sse = SseServerTransport(\"/messages/\")\n\n async def handle_sse(request: Request) -> None:\n async with sse.connect_sse(\n request.scope,\n request.receive,\n request._send,\n ) as (read_stream, write_stream):\n await mcp_server.run(\n read_stream,\n write_stream,\n mcp_server.create_initialization_options(),\n )\n\n return Starlette(\n debug=debug,\n routes=[\n Route(\"/\", endpoint=homepage), # Add the homepage route\n Route(\"/sse\", endpoint=handle_sse),\n Mount(\"/messages/\", app=sse.handle_post_message),\n ],\n )\n\nif __name__ == \"__main__\":\n mcp_server = mcp._mcp_server\n\n # Create and run Starlette app\n starlette_app = create_starlette_app(mcp_server, debug=True)\n uvicorn.run(starlette_app, host=\"0.0.0.0\", port=8080)\n```\n\n### 3. Define your tools\n\nAdd tool implementations to `server.py`:\n\n```python\nimport httpx\n\n@mcp.tool()\nasync def get_company_data(resource_id: str) -> str:\n \"\"\"Get data from your company API.\n\n Args:\n resource_id: The ID of the resource to fetch\n \"\"\"\n # Implement your API call here\n async with httpx.AsyncClient() as client:\n response = await client.get(\n f\"https://api.your-company.com/data/{resource_id}\",\n headers={\"Authorization\": \"Bearer YOUR_API_KEY\"}\n )\n response.raise_for_status()\n return response.text()\n```\n\n### 4. Add authentication (if required)\n\nFor APIs requiring authentication:\n\n```python\nimport os\nfrom mcp.server.types import LogLevel\n\n# Get API key from environment\nAPI_KEY = os.environ.get(\"COMPANY_API_KEY\")\n\nif not API_KEY:\n mcp.send_log_message(\n level=LogLevel.ERROR,\n data=\"API key not found. Set COMPANY_API_KEY environment variable.\"\n )\n```\n\n### 5. Run your server\n\n```bash\npython server.py\n```\n\nYour MCP server will now be accessible at: `http://localhost:8080` for the web interface and `http://localhost:8080/sse` for the SSE endpoint.\n\n## Example Server: Weather API\n\nBelow is a fully annotated implementation of a Weather API MCP server:\n\n```python\nfrom typing import Any\nimport httpx\nfrom mcp.server.fastmcp import FastMCP # Main MCP server class\nfrom starlette.applications import Starlette # ASGI framework\nfrom mcp.server.sse import SseServerTransport # SSE transport implementation\nfrom starlette.requests import Request\nfrom starlette.responses import HTMLResponse\nfrom starlette.routing import Mount, Route\nfrom mcp.server import Server # Base server class\nimport uvicorn # ASGI server\n\n# Initialize FastMCP server with a name\n# This name appears to clients when they connect\nmcp = FastMCP(\"weather\")\n\n# Constants for API access\nNWS_API_BASE = \"https://api.weather.gov\"\nUSER_AGENT = \"weather-app/1.0\" # Required by NWS API\n\nasync def make_nws_request(url: str) -> dict[str, Any] | None:\n \"\"\"Make a request to the NWS API with proper error handling.\n\n This helper function centralizes API communication logic and error handling.\n \"\"\"\n headers = {\n \"User-Agent\": USER_AGENT, # NWS requires a user agent\n \"Accept\": \"application/geo+json\" # Request GeoJSON format\n }\n async with httpx.AsyncClient() as client:\n try:\n response = await client.get(url, headers=headers, timeout=30.0)\n response.raise_for_status()\n return response.json()\n except Exception:\n return None # Return None on any error\n\ndef format_alert(feature: dict) -> str:\n \"\"\"Format an alert feature into a readable string.\n\n Extracts and formats the most important information from an alert.\n \"\"\"\n props = feature[\"properties\"]\n return f\"\"\"\nEvent: {props.get('event', 'Unknown')}\nArea: {props.get('areaDesc', 'Unknown')}\nSeverity: {props.get('severity', 'Unknown')}\nDescription: {props.get('description', 'No description available')}\nInstructions: {props.get('instruction', 'No specific instructions provided')}\n\"\"\"\n\n# Define a tool using the @mcp.tool() decorator\n# This makes the function available as a callable tool to MCP clients\n@mcp.tool()\nasync def get_alerts(state: str) -> str:\n \"\"\"Get weather alerts for a US state.\n\n Args:\n state: Two-letter US state code (e.g. CA, NY)\n \"\"\"\n url = f\"{NWS_API_BASE}/alerts/active/area/{state}\"\n data = await make_nws_request(url)\n\n if not data or \"features\" not in data:\n return \"Unable to fetch alerts or no alerts found.\"\n\n if not data[\"features\"]:\n return \"No active alerts for this state.\"\n\n alerts = [format_alert(feature) for feature in data[\"features\"]]\n return \"\\n---\\n\".join(alerts)\n\n# Define another tool\n@mcp.tool()\nasync def get_forecast(latitude: float, longitude: float) -> str:\n \"\"\"Get weather forecast for a location.\n\n Args:\n latitude: Latitude of the location\n longitude: Longitude of the location\n \"\"\"\n # First get the forecast grid endpoint\n points_url = f\"{NWS_API_BASE}/points/{latitude},{longitude}\"\n points_data = await make_nws_request(points_url)\n\n if not points_data:\n return \"Unable to fetch forecast data for this location.\"\n\n # Get the forecast URL from the points response\n forecast_url = points_data[\"properties\"][\"forecast\"]\n forecast_data = await make_nws_request(forecast_url)\n\n if not forecast_data:\n return \"Unable to fetch detailed forecast.\"\n\n # Format the periods into a readable forecast\n periods = forecast_data[\"properties\"][\"periods\"]\n forecasts = []\n for period in periods[:5]: # Only show next 5 periods\n forecast = f\"\"\"\n{period['name']}:\nTemperature: {period['temperature']}\u00b0{period['temperatureUnit']}\nWind: {period['windSpeed']} {period['windDirection']}\nForecast: {period['detailedForecast']}\n\"\"\"\n forecasts.append(forecast)\n\n return \"\\n---\\n\".join(forecasts)\n\n# HTML for the homepage that displays \"MCP Server\"\nasync def homepage(request: Request) -> HTMLResponse:\n html_content = \"\"\"\n \n \n \n \n \n MCP Server\n \n \n \n

MCP Server

\n\n

Server is running correctly!

\n\n \n\n
Connection status will appear here...
\n\n \n \n \n \"\"\"\n return HTMLResponse(html_content)\n\n# Create a Starlette application with SSE transport\ndef create_starlette_app(mcp_server: Server, *, debug: bool = False) -> Starlette:\n \"\"\"Create a Starlette application that can server the provied mcp server with SSE.\n\n This sets up the HTTP routes and SSE connection handling.\n \"\"\"\n # Create an SSE transport with a path for messages\n sse = SseServerTransport(\"/messages/\")\n\n # Handler for SSE connections\n async def handle_sse(request: Request) -> None:\n async with sse.connect_sse(\n request.scope,\n request.receive,\n request._send, # access private method\n ) as (read_stream, write_stream):\n # Run the MCP server with the SSE streams\n await mcp_server.run(\n read_stream,\n write_stream,\n mcp_server.create_initialization_options(),\n )\n\n # Create and return the Starlette application\n return Starlette(\n debug=debug,\n routes=[\n Route(\"/\", endpoint=homepage), # Add the homepage route\n Route(\"/sse\", endpoint=handle_sse), # Endpoint for SSE connections\n Mount(\"/messages/\", app=sse.handle_post_message), # Endpoint for messages\n ],\n )\n\nif __name__ == \"__main__\":\n # Get the underlying MCP server from FastMCP wrapper\n mcp_server = mcp._mcp_server\n\n import argparse\n\n # Parse command-line arguments\n parser = argparse.ArgumentParser(description='Run MCP SSE-based server')\n parser.add_argument('--host', default='0.0.0.0', help='Host to bind to')\n parser.add_argument('--port', type=int, default=8080, help='Port to listen on')\n args = parser.parse_args()\n\n # Create and run the Starlette application\n starlette_app = create_starlette_app(mcp_server, debug=True)\n uvicorn.run(starlette_app, host=args.host, port=args.port)\n```\n\n## Testing Your MCP Server\n\n### Manual Testing with the MCP Inspector\n\nThe MCP Inspector is a command-line tool for testing MCP servers:\n\n```bash\nnpx @modelcontextprotocol/inspector\n```\n\nConnect to your server:\n\n```\n> connect sse http://localhost:8080/sse\n```\n\nList available tools:\n\n```\n> list tools\n```\n\nCall a tool:\n\n```\n> call get_forecast --latitude 37.7749 --longitude -122.4194\n```\n\n## Advanced Configuration\n\n### Adding Resources\n\nResources provide data to the client application:\n\n```python\n@mcp.resource(\"company-data://{id}\")\nasync def company_data_resource(id: str) -> tuple[str, str]:\n \"\"\"Provide company data as a resource.\n\n Args:\n id: Resource identifier\n\n Returns:\n Tuple of (content, mime_type)\n \"\"\"\n # Fetch data from your API\n data = await fetch_company_data(id)\n return data, \"application/json\"\n```\n\n### Adding Prompts\n\nPrompts create templates that users can invoke:\n\n```python\n@mcp.prompt()\ndef data_analysis_prompt(resource_id: str) -> str:\n \"\"\"Create a prompt for analyzing company data.\n\n Args:\n resource_id: ID of the data to analyze\n \"\"\"\n return f\"\"\"Please analyze the company data with ID {resource_id}.\nFocus on key metrics and provide actionable insights.\"\"\"\n```\n\n### Server Lifecycle Management\n\nFor more control over server initialization and shutdown:\n\n```python\nfrom contextlib import asynccontextmanager\nfrom collections.abc import AsyncIterator\n\n@asynccontextmanager\nasync def server_lifespan(server: Server) -> AsyncIterator[dict]:\n \"\"\"Manage server startup and shutdown lifecycle.\"\"\"\n # Initialize resources on startup\n api_client = await setup_api_client()\n try:\n yield {\"api_client\": api_client}\n finally:\n # Clean up on shutdown\n await api_client.close()\n\n# Create server with lifespan\nfrom mcp.server import Server\nserver = Server(\"my-company-api\", lifespan=server_lifespan)\n\n# Access context in handlers\n@server.call_tool()\nasync def api_tool(name: str, arguments: dict) -> str:\n ctx = server.request_context\n api_client = ctx.lifespan_context[\"api_client\"]\n return await api_client.request(arguments[\"endpoint\"])\n```\n\n## Best Practices\n\n### Error Handling\n\nImplement comprehensive error handling:\n\n```python\n@mcp.tool()\nasync def api_tool(param: str) -> str:\n try:\n # API call here\n return result\n except httpx.RequestError:\n return \"Error: Could not connect to the API. Please check your network.\"\n except httpx.HTTPStatusError as e:\n if e.response.status_code == 401:\n return \"Error: Authentication failed. Please check your API key.\"\n elif e.response.status_code == 404:\n return f\"Error: Resource '{param}' not found.\"\n else:\n return f\"Error: HTTP error {e.response.status_code}\"\n except Exception as e:\n # Log the full error for debugging\n mcp.send_log_message(level=\"error\", data=f\"Unexpected error: {str(e)}\")\n return \"An unexpected error occurred. Please try again later.\"\n```\n\n### Security Considerations\n\n1. **API Key Management**: Never hardcode API keys; use environment variables\n2. **Input Validation**: Validate all inputs before making API calls\n3. **Rate Limiting**: Implement rate limiting to prevent abuse\n4. **Error Information**: Don't expose sensitive information in error messages\n\n### Performance Optimization\n\n1. **Connection Pooling**: Reuse HTTP connections when possible\n2. **Caching**: Cache API responses for frequently requested data\n3. **Asynchronous Operations**: Use async for all I/O operations\n4. **Timeout Handling**: Set reasonable timeouts for external API calls\n\n## Troubleshooting\n\n### Common Issues and Solutions\n\n1. **Connection Errors**\n - Check network connectivity\n - Verify server is running and accessible\n - Ensure correct host/port configuration\n\n2. **Authentication Failures**\n - Verify API keys are correct\n - Check for expired credentials\n - Ensure proper authorization headers\n\n3. **Timeout Issues**\n - Increase timeout values for long-running operations\n - Optimize API calls for performance\n - Consider implementing request chunking\n\n4. **Protocol Errors**\n - Verify MCP version compatibility\n - Check message format compliance\n - Review server and client logs\n\n### Logging\n\nTo enable detailed logging:\n\n```python\nimport logging\nlogging.basicConfig(level=logging.DEBUG)\n```\n\nTo send logs to the MCP client:\n\n```python\n@mcp.tool()\nasync def complex_tool(param: str) -> str:\n mcp.send_log_message(level=\"info\", data=f\"Processing request with param: {param}\")\n # Process request\n mcp.send_log_message(level=\"info\", data=\"Request processing complete\")\n return result\n```\n\n## Integration with NANDA Client:\n\nConnect to the NANDA client through our [web interface](https://main.dayer1hj1pz2p.amplifyapp.com).\n\n\"image\"\n\n### Adding a New Server\n\n- Server ID\n - Choose a unique identifier to distinguish your server\n- Server Name\n - Set a clear, descriptive name for easy identification\n- Server URL\n - Specify the SSE endpoint URL for your NANDA server\n\"image\"\n\n## API Reference\n\n### FastMCP Class\n\nMain class for creating MCP servers:\n\n```python\nfrom mcp.server.fastmcp import FastMCP\n\nmcp = FastMCP(\n name=\"my-server\",\n description=\"My API server\",\n version=\"1.0.0\"\n)\n```\n\n### Decorators\n\n- `@mcp.tool()`: Define a function as an MCP tool\n- `@mcp.resource(pattern)`: Define a function as an MCP resource\n- `@mcp.prompt()`: Define a function as an MCP prompt\n\n### Server Methods\n\n- `mcp.send_log_message(level, data)`: Send log messages to the client\n- `mcp.sse_app()`: Create an ASGI app for SSE transport\n\n## Additional Documentation\n\nFor more detailed information, refer to:\n\n1. [Model Context Protocol documentation](https://modelcontextprotocol.io/introduction)\n2. [Python SDK documentation](https://github.com/modelcontextprotocol/python-sdk)\n\nFor questions and community support, email dec-ai@media.mit.edu", "installation_instructions": null, "categories": [ "Everything", "Professional & Dev Tools" ], "owners": [], "owner": null, "code_snippets": {}, "evaluation_results": [], "found_via_ownership_request": false, "security_scans": [ { "repo_url": "https://github.com/aidecentralized/nanda-servers", "repo_name": "nanda-servers", "score": 95, "risk_level": "low", "score_explanation": "Score starts at 100, deducts points for security issues, and adds points for security best practices", "scan_id": "690637fd-2fa5-4a25-b16b-1243188c71b0", "mcp_app_id": "a840dd48-5832-4408-801f-0578fbcd371d", "scan_time": "2025-05-08T09:57:13.601288+00:00", "created_at": "2025-05-08T09:57:13.602177+00:00", "updated_at": "2025-05-08T09:57:13.602177+00:00", "findings": [ { "finding_id": "3c14c79f-9e45-4eef-bfe7-3cb846f6c9b2", "message": "Use of exec() detected. This can be dangerous if used with untrusted input.", "line": 69, "created_at": "2025-05-08T09:57:13.602177+00:00", "rule_id": "security-validator.scanner.rules.semgrep.dangerous-exec", "scan_id": "690637fd-2fa5-4a25-b16b-1243188c71b0", "type": "semgrep", "severity": "ERROR", "path": "mcp-synthetic-data-generator/server.py", "meta_info": { "lines": " exec(script)", "pattern": "", "rule_name": "dangerous_code" } } ], "vulnerabilities": [] }, { "repo_url": "https://github.com/aidecentralized/nanda-servers", "repo_name": "nanda-servers", "score": 95, "risk_level": "low", "score_explanation": "Score starts at 100, deducts points for security issues, and adds points for security best practices", "scan_id": "b5a445fc-dbff-4f29-b59e-fe72960209d1", "mcp_app_id": "a840dd48-5832-4408-801f-0578fbcd371d", "scan_time": "2025-06-13T10:16:47.905535+00:00", "created_at": "2025-06-13T10:16:47.906258+00:00", "updated_at": "2025-06-13T10:16:47.906258+00:00", "findings": [ { "finding_id": "3cd4dc58-d4bf-4267-aa5d-dc750302ee5e", "message": "Use of exec() detected. This can be dangerous if used with untrusted input.", "line": 69, "created_at": "2025-06-13T10:16:47.906258+00:00", "rule_id": "security-validator.scanner.rules.semgrep.dangerous-exec", "scan_id": "b5a445fc-dbff-4f29-b59e-fe72960209d1", "type": "semgrep", "severity": "ERROR", "path": "mcp-synthetic-data-generator/server.py", "meta_info": { "lines": " exec(script)", "pattern": "", "rule_name": "dangerous_code" } } ], "vulnerabilities": [] } ] } }