Integrations
OpenRouter
OpenRouter uses OpenAI tool definitions, so in addition to retrieving tools from Gatana, the definitions must also be converted from MCP to OpenAI format.
Example Usage
"""
OpenRouter + Gatana Integration Example
A minimal example showing how to use Gatana MCP servers with OpenRouter.
Requirements:
pip install mcp openai python-dotenv
Environment variables (.env):
OPENAI_API_KEY=sk-or-v1-your-openrouter-key
GATANA_ORG_ID=your-org-id
GATANA_TOKEN=your-gatana-token
"""
import asyncio
import os
from dotenv import load_dotenv
from gatana import GatanaClient
load_dotenv()
# Configuration
ORG_ID = os.getenv("GATANA_ORG_ID")
TOKEN = os.getenv("GATANA_TOKEN") # Or exchange from your federated IDP or directly with Gatana
MODEL = "anthropic/claude-4.5-opus"
if not ORG_ID or not TOKEN:
print("Error: Please set GATANA_ORG_ID and GATANA_TOKEN in your .env file")
print("Copy .env.example to .env and fill in your values")
exit(1)
if not os.getenv("OPENAI_API_KEY"):
print("Error: Please set OPENAI_API_KEY (OpenRouter key) in your .env file")
exit(1)
async def main():
print("OpenRouter + Gatana Demo")
print("-" * 40)
client = GatanaClient(
org_id=ORG_ID,
token=TOKEN,
model=MODEL
)
async with client:
# Show available tools
tools = await client.list_tools()
print(f"Available tools: {tools}\n")
while True:
query = input("\nYou: ").strip()
if query.lower() in ['quit', 'exit', 'q']:
break
if query:
response = await client.chat(query)
print(f"\nAssistant: {response}")
if __name__ == "__main__":
asyncio.run(main())GatanaClient
In gatana/ add this file:
"""
Gatana Client - Bridge MCP servers with OpenRouter.
"""
import json
from typing import Optional
from contextlib import AsyncExitStack
from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client
from openai import OpenAI
def _convert_tool_format(tool):
"""Convert MCP tool definitions to OpenAI-compatible format."""
return {
"type": "function",
"function": {
"name": tool.name,
"description": tool.description,
"parameters": {
"type": "object",
"properties": tool.inputSchema.get("properties", {}),
"required": tool.inputSchema.get("required", [])
}
}
}
class GatanaClient:
"""
A client that bridges Gatana MCP servers with OpenRouter.
This client:
1. Connects to a Gatana MCP server via Streamable HTTP
2. Converts MCP tools to OpenAI-compatible format
3. Uses OpenRouter to get LLM responses with tool calls
4. Executes tool calls via the MCP server
Example:
client = GatanaClient(
org_id="your-org-id",
token="your-gatana-token",
openrouter_api_key="sk-or-..."
)
async with client:
response = await client.chat("Hello!")
print(response)
"""
def __init__(
self,
org_id: str,
token: str,
openrouter_api_key: Optional[str] = None,
model: str = "anthropic/claude-3.5-sonnet",
openrouter_base_url: str = "https://openrouter.ai/api/v1"
):
"""
Initialize the Gatana client.
Args:
org_id: Your Gatana organization ID
token: Gatana authentication token
openrouter_api_key: OpenRouter API key (or set OPENAI_API_KEY env var)
model: Model to use via OpenRouter (default: anthropic/claude-3.5-sonnet)
openrouter_base_url: OpenRouter API base URL
"""
self.org_id = org_id
self.mcp_server_url = f"https://{org_id}.gatana.ai/mcp"
self.model = model
# Build MCP headers
self.mcp_headers = {"Authorization": f"Bearer {token}"}
# Initialize OpenAI client for OpenRouter
self.openai = OpenAI(
api_key=openrouter_api_key,
base_url=openrouter_base_url
)
# Session management
self.session: Optional[ClientSession] = None
self.exit_stack = AsyncExitStack()
self.messages = []
self._connected = False
async def connect(self):
"""Connect to the MCP server."""
if self._connected:
return
http_transport = await self.exit_stack.enter_async_context(
streamablehttp_client(self.mcp_server_url, headers=self.mcp_headers)
)
read_stream, write_stream, _ = http_transport
self.session = await self.exit_stack.enter_async_context(
ClientSession(read_stream, write_stream)
)
await self.session.initialize()
self._connected = True
async def disconnect(self):
"""Disconnect from the MCP server."""
if self._connected:
await self.exit_stack.aclose()
self._connected = False
async def __aenter__(self):
"""Async context manager entry."""
await self.connect()
return self
async def __aexit__(self, exc_type, exc_val, exc_tb):
"""Async context manager exit."""
await self.disconnect()
async def list_tools(self) -> list:
"""List available tools from the MCP server."""
if not self._connected:
raise RuntimeError("Not connected. Call connect() or use async with.")
response = await self.session.list_tools()
return [tool.name for tool in response.tools]
async def chat(
self,
message: str,
max_tokens: int = 1000,
reset_history: bool = False
) -> str:
"""
Send a message and get a response, with automatic tool execution.
Args:
message: The user message to send
max_tokens: Maximum tokens in the response
reset_history: If True, clear conversation history before this message
Returns:
The assistant's response text
"""
if not self._connected:
raise RuntimeError("Not connected. Call connect() or use async with.")
if reset_history:
self.messages = []
self.messages.append({
"role": "user",
"content": message
})
# Get available tools
response = await self.session.list_tools()
available_tools = [_convert_tool_format(tool) for tool in response.tools]
# Call OpenRouter
response = self.openai.chat.completions.create(
model=self.model,
tools=available_tools if available_tools else None,
messages=self.messages,
max_tokens=max_tokens
)
self.messages.append(response.choices[0].message.model_dump())
final_text = []
content = response.choices[0].message
if content.tool_calls:
# Process all tool calls
for tool_call in content.tool_calls:
tool_name = tool_call.function.name
tool_args = json.loads(tool_call.function.arguments) if tool_call.function.arguments else {}
try:
result = await self.session.call_tool(tool_name, tool_args)
# Format result content
result_content = result.content
if isinstance(result_content, list):
result_content = json.dumps([
item.model_dump() if hasattr(item, 'model_dump') else str(item)
for item in result_content
])
self.messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"name": tool_name,
"content": str(result_content)
})
except Exception as e:
self.messages.append({
"role": "tool",
"tool_call_id": tool_call.id,
"name": tool_name,
"content": f"Error: {str(e)}"
})
# Get final response after tool execution
response = self.openai.chat.completions.create(
model=self.model,
max_tokens=max_tokens,
messages=self.messages,
)
final_text.append(response.choices[0].message.content)
else:
final_text.append(content.content)
return "\n".join(final_text)
def clear_history(self):
"""Clear the conversation history."""
self.messages = []