Skip to main content

Custom Agent

Build your own agent that connects to the Aphelios MCP server programmatically.

Python with the MCP SDK

from mcp import ClientSession
from mcp.client.streamable_http import streamablehttp_client

async def main() -> None:
headers = {"Authorization": "Bearer aph_live_YOUR_API_KEY"}

async with streamablehttp_client(
"https://api.aphelios.ai/v1/mcp", headers=headers
) as (read_stream, write_stream, _):
async with ClientSession(read_stream, write_stream) as session:
await session.initialize()

tools = await session.list_tools()
print(f"Available tools: {[t.name for t in tools.tools]}")

result = await session.call_tool(
"get_shipment_details",
arguments={"bills_of_lading": ["MAEU123456789"]},
)
print(result.content[0].text)

Install the MCP SDK:

pip install mcp

TypeScript with the MCP SDK

import { Client } from "@modelcontextprotocol/sdk/client/index.js";
import { StreamableHTTPClientTransport } from "@modelcontextprotocol/sdk/client/streamableHttp.js";

const transport = new StreamableHTTPClientTransport(
new URL("https://api.aphelios.ai/v1/mcp"),
{
requestInit: {
headers: {
Authorization: "Bearer aph_live_YOUR_API_KEY",
},
},
}
);

const client = new Client({ name: "my-agent", version: "1.0.0" });
await client.connect(transport);

const tools = await client.listTools();
console.log("Available tools:", tools.tools.map((t) => t.name));

const result = await client.callTool({
name: "list_shipments_summary",
arguments: { shipment_status: "exception", results_per_page: 5 },
});
console.log(result.content);

Install the SDK:

npm install @modelcontextprotocol/sdk

With OpenRouter (LLM + tool calling)

If you want an LLM to autonomously decide when to call Aphelios tools:

import httpx
import json

OPENROUTER_API_KEY = "sk-or-v1-..."
MCP_URL = "https://api.aphelios.ai/v1/mcp"
APH_API_KEY = "aph_live_YOUR_API_KEY"


async def get_tools() -> list[dict]:
async with httpx.AsyncClient() as client:
response = await client.post(
MCP_URL,
headers={"Authorization": f"Bearer {APH_API_KEY}"},
json={"jsonrpc": "2.0", "method": "tools/list", "id": 1},
)
mcp_tools = response.json()["result"]["tools"]
return [
{
"type": "function",
"function": {
"name": tool["name"],
"description": tool["description"],
"parameters": tool["inputSchema"],
},
}
for tool in mcp_tools
]


async def ask(question: str) -> str:
tools = await get_tools()

async with httpx.AsyncClient() as client:
llm_response = await client.post(
"https://openrouter.ai/api/v1/chat/completions",
headers={"Authorization": f"Bearer {OPENROUTER_API_KEY}"},
json={
"model": "mistralai/mistral-medium-3.1",
"messages": [{"role": "user", "content": question}],
"tools": tools,
},
)

choice = llm_response.json()["choices"][0]["message"]

if "tool_calls" in choice:
tool_call = choice["tool_calls"][0]
mcp_result = await client.post(
MCP_URL,
headers={"Authorization": f"Bearer {APH_API_KEY}"},
json={
"jsonrpc": "2.0",
"method": "tools/call",
"params": {
"name": tool_call["function"]["name"],
"arguments": json.loads(
tool_call["function"]["arguments"]
),
},
"id": 1,
},
)
return mcp_result.json()["result"]["content"][0]["text"]

return choice["content"]

See Model Recommendations for which models work best with this approach.

Give your AI agent context

If you're using an AI coding assistant (Cursor, Claude Code, Copilot) to build your integration, point it to our machine-readable docs:

https://docs.aphelios.ai/llms.txt