Documentation Index
Fetch the complete documentation index at: https://agno-v2-shaloo-ai-support-link.mintlify.app/llms.txt
Use this file to discover all available pages before exploring further.
The new Streamable HTTP transport replaces the HTTP+SSE transport from protocol version 2024-11-05.
This transport enables the MCP server to handle multiple client connections, and can also use SSE for server-to-client streaming.
To use it, initialize the MCPTools passing the URL of the MCP server and setting the transport to streamable-http:
from agno.agent import Agent
from agno.models.openai import OpenAIResponses
from agno.tools.mcp import MCPTools
# Initialize and connect to the Streamable HTTP MCP server
mcp_tools = MCPTools(url="https://docs.agno.com/mcp", transport="streamable-http")
await mcp_tools.connect()
try:
agent = Agent(model=OpenAIResponses(id="gpt-5.2"), tools=[mcp_tools])
await agent.aprint_response("What can you tell me about MCP support in Agno?", stream=True)
finally:
# Always close the connection when done
await mcp_tools.close()
You can also use the server_params argument to define the MCP connection. This way you can specify the headers to send to the MCP server with every request, and the timeout values:
from agno.tools.mcp import MCPTools, StreamableHTTPClientParams
server_params = StreamableHTTPClientParams(
url=...,
headers=...,
timeout=...,
sse_read_timeout=...,
terminate_on_close=...,
)
# Initialize and connect using server parameters
mcp_tools = MCPTools(server_params=server_params, transport="streamable-http")
await mcp_tools.connect()
try:
# Use mcp_tools with your agent
pass
finally:
await mcp_tools.close()
Complete example
Let’s set up a simple local server and connect to it using the Streamable HTTP transport:
Setup the server
streamable_http_server.py
from mcp.server.fastmcp import FastMCP
mcp = FastMCP("calendar_assistant")
@mcp.tool()
def get_events(day: str) -> str:
return f"There are no events scheduled for {day}."
@mcp.tool()
def get_birthdays_this_week() -> str:
return "It is your mom's birthday tomorrow"
if __name__ == "__main__":
mcp.run(transport="streamable-http")
Setup the client
streamable_http_client.py
import asyncio
from agno.agent import Agent
from agno.models.openai import OpenAIResponses
from agno.tools.mcp import MCPTools, MultiMCPTools
# This is the URL of the MCP server we want to use.
server_url = "http://localhost:8000/mcp"
async def run_agent(message: str) -> None:
# Initialize and connect to the Streamable HTTP MCP server
mcp_tools = MCPTools(transport="streamable-http", url=server_url)
await mcp_tools.connect()
try:
agent = Agent(
model=OpenAIResponses(id="gpt-5.2"),
tools=[mcp_tools],
markdown=True,
)
await agent.aprint_response(message=message, stream=True, markdown=True)
finally:
await mcp_tools.close()
# Using MultiMCPTools, we can connect to multiple MCP servers at once, even if they use different transports.
# In this example we connect to both our example server (Streamable HTTP transport), and a different server (stdio transport).
async def run_agent_with_multimcp(message: str) -> None:
# Initialize and connect to multiple MCP servers with different transports
mcp_tools = MultiMCPTools(
commands=["npx -y @openbnb/mcp-server-airbnb --ignore-robots-txt"],
urls=[server_url],
urls_transports=["streamable-http"],
)
await mcp_tools.connect()
try:
agent = Agent(
model=OpenAIResponses(id="gpt-5.2"),
tools=[mcp_tools],
markdown=True,
)
await agent.aprint_response(message=message, stream=True, markdown=True)
finally:
await mcp_tools.close()
if __name__ == "__main__":
asyncio.run(run_agent("Do I have any birthdays this week?"))
asyncio.run(
run_agent_with_multimcp(
"Can you check when is my mom's birthday, and if there are any AirBnb listings in SF for two people for that day?"
)
)
Run the server
python streamable_http_server.py
Run the client
python streamable_http_client.py