Examples
Working examples demonstrating common Stirrup patterns. Full source code is in the examples/ directory.
Web Search Agent
A simple agent using default tools to search the web.
"""Getting started example for Stirrup.
Demonstrates the core pattern:
1. Create a ChatCompletionsClient for your LLM provider
2. Create an Agent with the client
3. Run it in a session context
4. Agent searches the web and creates a chart as output
"""
import asyncio
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
async def main() -> None:
"""Run an agent that searches the web and creates a chart."""
# Create client using ChatCompletionsClient
# Automatically uses OPENROUTER_API_KEY environment variable
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
)
# As no tools are provided, the agent will use the default tools, which consist of:
# - Web tools (web search and web fetching, note web search requires BRAVE_API_KEY)
# - Local code execution tool (to execute shell commands)
agent = Agent(client=client, name="agent", max_turns=15)
# Run with session context - handles tool lifecycle, logging and file outputs
async with agent.session(output_dir="output/getting_started_example") as session:
_finish_params, _history, _metadata = await session.run(
"""
What is the population of the US over the last 3 years? Search the web to
find out and create a chart using matplotlib showing the population per year.
"""
)
if __name__ == "__main__":
asyncio.run(main())
Note
Web search requires BRAVE_API_KEY environment variable.
Web Calculator
An agent with calculator added to default tools.
"""Example: Web-enabled calculator agent with simplified session API.
This example demonstrates how to create an agent that can:
1. Perform calculations
2. Search the web (requires BRAVE_API_KEY)
3. Fetch web page content
"""
import asyncio
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
from stirrup.tools import CALCULATOR_TOOL, DEFAULT_TOOLS
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
)
# Create agent with default tools + calculator tool
agent = Agent(
client=client,
name="web_calculator_agent",
tools=[*DEFAULT_TOOLS, CALCULATOR_TOOL],
)
async def main() -> None:
"""Run a simple web-enabled calculator agent."""
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
max_tokens=50_000,
)
# Create agent with default tools (coding env, web_search, web_fetch) + calculator tool
tools = [*DEFAULT_TOOLS, CALCULATOR_TOOL]
agent = Agent(
client=client,
name="web_calculator_agent",
tools=tools,
max_turns=10,
)
# Run with session context - handles all tool lifecycle and logging
async with agent.session() as session:
_finish_params, _history, _metadata = await session.run(
"""Find the current world population and calculate what 10% of it would be.
Use the web_search tool to find the current world population, then use
the calculator to compute 10% of that number.
When you're done, call the finish tool with your findings."""
)
if __name__ == "__main__":
asyncio.run(main())
Code Execution
Execute code in isolated environments with multiple backend options.
"""Example: Code execution agent (E2B, Docker, or local).
This example demonstrates how to create an agent that can execute shell or Python
code in an isolated execution environment using one of several backends (E2B, Docker, or local).
You can switch between backends by commenting/uncommenting the relevant execution
environment instantiation.
Prerequisites for E2B:
- Set E2B_API_KEY environment variable.
See accompanying comments in the file for backend options.
"""
import asyncio
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
from stirrup.tools.code_backends.local import LocalCodeExecToolProvider
async def main() -> None:
"""Run a simple code execution agent."""
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
)
# Choose your backend:
code_exec_tool_provider = LocalCodeExecToolProvider() # Local
# code_exec_tool_provider = DockerCodeExecToolProvider.from_image("python:3.12-slim") # Docker
# code_exec_tool_provider = E2BCodeExecToolProvider() # E2B (requires E2B_API_KEY)
agent = Agent(
client=client,
name="code_executor_agent",
tools=[code_exec_tool_provider],
max_turns=20,
)
async with agent.session(
input_files="examples/code_executor/task.txt",
output_dir="output/code_executor_example/",
) as session:
task = """
You are a helpful coding assistant with access to a Python execution environment.
Read the task from the input files and execute it. Use the code_exec tool to run the Python code.
When you're done, call the finish tool with the results.
"""
await session.run(task)
if __name__ == "__main__":
asyncio.run(main())
Backend Options
- Local:
LocalCodeExecToolProvider()- runs in temp directory - Docker:
DockerCodeExecToolProvider.from_image("python:3.12-slim")- requires Docker - E2B:
E2BCodeExecToolProvider()- requiresE2B_API_KEY
MCP Integration
Connect to MCP servers for additional tools.
"""Example: MCP-powered agent with simplified session API.
This example demonstrates how to create an agent with access to MCP server tools
using the new session-based API.
Prerequisites:
- Create mcp.json in .mcp/ directory with server configuration
Example .mcp/mcp.json:
{
"mcpServers": {
"deepwiki": {
"url": "https://mcp.deepwiki.com/sse"
}
}
}
"""
import asyncio
from pathlib import Path
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
from stirrup.tools import DEFAULT_TOOLS
from stirrup.tools.mcp import MCPToolProvider
async def main() -> None:
"""Run an agent with MCP tools."""
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
max_tokens=50_000,
)
# Create agent with default tools + MCP tools
agent = Agent(
client=client,
name="mcp_example_agent",
tools=[*DEFAULT_TOOLS, MCPToolProvider.from_config(".mcp/mcp.json")],
max_turns=20,
)
# Run with session context - handles tool lifecycle, logging, and file saving
async with agent.session(output_dir=Path("./output/mcp_example")) as session:
task = """You have access to MCP server tools and a code execution environment.
Using the same implementation as TheAlgorithms/Python (you can use DeepWiki MCP
to research), write a Python file quicksort.py that implements quicksort and
another that tests (and times) it.
When done, call the finish tool including your findings."""
_finish_params, _history, _metadata = await session.run(task)
if __name__ == "__main__":
asyncio.run(main())
Example .mcp/mcp.json:
Note
Requires pip install stirrup[mcp] (or: uv add stirrup[mcp]).
Image Processing
An agent that can download and view images.
"""Example: Web search and image viewing.
This example demonstrates an agent that can:
1. Search the web for images
2. Download images to the execution environment
3. View the downloaded images using ViewImageToolProvider
"""
import asyncio
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
from stirrup.tools import LocalCodeExecToolProvider, ViewImageToolProvider, WebToolProvider
async def main() -> None:
"""Run an agent that can search the web and view images."""
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
)
# ViewImageToolProvider will automatically use the Agent's CodeExecToolProvider
agent = Agent(
client=client,
name="image_agent",
tools=[
LocalCodeExecToolProvider(),
WebToolProvider(),
ViewImageToolProvider(), # Auto-detects exec_env
],
max_turns=20,
)
async with agent.session(output_dir="output/view_image_example/") as session:
_finish_params, _message_history, _run_metadata = await session.run(
"""Download an image of a kangaroo and describe what you see in it."""
)
if __name__ == "__main__":
asyncio.run(main())
Sub-Agent Pattern
Use one agent as a tool for another. This example shows a supervisor agent coordinating specialized sub-agents for research and report writing.
"""Example: Sub-agent delegation.
This example demonstrates how to create a supervisor agent that delegates tasks
to specialized sub-agents. The supervisor coordinates:
- A research sub-agent with web search and local code execution
- A report writing sub-agent with Docker-based code execution
"""
import asyncio
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
from stirrup.tools import LocalCodeExecToolProvider, WebToolProvider
from stirrup.tools.code_backends.docker import DockerCodeExecToolProvider
async def main() -> None:
"""Run a supervisor agent that delegates tasks to sub-agents."""
# Create client for OpenRouter (shared across all agents)
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
max_tokens=64_000,
)
## ------- define research sub-agent ------- ##
research_agent = Agent(
client=client,
name="research_sub_agent",
tools=[WebToolProvider(), LocalCodeExecToolProvider()],
max_turns=10,
system_prompt=(
"You are a research agent. When asked to complete research, save it all to a markdown file "
"(using a code executor tool) and pass the filepath to the finish tool and mention it in the "
"finish_reason. Remember you will need a turn to write the markdown file and a separate turn to finish."
),
)
# Convert agent to a tool for use by supervisor
research_subagent_tool = research_agent.to_tool(
description="Agent that can search the web and return the results.",
)
## ------- define report writing sub-agent ------- ##
report_writing_agent = Agent(
client=client,
name="report_writing_sub_agent",
tools=[DockerCodeExecToolProvider.from_image("ghcr.io/astral-sh/uv:python3.13-bookworm-slim")],
max_turns=10,
)
report_writing_subagent_tool = report_writing_agent.to_tool(
description="Create final reports using coding tools. Use uv to install any dependencies needed.",
)
## ------- define supervisor agent ------- ##
supervisor_agent = Agent(
client=client,
name="supervisor_agent",
tools=[research_subagent_tool, report_writing_subagent_tool, LocalCodeExecToolProvider()],
max_turns=5,
)
async with supervisor_agent.session(output_dir="output/sub_agent_example/") as session:
_finish_params, _message_history, _run_metadata = await session.run(
init_msgs="""
Create a report on the latest hallucination benchmarks.
Use your research sub-agent and report writing sub-agent to create the report.
Output the report as a PDF file and pass the path in the finish tool.
"""
)
if __name__ == "__main__":
asyncio.run(main())
File Transfer Requirement
If a sub-agent has a code execution environment and produces files, the parent agent must also have a CodeExecToolProvider to receive those files.
OpenAI-Compatible APIs
Connect to any OpenAI-compatible API by specifying a custom base_url. Ensure you have set the correct environment variables required for the specific provider.
"""Example: Using Agent with Deepseek's OpenAI-compatible API.
Demonstrates connecting to Deepseek using ChatCompletionsClient with a custom base_url.
This same pattern works for any OpenAI-compatible API (vLLM, Ollama, Azure OpenAI,
local models, etc.).
Requires: DEEPSEEK_API_KEY environment variable
"""
import asyncio
import os
from stirrup import Agent
from stirrup.clients.chat_completions_client import ChatCompletionsClient
async def main() -> None:
"""Run an agent using Deepseek's API."""
# Create client using Deepseek's OpenAI-compatible endpoint
client = ChatCompletionsClient(
base_url="https://api.deepseek.com",
model="deepseek-chat", # or "deepseek-reasoner" for R1
api_key=os.environ["DEEPSEEK_API_KEY"],
)
agent = Agent(client=client, name="deepseek_agent")
async with agent.session(output_dir="./output") as session:
await session.run("What is 2 + 2? Explain your reasoning step by step.")
if __name__ == "__main__":
asyncio.run(main())
Note
Requires DEEPSEEK_API_KEY environment variable (or the appropriate key for your provider).
LiteLLM Multi-Provider Support
Use LiteLLM to connect to non-OpenAI providers like Anthropic Claude, Google Gemini, and many others.
"""Example: Using Agent with LiteLLM for multi-provider support.
Demonstrates using LiteLLM to connect to non-OpenAI providers like Anthropic Claude,
Google Gemini, etc. For LiteLLM, create the client directly and pass it to the Agent.
Requires:
- uv pip install stirrup[litellm]
- ANTHROPIC_API_KEY environment variable (for this example)
"""
import asyncio
from stirrup import Agent
from stirrup.clients.litellm_client import LiteLLMClient
async def main() -> None:
"""Run an agent using Anthropic Claude via LiteLLM."""
# Create LiteLLM client for Anthropic Claude
# See https://docs.litellm.ai/docs/providers for all supported providers
client = LiteLLMClient(model_slug="anthropic/claude-sonnet-4-5", max_tokens=64_000)
# Pass client to Agent - model info comes from client.model_slug
agent = Agent(
client=client,
name="claude_agent",
)
async with agent.session(output_dir="./output/litellm_example") as session:
await session.run(
"What has the temperature been in the last 3 days in San Francisco? "
"Provide a brief summary and output a pdf file with the summary and a graph."
)
if __name__ == "__main__":
asyncio.run(main())
Note
Requires pip install stirrup[litellm] (or: uv add stirrup[litellm]) and the appropriate API key for your chosen provider (e.g., ANTHROPIC_API_KEY for Claude).
Custom Finish Tool
Define structured output with a custom finish tool:
import asyncio
from pydantic import BaseModel, Field
from stirrup import Agent, Tool, ToolResult
from stirrup.clients.chat_completions_client import ChatCompletionsClient
class AnalysisResult(BaseModel):
"""Structured analysis output."""
summary: str = Field(description="Brief summary of findings")
confidence: float = Field(description="Confidence score 0-1")
sources: list[str] = Field(description="URLs of sources used")
async def main():
# Create client for OpenRouter
client = ChatCompletionsClient(
base_url="https://openrouter.ai/api/v1",
model="anthropic/claude-sonnet-4.5",
)
# Create custom finish tool
finish_tool = Tool(
name="finish",
description="Complete the analysis with structured results",
parameters=AnalysisResult,
executor=lambda p: ToolResult(content="Analysis complete", metadata=None),
)
agent = Agent(
client=client,
name="analyst",
finish_tool=finish_tool,
)
async with agent.session() as session:
finish_params, _, _ = await session.run(
"Analyze the current state of renewable energy adoption globally."
)
# finish_params is now typed as AnalysisResult
print(f"Summary: {finish_params.summary}")
print(f"Confidence: {finish_params.confidence}")
print(f"Sources: {finish_params.sources}")
asyncio.run(main())