Add Web Search to CrewAI Agents in 5 Minutes
The Problem
CrewAI agents are powerful for multi-step reasoning, but they're limited to their training data. A research agent that can't search the web is like a librarian locked out of the library.
Setup
pip install crewai httpx
Get a free API key from [Searlo Dashboard](https://dashboard.searlo.tech/auth) (includes 3,000 credits).
Create the Search Tool
import httpx
from crewai.tools import BaseTool
from pydantic import Field
class WebSearchTool(BaseTool):
name: str = "Web Search"
description: str = "Search Google for real-time information. Use for current events, facts, or research."
api_key: str = Field(default="your_searlo_api_key")
def _run(self, query: str) -> str:
response = httpx.get(
"https://api.searlo.tech/api/v1/search",
params={"q": query, "num": 5, "format": "toon"},
headers={"X-API-Key": self.api_key},
timeout=10.0,
)
data = response.json()
results = data.get("organic", [])
if not results:
return "No results found."
return "\n\n".join(
f"**{r['title']}**\n{r['snippet']}\nSource: {r['link']}"
for r in results
)
Use It in a Crew
from crewai import Agent, Task, Crew
search = WebSearchTool(api_key="your_searlo_api_key")
researcher = Agent(
role="Senior Research Analyst",
goal="Find accurate, current information on the given topic",
backstory="Expert researcher who always verifies facts with multiple sources.",
tools=[search],
verbose=True,
)
writer = Agent(
role="Content Writer",
goal="Write clear, well-sourced articles based on research",
backstory="Experienced writer who transforms research into readable content.",
verbose=True,
)
research_task = Task(
description="Research the current state of {topic}. Find at least 3 recent sources.",
expected_output="A research brief with key findings and source URLs.",
agent=researcher,
)
write_task = Task(
description="Write a 500-word article based on the research brief.",
expected_output="A well-structured article with source citations.",
agent=writer,
)
crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task])
result = crew.kickoff(inputs={"topic": "AI regulation in the EU"})
print(result)
What Happens
1. The **researcher** agent searches Google via Searlo for current information
2. It can make multiple search calls to verify facts across sources
3. The **writer** agent receives the research and creates the article
4. Total cost: ~5-10 searches = less than $0.01
Why Searlo for CrewAI
• **Speed**: ~300ms responses don't slow down multi-agent workflows
• **TOON format**: 60% fewer tokens means cheaper LLM calls in the pipeline
• **No subscription**: One-time credits work well for batch agent runs
• **Structured JSON**: Clean data that agents can parse reliably
More Resources
• [Full integrations guide](/integrations) with LangChain, LlamaIndex, and n8n
• [Search API for AI Agents](/search-api-for-ai-agents) — architecture patterns and use cases
• [Pricing](/pricing) — credit packs start at $2.99
bashpip install crewai httpx
pythonimport httpx from crewai.tools import BaseTool from pydantic import Field class WebSearchTool(BaseTool): name: str = "Web Search" description: str = "Search Google for real-time information. Use for current events, facts, or research." api_key: str = Field(default="your_searlo_api_key") def _run(self, query: str) -> str: response = httpx.get( "https://api.searlo.tech/api/v1/search", params={"q": query, "num": 5, "format": "toon"}, headers={"X-API-Key": self.api_key}, timeout=10.0, ) data = response.json() results = data.get("organic", []) if not results: return "No results found." return "\n\n".join( f"**{r['title']}**\n{r['snippet']}\nSource: {r['link']}" for r in results )
pythonfrom crewai import Agent, Task, Crew search = WebSearchTool(api_key="your_searlo_api_key") researcher = Agent( role="Senior Research Analyst", goal="Find accurate, current information on the given topic", backstory="Expert researcher who always verifies facts with multiple sources.", tools=[search], verbose=True, ) writer = Agent( role="Content Writer", goal="Write clear, well-sourced articles based on research", backstory="Experienced writer who transforms research into readable content.", verbose=True, ) research_task = Task( description="Research the current state of {topic}. Find at least 3 recent sources.", expected_output="A research brief with key findings and source URLs.", agent=researcher, ) write_task = Task( description="Write a 500-word article based on the research brief.", expected_output="A well-structured article with source citations.", agent=writer, ) crew = Crew(agents=[researcher, writer], tasks=[research_task, write_task]) result = crew.kickoff(inputs={"topic": "AI regulation in the EU"}) print(result)