The CrewAI integration enables you to create intelligent agents that can automate web interactions using natural language instructions. The StagehandTool wraps the Stagehand Python SDK to provide CrewAI agents with browser automation capabilities.
The CrewAI integration uses the Stagehand Python SDK. For TypeScript/JavaScript projects, use the LangChain integration instead.
BROWSERBASE_API_KEY="your-browserbase-api-key"BROWSERBASE_PROJECT_ID="your-browserbase-project-id"OPENAI_API_KEY="your-openai-api-key" # or other LLM provider
from crewai import Agent, Task, Crewfrom stagehand_crewai import StagehandTool# Create the Stagehand toolstagehand_tool = StagehandTool(stagehand=stagehand)# Create an agent with browser automation capabilitiesbrowser_agent = Agent( role="Web Automation Specialist", goal="Navigate websites and extract information", backstory="Expert at automating web interactions and data extraction", tools=[stagehand_tool], verbose=True)
# Define a taskresearch_task = Task( description=""" Go to example.com and: 1. Find the main heading 2. Extract the page title 3. Look for any contact information """, agent=browser_agent, expected_output="Structured data with heading, title, and contact info")# Create crew and executecrew = Crew( agents=[browser_agent], tasks=[research_task], verbose=True)result = crew.kickoff()print(result)
from crewai import Agent, Task, Crewfrom stagehand_crewai import StagehandToolfrom stagehand import Stagehand# Initializestagehand = Stagehand(env="browserbase")await stagehand.init()stagehand_tool = StagehandTool(stagehand=stagehand)# Create agentprice_monitor = Agent( role="Price Monitoring Specialist", goal="Track product prices across e-commerce sites", backstory="Expert at navigating e-commerce sites and extracting pricing data", tools=[stagehand_tool], verbose=True)# Create taskmonitor_task = Task( description=""" Go to amazon.com and search for 'wireless headphones'. Extract the following for the top 5 results: - Product name - Price - Rating - Number of reviews """, agent=price_monitor, expected_output="List of products with name, price, rating, and review count")# Executecrew = Crew(agents=[price_monitor], tasks=[monitor_task])result = crew.kickoff()
job_scraper = Agent( role="Job Data Collector", goal="Aggregate job postings from multiple sources", backstory="Specialized in extracting structured job posting data", tools=[stagehand_tool])job_task = Task( description=""" Visit linkedin.com/jobs and search for 'software engineer' positions. For the first 10 results, extract: - Job title - Company name - Location - Posting date - Job description summary """, agent=job_scraper, expected_output="Structured list of job postings")crew = Crew(agents=[job_scraper], tasks=[job_task])results = crew.kickoff()
form_agent = Agent( role="Form Automation Specialist", goal="Automatically fill and submit web forms", backstory="Expert at understanding form fields and completing them accurately", tools=[stagehand_tool])form_task = Task( description=""" Navigate to contact-form.com and: 1. Fill out the contact form: - Name: John Doe - Email: john.doe@example.com - Subject: Product Inquiry - Message: I'm interested in your enterprise plan 2. Submit the form 3. Verify submission was successful """, agent=form_agent, expected_output="Confirmation of successful form submission")crew = Crew(agents=[form_agent], tasks=[form_task])result = crew.kickoff()
research_agent = Agent( role="Research Analyst", goal="Conduct comprehensive web research across multiple sources", backstory="Expert at gathering and synthesizing information from various websites", tools=[stagehand_tool])research_task = Task( description=""" Research the latest AI developments by: 1. Visiting techcrunch.com and extracting top 3 AI-related headlines 2. Visiting openai.com/blog and extracting latest blog post titles 3. Visiting arxiv.org and searching for recent AI papers 4. Synthesize findings into a summary """, agent=research_agent, expected_output="Comprehensive summary of latest AI developments")crew = Crew(agents=[research_agent], tasks=[research_task])result = crew.kickoff()
task = Task( description=""" Step 1: Navigate to example.com Step 2: Click the 'Products' link in the navigation Step 3: Filter by 'Price: Low to High' Step 4: Extract the first 10 product names and prices """, agent=browser_agent)
For multiple tasks, reuse the same Stagehand instance:
stagehand = Stagehand(env="browserbase")await stagehand.init()# Use for multiple agents/tasksagent1 = Agent(tools=[StagehandTool(stagehand=stagehand)])agent2 = Agent(tools=[StagehandTool(stagehand=stagehand)])# Clean up once at the endawait stagehand.close()
stagehand = Stagehand( env="browserbase", # Use Browserbase for cloud browsers verbose=True, # Enable detailed logging headless=True, # Run browser in headless mode enable_caching=True, # Cache for faster execution dom_settle_timeout_ms=5000, # Wait time for page loads)