Generate optimal browser configurations for AI agents
Inspired by Agent Browser Protocol (HN #108)
// Playwright Agent Browser Configuration
// Generated by usefultools.ai/agent-browser-config
// Inspired by: https://github.com/theredsix/agent-browser-protocol
const { chromium } = require('playwright');
async function launchAgentBrowser() {
const browser = await chromium.launch({
headless: true,
});
const context = await browser.newContext({
viewport: { width: 1920, height: 1080 },
userAgent: 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36',
// proxy: { server: "http://your-proxy:8080" },
});
const page = await context.newPage();
// Set timeout
page.setDefaultTimeout(30000);
// Stealth mode: Hide automation markers
// Stealth mode disabled
return { browser, context, page };
}
// Usage example
async function runAgent() {
const { browser, page } = await launchAgentBrowser();
try {
await page.goto('https://example.com');
// Your agent logic here
const title = await page.title();
console.log('Page title:', title);
// Screenshot disabled
} finally {
await browser.close();
}
}
runAgent().catch(console.error);Hide automation markers to avoid detection. Essential for scraping tasks that may have anti-bot measures.
Set appropriate timeouts to prevent hanging. Use shorter timeouts for fast scraping, longer for complex automation.
Rotate proxies for large-scale operations to avoid IP bans and rate limiting.
Generate undetectable browser profiles for your web automation.
For developers building web scrapers and browser automation agents.
Bypass basic bot detection
Configure reliable data scraping pipelines
Give your web-browsing agents realistic footprints