# Robots.txt for Scamy.io # Allow all crawlers to access the site User-agent: * Allow: / # Disallow admin and private areas Disallow: /admin/ Disallow: /dashboard/ Disallow: /api/ Disallow: /auth/ Disallow: /onboarding/ # Allow specific API endpoints that are public Allow: /api/domain Allow: /api/extension.check-url Allow: /api/mobile.check-url # Sitemap location Sitemap: https://scamy.io/sitemap.xml # Crawl-delay for polite crawling (optional) Crawl-delay: 1 # Additional directives for major search engines User-agent: Googlebot Allow: / User-agent: Bingbot Allow: / User-agent: Slurp Allow: / # Block common bad bots User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: / User-agent: DotBot Disallow: /