User-agent: * Allow: / # Welcome legitimate search engines! # Frontend is fully crawlable for SEO # Block API endpoints from crawlers Disallow: /api/ # Block transactional pages Disallow: /thank-you # Optional: Block common bot paths if you add them later Disallow: /admin/ Disallow: /private/ Disallow: /.env Disallow: /config/ # Sitemap location for search engines Sitemap: https://statenode.com/sitemap.xml # Crawl delay (be nice to our servers) Crawl-delay: 1 # Popular search engines - more specific rules User-agent: Googlebot Allow: / Disallow: /api/ User-agent: Bingbot Allow: / Disallow: /api/ User-agent: Slurp Allow: / Disallow: /api/ # Block known bad bots (add more as needed) User-agent: SemrushBot Disallow: / User-agent: AhrefsBot Disallow: / User-agent: MJ12bot Disallow: /