# robots.txt for https://www.embold.co # Default: allow site-wide crawling of clean URLs User-agent: * Allow: / # Reduce crawl of low-value or duplicate URLs Disallow: /search Disallow: /*?* # blocks all query-parameter URLs (e.g., UTM, preview) # Sitemap for canonical URL discovery Sitemap: https://www.embold.co/sitemap.xml # AI crawlers (toggle to your preference by switching Allow/Disallow) User-agent: GPTBot Allow: / User-agent: ChatGPT-User Allow: / User-agent: ClaudeBot Allow: / User-agent: Claude-Web Allow: / User-agent: PerplexityBot Allow: / User-agent: Google-Extended Allow: / Sitemap: https://www.embold.co/sitemap.xml