# ======================================== # Default rules for all bots # ======================================== User-agent: * Disallow: /administrator/ Disallow: /api/ Disallow: /bin/ Disallow: /cache/ Disallow: /cli/ Disallow: /components/ Disallow: /files/ Disallow: /includes/ Disallow: /language/ Disallow: /layouts/ Disallow: /libraries/ Disallow: /logs/ Disallow: /modules/ Disallow: /plugins/ Disallow: /styleguide/ Disallow: /templates/ Disallow: /tmp/ Disallow: /index.php?option=com_ajax Disallow: /component/search Crawl-delay: 15 # ======================================== # Collections: allow crawling but slow down (Google currently ignores) # ======================================== User-agent: Googlebot Allow: /collections Crawl-delay: 10 User-agent: Bingbot Allow: /collections Crawl-delay: 10 # ======================================== # Block known AI and scraper bots # ======================================== User-agent: GPTBot Disallow: / User-agent: ChatGPT-User Disallow: / User-agent: ClaudeBot Disallow: / User-agent: Claude-Web Disallow: / User-agent: anthropic-ai Disallow: / User-agent: PerplexityBot Disallow: / User-agent: UpstreamBot Disallow: / User-agent: CCBot Disallow: / User-agent: CommonCrawler Disallow: / User-agent: Amazonbot Disallow: / User-agent: Bytespider Disallow: / User-agent: TikTokSpider Disallow: / User-agent: ByteSpider-image Disallow: / User-agent: ByteDanceSpider Disallow: / User-agent: AwemeSpider Disallow: / User-agent: GoogleOther Disallow: /