# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /
User-agent: GoogleBot
Crawl-delay: 30
Disallow: /docs/
User-agent: bingbot
Disallow: /docs/
User-agent: Yandex
Disallow: /
User-agent: SMTBot
Disallow: /
User-agent: MJ12Bot
Disallow: /
User-agent: AhrefsBot
Disallow: /
User-agent: spbot
Disallow: /
User-agent: Nutch
Disallow: /
User-agent: GPTBot
Disallow: /
User-agent: chatGPT-user
Disallow: /
User-agent: ia_archiver
Disallow: /
User-agent: PerplexityBot
Disallow: /
User-agent: anthropic-ai
Disallow: /
User-agent: Claude-Web
Disallow: /
User-agent: ClaudeBot
Disallow: /
User-agent: CCBot
Disallow: /
User-agent: Google-Extended
Disallow: /
User-agent: *
Disallow: /docs/