# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
User-agent: *
Disallow: /search
Disallow: /users/*/similar_profiles
Disallow: /invite_colleague
Disallow: /people
Disallow: /cureus_career_center
Disallow: /users/*/coauthors
Disallow: /users/*/activities
Disallow: /articles/*/score/fetch_props
Disallow: /articles/*/suggested_articles
Disallow: /articles/*/fetch_likes_and_comments
Disallow: /articles/*/most_popular_by_category
Sitemap: https://www.cureus.com/sitemaps/sitemap.xml
User-agent: AhrefsBot
Crawl-Delay: 8
User-agent: SemrushBot
Crawl-Delay: 8
User-agent: dotbot
Crawl-delay: 8
# Allow AI search and agent use
User-agent: OAI-SearchBot
User-agent: ChatGPT-User
User-agent: PerplexityBot
User-agent: FirecrawlAgent
User-agent: AndiBot
User-agent: ExaBot
User-agent: PhindBot
User-agent: YouBot
Crawl-delay: 8
Allow: /
# Disallow AI training data collection
User-agent: GPTBot
User-agent: CCBot
User-agent: Google-Extended
Disallow: /