# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
User-agent: *
Disallow: /reports/
Disallow: /documents/
Disallow: /alist/
Disallow: /projects/*click$
Disallow: /*/*/statement
# Google, why do you make me split this out?
User-agent: Adsbot-Google
Disallow: /alist/
Disallow: /*/*/statement
# 80Legs crawler
User-agent: voltron
Disallow: /
# Somehow being used to fetch user profiles?
User-agent: FeedBurner/1.0 (http://www.FeedBurner.com)
Disallow: /
# Some weird backlink checker (webmeup.com)
User-agent: BLEXBot
Disallow: /
# Another backlink crawler
User-agent: linkdexbot/2.0
Disallow: /
# Another backlink crawler (https://ahrefs.com/robot)
User-Agent: AhrefsBot
Disallow: /
# Another one.
User-Agent: GetintentCrawler
Disallow: /
User-agent: Cliqzbot
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: Mail.RU_Bot
Disallow: /
User-agent: VegeBot
Disallow: /
Crawl-delay: 60
User-agent: Vegi bot
Disallow: /
Crawl-delay: 60
User-agent: adbeat_bot
Disallow: /
User-agent: Jooblebot
Disallow: /
User-agent: CrazyWebCrawler-Spider
Disallow: /