# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
User-agent: *
Disallow: /reports/
Disallow: /documents/
Disallow: /alist/
Disallow: /moneyball
Disallow: /projects/*click$
# Google, why do you make me split this out?
User-agent: Adsbot-Google
Disallow: /alist/
# 80Legs crawler
User-agent: voltron
Disallow: /
# Somehow being used to fetch user profiles?
User-agent: FeedBurner/1.0 (http://www.FeedBurner.com)
Disallow: /
# Some weird backlink checker (webmeup.com)
User-agent: BLEXBot
Disallow: /
# Another backlink crawler
User-agent: linkdexbot/2.0
Disallow: /