# See http://www.robotstxt.org/wc/norobots.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-Agent: *
# Disallow: /
User-agent: *
Disallow: /validate/
Disallow: /retrieve/
Disallow: /api/
Disallow: /blaze/
Disallow: /user/
Disallow: /assets/
Disallow: /files/
User-agent: YandexBot
Disallow: /
User-agent: facebookexternalhit
Disallow: /
User-agent: MJ12bot
Disallow: /
User-agent: DotBot
Disallow: /
User-agent: MauiBot
Disallow: /
User-agent: Domain Re-Animator Bot
Disallow: /
User-agent: BLEXBot
Disallow: /
User-agent: Exabot
Disallow: /
User-agent: MegaIndex.ru
Disallow: /
User-agent: SemrushBot
Disallow: /
User-agent: linkdexbot
Disallow: /
User-agent: oBot
Disallow: /
User-agent: Wotbox
Disallow: /
User-agent: SMTBot
Disallow: /
User-agent: panscient.com
Disallow: /
User-agent: Qwantify
Disallow: /
User-agent: YandexDirect
Disallow: /
User-agent: YandexAntivirus
Disallow: /
User-agent: icevikatam
Disallow: /