# See http://www.robotstxt.org/robotstxt.html for documentation on how to use the robots.txt file
#
# To ban all spiders from the entire site uncomment the next two lines:
# User-agent: *
# Disallow: /
# Changes: update this url in BlogController#sitemap too
Sitemap: https://media.offerzen.com/sitemap.xml
# Landing pages sitemap
Sitemap: https://www.offerzen.com/sitemap.xml
User-agent: *
Disallow: /blog/www.educonnect.co.za$
Disallow: /blog/www.linkedin.com/in/duke-coulbanis$
Disallow: /blog/www.pandora.com$
Disallow: /blog/www.nicharalambous.com$
Disallow: /blog/www.codewars.com$
Disallow: /blog/www.spin.com$
Disallow: /blog/www.invisionapp.com$
Disallow: /blog/www.devobsession.com$
Disallow: /blog/www.linkedin.com/in/*$
Disallow: /blog/www.4dicapital.com$
Disallow: /blog/how-i-get-the-most-out-of-hackathons/trackback$
Disallow: /blog/github.com/HypothesisWorks/hypothesis/tree/master/hypothesis-python$
Disallow: /blog/automating-my-development...$
Disallow: /blog/dries@deeplearning-cafe.com$
Disallow: /blog/ben@offerzen.com$
Disallow: /marketing/tools/2020_OfferZen_Remote_Work_Poll_Report_SA_Newsletter.pdf$
Disallow: /marketing/tools/private/*$
# Crawl delay for bot crawlers
User-agent: BrightEdge Crawler
Crawl-delay: 1
User-agent: Python-urllib/2.7
Crawl-delay: 1
User-agent: Python-urllib/2.6
Crawl-delay: 1