######################################################################### # robots.txt generated at http://www.mcanerin.com # #File Name :robots.txt # #Date Created :Feb 15th, 2013 # #Descr :This file is used for temp sites and symlinked to dir. # #Author :Charlie # ######################################################################### User-agent: Gigabot Disallow: / User-agent: Scrubby Disallow: / User-agent: Robozilla Disallow: / User-agent: Nutch Disallow: / User-agent: baiduspider Disallow: / User-agent: naverbot Disallow: / User-agent: yeti Disallow: / User-agent: asterias Disallow: / User-agent: * Crawl-delay: 120 ##Block or Allow Dirs below## Disallow: /cgi-bin/ #Disallow: /css/ Disallow: /docs/ Disallow: /calendar/ Disallow: /includes/ #Disallow: /images/ #Disallow: /js/ Disallow: /lib/ Disallow: /search/ Disallow: /stats/ Disallow: /templates/ #Disallow: /themes/ Disallow: /temp_pa/ ##Sitemap leave or remove #Sitemap: /sitemap.xml Disallow: /docs/ Disallow: /show_all_pages.php