# Grant Robots Access
#######################################################################################
# This example allows all robots to visit all files because the wildcard "*" specifies all robots:
User-agent: *
Allow:
#To allow a single robot you would use the following:
#User-agent: Google
#Disallow:
#User-agent: *
#Disallow: /
# Deny Robots Access
#######################################################################################
# This example keeps all robots out:
#User-agent: *
#Disallow: /
# The next is an example that tells all crawlers not to enter into four directories of a website:
User-agent: *
Disallow: /cgi-bin/
Disallow: /images/
Disallow: /tmp/
Disallow: /private/
# Example that tells a specific crawler not to enter one specific directory:
#User-agent: BadBot
#Disallow: /private/
# Example that tells all crawlers not to enter one specific file called foo.html
#User-agent: *
#Disallow: /domains/example.com/html/