Add a robots.txt to SAMO that disallows everything

This commit is contained in:
Wil Clouser 2011-05-06 09:44:23 -07:00
Родитель b1fe2c5e3c
Коммит 5e4c1e713e
2 изменённых файлов: 7 добавлений и 2 удалений

Просмотреть файл

@ -202,7 +202,8 @@ def monitor(request, format=None):
def robots(request):
"""Generate a robots.txt"""
if not settings.ENGAGE_ROBOTS:
_service = (request.META['SERVER_NAME'] == settings.SERVICES_DOMAIN)
if _service or not settings.ENGAGE_ROBOTS:
template = "User-agent: *\nDisallow: /"
else:
template = jingo.render(request, 'amo/robots.html',

Просмотреть файл

@ -122,9 +122,13 @@ DOMAIN = HOSTNAME
# Example: https://addons.mozilla.org
SITE_URL = 'http://%s' % DOMAIN
# Domain of the services site. This is where your API, and in-product pages
# live.
SERVICES_DOMAIN = 'services.%s' % DOMAIN
# Full URL to your API service. No trailing slash.
# Example: https://services.addons.mozilla.org
SERVICES_URL = 'http://services.%s' % DOMAIN
SERVICES_URL = 'http://%s' % SERVICES_DOMAIN
# The domain of the mobile site.
MOBILE_DOMAIN = 'm.%s' % DOMAIN