Serve robots.txt from Django. [bug 616147]

Since the only content we try to disallow is forums, this lists all locales
instead of /*/forums, since wildcards are not part of the standard, and it
was easy.
This commit is contained in:
James Socol 2010-12-30 15:15:46 -05:00
Родитель 7951ad064f
Коммит 6113cda85e
6 изменённых файлов: 55 добавлений и 3 удалений

Просмотреть файл

@ -0,0 +1,7 @@
# robots.txt for support.mozilla.com
User-Agent: *
Disallow: /admin/
{% for (l, n) in settings.LANGUAGE_CHOICES %}
Disallow: /{{ l }}/forums/
{% endfor %}

Просмотреть файл

@ -1,3 +1,4 @@
from django.conf import settings
from django.http import HttpResponseRedirect, HttpResponsePermanentRedirect
from nose.tools import eq_
@ -9,7 +10,6 @@ from sumo.views import redirect_to
class RedirectToTestcase(TestCase):
rf = RequestFactory()
def test_redirect_to(self):
@ -21,3 +21,23 @@ class RedirectToTestcase(TestCase):
resp = redirect_to(self.rf.get('/'), url='home')
assert isinstance(resp, HttpResponsePermanentRedirect)
eq_(reverse('home'), resp['location'])
class RobotsTestCase(TestCase):
# Use the hard-coded URL because it's well-known.
old_setting = settings.ENGAGE_ROBOTS
def tearDown(self):
settings.ENGAGE_ROBOTS = self.old_setting
def test_disengaged(self):
settings.ENGAGE_ROBOTS = False
response = self.client.get('/robots.txt')
eq_('Disallow: /', response.content)
eq_('text/plain', response['content-type'])
def test_engaged(self):
settings.ENGAGE_ROBOTS = True
response = self.client.get('/robots.txt')
eq_('text/plain', response['content-type'])
assert len(response.content) > 11

8
apps/sumo/urls.py Normal file
Просмотреть файл

@ -0,0 +1,8 @@
from django.conf.urls.defaults import patterns, url
from sumo import views
urlpatterns = patterns('',
url(r'^robots.txt$', views.robots, name='robots.txt'),
)

Просмотреть файл

@ -1,4 +1,6 @@
from django.http import HttpResponsePermanentRedirect, HttpResponseRedirect
from django.conf import settings
from django.http import (HttpResponsePermanentRedirect, HttpResponseRedirect,
HttpResponse)
import jingo
@ -41,3 +43,12 @@ def redirect_to(request, url, permanent=True):
return HttpResponsePermanentRedirect(dest)
return HttpResponseRedirect(dest)
def robots(request):
"""Generate a robots.txt."""
if not settings.ENGAGE_ROBOTS:
template = 'Disallow: /'
else:
template = jingo.render(request, 'sumo/robots.html')
return HttpResponse(template, mimetype='text/plain')

Просмотреть файл

@ -89,6 +89,9 @@ SITE_TITLE = _lazy(u'Firefox Support', 'site_title')
USE_I18N = True
USE_L10N = True
# Use the real robots.txt?
ENGAGE_ROBOTS = False
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = path('media')
@ -104,7 +107,7 @@ MEDIA_URL = '/media/'
ADMIN_MEDIA_PREFIX = '/admin-media/'
# Paths that don't require a locale prefix.
SUPPORTED_NONLOCALES = ('media', 'admin')
SUPPORTED_NONLOCALES = ('media', 'admin', 'robots.txt')
# Make this unique, and don't share it with anybody.
SECRET_KEY = '#%tc(zja8j01!r#h_y)=hy!^k)9az74k+-ib&ij&+**s3-e^_z'

Просмотреть файл

@ -33,6 +33,9 @@ urlpatterns = patterns('',
# Users
('', include('users.urls')),
# Services and sundry.
(r'', include('sumo.urls')),
)
# Handle 404 and 500 errors