diff --git a/apps/search/views.py b/apps/search/views.py index 8fa4ef2dd..bded98330 100644 --- a/apps/search/views.py +++ b/apps/search/views.py @@ -7,6 +7,7 @@ from django import forms from django.forms.util import ValidationError from django.conf import settings from django.http import HttpResponse +from django.utils.http import urlencode import jingo import jinja2 @@ -15,7 +16,7 @@ from tower import ugettext as _ from forums.models import Forum as DiscussionForum, Thread, Post from sumo.models import WikiPage, Category from questions.models import Question -from sumo.utils import paginate, urlencode +from sumo.utils import paginate from .clients import (QuestionsClient, WikiClient, DiscussionClient, SearchError) from .utils import crc32, locale_or_default, sphinx_locale diff --git a/apps/sumo/helpers.py b/apps/sumo/helpers.py index dd7d19f5d..eea40b3a6 100644 --- a/apps/sumo/helpers.py +++ b/apps/sumo/helpers.py @@ -3,8 +3,9 @@ import urlparse import datetime import re -from django.utils.encoding import smart_unicode from django.conf import settings +from django.utils.encoding import smart_unicode +from django.utils.http import urlencode import jinja2 from jingo import register, env @@ -14,7 +15,7 @@ from babel.dates import format_date, format_time, format_datetime from pytz import timezone from .urlresolvers import reverse -from .utils import urlencode, wiki_to_html +from .utils import wiki_to_html class DateTimeFormatError(Exception): diff --git a/apps/sumo/parser.py b/apps/sumo/parser.py index 2c62d3fad..05021b436 100644 --- a/apps/sumo/parser.py +++ b/apps/sumo/parser.py @@ -61,8 +61,13 @@ class WikiParser(object): """ Checks the page exists, and returns its URL, or the URL to create it. """ - return reverse('wiki.document', - kwargs={'document_slug': link.replace(' ', '+')}) + from wiki.models import Document + try: + d = Document.objects.get(title=link) + except Document.DoesNotExist: + from sumo.helpers import urlparams + return urlparams(reverse('wiki.new_document'), title=link) + return d.get_absolute_url() def hook_internal_link(self, parser, space, name): """Parses text and returns internal link.""" diff --git a/apps/sumo/tests/test__utils.py b/apps/sumo/tests/test__utils.py deleted file mode 100644 index e94d01a23..000000000 --- a/apps/sumo/tests/test__utils.py +++ /dev/null @@ -1,20 +0,0 @@ -# -*- coding: utf-8 -*- - -from nose.tools import eq_ - -from sumo.utils import urlencode - - -def test_urlencode(): - """Our urlencode is Unicode-safe.""" - items = [('q', u'Fran\xe7ais')] - eq_('q=Fran%C3%A7ais', urlencode(items)) - - items = [('q', u'は「着')] - eq_('q=%E3%81%AF%E3%80%8C%E7%9D%80', urlencode(items)) - - -def test_urlencode_int(): - """urlencode() should not choke on integers.""" - items = [('q', 't'), ('a', 1)] - eq_('q=t&a=1', urlencode(items)) diff --git a/apps/sumo/tests/test_parser.py b/apps/sumo/tests/test_parser.py index 53049d0b2..d55159be6 100644 --- a/apps/sumo/tests/test_parser.py +++ b/apps/sumo/tests/test_parser.py @@ -52,7 +52,7 @@ class TestWikiParser(TestCase): """_buildImageParams handles wiki pages.""" items = ['page=Installing Firefox'] params = self.p._buildImageParams(items) - eq_('/kb/Installing+Firefox', params['link']) + eq_('/en-US/kb/installing-firefox', params['link']) def test_image_params_link(self): """_buildImageParams handles external links.""" @@ -64,7 +64,7 @@ class TestWikiParser(TestCase): """_buildImageParams - wiki page overrides link.""" items = ['page=Installing Firefox', 'link=http://example.com'] params = self.p._buildImageParams(items) - eq_('/kb/Installing+Firefox', params['link']) + eq_('/en-US/kb/installing-firefox', params['link']) def test_image_params_align(self): """Align valid options.""" @@ -116,7 +116,7 @@ class TestWikiParser(TestCase): def test_get_wiki_link(self): """Wiki links are properly built for existing pages.""" - eq_('/kb/Installing+Firefox', + eq_('/en-US/kb/installing-firefox', self.p._getWikiLink('Installing Firefox')) @@ -132,25 +132,25 @@ class TestWikiInternalLinks(TestCase): def test_simple(self): """Simple internal link markup.""" link = pq_link(self.p, '[[Installing Firefox]]') - eq_('/kb/Installing+Firefox', link.attr('href')) + eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('Installing Firefox', link.text()) def test_simple_markup(self): text = '[[Installing Firefox]]' - eq_('

' + + eq_('

' + 'Installing Firefox\n

', self.p.parse(text)) def test_link_hash(self): """Internal link with hash.""" link = pq_link(self.p, '[[Installing Firefox#section name]]') - eq_('/kb/Installing+Firefox#section_name', link.attr('href')) + eq_('/en-US/kb/installing-firefox#section_name', link.attr('href')) eq_('Installing Firefox#section name', link.text()) def test_link_hash_markup(self): """Internal link with hash.""" text = '[[Installing Firefox#section name]]' - eq_('

Installing Firefox#section name\n

', self.p.parse(text)) @@ -163,12 +163,12 @@ class TestWikiInternalLinks(TestCase): def test_link_name(self): """Internal link with name.""" link = pq_link(self.p, '[[Installing Firefox|this name]]') - eq_('/kb/Installing+Firefox', link.attr('href')) + eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('this name', link.text()) def test_link_with_extra_pipe(self): link = pq_link(self.p, '[[Installing Firefox|with|pipe]]') - eq_('/kb/Installing+Firefox', link.attr('href')) + eq_('/en-US/kb/installing-firefox', link.attr('href')) eq_('with|pipe', link.text()) def test_hash_name(self): @@ -180,25 +180,25 @@ class TestWikiInternalLinks(TestCase): def test_link_hash_name(self): """Internal link with hash and name.""" link = pq_link(self.p, '[[Installing Firefox#section 3|this name]]') - eq_('/kb/Installing+Firefox#section_3', link.attr('href')) + eq_('/en-US/kb/installing-firefox#section_3', link.attr('href')) eq_('this name', link.text()) def test_link_hash_name_markup(self): """Internal link with hash and name.""" text = '[[Installing Firefox#section 3|this name]]' - eq_('

this name\n

', self.p.parse(text)) def test_simple_create(self): """Simple link for inexistent page.""" link = pq_link(self.p, '[[A new page]]') - eq_('/kb/A+new+page', link.attr('href')) + eq_('/kb/new?title=A+new+page', link.attr('href')) eq_('A new page', link.text()) def test_link_edit_hash_name(self): """Internal link for inexistent page with hash and name.""" link = pq_link(self.p, '[[A new page#section 3|this name]]') - eq_('/kb/A+new+page#section_3', link.attr('href')) + eq_('/kb/new?title=A+new+page#section_3', link.attr('href')) eq_('this name', link.text()) @@ -248,7 +248,7 @@ class TestWikiImageTags(TestCase): eq_('file.png', img.attr('alt')) eq_('file.png', caption) eq_('/img/wiki_up/file.png', img.attr('src')) - eq_('/kb/Installing+Firefox', img_a.attr('href')) + eq_('/en-US/kb/installing-firefox', img_a.attr('href')) def test_page_link_edit(self): """Link to a nonexistent wiki page.""" @@ -260,7 +260,7 @@ class TestWikiImageTags(TestCase): eq_('file.png', img.attr('alt')) eq_('file.png', caption) eq_('/img/wiki_up/file.png', img.attr('src')) - eq_('/kb/Article+List', img_a.attr('href')) + eq_('/kb/new?title=Article+List', img_a.attr('href')) def test_page_link_caption(self): """Link to a wiki page with caption.""" @@ -273,7 +273,7 @@ class TestWikiImageTags(TestCase): eq_('my caption', img.attr('alt')) eq_('my caption', caption) eq_('/img/wiki_up/file.png', img.attr('src')) - eq_('/kb/Article+List', img_a.attr('href')) + eq_('/kb/new?title=Article+List', img_a.attr('href')) def test_link(self): """Link to an external page.""" @@ -400,4 +400,4 @@ class TestWikiImageTags(TestCase): self.p, '[[Image:img.png|frameless|page=Installing Firefox]]', 'a') img = img_a('img') eq_('frameless', img.attr('class')) - eq_('/kb/Installing+Firefox', img_a.attr('href')) + eq_('/en-US/kb/installing-firefox', img_a.attr('href')) diff --git a/apps/sumo/utils.py b/apps/sumo/utils.py index e0188e07e..bff13512e 100644 --- a/apps/sumo/utils.py +++ b/apps/sumo/utils.py @@ -1,7 +1,5 @@ -import urllib - from django.core import paginator -from django.utils.encoding import smart_str +from django.utils.http import urlencode import jinja2 @@ -35,15 +33,6 @@ def paginate(request, queryset, per_page=20): return paginated -def urlencode(items): - """A Unicode-safe URLencoder.""" - - try: - return urllib.urlencode(items) - except UnicodeEncodeError: - return urllib.urlencode([(k, smart_str(v)) for k, v in items]) - - def wiki_to_html(wiki_markup, wiki_hooks=False): """Wiki Markup -> HTML""" parser = WikiParser(wiki_hooks=wiki_hooks) diff --git a/apps/wiki/forms.py b/apps/wiki/forms.py index b43daa946..583c5cfac 100644 --- a/apps/wiki/forms.py +++ b/apps/wiki/forms.py @@ -13,6 +13,9 @@ KEYWORDS_HELP_TEXT = _lazy(u'Keywords are used to improve searches.') TITLE_REQUIRED = _lazy(u'Please provide a title.') TITLE_SHORT = _lazy(u'Your title is too short (%(show_value)s characters). It must be at least %(limit_value)s characters.') TITLE_LONG = _lazy(u'Please keep the length of your title to %(limit_value)s characters or less. It is currently %(show_value)s characters.') +SLUG_REQUIRED = _lazy(u'Please provide a slug.') +SLUG_SHORT = _lazy(u'Your slug is too short (%(show_value)s characters). It must be at least %(limit_value)s characters.') +SLUG_LONG = _lazy(u'Please keep the length of your slug to %(limit_value)s characters or less. It is currently %(show_value)s characters.') SUMMARY_REQUIRED = _lazy(u'Please provide a summary.') SUMMARY_SHORT = _lazy(u'The summary is too short (%(show_value)s characters). It must be at least %(limit_value)s characters.') SUMMARY_LONG = _lazy(u'Please keep the length of the summary to %(limit_value)s characters or less. It is currently %(show_value)s characters.') @@ -28,6 +31,11 @@ class DocumentForm(forms.ModelForm): error_messages={'required': TITLE_REQUIRED, 'min_length': TITLE_SHORT, 'max_length': TITLE_LONG}) + slug = StrippedCharField(min_length=5, max_length=255, + widget=forms.TextInput(), + error_messages={'required': SLUG_REQUIRED, + 'min_length': SLUG_SHORT, + 'max_length': SLUG_LONG}) firefox_versions = forms.MultipleChoiceField( label=_('Firefox Version'), @@ -49,7 +57,7 @@ class DocumentForm(forms.ModelForm): class Meta: model = Document - fields = ('title', 'category', 'tags') + fields = ('title', 'slug', 'category', 'tags') class RevisionForm(forms.ModelForm): diff --git a/apps/wiki/models.py b/apps/wiki/models.py index eb73bf6fc..0f878b6ad 100644 --- a/apps/wiki/models.py +++ b/apps/wiki/models.py @@ -5,9 +5,9 @@ from tower import ugettext_lazy as _lazy from django.conf import settings from django.contrib.auth.models import User from django.db import models +from django.utils.http import urlquote from sumo.models import ModelBase, TaggableMixin -from sumo.urlresolvers import reverse from sumo.utils import wiki_to_html @@ -62,6 +62,7 @@ def _inherited(parent_attr, direct_attr): class Document(ModelBase, TaggableMixin): """A localized knowledgebase document, not revision-specific.""" title = models.CharField(max_length=255, db_index=True) + slug = models.CharField(max_length=255, db_index=True) # TODO: validate (against settings.SUMO_LANGUAGES?) locale = models.CharField(max_length=7, db_index=True, @@ -94,7 +95,8 @@ class Document(ModelBase, TaggableMixin): # how MySQL uses indexes, we probably don't need individual indexes on # title and locale as well as a combined (title, locale) one. class Meta(object): - unique_together = (('parent', 'locale'), ('title', 'locale')) + unique_together = (('parent', 'locale'), ('title', 'locale'), + ('slug', 'locale')) @property def content_parsed(self): @@ -106,8 +108,7 @@ class Document(ModelBase, TaggableMixin): operating_systems = _inherited('operating_systems', 'operating_system_set') def get_absolute_url(self): - return reverse('wiki.document', - kwargs={'document_slug': self.title.replace(' ', '+')}) + return '/%s/kb/%s' % (self.locale, urlquote(self.slug)) def __unicode__(self): return '[%s] %s' % (self.locale, self.title) diff --git a/apps/wiki/templates/wiki/document_revisions.html b/apps/wiki/templates/wiki/document_revisions.html index 2fc3ea4d9..35fed27be 100644 --- a/apps/wiki/templates/wiki/document_revisions.html +++ b/apps/wiki/templates/wiki/document_revisions.html @@ -28,7 +28,7 @@
{% if user.has_perm('wiki.add_revision') %} - Add a Revision + Add a Revision {% endif %}
{% endblock %} diff --git a/apps/wiki/tests/__init__.py b/apps/wiki/tests/__init__.py index 9d262b20d..43ade0db0 100644 --- a/apps/wiki/tests/__init__.py +++ b/apps/wiki/tests/__init__.py @@ -1,6 +1,9 @@ +from django.template.defaultfilters import slugify from django.contrib.auth.models import User from django.core.cache import cache +from datetime import datetime + from sumo.tests import LocalizingClient, TestCase from wiki.models import Document, Revision, CATEGORIES, SIGNIFICANCES @@ -19,9 +22,11 @@ class TestCaseBase(TestCase): def document(**kwargs): """Return an empty document with enough stuff filled out that it can be saved.""" - if 'category' not in kwargs: - kwargs['category'] = CATEGORIES[0][0] # arbitrary - return Document(**kwargs) + auto_title = str(datetime.now()) + defaults = {'category': CATEGORIES[0][0], 'title': auto_title} + defaults.update(kwargs) + defaults['slug'] = slugify(defaults['title']) + return Document(**defaults) def revision(**kwargs): diff --git a/apps/wiki/tests/test_templates.py b/apps/wiki/tests/test_templates.py index b96bfb440..03e5d2074 100644 --- a/apps/wiki/tests/test_templates.py +++ b/apps/wiki/tests/test_templates.py @@ -5,7 +5,7 @@ from pyquery import PyQuery as pq from sumo.urlresolvers import reverse from wiki.models import Document, Revision, SIGNIFICANCES, CATEGORIES -from wiki.tests import TestCaseBase +from wiki.tests import TestCaseBase, document, revision class DocumentTests(TestCaseBase): @@ -48,7 +48,7 @@ class NewDocumentTests(TestCaseBase): response = self.client.post(reverse('wiki.new_document'), data, follow=True) d = Document.objects.get(title=data['title']) - eq_([('http://testserver/en-US/kb/%s/history' % d.id, 302)], + eq_([('http://testserver/en-US/kb/%s/history' % d.slug, 302)], response.redirect_chain) eq_(data['category'], d.category) eq_(tags, list(d.tags.values_list('name', flat=True))) @@ -121,16 +121,14 @@ class NewRevisionTests(TestCaseBase): """Trying to create a new revision wihtout permission returns 403.""" d = _create_document() self.client.login(username='rrosario', password='testpass') - response = self.client.get(reverse('wiki.new_revision', - args=[d.title.replace(' ', '+')])) + response = self.client.get(reverse('wiki.new_revision', args=[d.slug])) eq_(302, response.status_code) def test_new_revision_GET_with_perm(self): """HTTP GET to new revision URL renders the form.""" d = _create_document() self.client.login(username='admin', password='testpass') - response = self.client.get(reverse('wiki.new_revision', - args=[d.title.replace(' ', '+')])) + response = self.client.get(reverse('wiki.new_revision', args=[d.slug])) eq_(200, response.status_code) doc = pq(response.content) eq_(1, len(doc('#document-form textarea[name="content"]'))) @@ -149,8 +147,7 @@ class NewRevisionTests(TestCaseBase): r.save() self.client.login(username='admin', password='testpass') response = self.client.get(reverse('wiki.new_revision_based_on', - args=[d.title.replace(' ', '+'), - r.id])) + args=[d.slug, r.id])) eq_(200, response.status_code) doc = pq(response.content) eq_(doc('#id_keywords')[0].value, r.keywords) @@ -166,12 +163,10 @@ class NewRevisionTests(TestCaseBase): """ d = _create_document() self.client.login(username='admin', password='testpass') - response = self.client.post(reverse('wiki.new_revision', - args=[d.title.replace(' ', '+')]), - {'summary': 'A brief summary', - 'content': 'The article content', - 'keywords': 'keyword1 keyword2', - 'significance': 10}) + response = self.client.post( + reverse('wiki.new_revision', args=[d.slug]), + {'summary': 'A brief summary', 'content': 'The article content', + 'keywords': 'keyword1 keyword2', 'significance': 10}) eq_(302, response.status_code) eq_(2, d.revisions.count()) @@ -188,8 +183,7 @@ class NewRevisionTests(TestCaseBase): self.client.login(username='admin', password='testpass') tags = ['tag1', 'tag2', 'tag3'] data = _new_document_data(tags) - response = self.client.post(reverse('wiki.new_revision', - args=[d.title.replace(' ', '+')]), + response = self.client.post(reverse('wiki.new_revision', args=[d.slug]), data) eq_(302, response.status_code) eq_(2, d.revisions.count()) @@ -226,23 +220,23 @@ class DocumentRevisionsTests(TestCaseBase): """Verify the document revisions list view.""" d = _create_document() user = User.objects.get(pk=118533) - r1 = Revision(summary="a tweak", content='lorem ipsum dolor', + r1 = revision(summary="a tweak", content='lorem ipsum dolor', significance=10, keywords='kw1 kw2', document=d, creator=user) r1.save() - r2 = Revision(summary="another tweak", content='lorem dimsum dolor', + r2 = revision(summary="another tweak", content='lorem dimsum dolor', significance=10, keywords='kw1 kw2', document=d, creator=user) r2.save() response = self.client.get(reverse('wiki.document_revisions', - args=[d.title.replace(' ', '+')])) + args=[d.slug])) eq_(200, response.status_code) doc = pq(response.content) eq_(3, len(doc('#revision-list > ul > li'))) def _create_document(title='Test Document'): - d = Document(title=title, html='
Lorem Ipsum
', + d = document(title=title, html='
Lorem Ipsum
', category=1, locale='en-US') d.save() r = Revision(document=d, keywords='key1, key2', summary='lipsum', @@ -257,6 +251,7 @@ def _create_document(title='Test Document'): def _new_document_data(tags): return { 'title': 'A Test Article', + 'slug': 'a-test-article', 'tags': ','.join(tags), 'firefox_versions': [1, 2], 'operating_systems': [1, 3], diff --git a/apps/wiki/urls.py b/apps/wiki/urls.py index 178b2e79d..1a4afa0f9 100644 --- a/apps/wiki/urls.py +++ b/apps/wiki/urls.py @@ -6,11 +6,12 @@ urlpatterns = patterns('wiki.views', url(r'^/all$', 'list_documents', name='wiki.all_documents'), url(r'^/category/(?P\d+)$', 'list_documents', name='wiki.category'), - url(r'^/(?P[\+\w]+)$', 'document', name='wiki.document'), - url(r'^/(?P[\+\w]+)/history$', 'document_revisions', - name='wiki.document_revisions'), - url(r'^/(?P[\+\w]+)/edit$', 'new_revision', + url(r'^/(?P[^\/]+)$', 'document', + name='wiki.document'), + url(r'^/(?P[^\/]+)/history$', + 'document_revisions', name='wiki.document_revisions'), + url(r'^/(?P[^\/]+)/edit$', 'new_revision', name='wiki.new_revision'), - url(r'^/(?P[\+\w]+)/edit/(?P\d+)$', + url(r'^/(?P[^\/]+)/edit/(?P\d+)$', 'new_revision', name='wiki.new_revision_based_on'), ) diff --git a/apps/wiki/views.py b/apps/wiki/views.py index 5462fb9c4..406eb9186 100644 --- a/apps/wiki/views.py +++ b/apps/wiki/views.py @@ -17,7 +17,8 @@ def document(request, document_slug): """View a wiki document.""" # This may change depending on how we decide to structure # the url and handle locales. - doc = get_object_or_404(Document, title=document_slug.replace('+', ' ')) + doc = get_object_or_404( + Document, locale=request.locale, slug=document_slug) return jingo.render(request, 'wiki/document.html', {'document': doc}) @@ -66,7 +67,7 @@ def new_document(request): rev.save() return HttpResponseRedirect(reverse('wiki.document_revisions', - args=[doc.id])) + args=[doc.slug])) return jingo.render(request, 'wiki/new_document.html', {'document_form': doc_form, @@ -77,7 +78,8 @@ def new_document(request): @permission_required('wiki.add_revision') def new_revision(request, document_slug, revision_id=None): """Create a new revision of a wiki document.""" - doc = get_object_or_404(Document, title=document_slug.replace('+', ' ')) + doc = get_object_or_404( + Document, locale=request.locale, slug=document_slug) if request.method == 'GET': if revision_id: @@ -158,7 +160,8 @@ def new_revision(request, document_slug, revision_id=None): def document_revisions(request, document_slug): """List all the revisions of a given document.""" - doc = get_object_or_404(Document, title=document_slug.replace('+', ' ')) + doc = get_object_or_404( + Document, locale=request.locale, slug=document_slug) revs = Revision.objects.filter(document=doc) return jingo.render(request, 'wiki/document_revisions.html', {'revisions': revs, diff --git a/media/js/libs/django/prepopulate.js b/media/js/libs/django/prepopulate.js new file mode 100644 index 000000000..20ea1e088 --- /dev/null +++ b/media/js/libs/django/prepopulate.js @@ -0,0 +1,43 @@ +/* + * Taken from Django's contrib/admin/media/js folder, thanks Django! + * Copyright Django and licensed under BSD, please see django/LICENSE for + * license details. + * Modified slightly to handle fallback to full title if slug is empty + */ +(function($) { + $.fn.prepopulate = function(dependencies, maxLength) { + /* + Depends on urlify.js + Populates a selected field with the values of the dependent fields, + URLifies and shortens the string. + dependencies - selected jQuery object of dependent fields + maxLength - maximum length of the URLify'd string + */ + return this.each(function() { + var field = $(this); + + field.data('_changed', false); + field.change(function() { + field.data('_changed', true); + }); + + var populate = function () { + // Bail if the fields value has changed + if (field.data('_changed') == true) return; + + var values = [], field_val, field_val_raw; + dependencies.each(function() { + if ($(this).val().length > 0) { + values.push($(this).val()); + } + }); + field_val_raw = values.join(' '); + field_val = URLify(field_val_raw, maxLength) || + field_val_raw; + field.val(field_val); + }; + + dependencies.keyup(populate).change(populate).focus(populate); + }); + }; +})(jQuery); diff --git a/media/js/libs/django/urlify.js b/media/js/libs/django/urlify.js new file mode 100644 index 000000000..26844fe07 --- /dev/null +++ b/media/js/libs/django/urlify.js @@ -0,0 +1,145 @@ +/* + * Taken from Django's contrib/admin/media/js folder, thanks Django! + * Copyright Django and licensed under BSD, please see django/LICENSE for + * license details. + */ +var LATIN_MAP = { + 'À': 'A', 'Á': 'A', 'Â': 'A', 'Ã': 'A', 'Ä': 'A', 'Å': 'A', 'Æ': 'AE', 'Ç': + 'C', 'È': 'E', 'É': 'E', 'Ê': 'E', 'Ë': 'E', 'Ì': 'I', 'Í': 'I', 'Î': 'I', + 'Ï': 'I', 'Ð': 'D', 'Ñ': 'N', 'Ò': 'O', 'Ó': 'O', 'Ô': 'O', 'Õ': 'O', 'Ö': + 'O', 'Ő': 'O', 'Ø': 'O', 'Ù': 'U', 'Ú': 'U', 'Û': 'U', 'Ü': 'U', 'Ű': 'U', + 'Ý': 'Y', 'Þ': 'TH', 'ß': 'ss', 'à':'a', 'á':'a', 'â': 'a', 'ã': 'a', 'ä': + 'a', 'å': 'a', 'æ': 'ae', 'ç': 'c', 'è': 'e', 'é': 'e', 'ê': 'e', 'ë': 'e', + 'ì': 'i', 'í': 'i', 'î': 'i', 'ï': 'i', 'ð': 'd', 'ñ': 'n', 'ò': 'o', 'ó': + 'o', 'ô': 'o', 'õ': 'o', 'ö': 'o', 'ő': 'o', 'ø': 'o', 'ù': 'u', 'ú': 'u', + 'û': 'u', 'ü': 'u', 'ű': 'u', 'ý': 'y', 'þ': 'th', 'ÿ': 'y' +} +var LATIN_SYMBOLS_MAP = { + '©':'(c)' +} +var GREEK_MAP = { + 'α':'a', 'β':'b', 'γ':'g', 'δ':'d', 'ε':'e', 'ζ':'z', 'η':'h', 'θ':'8', + 'ι':'i', 'κ':'k', 'λ':'l', 'μ':'m', 'ν':'n', 'ξ':'3', 'ο':'o', 'π':'p', + 'ρ':'r', 'σ':'s', 'τ':'t', 'υ':'y', 'φ':'f', 'χ':'x', 'ψ':'ps', 'ω':'w', + 'ά':'a', 'έ':'e', 'ί':'i', 'ό':'o', 'ύ':'y', 'ή':'h', 'ώ':'w', 'ς':'s', + 'ϊ':'i', 'ΰ':'y', 'ϋ':'y', 'ΐ':'i', + 'Α':'A', 'Β':'B', 'Γ':'G', 'Δ':'D', 'Ε':'E', 'Ζ':'Z', 'Η':'H', 'Θ':'8', + 'Ι':'I', 'Κ':'K', 'Λ':'L', 'Μ':'M', 'Ν':'N', 'Ξ':'3', 'Ο':'O', 'Π':'P', + 'Ρ':'R', 'Σ':'S', 'Τ':'T', 'Υ':'Y', 'Φ':'F', 'Χ':'X', 'Ψ':'PS', 'Ω':'W', + 'Ά':'A', 'Έ':'E', 'Ί':'I', 'Ό':'O', 'Ύ':'Y', 'Ή':'H', 'Ώ':'W', 'Ϊ':'I', + 'Ϋ':'Y' +} +var TURKISH_MAP = { + 'ş':'s', 'Ş':'S', 'ı':'i', 'İ':'I', 'ç':'c', 'Ç':'C', 'ü':'u', 'Ü':'U', + 'ö':'o', 'Ö':'O', 'ğ':'g', 'Ğ':'G' +} +var RUSSIAN_MAP = { + 'а':'a', 'б':'b', 'в':'v', 'г':'g', 'д':'d', 'е':'e', 'ё':'yo', 'ж':'zh', + 'з':'z', 'и':'i', 'й':'j', 'к':'k', 'л':'l', 'м':'m', 'н':'n', 'о':'o', + 'п':'p', 'р':'r', 'с':'s', 'т':'t', 'у':'u', 'ф':'f', 'х':'h', 'ц':'c', + 'ч':'ch', 'ш':'sh', 'щ':'sh', 'ъ':'', 'ы':'y', 'ь':'', 'э':'e', 'ю':'yu', + 'я':'ya', + 'А':'A', 'Б':'B', 'В':'V', 'Г':'G', 'Д':'D', 'Е':'E', 'Ё':'Yo', 'Ж':'Zh', + 'З':'Z', 'И':'I', 'Й':'J', 'К':'K', 'Л':'L', 'М':'M', 'Н':'N', 'О':'O', + 'П':'P', 'Р':'R', 'С':'S', 'Т':'T', 'У':'U', 'Ф':'F', 'Х':'H', 'Ц':'C', + 'Ч':'Ch', 'Ш':'Sh', 'Щ':'Sh', 'Ъ':'', 'Ы':'Y', 'Ь':'', 'Э':'E', 'Ю':'Yu', + 'Я':'Ya' +} +var UKRAINIAN_MAP = { + 'Є':'Ye', 'І':'I', 'Ї':'Yi', 'Ґ':'G', 'є':'ye', 'і':'i', 'ї':'yi', 'ґ':'g' +} +var CZECH_MAP = { + 'č':'c', 'ď':'d', 'ě':'e', 'ň': 'n', 'ř':'r', 'š':'s', 'ť':'t', 'ů':'u', + 'ž':'z', 'Č':'C', 'Ď':'D', 'Ě':'E', 'Ň': 'N', 'Ř':'R', 'Š':'S', 'Ť':'T', + 'Ů':'U', 'Ž':'Z' +} + +var POLISH_MAP = { + 'ą':'a', 'ć':'c', 'ę':'e', 'ł':'l', 'ń':'n', 'ó':'o', 'ś':'s', 'ź':'z', + 'ż':'z', 'Ą':'A', 'Ć':'C', 'Ę':'e', 'Ł':'L', 'Ń':'N', 'Ó':'o', 'Ś':'S', + 'Ź':'Z', 'Ż':'Z' +} + +var LATVIAN_MAP = { + 'ā':'a', 'č':'c', 'ē':'e', 'ģ':'g', 'ī':'i', 'ķ':'k', 'ļ':'l', 'ņ':'n', + 'š':'s', 'ū':'u', 'ž':'z', 'Ā':'A', 'Č':'C', 'Ē':'E', 'Ģ':'G', 'Ī':'i', + 'Ķ':'k', 'Ļ':'L', 'Ņ':'N', 'Š':'S', 'Ū':'u', 'Ž':'Z' +} + +var ALL_DOWNCODE_MAPS=new Array() +ALL_DOWNCODE_MAPS[0]=LATIN_MAP +ALL_DOWNCODE_MAPS[1]=LATIN_SYMBOLS_MAP +ALL_DOWNCODE_MAPS[2]=GREEK_MAP +ALL_DOWNCODE_MAPS[3]=TURKISH_MAP +ALL_DOWNCODE_MAPS[4]=RUSSIAN_MAP +ALL_DOWNCODE_MAPS[5]=UKRAINIAN_MAP +ALL_DOWNCODE_MAPS[6]=CZECH_MAP +ALL_DOWNCODE_MAPS[7]=POLISH_MAP +ALL_DOWNCODE_MAPS[8]=LATVIAN_MAP + +var Downcoder = new Object(); +Downcoder.Initialize = function() +{ + if (Downcoder.map) // already made + return ; + Downcoder.map ={} + Downcoder.chars = '' ; + for(var i in ALL_DOWNCODE_MAPS) + { + var lookup = ALL_DOWNCODE_MAPS[i] + for (var c in lookup) + { + Downcoder.map[c] = lookup[c] ; + Downcoder.chars += c ; + } + } + Downcoder.regex = new RegExp('[' + Downcoder.chars + ']|[^' + Downcoder.chars + ']+','g') ; +} + +downcode= function( slug ) +{ + Downcoder.Initialize() ; + var downcoded ="" + var pieces = slug.match(Downcoder.regex); + if(pieces) + { + for (var i = 0 ; i < pieces.length ; i++) + { + if (pieces[i].length == 1) + { + var mapped = Downcoder.map[pieces[i]] ; + if (mapped != null) + { + downcoded+=mapped; + continue ; + } + } + downcoded+=pieces[i]; + } + } + else + { + downcoded = slug; + } + return downcoded; +} + + +function URLify(s, num_chars) { + // changes, e.g., "Petty theft" to "petty_theft" + // remove all these words from the string before urlifying + s = downcode(s); + removelist = ["a", "an", "as", "at", "before", "but", "by", "for", "from", + "is", "in", "into", "like", "of", "off", "on", "onto", "per", + "since", "than", "the", "this", "that", "to", "up", "via", + "with"]; + r = new RegExp('\\b(' + removelist.join('|') + ')\\b', 'gi'); + s = s.replace(r, ''); + // if downcode doesn't hit, the char will be stripped here + s = s.replace(/[^-\w\s]/g, ''); // remove unneeded chars + s = s.replace(/^\s+|\s+$/g, ''); // trim leading/trailing spaces + s = s.replace(/[-\s]+/g, '-'); // convert spaces to hyphens + s = s.toLowerCase(); // convert to lowercase + return s.substring(0, num_chars);// trim to first num_chars chars +} + diff --git a/media/js/wiki.js b/media/js/wiki.js new file mode 100644 index 000000000..738492cd1 --- /dev/null +++ b/media/js/wiki.js @@ -0,0 +1,18 @@ +(function () { + var fields = { + title: { + id: '#id_slug', + dependency_ids: ['#id_title'], + dependency_list: ['#id_title'], + maxLength: 50 + } + }, field = null; + + for (i in fields) { + field = fields[i]; + $('#id_slug').addClass('prepopulated_field'); + $(field.id).data('dependency_list', field['dependency_list']) + .prepopulate($(field['dependency_ids'].join(',')), + field.maxLength); + }; +}()); diff --git a/migrations/35-document-slug.sql b/migrations/35-document-slug.sql new file mode 100644 index 000000000..2f5f30e31 --- /dev/null +++ b/migrations/35-document-slug.sql @@ -0,0 +1,3 @@ +ALTER TABLE `wiki_document` ADD `slug` varchar(255) NOT NULL; +CREATE INDEX `wiki_document_slug` ON `wiki_document` (`slug`); +CREATE UNIQUE INDEX `slug` ON `wiki_document` (`slug`,`locale`); diff --git a/settings.py b/settings.py index cebde54ea..93c7c282d 100644 --- a/settings.py +++ b/settings.py @@ -305,6 +305,9 @@ MINIFY_BUNDLES = { 'js/markup.js', ), 'wiki': ( + 'js/libs/django/urlify.js', + 'js/libs/django/prepopulate.js', + 'js/wiki.js', ), }, }