зеркало из https://github.com/mozilla/bedrock.git
Upgrade south to 0.8.4
Use bug_list and bug_search_url fields in template Add jingo_markdown Upgrade RNA (4ae910da) Add firefox.system_requirements view using RNA Use latest RNA as submodule instead of pip install Adjust views, settings, templates to use latest RNA Restore lib/l10n_utils/helpers.py and tests from rebase mistake Move django-extensions to submodule Cleanup old rna egg-info Move django-rest-framework to submodule Move requests to submodule Move Python-Markdown to submodule Move jingo-markdown to submodule Move six to submodule Test RNA views + flake8 fixes Fix flake8 Try to fix Jenkins test breakage Clean up templates Cleanup template some more Reword test docstring and add comments Update requiremnts/prod.txt to match submodules Use settings.LANGUAGE_CODE for default locale Try to make Jenkins happy
This commit is contained in:
Родитель
c9ca7fb5c9
Коммит
c8c7e1901e
|
@ -19,3 +19,24 @@
|
|||
[submodule "vendor-local/src/django-waffle"]
|
||||
path = vendor-local/src/django-waffle
|
||||
url = https://github.com/jsocol/django-waffle.git
|
||||
[submodule "vendor-local/src/rna"]
|
||||
path = vendor-local/src/rna
|
||||
url = git://github.com/mozilla/rna
|
||||
[submodule "vendor-local/src/django-extensions"]
|
||||
path = vendor-local/src/django-extensions
|
||||
url = https://github.com/django-extensions/django-extensions.git
|
||||
[submodule "vendor-local/src/django-rest-framework"]
|
||||
path = vendor-local/src/django-rest-framework
|
||||
url = https://github.com/tomchristie/django-rest-framework.git
|
||||
[submodule "vendor-local/src/requests"]
|
||||
path = vendor-local/src/requests
|
||||
url = https://github.com/kennethreitz/requests.git
|
||||
[submodule "vendor-local/src/Python-Markdown"]
|
||||
path = vendor-local/src/Python-Markdown
|
||||
url = https://github.com/waylan/Python-Markdown.git
|
||||
[submodule "vendor-local/src/jingo-markdown"]
|
||||
path = vendor-local/src/jingo-markdown
|
||||
url = https://github.com/monkeywarrior/jingo-markdown.git
|
||||
[submodule "vendor-local/src/six"]
|
||||
path = vendor-local/src/six
|
||||
url = https://github.com/kelp404/six.git
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/. #}
|
||||
|
||||
{% extends "firefox/fxos-base.html" %}
|
||||
{% extends "firefox/base-resp.html" %}
|
||||
|
||||
{% block page_title_prefix %}{% endblock %}
|
||||
{% block page_title %}{{ _('Firefox — {version} Notes')|f(version=version) }}{% endblock %}
|
||||
|
@ -15,8 +15,6 @@
|
|||
{{ css('firefox_releasenotes') }}
|
||||
{% endblock %}
|
||||
|
||||
{% block extrahead %}{% endblock %}
|
||||
|
||||
{% block content %}
|
||||
<header id="main-feature">
|
||||
<h1>{{ _('Firefox {version} Notes')|f(version=version) }}</h1>
|
||||
|
@ -25,14 +23,23 @@
|
|||
|
||||
<p>
|
||||
{% trans feedback='https://input.mozilla.org/feedback',
|
||||
bugzilla='https://bugzilla.mozilla.org/',
|
||||
buglist='link-to-bugzilla-search' %}
|
||||
bugzilla='https://bugzilla.mozilla.org/' %}
|
||||
As always, you’re encouraged to
|
||||
<a href="{{ feedback }}">tell us what you think</a>,
|
||||
or <a href="{{ bugzilla }}">file a bug in Bugzilla</a>.
|
||||
If interested, please see the <a href="{{ buglist }}">complete list of changes</a>
|
||||
in this release.
|
||||
{% endtrans %}
|
||||
{% if release.bug_search_url %}
|
||||
{% trans bug_search_url=release.bug_search_url %}
|
||||
If interested, please see the <a href="{{ bug_search_url }}">complete list of changes</a>
|
||||
in this release.
|
||||
{% endtrans %}
|
||||
{% elif release.bug_list %}
|
||||
<form action="https://bugzilla.mozilla.org/buglist.cgi" method="post">
|
||||
<input type="hidden" name="limit" value="0">
|
||||
<input type="hidden" name="bug_id" value="{{ release.bug_list }}">
|
||||
<p>{{ _('If interested, please see the <input type="submit" value="complete list of changes"> in this release.') }}</p>
|
||||
</form>
|
||||
{% endif %}
|
||||
</p>
|
||||
</header>
|
||||
|
||||
|
@ -42,12 +49,12 @@
|
|||
<h3>{{ _('New Features') }}</h3>
|
||||
<ul class="section-items tagged">
|
||||
{% for note in new_features %}
|
||||
<li {% if not note.tag.text %}class="untagged"{% endif %}>
|
||||
<li {% if not note.tag %}class="untagged"{% endif %}>
|
||||
<p>
|
||||
{% if note.tag.text %}
|
||||
<b class="tag tag-{{ note.tag.text.lower() }}">{{ note.tag.text }}</b>
|
||||
{% if note.tag %}
|
||||
<b class="tag tag-{{ note.tag.lower() }}">{{ note.tag }}</b>
|
||||
{% endif %}
|
||||
{{ note.html|safe }}
|
||||
{{ note.note|markdown|safe }}
|
||||
</p>
|
||||
</li>
|
||||
{% endfor %}
|
||||
|
@ -61,12 +68,12 @@
|
|||
<li>
|
||||
<p>
|
||||
<b class="tag tag-unresolved">{{ _('unresolved') }}</b>
|
||||
{{ note.html|safe }}
|
||||
{{ note.note|markdown|safe }}
|
||||
</p>
|
||||
{% if not note.fixed_in_version is none %}
|
||||
{% if note.fixed_in_release %}
|
||||
<p class="note">
|
||||
<a href="{{ url('firefox.releasenotes', '{major_version}.0'|f(major_version=note.fixed_in_version)) }}">
|
||||
{{ _('Resolved in v{version_number}')|f(version_number=note.fixed_in_version) }}
|
||||
<a href="{{ url('firefox.releasenotes', '{major_version}.0'|f(major_version=note.fixed_in_release.version)) }}">
|
||||
{{ _('Resolved in v{version_number}')|f(version_number=note.fixed_in_release.version) }}
|
||||
</a>
|
||||
</p>
|
||||
{% endif %}
|
||||
|
@ -81,8 +88,8 @@
|
|||
<section id="try">
|
||||
<h3>{{ _('Try Firefox') }}</h3>
|
||||
<ol>
|
||||
<li class="try1"><a href="/firefox/26.0/system-requirements/">{{ _('Check') }}</a></li>
|
||||
<li class="try2"><a href="/firefox/">{{ _('Download') }}</a></li>
|
||||
<li class="try1"><a href="{{ url('firefox.system_requirements', version) }}">{{ _('Check') }}</a></li>
|
||||
<li class="try2"><a href="{{ url('firefox') }}">{{ _('Download') }}</a></li>
|
||||
<li class="try3"><a href="https://support.mozilla.org/kb/Installing%20Firefox">{{ _('Install') }}</a></li>
|
||||
<li class="try4"><a href="https://addons.mozilla.org/firefox/">{{ _('Customize') }}</a></li>
|
||||
</ol>
|
||||
|
|
|
@ -24,7 +24,7 @@
|
|||
|
||||
<div id="main-content">
|
||||
<article class="main-column">
|
||||
{{ release.system_requirements|markdown()|safe }}
|
||||
{{ release.system_requirements|markdown|safe }}
|
||||
</article>
|
||||
</div>
|
||||
{% block sidebar %}{% endblock %}
|
||||
|
|
|
@ -1,49 +0,0 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
from datetime import datetime
|
||||
|
||||
import rna.models
|
||||
from factory import DjangoModelFactory, LazyAttribute, Sequence, SubFactory
|
||||
from factory.fuzzy import FuzzyNaiveDateTime, FuzzyInteger
|
||||
|
||||
|
||||
class ChannelFactory(DjangoModelFactory):
|
||||
FACTORY_FOR = rna.models.Channel
|
||||
name = Sequence(lambda n: 'Channel {0}'.format(n))
|
||||
|
||||
|
||||
class ProductFactory(DjangoModelFactory):
|
||||
FACTORY_FOR = rna.models.Product
|
||||
name = Sequence(lambda n: 'Product {0}'.format(n))
|
||||
text = Sequence(lambda n: 'Text {0}'.format(n))
|
||||
|
||||
|
||||
class ReleaseFactory(DjangoModelFactory):
|
||||
FACTORY_FOR = rna.models.Release
|
||||
product = SubFactory(ProductFactory)
|
||||
channel = SubFactory(ChannelFactory)
|
||||
version = FuzzyInteger(0)
|
||||
sub_version = 0
|
||||
release_date = FuzzyNaiveDateTime(datetime(2013, 12, 2))
|
||||
text = ''
|
||||
|
||||
|
||||
class TagFactory(DjangoModelFactory):
|
||||
FACTORY_FOR = rna.models.Tag
|
||||
text = Sequence(lambda n: 'Tag {0}'.format(n))
|
||||
sort_num = Sequence(lambda n: n)
|
||||
|
||||
|
||||
class NoteFactory(DjangoModelFactory):
|
||||
FACTORY_FOR = rna.models.Note
|
||||
bug = None
|
||||
html = '<p>Note!</p>'
|
||||
first_version = Sequence(lambda n: n)
|
||||
first_channel = SubFactory(ChannelFactory)
|
||||
fixed_in_version = LazyAttribute(lambda n: n.first_version + 2)
|
||||
fixed_in_channel = SubFactory(ChannelFactory)
|
||||
tag = SubFactory(TagFactory)
|
||||
product = SubFactory(ProductFactory)
|
||||
sort_num = Sequence(lambda n: n)
|
||||
fixed_in_subversion = None
|
|
@ -8,11 +8,10 @@ from nose.tools import eq_
|
|||
from rna.models import Release
|
||||
|
||||
from bedrock.firefox import views
|
||||
from bedrock.firefox.tests import NoteFactory, ProductFactory, ReleaseFactory
|
||||
from bedrock.mozorg.tests import TestCase
|
||||
|
||||
|
||||
class TestReleaseNotesView(TestCase):
|
||||
class TestRNAViews(TestCase):
|
||||
def setUp(self):
|
||||
self.factory = RequestFactory()
|
||||
self.request = self.factory.get('/')
|
||||
|
@ -31,83 +30,40 @@ class TestReleaseNotesView(TestCase):
|
|||
"""
|
||||
return self.mock_render.call_args[0][2]
|
||||
|
||||
def test_missing_minor_version(self):
|
||||
@patch('bedrock.firefox.views.get_object_or_404')
|
||||
def test_release_notes(self, get_object_or_404):
|
||||
"""
|
||||
If the minor version is missing in the URL, it should default to
|
||||
0.
|
||||
Should use release returned from get_object_or_404 with the
|
||||
correct params and pass the correct context variables and
|
||||
template to l10n_utils.render
|
||||
"""
|
||||
ReleaseFactory.create(version=18, sub_version=0, channel__name='Release',
|
||||
product__name='Firefox')
|
||||
mock_release = get_object_or_404.return_value
|
||||
mock_release.notes.return_value = ('mock new_features',
|
||||
'mock known_issues')
|
||||
|
||||
views.release_notes(self.request, '18.0')
|
||||
eq_(self.last_ctx['minor_version'], 0)
|
||||
views.release_notes(self.request, '27.0')
|
||||
# Should use fixed version for query
|
||||
get_object_or_404.assert_called_with(
|
||||
Release, version='27.0.0', channel='Release', product='Firefox')
|
||||
# Should use original version for context variable
|
||||
eq_(self.last_ctx['version'], '27.0')
|
||||
eq_(self.last_ctx['major_version'], '27')
|
||||
eq_(self.last_ctx['release'], mock_release)
|
||||
eq_(self.last_ctx['new_features'], 'mock new_features')
|
||||
eq_(self.last_ctx['known_issues'], 'mock known_issues')
|
||||
eq_(self.mock_render.call_args[0][1], 'firefox/releases/notes.html')
|
||||
|
||||
def test_no_release_404(self):
|
||||
@patch('bedrock.firefox.views.get_object_or_404')
|
||||
def test_system_requirements(self, get_object_or_404):
|
||||
"""
|
||||
Fetch the release using get_object_or_404, so that an Http404 is
|
||||
raised when the release isn't found.
|
||||
Should use release returned from get_object_or_404, with a
|
||||
default channel of Release and default product of Firefox,
|
||||
and pass the version to l10n_utils.render
|
||||
"""
|
||||
with patch('bedrock.firefox.views.get_object_or_404') as get_object_or_404:
|
||||
views.release_notes(self.request, '18.0')
|
||||
eq_(self.last_ctx['release'], get_object_or_404.return_value)
|
||||
get_object_or_404.assert_called_with(Release, version=18, sub_version=0,
|
||||
channel__name='Release', product__name='Firefox')
|
||||
|
||||
def test_note_first_version_not_fixed(self):
|
||||
"""
|
||||
If a note started on or before the current version, and has yet
|
||||
to be fixed, include it in known issues.
|
||||
"""
|
||||
product = ProductFactory.create(name='Firefox')
|
||||
ReleaseFactory.create(version=19, sub_version=0, channel__name='Release', product=product)
|
||||
note1 = NoteFactory.create(first_version=18, fixed_in_version=None, product=product)
|
||||
note2 = NoteFactory.create(first_version=19, fixed_in_version=None, product=product)
|
||||
|
||||
views.release_notes(self.request, '19.0')
|
||||
eq_(set([note1, note2]), set(self.last_ctx['known_issues']))
|
||||
|
||||
def test_note_first_version_fixed(self):
|
||||
"""
|
||||
If a note started on or before the current version, and was
|
||||
fixed on this version, include it in new features. If it was
|
||||
fixed after this version, include it in known issues.
|
||||
"""
|
||||
product = ProductFactory.create(name='Firefox')
|
||||
ReleaseFactory.create(version=19, sub_version=0, channel__name='Release', product=product)
|
||||
note1 = NoteFactory.create(first_version=18, fixed_in_version=19, product=product)
|
||||
note2 = NoteFactory.create(first_version=19, fixed_in_version=19, product=product)
|
||||
note3 = NoteFactory.create(first_version=18, fixed_in_version=20, product=product)
|
||||
note4 = NoteFactory.create(first_version=19, fixed_in_version=20, product=product)
|
||||
|
||||
views.release_notes(self.request, '19.0')
|
||||
eq_(set([note1, note2]), set(self.last_ctx['new_features']))
|
||||
eq_(set([note3, note4]), set(self.last_ctx['known_issues']))
|
||||
|
||||
def test_note_fixed_no_first_version(self):
|
||||
"""
|
||||
If a note started has no first version but was fixed in the
|
||||
current version, include it in new features.
|
||||
"""
|
||||
product = ProductFactory.create(name='Firefox')
|
||||
ReleaseFactory.create(version=19, sub_version=0, channel__name='Release', product=product)
|
||||
note1 = NoteFactory.create(first_version=None, fixed_in_version=19, product=product)
|
||||
|
||||
views.release_notes(self.request, '19.0')
|
||||
eq_(set([note1]), set(self.last_ctx['new_features']))
|
||||
|
||||
def test_note_fixed_product_name(self):
|
||||
"""
|
||||
If a note does has no product or "Firefox" as the product,
|
||||
include it.
|
||||
"""
|
||||
product = ProductFactory.create(name='Firefox')
|
||||
ReleaseFactory.create(version=19, sub_version=0, channel__name='Release', product=product)
|
||||
note1 = NoteFactory.create(first_version=None, fixed_in_version=19, product=product)
|
||||
note2 = NoteFactory.create(first_version=None, fixed_in_version=19, product=None)
|
||||
|
||||
# Notes that shouldn't appear.
|
||||
NoteFactory.create(first_version=None, fixed_in_version=19, product__name='Fennec')
|
||||
NoteFactory.create(first_version=None, fixed_in_version=19, product__name='FirefoxOS')
|
||||
|
||||
views.release_notes(self.request, '19.0')
|
||||
eq_(set([note1, note2]), set(self.last_ctx['new_features']))
|
||||
views.system_requirements(self.request, '27.0.1')
|
||||
get_object_or_404.assert_called_with(
|
||||
Release, version='27.0.1', channel='Release', product='Firefox')
|
||||
eq_(self.last_ctx['release'], get_object_or_404.return_value)
|
||||
eq_(self.last_ctx['version'], '27.0.1')
|
||||
eq_(self.mock_render.call_args[0][1],
|
||||
'firefox/releases/system_requirements.html')
|
||||
|
|
|
@ -16,6 +16,7 @@ whatsnew_re = latest_re % (version_re, 'whatsnew')
|
|||
product_re = '(?P<product>firefox|mobile)'
|
||||
channel_re = '(?P<channel>beta|aurora|organizations)'
|
||||
releasenotes_re = latest_re % (version_re, 'releasenotes')
|
||||
sysreq_re = latest_re % (version_re, 'releasenotes/system-requirements')
|
||||
|
||||
|
||||
urlpatterns = patterns('',
|
||||
|
@ -80,8 +81,10 @@ urlpatterns = patterns('',
|
|||
page('mwc', 'firefox/os/mwc-2014-preview.html'),
|
||||
page('firefox/os/devices', 'firefox/os/devices.html'),
|
||||
|
||||
url(releasenotes_re, views.release_notes, name='firefox.releasenotes'),
|
||||
|
||||
# temporary URL for Aurora 29 survey
|
||||
page('firefox/aurora/up-to-date', 'firefox/whatsnew-aurora-29-survey.html'),
|
||||
|
||||
url(releasenotes_re, views.release_notes, name='firefox.releasenotes'),
|
||||
url(sysreq_re, views.system_requirements,
|
||||
name='firefox.system_requirements'),
|
||||
)
|
||||
|
|
|
@ -410,32 +410,36 @@ class WhatsnewView(LatestFxView):
|
|||
return template
|
||||
|
||||
|
||||
def release_notes(request, version, channel='Release', product='Firefox'):
|
||||
if len(version.split('.')) == 2:
|
||||
query_version = version + '.0'
|
||||
def fix_fx_version(fx_version):
|
||||
if len(fx_version.split('.')) == 2:
|
||||
return fx_version + '.0'
|
||||
else:
|
||||
query_version = version
|
||||
release = get_object_or_404(Release, version=query_version,
|
||||
channel=channel, product=product)
|
||||
return fx_version
|
||||
|
||||
|
||||
def release_notes_template(channel, product):
|
||||
#TODO: if product == 'Firefox OS':
|
||||
#TODO: different templates based on channel with default
|
||||
return 'firefox/releases/notes.html'
|
||||
|
||||
|
||||
def release_notes(request, fx_version, channel='Release', product='Firefox'):
|
||||
release = get_object_or_404(Release, version=fix_fx_version(fx_version),
|
||||
channel=channel, product=product)
|
||||
new_features, known_issues = release.notes()
|
||||
return l10n_utils.render(
|
||||
request, 'firefox/releases/notes.html', {
|
||||
'version': version,
|
||||
'major_version': version.split('.', 1)[0],
|
||||
request, release_notes_template(channel, product), {
|
||||
'version': fx_version,
|
||||
'major_version': fx_version.split('.', 1)[0],
|
||||
'release': release,
|
||||
'new_features': new_features,
|
||||
'known_issues': known_issues})
|
||||
|
||||
|
||||
def system_requirements(request, version, channel='Release',
|
||||
def system_requirements(request, fx_version, channel='Release',
|
||||
product='Firefox'):
|
||||
if len(version.split('.')) == 2:
|
||||
query_version = version + '.0'
|
||||
else:
|
||||
query_version = version
|
||||
release = get_object_or_404(Release, version=query_version,
|
||||
release = get_object_or_404(Release, version=fix_fx_version(fx_version),
|
||||
channel=channel, product=product)
|
||||
return l10n_utils.render(
|
||||
request, 'firefox/releases/system_requirements.html',
|
||||
{'release': release, 'version': version})
|
||||
{'release': release, 'version': fx_version})
|
||||
|
|
|
@ -82,7 +82,8 @@ def JINJA_CONFIG():
|
|||
'extensions': [
|
||||
'lib.l10n_utils.template.i18n', 'jinja2.ext.do', 'jinja2.ext.with_',
|
||||
'jinja2.ext.loopcontrols', 'lib.l10n_utils.template.l10n_blocks',
|
||||
'lib.l10n_utils.template.lang_blocks'
|
||||
'lib.l10n_utils.template.lang_blocks',
|
||||
'jingo_markdown.extensions.MarkdownExtension',
|
||||
],
|
||||
# Make None in templates render as ''
|
||||
'finalize': lambda x: x if x is not None else '',
|
||||
|
@ -758,6 +759,7 @@ INSTALLED_APPS = get_apps(exclude=(
|
|||
'djcelery',
|
||||
), append=(
|
||||
# Local apps
|
||||
'jingo_markdown',
|
||||
'jingo_minify',
|
||||
'django_statsd',
|
||||
'waffle',
|
||||
|
@ -979,5 +981,7 @@ USE_GRUNT_LIVERELOAD = False
|
|||
# Publishing system config
|
||||
RNA = {
|
||||
'BASE_URL': os.environ.get('RNA_BASE_URL', 'https://nucleus.mozilla.org/rna/'),
|
||||
'LEGACY_API': os.environ.get('RNA_LEGACY_API', False),
|
||||
|
||||
# default False as temporary workaround for bug 973499
|
||||
'VERIFY_SSL_CERT': os.environ.get('VERIFY_SSL_CERT', False),
|
||||
}
|
||||
|
|
|
@ -4,8 +4,11 @@
|
|||
|
||||
import jingo
|
||||
import jinja2
|
||||
from babel.core import Locale, UnknownLocaleError
|
||||
from babel.dates import format_date
|
||||
|
||||
from django.conf import settings
|
||||
from django.utils.translation import get_language
|
||||
|
||||
from dotlang import translate, lang_file_has_tag
|
||||
|
||||
|
@ -70,3 +73,24 @@ def l10n_has_tag(ctx, tag, langfile=None):
|
|||
"""Return boolean whether the given lang file has the given tag."""
|
||||
langfile = langfile or ctx.get('langfile')
|
||||
return lang_file_has_tag(langfile, tag=tag)
|
||||
|
||||
|
||||
def current_locale():
|
||||
"""
|
||||
Return the current Locale object (from Babel). Defaults to locale
|
||||
based on settings.LANGUAGE_CODE if locale does not exist.
|
||||
"""
|
||||
try:
|
||||
return Locale.parse(get_language(), sep='-')
|
||||
except (UnknownLocaleError, ValueError):
|
||||
return Locale(*settings.LANGUAGE_CODE.split('-'))
|
||||
|
||||
|
||||
@jingo.register.filter
|
||||
def l10n_format_date(date, format='long'):
|
||||
"""
|
||||
Formats a date according to the current locale. Wraps around
|
||||
babel.dates.format_date.
|
||||
"""
|
||||
locale = current_locale()
|
||||
return format_date(date, locale=locale, format=format)
|
||||
|
|
|
@ -3,11 +3,13 @@
|
|||
# This Source Code Form is subject to the terms of the Mozilla Public
|
||||
# License, v. 2.0. If a copy of the MPL was not distributed with this
|
||||
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
|
||||
|
||||
from mock import patch
|
||||
import jingo
|
||||
from babel.core import UnknownLocaleError
|
||||
from mock import patch
|
||||
from nose.tools import eq_
|
||||
|
||||
from bedrock.mozorg.tests import TestCase
|
||||
from l10n_utils import helpers
|
||||
|
||||
|
||||
def render(s, context=None):
|
||||
|
@ -15,18 +17,57 @@ def render(s, context=None):
|
|||
return t.render(context or {})
|
||||
|
||||
|
||||
@patch('lib.l10n_utils.helpers.lang_file_has_tag')
|
||||
class TestL10nHasTag(TestCase):
|
||||
def test_gets_right_langfile(self, lfht_mock):
|
||||
lfht_mock.return_value = True
|
||||
res = render('{{ "nihilist" if l10n_has_tag("abide") }}',
|
||||
{'langfile': 'dude'})
|
||||
self.assertEqual(res, 'nihilist')
|
||||
lfht_mock.assert_called_with('dude', tag='abide')
|
||||
def test_gets_right_langfile(self):
|
||||
with patch('lib.l10n_utils.helpers.lang_file_has_tag') as lfht_mock:
|
||||
lfht_mock.return_value = True
|
||||
res = render('{{ "nihilist" if l10n_has_tag("abide") }}',
|
||||
{'langfile': 'dude'})
|
||||
self.assertEqual(res, 'nihilist')
|
||||
lfht_mock.assert_called_with('dude', tag='abide')
|
||||
|
||||
def test_override_langfile(self, lfht_mock):
|
||||
lfht_mock.return_value = True
|
||||
res = render('{{ "nihilist" if l10n_has_tag("abide", "uli") }}',
|
||||
{'langfile': 'dude'})
|
||||
self.assertEqual(res, 'nihilist')
|
||||
lfht_mock.assert_called_with('uli', tag='abide')
|
||||
def test_override_langfile(self):
|
||||
with patch('lib.l10n_utils.helpers.lang_file_has_tag') as lfht_mock:
|
||||
lfht_mock.return_value = True
|
||||
res = render('{{ "nihilist" if l10n_has_tag("abide", "uli") }}',
|
||||
{'langfile': 'dude'})
|
||||
self.assertEqual(res, 'nihilist')
|
||||
lfht_mock.assert_called_with('uli', tag='abide')
|
||||
|
||||
|
||||
class TestCurrentLocale(TestCase):
|
||||
@patch('l10n_utils.helpers.Locale')
|
||||
def test_unknown_locale(self, Locale):
|
||||
"""
|
||||
If Locale.parse raises an UnknownLocaleError, return the en-US
|
||||
locale object.
|
||||
"""
|
||||
Locale.parse.side_effect = UnknownLocaleError('foo')
|
||||
eq_(helpers.current_locale(), Locale.return_value)
|
||||
Locale.assert_called_with('en', 'US')
|
||||
|
||||
@patch('l10n_utils.helpers.Locale')
|
||||
def test_value_error(self, Locale):
|
||||
"""
|
||||
If Locale.parse raises a ValueError, return the en-US locale
|
||||
object.
|
||||
"""
|
||||
Locale.parse.side_effect = ValueError
|
||||
eq_(helpers.current_locale(), Locale.return_value)
|
||||
Locale.assert_called_with('en', 'US')
|
||||
|
||||
@patch('l10n_utils.helpers.get_language')
|
||||
@patch('l10n_utils.helpers.Locale')
|
||||
def test_success(self, Locale, get_language):
|
||||
eq_(helpers.current_locale(), Locale.parse.return_value)
|
||||
Locale.parse.assert_called_with(get_language.return_value, sep='-')
|
||||
|
||||
|
||||
class TestL10nFormatDate(TestCase):
|
||||
@patch('l10n_utils.helpers.current_locale')
|
||||
@patch('l10n_utils.helpers.format_date')
|
||||
def test_success(self, format_date, current_locale):
|
||||
eq_(helpers.l10n_format_date('somedate', format='long'),
|
||||
format_date.return_value)
|
||||
format_date.assert_called_with(
|
||||
'somedate', locale=current_locale.return_value, format='long')
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# Django 1.4.5 is also in vendor, but needs to be kept here so packages that
|
||||
# Django 1.4.8 is also in vendor, but needs to be kept here so packages that
|
||||
# depend on Django do not install a newer version
|
||||
Django==1.4.5
|
||||
Django==1.4.8
|
||||
|
||||
# Templates
|
||||
-e git://github.com/jbalogh/jingo.git#egg=jingo
|
||||
|
@ -29,4 +29,4 @@ python-memcached==1.53
|
|||
django-statsd-mozilla==0.3.5
|
||||
|
||||
# Publishing system
|
||||
git+git://github.com/mozilla/rna.git@2b9dd508197ab2ef2b74c02a16fa8ef6c3fbab0d
|
||||
git+git://github.com/mozilla/rna.git@39e9260428560a677f59db8d12dde8073f7fe7a9
|
||||
|
|
|
@ -1,22 +0,0 @@
|
|||
Metadata-Version: 1.1
|
||||
Name: django-extensions
|
||||
Version: 1.2.0
|
||||
Summary: Extensions for Django
|
||||
Home-page: http://github.com/django-extensions/django-extensions
|
||||
Author: Bas van Oostveen
|
||||
Author-email: v.oostveen@gmail.com
|
||||
License: New BSD License
|
||||
Description: django-extensions bundles several useful
|
||||
additions for Django projects. See the project page for more information:
|
||||
http://github.com/django-extensions/django-extensions
|
||||
Platform: any
|
||||
Classifier: Development Status :: 4 - Beta
|
||||
Classifier: Development Status :: 5 - Production/Stable
|
||||
Classifier: Environment :: Web Environment
|
||||
Classifier: Framework :: Django
|
||||
Classifier: Intended Audience :: Developers
|
||||
Classifier: License :: OSI Approved :: BSD License
|
||||
Classifier: Operating System :: OS Independent
|
||||
Classifier: Programming Language :: Python
|
||||
Classifier: Programming Language :: Python :: 3
|
||||
Classifier: Topic :: Utilities
|
|
@ -1,156 +0,0 @@
|
|||
LICENSE
|
||||
MANIFEST.in
|
||||
README.rst
|
||||
setup.cfg
|
||||
setup.py
|
||||
django_extensions/__init__.py
|
||||
django_extensions/future_1_5.py
|
||||
django_extensions/models.py
|
||||
django_extensions/settings.py
|
||||
django_extensions.egg-info/PKG-INFO
|
||||
django_extensions.egg-info/SOURCES.txt
|
||||
django_extensions.egg-info/dependency_links.txt
|
||||
django_extensions.egg-info/requires.txt
|
||||
django_extensions.egg-info/top_level.txt
|
||||
django_extensions/admin/__init__.py
|
||||
django_extensions/admin/widgets.py
|
||||
django_extensions/conf/app_template/__init__.py.tmpl
|
||||
django_extensions/conf/app_template/forms.py.tmpl
|
||||
django_extensions/conf/app_template/models.py.tmpl
|
||||
django_extensions/conf/app_template/urls.py.tmpl
|
||||
django_extensions/conf/app_template/views.py.tmpl
|
||||
django_extensions/conf/command_template/management/__init__.py.tmpl
|
||||
django_extensions/conf/command_template/management/commands/__init__.py.tmpl
|
||||
django_extensions/conf/command_template/management/commands/sample.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/__init__.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/sample.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/daily/__init__.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/hourly/__init__.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/monthly/__init__.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/weekly/__init__.py.tmpl
|
||||
django_extensions/conf/jobs_template/jobs/yearly/__init__.py.tmpl
|
||||
django_extensions/db/__init__.py
|
||||
django_extensions/db/models.py
|
||||
django_extensions/db/fields/__init__.py
|
||||
django_extensions/db/fields/encrypted.py
|
||||
django_extensions/db/fields/json.py
|
||||
django_extensions/jobs/__init__.py
|
||||
django_extensions/jobs/daily/__init__.py
|
||||
django_extensions/jobs/daily/cache_cleanup.py
|
||||
django_extensions/jobs/daily/daily_cleanup.py
|
||||
django_extensions/jobs/hourly/__init__.py
|
||||
django_extensions/jobs/monthly/__init__.py
|
||||
django_extensions/jobs/weekly/__init__.py
|
||||
django_extensions/jobs/yearly/__init__.py
|
||||
django_extensions/management/__init__.py
|
||||
django_extensions/management/base.py
|
||||
django_extensions/management/color.py
|
||||
django_extensions/management/jobs.py
|
||||
django_extensions/management/modelviz.py
|
||||
django_extensions/management/notebook_extension.py
|
||||
django_extensions/management/shells.py
|
||||
django_extensions/management/signals.py
|
||||
django_extensions/management/technical_response.py
|
||||
django_extensions/management/utils.py
|
||||
django_extensions/management/commands/__init__.py
|
||||
django_extensions/management/commands/clean_pyc.py
|
||||
django_extensions/management/commands/compile_pyc.py
|
||||
django_extensions/management/commands/create_app.py
|
||||
django_extensions/management/commands/create_command.py
|
||||
django_extensions/management/commands/create_jobs.py
|
||||
django_extensions/management/commands/describe_form.py
|
||||
django_extensions/management/commands/dumpscript.py
|
||||
django_extensions/management/commands/export_emails.py
|
||||
django_extensions/management/commands/find_template.py
|
||||
django_extensions/management/commands/generate_secret_key.py
|
||||
django_extensions/management/commands/graph_models.py
|
||||
django_extensions/management/commands/mail_debug.py
|
||||
django_extensions/management/commands/notes.py
|
||||
django_extensions/management/commands/passwd.py
|
||||
django_extensions/management/commands/pipchecker.py
|
||||
django_extensions/management/commands/print_settings.py
|
||||
django_extensions/management/commands/print_user_for_session.py
|
||||
django_extensions/management/commands/reset_db.py
|
||||
django_extensions/management/commands/runjob.py
|
||||
django_extensions/management/commands/runjobs.py
|
||||
django_extensions/management/commands/runprofileserver.py
|
||||
django_extensions/management/commands/runscript.py
|
||||
django_extensions/management/commands/runserver_plus.py
|
||||
django_extensions/management/commands/set_fake_emails.py
|
||||
django_extensions/management/commands/set_fake_passwords.py
|
||||
django_extensions/management/commands/shell_plus.py
|
||||
django_extensions/management/commands/show_templatetags.py
|
||||
django_extensions/management/commands/show_urls.py
|
||||
django_extensions/management/commands/sqlcreate.py
|
||||
django_extensions/management/commands/sqldiff.py
|
||||
django_extensions/management/commands/sync_media_s3.py
|
||||
django_extensions/management/commands/syncdata.py
|
||||
django_extensions/management/commands/unreferenced_files.py
|
||||
django_extensions/management/commands/update_permissions.py
|
||||
django_extensions/management/commands/validate_templates.py
|
||||
django_extensions/migrations/0001_empty.py
|
||||
django_extensions/migrations/__init__.py
|
||||
django_extensions/mongodb/__init__.py
|
||||
django_extensions/mongodb/models.py
|
||||
django_extensions/mongodb/fields/__init__.py
|
||||
django_extensions/mongodb/fields/encrypted.py
|
||||
django_extensions/mongodb/fields/json.py
|
||||
django_extensions/static/django_extensions/css/jquery.autocomplete.css
|
||||
django_extensions/static/django_extensions/img/indicator.gif
|
||||
django_extensions/static/django_extensions/js/jquery.ajaxQueue.js
|
||||
django_extensions/static/django_extensions/js/jquery.autocomplete.js
|
||||
django_extensions/static/django_extensions/js/jquery.bgiframe.min.js
|
||||
django_extensions/static/django_extensions/js/jquery.js
|
||||
django_extensions/templates/django_extensions/graph_models/body.html
|
||||
django_extensions/templates/django_extensions/graph_models/head.html
|
||||
django_extensions/templates/django_extensions/graph_models/rel.html
|
||||
django_extensions/templates/django_extensions/graph_models/tail.html
|
||||
django_extensions/templates/django_extensions/widgets/foreignkey_searchinput.html
|
||||
django_extensions/templatetags/__init__.py
|
||||
django_extensions/templatetags/highlighting.py
|
||||
django_extensions/templatetags/syntax_color.py
|
||||
django_extensions/templatetags/truncate_letters.py
|
||||
django_extensions/templatetags/widont.py
|
||||
django_extensions/tests/__init__.py
|
||||
django_extensions/tests/encrypted_fields.py
|
||||
django_extensions/tests/fields.py
|
||||
django_extensions/tests/json_field.py
|
||||
django_extensions/tests/management_command.py
|
||||
django_extensions/tests/models.py
|
||||
django_extensions/tests/test_dumpscript.py
|
||||
django_extensions/tests/urls.py
|
||||
django_extensions/tests/utils.py
|
||||
django_extensions/tests/uuid_field.py
|
||||
django_extensions/tests/management/__init__.py
|
||||
django_extensions/tests/management/commands/__init__.py
|
||||
django_extensions/tests/management/commands/error_raising_command.py
|
||||
django_extensions/utils/__init__.py
|
||||
django_extensions/utils/dia2django.py
|
||||
django_extensions/utils/text.py
|
||||
django_extensions/utils/validatingtemplatetags.py
|
||||
docs/AUTHORS
|
||||
docs/Makefile
|
||||
docs/admin_extensions.rst
|
||||
docs/command_extension_ideas.rst
|
||||
docs/command_extensions.rst
|
||||
docs/conf.py
|
||||
docs/create_app.rst
|
||||
docs/creating_release.txt
|
||||
docs/dumpscript.rst
|
||||
docs/export_emails.rst
|
||||
docs/field_extensions.rst
|
||||
docs/graph_models.rst
|
||||
docs/index.rst
|
||||
docs/installation_instructions.rst
|
||||
docs/jobs_scheduling.rst
|
||||
docs/model_extensions.rst
|
||||
docs/namespace_proposal.rst
|
||||
docs/print_settings.rst
|
||||
docs/runprofileserver.rst
|
||||
docs/runscript.rst
|
||||
docs/runserver_plus.rst
|
||||
docs/shell_plus.rst
|
||||
docs/sqlcreate.rst
|
||||
docs/sqldiff.rst
|
||||
docs/sync_media_s3.rst
|
||||
docs/validate_templates.rst
|
|
@ -1 +0,0 @@
|
|||
|
|
@ -1,220 +0,0 @@
|
|||
../django_extensions/__init__.py
|
||||
../django_extensions/future_1_5.py
|
||||
../django_extensions/models.py
|
||||
../django_extensions/settings.py
|
||||
../django_extensions/admin/__init__.py
|
||||
../django_extensions/admin/widgets.py
|
||||
../django_extensions/db/__init__.py
|
||||
../django_extensions/db/models.py
|
||||
../django_extensions/db/fields/__init__.py
|
||||
../django_extensions/db/fields/encrypted.py
|
||||
../django_extensions/db/fields/json.py
|
||||
../django_extensions/jobs/__init__.py
|
||||
../django_extensions/jobs/daily/__init__.py
|
||||
../django_extensions/jobs/daily/cache_cleanup.py
|
||||
../django_extensions/jobs/daily/daily_cleanup.py
|
||||
../django_extensions/jobs/hourly/__init__.py
|
||||
../django_extensions/jobs/monthly/__init__.py
|
||||
../django_extensions/jobs/weekly/__init__.py
|
||||
../django_extensions/jobs/yearly/__init__.py
|
||||
../django_extensions/management/__init__.py
|
||||
../django_extensions/management/base.py
|
||||
../django_extensions/management/color.py
|
||||
../django_extensions/management/jobs.py
|
||||
../django_extensions/management/modelviz.py
|
||||
../django_extensions/management/notebook_extension.py
|
||||
../django_extensions/management/shells.py
|
||||
../django_extensions/management/signals.py
|
||||
../django_extensions/management/technical_response.py
|
||||
../django_extensions/management/utils.py
|
||||
../django_extensions/management/commands/__init__.py
|
||||
../django_extensions/management/commands/clean_pyc.py
|
||||
../django_extensions/management/commands/compile_pyc.py
|
||||
../django_extensions/management/commands/create_app.py
|
||||
../django_extensions/management/commands/create_command.py
|
||||
../django_extensions/management/commands/create_jobs.py
|
||||
../django_extensions/management/commands/describe_form.py
|
||||
../django_extensions/management/commands/dumpscript.py
|
||||
../django_extensions/management/commands/export_emails.py
|
||||
../django_extensions/management/commands/find_template.py
|
||||
../django_extensions/management/commands/generate_secret_key.py
|
||||
../django_extensions/management/commands/graph_models.py
|
||||
../django_extensions/management/commands/mail_debug.py
|
||||
../django_extensions/management/commands/notes.py
|
||||
../django_extensions/management/commands/passwd.py
|
||||
../django_extensions/management/commands/pipchecker.py
|
||||
../django_extensions/management/commands/print_settings.py
|
||||
../django_extensions/management/commands/print_user_for_session.py
|
||||
../django_extensions/management/commands/reset_db.py
|
||||
../django_extensions/management/commands/runjob.py
|
||||
../django_extensions/management/commands/runjobs.py
|
||||
../django_extensions/management/commands/runprofileserver.py
|
||||
../django_extensions/management/commands/runscript.py
|
||||
../django_extensions/management/commands/runserver_plus.py
|
||||
../django_extensions/management/commands/set_fake_emails.py
|
||||
../django_extensions/management/commands/set_fake_passwords.py
|
||||
../django_extensions/management/commands/shell_plus.py
|
||||
../django_extensions/management/commands/show_templatetags.py
|
||||
../django_extensions/management/commands/show_urls.py
|
||||
../django_extensions/management/commands/sqlcreate.py
|
||||
../django_extensions/management/commands/sqldiff.py
|
||||
../django_extensions/management/commands/sync_media_s3.py
|
||||
../django_extensions/management/commands/syncdata.py
|
||||
../django_extensions/management/commands/unreferenced_files.py
|
||||
../django_extensions/management/commands/update_permissions.py
|
||||
../django_extensions/management/commands/validate_templates.py
|
||||
../django_extensions/migrations/0001_empty.py
|
||||
../django_extensions/migrations/__init__.py
|
||||
../django_extensions/mongodb/__init__.py
|
||||
../django_extensions/mongodb/models.py
|
||||
../django_extensions/mongodb/fields/__init__.py
|
||||
../django_extensions/mongodb/fields/encrypted.py
|
||||
../django_extensions/mongodb/fields/json.py
|
||||
../django_extensions/templatetags/__init__.py
|
||||
../django_extensions/templatetags/highlighting.py
|
||||
../django_extensions/templatetags/syntax_color.py
|
||||
../django_extensions/templatetags/truncate_letters.py
|
||||
../django_extensions/templatetags/widont.py
|
||||
../django_extensions/tests/__init__.py
|
||||
../django_extensions/tests/encrypted_fields.py
|
||||
../django_extensions/tests/fields.py
|
||||
../django_extensions/tests/json_field.py
|
||||
../django_extensions/tests/management_command.py
|
||||
../django_extensions/tests/models.py
|
||||
../django_extensions/tests/test_dumpscript.py
|
||||
../django_extensions/tests/urls.py
|
||||
../django_extensions/tests/utils.py
|
||||
../django_extensions/tests/uuid_field.py
|
||||
../django_extensions/tests/management/__init__.py
|
||||
../django_extensions/tests/management/commands/__init__.py
|
||||
../django_extensions/tests/management/commands/error_raising_command.py
|
||||
../django_extensions/utils/__init__.py
|
||||
../django_extensions/utils/dia2django.py
|
||||
../django_extensions/utils/text.py
|
||||
../django_extensions/utils/validatingtemplatetags.py
|
||||
../django_extensions/__init__.pyc
|
||||
../django_extensions/future_1_5.pyc
|
||||
../django_extensions/models.pyc
|
||||
../django_extensions/settings.pyc
|
||||
../django_extensions/admin/__init__.pyc
|
||||
../django_extensions/admin/widgets.pyc
|
||||
../django_extensions/db/__init__.pyc
|
||||
../django_extensions/db/models.pyc
|
||||
../django_extensions/db/fields/__init__.pyc
|
||||
../django_extensions/db/fields/encrypted.pyc
|
||||
../django_extensions/db/fields/json.pyc
|
||||
../django_extensions/jobs/__init__.pyc
|
||||
../django_extensions/jobs/daily/__init__.pyc
|
||||
../django_extensions/jobs/daily/cache_cleanup.pyc
|
||||
../django_extensions/jobs/daily/daily_cleanup.pyc
|
||||
../django_extensions/jobs/hourly/__init__.pyc
|
||||
../django_extensions/jobs/monthly/__init__.pyc
|
||||
../django_extensions/jobs/weekly/__init__.pyc
|
||||
../django_extensions/jobs/yearly/__init__.pyc
|
||||
../django_extensions/management/__init__.pyc
|
||||
../django_extensions/management/base.pyc
|
||||
../django_extensions/management/color.pyc
|
||||
../django_extensions/management/jobs.pyc
|
||||
../django_extensions/management/modelviz.pyc
|
||||
../django_extensions/management/notebook_extension.pyc
|
||||
../django_extensions/management/shells.pyc
|
||||
../django_extensions/management/signals.pyc
|
||||
../django_extensions/management/technical_response.pyc
|
||||
../django_extensions/management/utils.pyc
|
||||
../django_extensions/management/commands/__init__.pyc
|
||||
../django_extensions/management/commands/clean_pyc.pyc
|
||||
../django_extensions/management/commands/compile_pyc.pyc
|
||||
../django_extensions/management/commands/create_app.pyc
|
||||
../django_extensions/management/commands/create_command.pyc
|
||||
../django_extensions/management/commands/create_jobs.pyc
|
||||
../django_extensions/management/commands/describe_form.pyc
|
||||
../django_extensions/management/commands/dumpscript.pyc
|
||||
../django_extensions/management/commands/export_emails.pyc
|
||||
../django_extensions/management/commands/find_template.pyc
|
||||
../django_extensions/management/commands/generate_secret_key.pyc
|
||||
../django_extensions/management/commands/graph_models.pyc
|
||||
../django_extensions/management/commands/mail_debug.pyc
|
||||
../django_extensions/management/commands/notes.pyc
|
||||
../django_extensions/management/commands/passwd.pyc
|
||||
../django_extensions/management/commands/pipchecker.pyc
|
||||
../django_extensions/management/commands/print_settings.pyc
|
||||
../django_extensions/management/commands/print_user_for_session.pyc
|
||||
../django_extensions/management/commands/reset_db.pyc
|
||||
../django_extensions/management/commands/runjob.pyc
|
||||
../django_extensions/management/commands/runjobs.pyc
|
||||
../django_extensions/management/commands/runprofileserver.pyc
|
||||
../django_extensions/management/commands/runscript.pyc
|
||||
../django_extensions/management/commands/runserver_plus.pyc
|
||||
../django_extensions/management/commands/set_fake_emails.pyc
|
||||
../django_extensions/management/commands/set_fake_passwords.pyc
|
||||
../django_extensions/management/commands/shell_plus.pyc
|
||||
../django_extensions/management/commands/show_templatetags.pyc
|
||||
../django_extensions/management/commands/show_urls.pyc
|
||||
../django_extensions/management/commands/sqlcreate.pyc
|
||||
../django_extensions/management/commands/sqldiff.pyc
|
||||
../django_extensions/management/commands/sync_media_s3.pyc
|
||||
../django_extensions/management/commands/syncdata.pyc
|
||||
../django_extensions/management/commands/unreferenced_files.pyc
|
||||
../django_extensions/management/commands/update_permissions.pyc
|
||||
../django_extensions/management/commands/validate_templates.pyc
|
||||
../django_extensions/migrations/0001_empty.pyc
|
||||
../django_extensions/migrations/__init__.pyc
|
||||
../django_extensions/mongodb/__init__.pyc
|
||||
../django_extensions/mongodb/models.pyc
|
||||
../django_extensions/mongodb/fields/__init__.pyc
|
||||
../django_extensions/mongodb/fields/encrypted.pyc
|
||||
../django_extensions/mongodb/fields/json.pyc
|
||||
../django_extensions/templatetags/__init__.pyc
|
||||
../django_extensions/templatetags/highlighting.pyc
|
||||
../django_extensions/templatetags/syntax_color.pyc
|
||||
../django_extensions/templatetags/truncate_letters.pyc
|
||||
../django_extensions/templatetags/widont.pyc
|
||||
../django_extensions/tests/__init__.pyc
|
||||
../django_extensions/tests/encrypted_fields.pyc
|
||||
../django_extensions/tests/fields.pyc
|
||||
../django_extensions/tests/json_field.pyc
|
||||
../django_extensions/tests/management_command.pyc
|
||||
../django_extensions/tests/models.pyc
|
||||
../django_extensions/tests/test_dumpscript.pyc
|
||||
../django_extensions/tests/urls.pyc
|
||||
../django_extensions/tests/utils.pyc
|
||||
../django_extensions/tests/uuid_field.pyc
|
||||
../django_extensions/tests/management/__init__.pyc
|
||||
../django_extensions/tests/management/commands/__init__.pyc
|
||||
../django_extensions/tests/management/commands/error_raising_command.pyc
|
||||
../django_extensions/utils/__init__.pyc
|
||||
../django_extensions/utils/dia2django.pyc
|
||||
../django_extensions/utils/text.pyc
|
||||
../django_extensions/utils/validatingtemplatetags.pyc
|
||||
../django_extensions/conf/app_template/__init__.py.tmpl
|
||||
../django_extensions/conf/app_template/forms.py.tmpl
|
||||
../django_extensions/conf/app_template/models.py.tmpl
|
||||
../django_extensions/conf/app_template/urls.py.tmpl
|
||||
../django_extensions/conf/app_template/views.py.tmpl
|
||||
../django_extensions/conf/command_template/management/__init__.py.tmpl
|
||||
../django_extensions/conf/command_template/management/commands/__init__.py.tmpl
|
||||
../django_extensions/conf/command_template/management/commands/sample.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/__init__.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/sample.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/daily/__init__.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/hourly/__init__.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/monthly/__init__.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/weekly/__init__.py.tmpl
|
||||
../django_extensions/conf/jobs_template/jobs/yearly/__init__.py.tmpl
|
||||
../django_extensions/static/django_extensions/css/jquery.autocomplete.css
|
||||
../django_extensions/static/django_extensions/img/indicator.gif
|
||||
../django_extensions/static/django_extensions/js/jquery.ajaxQueue.js
|
||||
../django_extensions/static/django_extensions/js/jquery.autocomplete.js
|
||||
../django_extensions/static/django_extensions/js/jquery.bgiframe.min.js
|
||||
../django_extensions/static/django_extensions/js/jquery.js
|
||||
../django_extensions/templates/django_extensions/graph_models/body.html
|
||||
../django_extensions/templates/django_extensions/graph_models/head.html
|
||||
../django_extensions/templates/django_extensions/graph_models/rel.html
|
||||
../django_extensions/templates/django_extensions/graph_models/tail.html
|
||||
../django_extensions/templates/django_extensions/widgets/foreignkey_searchinput.html
|
||||
./
|
||||
dependency_links.txt
|
||||
PKG-INFO
|
||||
requires.txt
|
||||
SOURCES.txt
|
||||
top_level.txt
|
|
@ -1 +0,0 @@
|
|||
six
|
|
@ -1 +0,0 @@
|
|||
django_extensions
|
|
@ -1,13 +0,0 @@
|
|||
|
||||
VERSION = (1, 2, 0)
|
||||
|
||||
# Dynamically calculate the version based on VERSION tuple
|
||||
if len(VERSION) > 2 and VERSION[2] is not None:
|
||||
if isinstance(VERSION[2], int):
|
||||
str_version = "%s.%s.%s" % VERSION[:3]
|
||||
else:
|
||||
str_version = "%s.%s_%s" % VERSION[:3]
|
||||
else:
|
||||
str_version = "%s.%s" % VERSION[:2]
|
||||
|
||||
__version__ = str_version
|
|
@ -1,145 +0,0 @@
|
|||
#
|
||||
# Autocomplete feature for admin panel
|
||||
#
|
||||
# Most of the code has been written by Jannis Leidel and was updated a bit
|
||||
# for django_extensions.
|
||||
# http://jannisleidel.com/2008/11/autocomplete-form-widget-foreignkey-model-fields/
|
||||
#
|
||||
# to_string_function, Satchmo adaptation and some comments added by emes
|
||||
# (Michal Salaban)
|
||||
#
|
||||
|
||||
import six
|
||||
import operator
|
||||
from six.moves import reduce
|
||||
from django.http import HttpResponse, HttpResponseNotFound
|
||||
from django.db import models
|
||||
from django.db.models.query import QuerySet
|
||||
from django.utils.encoding import smart_str
|
||||
from django.utils.translation import ugettext as _
|
||||
from django.utils.text import get_text_list
|
||||
try:
|
||||
from functools import update_wrapper
|
||||
assert update_wrapper
|
||||
except ImportError:
|
||||
from django.utils.functional import update_wrapper
|
||||
|
||||
from django_extensions.admin.widgets import ForeignKeySearchInput
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
if 'reversion' in settings.INSTALLED_APPS:
|
||||
from reversion.admin import VersionAdmin as ModelAdmin
|
||||
assert ModelAdmin
|
||||
else:
|
||||
from django.contrib.admin import ModelAdmin
|
||||
|
||||
|
||||
class ForeignKeyAutocompleteAdmin(ModelAdmin):
|
||||
"""Admin class for models using the autocomplete feature.
|
||||
|
||||
There are two additional fields:
|
||||
- related_search_fields: defines fields of managed model that
|
||||
have to be represented by autocomplete input, together with
|
||||
a list of target model fields that are searched for
|
||||
input string, e.g.:
|
||||
|
||||
related_search_fields = {
|
||||
'author': ('first_name', 'email'),
|
||||
}
|
||||
|
||||
- related_string_functions: contains optional functions which
|
||||
take target model instance as only argument and return string
|
||||
representation. By default __unicode__() method of target
|
||||
object is used.
|
||||
"""
|
||||
|
||||
related_search_fields = {}
|
||||
related_string_functions = {}
|
||||
|
||||
def get_urls(self):
|
||||
try:
|
||||
from django.conf.urls import patterns, url
|
||||
except ImportError: # django < 1.4
|
||||
from django.conf.urls.defaults import patterns, url
|
||||
|
||||
def wrap(view):
|
||||
def wrapper(*args, **kwargs):
|
||||
return self.admin_site.admin_view(view)(*args, **kwargs)
|
||||
return update_wrapper(wrapper, view)
|
||||
|
||||
info = self.model._meta.app_label, self.model._meta.module_name
|
||||
|
||||
urlpatterns = patterns('', url(r'foreignkey_autocomplete/$', wrap(self.foreignkey_autocomplete), name='%s_%s_autocomplete' % info))
|
||||
urlpatterns += super(ForeignKeyAutocompleteAdmin, self).get_urls()
|
||||
return urlpatterns
|
||||
|
||||
def foreignkey_autocomplete(self, request):
|
||||
"""
|
||||
Searches in the fields of the given related model and returns the
|
||||
result as a simple string to be used by the jQuery Autocomplete plugin
|
||||
"""
|
||||
query = request.GET.get('q', None)
|
||||
app_label = request.GET.get('app_label', None)
|
||||
model_name = request.GET.get('model_name', None)
|
||||
search_fields = request.GET.get('search_fields', None)
|
||||
object_pk = request.GET.get('object_pk', None)
|
||||
try:
|
||||
to_string_function = self.related_string_functions[model_name]
|
||||
except KeyError:
|
||||
to_string_function = lambda x: x.__unicode__()
|
||||
if search_fields and app_label and model_name and (query or object_pk):
|
||||
def construct_search(field_name):
|
||||
# use different lookup methods depending on the notation
|
||||
if field_name.startswith('^'):
|
||||
return "%s__istartswith" % field_name[1:]
|
||||
elif field_name.startswith('='):
|
||||
return "%s__iexact" % field_name[1:]
|
||||
elif field_name.startswith('@'):
|
||||
return "%s__search" % field_name[1:]
|
||||
else:
|
||||
return "%s__icontains" % field_name
|
||||
model = models.get_model(app_label, model_name)
|
||||
queryset = model._default_manager.all()
|
||||
data = ''
|
||||
if query:
|
||||
for bit in query.split():
|
||||
or_queries = [models.Q(**{construct_search(smart_str(field_name)): smart_str(bit)}) for field_name in search_fields.split(',')]
|
||||
other_qs = QuerySet(model)
|
||||
other_qs.dup_select_related(queryset)
|
||||
other_qs = other_qs.filter(reduce(operator.or_, or_queries))
|
||||
queryset = queryset & other_qs
|
||||
data = ''.join([six.u('%s|%s\n' % (to_string_function(f), f.pk)) for f in queryset])
|
||||
elif object_pk:
|
||||
try:
|
||||
obj = queryset.get(pk=object_pk)
|
||||
except:
|
||||
pass
|
||||
else:
|
||||
data = to_string_function(obj)
|
||||
return HttpResponse(data)
|
||||
return HttpResponseNotFound()
|
||||
|
||||
def get_help_text(self, field_name, model_name):
|
||||
searchable_fields = self.related_search_fields.get(field_name, None)
|
||||
if searchable_fields:
|
||||
help_kwargs = {
|
||||
'model_name': model_name,
|
||||
'field_list': get_text_list(searchable_fields, _('and')),
|
||||
}
|
||||
return _('Use the left field to do %(model_name)s lookups in the fields %(field_list)s.') % help_kwargs
|
||||
return ''
|
||||
|
||||
def formfield_for_dbfield(self, db_field, **kwargs):
|
||||
"""
|
||||
Overrides the default widget for Foreignkey fields if they are
|
||||
specified in the related_search_fields class attribute.
|
||||
"""
|
||||
if (isinstance(db_field, models.ForeignKey) and db_field.name in self.related_search_fields):
|
||||
model_name = db_field.rel.to._meta.object_name
|
||||
help_text = self.get_help_text(db_field.name, model_name)
|
||||
if kwargs.get('help_text'):
|
||||
help_text = six.u('%s %s' % (kwargs['help_text'], help_text))
|
||||
kwargs['widget'] = ForeignKeySearchInput(db_field.rel, self.related_search_fields[db_field.name])
|
||||
kwargs['help_text'] = help_text
|
||||
return super(ForeignKeyAutocompleteAdmin, self).formfield_for_dbfield(db_field, **kwargs)
|
|
@ -1,96 +0,0 @@
|
|||
import six
|
||||
import django
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
from django.contrib.admin.sites import site
|
||||
from django.utils.safestring import mark_safe
|
||||
if django.get_version() >= "1.4":
|
||||
from django.utils.text import Truncator
|
||||
else:
|
||||
from django.utils.text import truncate_words
|
||||
from django.template.loader import render_to_string
|
||||
from django.contrib.admin.widgets import ForeignKeyRawIdWidget
|
||||
|
||||
|
||||
class ForeignKeySearchInput(ForeignKeyRawIdWidget):
|
||||
"""
|
||||
A Widget for displaying ForeignKeys in an autocomplete search input
|
||||
instead in a <select> box.
|
||||
"""
|
||||
# Set in subclass to render the widget with a different template
|
||||
widget_template = None
|
||||
# Set this to the patch of the search view
|
||||
search_path = '../foreignkey_autocomplete/'
|
||||
|
||||
def _media(self):
|
||||
js_files = ['django_extensions/js/jquery.bgiframe.min.js',
|
||||
'django_extensions/js/jquery.ajaxQueue.js',
|
||||
'django_extensions/js/jquery.autocomplete.js']
|
||||
if django.get_version() < "1.3":
|
||||
js_files.append('django_extensions/js/jquery.js')
|
||||
return forms.Media(css={'all': ('django_extensions/css/jquery.autocomplete.css',)},
|
||||
js=js_files)
|
||||
|
||||
media = property(_media)
|
||||
|
||||
def label_for_value(self, value):
|
||||
key = self.rel.get_related_field().name
|
||||
obj = self.rel.to._default_manager.get(**{key: value})
|
||||
if django.get_version() >= "1.4":
|
||||
return Truncator(obj).words(14, truncate='...')
|
||||
else:
|
||||
return truncate_words(obj, 14)
|
||||
|
||||
def __init__(self, rel, search_fields, attrs=None):
|
||||
self.search_fields = search_fields
|
||||
if django.get_version() >= "1.4":
|
||||
super(ForeignKeySearchInput, self).__init__(rel, site, attrs)
|
||||
else:
|
||||
super(ForeignKeySearchInput, self).__init__(rel, attrs)
|
||||
|
||||
def render(self, name, value, attrs=None):
|
||||
if attrs is None:
|
||||
attrs = {}
|
||||
#output = [super(ForeignKeySearchInput, self).render(name, value, attrs)]
|
||||
opts = self.rel.to._meta
|
||||
app_label = opts.app_label
|
||||
model_name = opts.object_name.lower()
|
||||
related_url = '../../../%s/%s/' % (app_label, model_name)
|
||||
params = self.url_parameters()
|
||||
if params:
|
||||
url = '?' + '&'.join(['%s=%s' % (k, v) for k, v in params.items()])
|
||||
else:
|
||||
url = ''
|
||||
if not 'class' in attrs:
|
||||
attrs['class'] = 'vForeignKeyRawIdAdminField'
|
||||
# Call the TextInput render method directly to have more control
|
||||
output = [forms.TextInput.render(self, name, value, attrs)]
|
||||
if value:
|
||||
label = self.label_for_value(value)
|
||||
else:
|
||||
label = six.u('')
|
||||
|
||||
try:
|
||||
admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
|
||||
except AttributeError:
|
||||
admin_media_prefix = settings.STATIC_URL + "admin/"
|
||||
|
||||
context = {
|
||||
'url': url,
|
||||
'related_url': related_url,
|
||||
'admin_media_prefix': admin_media_prefix,
|
||||
'search_path': self.search_path,
|
||||
'search_fields': ','.join(self.search_fields),
|
||||
'model_name': model_name,
|
||||
'app_label': app_label,
|
||||
'label': label,
|
||||
'name': name,
|
||||
'pre_django_14': (django.VERSION[:2] < (1, 4)),
|
||||
}
|
||||
output.append(render_to_string(self.widget_template or (
|
||||
'django_extensions/widgets/%s/%s/foreignkey_searchinput.html' % (app_label, model_name),
|
||||
'django_extensions/widgets/%s/foreignkey_searchinput.html' % app_label,
|
||||
'django_extensions/widgets/foreignkey_searchinput.html',
|
||||
), context))
|
||||
output.reverse()
|
||||
return mark_safe(six.u(''.join(output)))
|
|
@ -1,3 +0,0 @@
|
|||
from django import forms
|
||||
|
||||
# place form definition here
|
|
@ -1,3 +0,0 @@
|
|||
from django.db import models
|
||||
|
||||
# Create your models here.
|
|
@ -1,6 +0,0 @@
|
|||
try:
|
||||
from django.conf.urls import *
|
||||
except ImportError: # django < 1.4
|
||||
from django.conf.urls.defaults import *
|
||||
|
||||
# place app url patterns here
|
|
@ -1 +0,0 @@
|
|||
# Create your views here.
|
|
@ -1,7 +0,0 @@
|
|||
from django.core.management.base import {{ base_command }}
|
||||
|
||||
class Command({{ base_command }}):
|
||||
help = "My shiny new management command."
|
||||
|
||||
def {{ handle_method }}:
|
||||
raise NotImplementedError()
|
|
@ -1,8 +0,0 @@
|
|||
from django_extensions.management.jobs import BaseJob
|
||||
|
||||
class Job(BaseJob):
|
||||
help = "My sample job."
|
||||
|
||||
def execute(self):
|
||||
# executing empty sample job
|
||||
pass
|
|
@ -1,287 +0,0 @@
|
|||
"""
|
||||
Django Extensions additional model fields
|
||||
"""
|
||||
import re
|
||||
import six
|
||||
try:
|
||||
import uuid
|
||||
HAS_UUID = True
|
||||
except ImportError:
|
||||
HAS_UUID = False
|
||||
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django.template.defaultfilters import slugify
|
||||
from django.db.models import DateTimeField, CharField, SlugField
|
||||
|
||||
try:
|
||||
from django.utils.timezone import now as datetime_now
|
||||
assert datetime_now
|
||||
except ImportError:
|
||||
import datetime
|
||||
datetime_now = datetime.datetime.now
|
||||
|
||||
try:
|
||||
from django.utils.encoding import force_unicode # NOQA
|
||||
except ImportError:
|
||||
from django.utils.encoding import force_text as force_unicode # NOQA
|
||||
|
||||
|
||||
class AutoSlugField(SlugField):
|
||||
""" AutoSlugField
|
||||
|
||||
By default, sets editable=False, blank=True.
|
||||
|
||||
Required arguments:
|
||||
|
||||
populate_from
|
||||
Specifies which field or list of fields the slug is populated from.
|
||||
|
||||
Optional arguments:
|
||||
|
||||
separator
|
||||
Defines the used separator (default: '-')
|
||||
|
||||
overwrite
|
||||
If set to True, overwrites the slug on every save (default: False)
|
||||
|
||||
Inspired by SmileyChris' Unique Slugify snippet:
|
||||
http://www.djangosnippets.org/snippets/690/
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('blank', True)
|
||||
kwargs.setdefault('editable', False)
|
||||
|
||||
populate_from = kwargs.pop('populate_from', None)
|
||||
if populate_from is None:
|
||||
raise ValueError("missing 'populate_from' argument")
|
||||
else:
|
||||
self._populate_from = populate_from
|
||||
self.separator = kwargs.pop('separator', six.u('-'))
|
||||
self.overwrite = kwargs.pop('overwrite', False)
|
||||
self.allow_duplicates = kwargs.pop('allow_duplicates', False)
|
||||
super(AutoSlugField, self).__init__(*args, **kwargs)
|
||||
|
||||
def _slug_strip(self, value):
|
||||
"""
|
||||
Cleans up a slug by removing slug separator characters that occur at
|
||||
the beginning or end of a slug.
|
||||
|
||||
If an alternate separator is used, it will also replace any instances
|
||||
of the default '-' separator with the new separator.
|
||||
"""
|
||||
re_sep = '(?:-|%s)' % re.escape(self.separator)
|
||||
value = re.sub('%s+' % re_sep, self.separator, value)
|
||||
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
|
||||
|
||||
def get_queryset(self, model_cls, slug_field):
|
||||
for field, model in model_cls._meta.get_fields_with_model():
|
||||
if model and field == slug_field:
|
||||
return model._default_manager.all()
|
||||
return model_cls._default_manager.all()
|
||||
|
||||
def slugify_func(self, content):
|
||||
if content:
|
||||
return slugify(content)
|
||||
return ''
|
||||
|
||||
def create_slug(self, model_instance, add):
|
||||
# get fields to populate from and slug field to set
|
||||
if not isinstance(self._populate_from, (list, tuple)):
|
||||
self._populate_from = (self._populate_from, )
|
||||
slug_field = model_instance._meta.get_field(self.attname)
|
||||
|
||||
if add or self.overwrite:
|
||||
# slugify the original field content and set next step to 2
|
||||
slug_for_field = lambda field: self.slugify_func(getattr(model_instance, field))
|
||||
slug = self.separator.join(map(slug_for_field, self._populate_from))
|
||||
next = 2
|
||||
else:
|
||||
# get slug from the current model instance
|
||||
slug = getattr(model_instance, self.attname)
|
||||
# model_instance is being modified, and overwrite is False,
|
||||
# so instead of doing anything, just return the current slug
|
||||
return slug
|
||||
|
||||
# strip slug depending on max_length attribute of the slug field
|
||||
# and clean-up
|
||||
slug_len = slug_field.max_length
|
||||
if slug_len:
|
||||
slug = slug[:slug_len]
|
||||
slug = self._slug_strip(slug)
|
||||
original_slug = slug
|
||||
|
||||
if self.allow_duplicates:
|
||||
return slug
|
||||
|
||||
# exclude the current model instance from the queryset used in finding
|
||||
# the next valid slug
|
||||
queryset = self.get_queryset(model_instance.__class__, slug_field)
|
||||
if model_instance.pk:
|
||||
queryset = queryset.exclude(pk=model_instance.pk)
|
||||
|
||||
# form a kwarg dict used to impliment any unique_together contraints
|
||||
kwargs = {}
|
||||
for params in model_instance._meta.unique_together:
|
||||
if self.attname in params:
|
||||
for param in params:
|
||||
kwargs[param] = getattr(model_instance, param, None)
|
||||
kwargs[self.attname] = slug
|
||||
|
||||
# increases the number while searching for the next valid slug
|
||||
# depending on the given slug, clean-up
|
||||
while not slug or queryset.filter(**kwargs):
|
||||
slug = original_slug
|
||||
end = '%s%s' % (self.separator, next)
|
||||
end_len = len(end)
|
||||
if slug_len and len(slug) + end_len > slug_len:
|
||||
slug = slug[:slug_len - end_len]
|
||||
slug = self._slug_strip(slug)
|
||||
slug = '%s%s' % (slug, end)
|
||||
kwargs[self.attname] = slug
|
||||
next += 1
|
||||
return slug
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = force_unicode(self.create_slug(model_instance, add))
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
|
||||
def get_internal_type(self):
|
||||
return "SlugField"
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect the _actual_ field.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = '%s.AutoSlugField' % self.__module__
|
||||
args, kwargs = introspector(self)
|
||||
kwargs.update({
|
||||
'populate_from': repr(self._populate_from),
|
||||
'separator': repr(self.separator),
|
||||
'overwrite': repr(self.overwrite),
|
||||
'allow_duplicates': repr(self.allow_duplicates),
|
||||
})
|
||||
# That's our definition!
|
||||
return (field_class, args, kwargs)
|
||||
|
||||
|
||||
class CreationDateTimeField(DateTimeField):
|
||||
""" CreationDateTimeField
|
||||
|
||||
By default, sets editable=False, blank=True, default=datetime.now
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('editable', False)
|
||||
kwargs.setdefault('blank', True)
|
||||
kwargs.setdefault('default', datetime_now)
|
||||
DateTimeField.__init__(self, *args, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return "DateTimeField"
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect ourselves, since we inherit.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.DateTimeField"
|
||||
args, kwargs = introspector(self)
|
||||
return (field_class, args, kwargs)
|
||||
|
||||
|
||||
class ModificationDateTimeField(CreationDateTimeField):
|
||||
""" ModificationDateTimeField
|
||||
|
||||
By default, sets editable=False, blank=True, default=datetime.now
|
||||
|
||||
Sets value to datetime.now() on each save of the model.
|
||||
"""
|
||||
|
||||
def pre_save(self, model, add):
|
||||
value = datetime_now()
|
||||
setattr(model, self.attname, value)
|
||||
return value
|
||||
|
||||
def get_internal_type(self):
|
||||
return "DateTimeField"
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect ourselves, since we inherit.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.DateTimeField"
|
||||
args, kwargs = introspector(self)
|
||||
return (field_class, args, kwargs)
|
||||
|
||||
|
||||
class UUIDVersionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UUIDField(CharField):
|
||||
""" UUIDField
|
||||
|
||||
By default uses UUID version 4 (randomly generated UUID).
|
||||
|
||||
The field support all uuid versions which are natively supported by the uuid python module, except version 2.
|
||||
For more information see: http://docs.python.org/lib/module-uuid.html
|
||||
"""
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, auto=True, version=4, node=None, clock_seq=None, namespace=None, **kwargs):
|
||||
if not HAS_UUID:
|
||||
raise ImproperlyConfigured("'uuid' module is required for UUIDField. (Do you have Python 2.5 or higher installed ?)")
|
||||
kwargs.setdefault('max_length', 36)
|
||||
if auto:
|
||||
self.empty_strings_allowed = False
|
||||
kwargs['blank'] = True
|
||||
kwargs.setdefault('editable', False)
|
||||
self.auto = auto
|
||||
self.version = version
|
||||
if version == 1:
|
||||
self.node, self.clock_seq = node, clock_seq
|
||||
elif version == 3 or version == 5:
|
||||
self.namespace, self.name = namespace, name
|
||||
CharField.__init__(self, verbose_name, name, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return CharField.__name__
|
||||
|
||||
def create_uuid(self):
|
||||
if not self.version or self.version == 4:
|
||||
return uuid.uuid4()
|
||||
elif self.version == 1:
|
||||
return uuid.uuid1(self.node, self.clock_seq)
|
||||
elif self.version == 2:
|
||||
raise UUIDVersionError("UUID version 2 is not supported.")
|
||||
elif self.version == 3:
|
||||
return uuid.uuid3(self.namespace, self.name)
|
||||
elif self.version == 5:
|
||||
return uuid.uuid5(self.namespace, self.name)
|
||||
else:
|
||||
raise UUIDVersionError("UUID version %s is not valid." % self.version)
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = super(UUIDField, self).pre_save(model_instance, add)
|
||||
if self.auto and add and value is None:
|
||||
value = force_unicode(self.create_uuid())
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
else:
|
||||
if self.auto and not value:
|
||||
value = force_unicode(self.create_uuid())
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
if self.auto:
|
||||
return None
|
||||
return super(UUIDField, self).formfield(**kwargs)
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect the _actual_ field.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.CharField"
|
||||
args, kwargs = introspector(self)
|
||||
# That's our definition!
|
||||
return (field_class, args, kwargs)
|
|
@ -1,136 +0,0 @@
|
|||
import six
|
||||
from django.db import models
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
from django import forms
|
||||
from django.conf import settings
|
||||
import warnings
|
||||
|
||||
try:
|
||||
from keyczar import keyczar
|
||||
except ImportError:
|
||||
raise ImportError('Using an encrypted field requires the Keyczar module. '
|
||||
'You can obtain Keyczar from http://www.keyczar.org/.')
|
||||
|
||||
|
||||
class EncryptionWarning(RuntimeWarning):
|
||||
pass
|
||||
|
||||
|
||||
class BaseEncryptedField(models.Field):
|
||||
prefix = 'enc_str:::'
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
if not hasattr(settings, 'ENCRYPTED_FIELD_KEYS_DIR'):
|
||||
raise ImproperlyConfigured('You must set the ENCRYPTED_FIELD_KEYS_DIR '
|
||||
'setting to your Keyczar keys directory.')
|
||||
|
||||
crypt_class = self.get_crypt_class()
|
||||
self.crypt = crypt_class.Read(settings.ENCRYPTED_FIELD_KEYS_DIR)
|
||||
|
||||
# Encrypted size is larger than unencrypted
|
||||
self.unencrypted_length = max_length = kwargs.get('max_length', None)
|
||||
if max_length:
|
||||
max_length = len(self.prefix) + len(self.crypt.Encrypt('x' * max_length))
|
||||
# TODO: Re-examine if this logic will actually make a large-enough
|
||||
# max-length for unicode strings that have non-ascii characters in them.
|
||||
kwargs['max_length'] = max_length
|
||||
|
||||
super(BaseEncryptedField, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_crypt_class(self):
|
||||
"""
|
||||
Get the Keyczar class to use.
|
||||
|
||||
The class can be customized with the ENCRYPTED_FIELD_MODE setting. By default,
|
||||
this setting is DECRYPT_AND_ENCRYPT. Set this to ENCRYPT to disable decryption.
|
||||
This is necessary if you are only providing public keys to Keyczar.
|
||||
|
||||
Returns:
|
||||
keyczar.Encrypter if ENCRYPTED_FIELD_MODE is ENCRYPT.
|
||||
keyczar.Crypter if ENCRYPTED_FIELD_MODE is DECRYPT_AND_ENCRYPT.
|
||||
|
||||
Override this method to customize the type of Keyczar class returned.
|
||||
"""
|
||||
|
||||
crypt_type = getattr(settings, 'ENCRYPTED_FIELD_MODE', 'DECRYPT_AND_ENCRYPT')
|
||||
if crypt_type == 'ENCRYPT':
|
||||
crypt_class_name = 'Encrypter'
|
||||
elif crypt_type == 'DECRYPT_AND_ENCRYPT':
|
||||
crypt_class_name = 'Crypter'
|
||||
else:
|
||||
raise ImproperlyConfigured(
|
||||
'ENCRYPTED_FIELD_MODE must be either DECRYPT_AND_ENCRYPT '
|
||||
'or ENCRYPT, not %s.' % crypt_type)
|
||||
return getattr(keyczar, crypt_class_name)
|
||||
|
||||
def to_python(self, value):
|
||||
if isinstance(self.crypt.primary_key, keyczar.keys.RsaPublicKey):
|
||||
retval = value
|
||||
elif value and (value.startswith(self.prefix)):
|
||||
retval = self.crypt.Decrypt(value[len(self.prefix):])
|
||||
if retval:
|
||||
retval = retval.decode('utf-8')
|
||||
else:
|
||||
retval = value
|
||||
return retval
|
||||
|
||||
def get_db_prep_value(self, value, connection, prepared=False):
|
||||
if value and not value.startswith(self.prefix):
|
||||
# We need to encode a unicode string into a byte string, first.
|
||||
# keyczar expects a bytestring, not a unicode string.
|
||||
if type(value) == six.types.UnicodeType:
|
||||
value = value.encode('utf-8')
|
||||
# Truncated encrypted content is unreadable,
|
||||
# so truncate before encryption
|
||||
max_length = self.unencrypted_length
|
||||
if max_length and len(value) > max_length:
|
||||
warnings.warn("Truncating field %s from %d to %d bytes" % (
|
||||
self.name, len(value), max_length), EncryptionWarning
|
||||
)
|
||||
value = value[:max_length]
|
||||
|
||||
value = self.prefix + self.crypt.Encrypt(value)
|
||||
return value
|
||||
|
||||
|
||||
class EncryptedTextField(six.with_metaclass(models.SubfieldBase,
|
||||
BaseEncryptedField)):
|
||||
def get_internal_type(self):
|
||||
return 'TextField'
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'widget': forms.Textarea}
|
||||
defaults.update(kwargs)
|
||||
return super(EncryptedTextField, self).formfield(**defaults)
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect the _actual_ field.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.TextField"
|
||||
args, kwargs = introspector(self)
|
||||
# That's our definition!
|
||||
return (field_class, args, kwargs)
|
||||
|
||||
|
||||
class EncryptedCharField(six.with_metaclass(models.SubfieldBase,
|
||||
BaseEncryptedField)):
|
||||
def __init__(self, *args, **kwargs):
|
||||
super(EncryptedCharField, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return "CharField"
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'max_length': self.max_length}
|
||||
defaults.update(kwargs)
|
||||
return super(EncryptedCharField, self).formfield(**defaults)
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect the _actual_ field.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.CharField"
|
||||
args, kwargs = introspector(self)
|
||||
# That's our definition!
|
||||
return (field_class, args, kwargs)
|
|
@ -1,102 +0,0 @@
|
|||
"""
|
||||
JSONField automatically serializes most Python terms to JSON data.
|
||||
Creates a TEXT field with a default value of "{}". See test_json.py for
|
||||
more information.
|
||||
|
||||
from django.db import models
|
||||
from django_extensions.db.fields import json
|
||||
|
||||
class LOL(models.Model):
|
||||
extra = json.JSONField()
|
||||
"""
|
||||
|
||||
import six
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
from django.utils import simplejson
|
||||
|
||||
|
||||
class JSONEncoder(simplejson.JSONEncoder):
|
||||
def default(self, obj):
|
||||
if isinstance(obj, Decimal):
|
||||
return str(obj)
|
||||
elif isinstance(obj, datetime.datetime):
|
||||
assert settings.TIME_ZONE == 'UTC'
|
||||
return obj.strftime('%Y-%m-%dT%H:%M:%SZ')
|
||||
return simplejson.JSONEncoder.default(self, obj)
|
||||
|
||||
|
||||
def dumps(value):
|
||||
return JSONEncoder().encode(value)
|
||||
|
||||
|
||||
def loads(txt):
|
||||
value = simplejson.loads(
|
||||
txt,
|
||||
parse_float=Decimal,
|
||||
encoding=settings.DEFAULT_CHARSET
|
||||
)
|
||||
return value
|
||||
|
||||
|
||||
class JSONDict(dict):
|
||||
"""
|
||||
Hack so repr() called by dumpdata will output JSON instead of
|
||||
Python formatted data. This way fixtures will work!
|
||||
"""
|
||||
def __repr__(self):
|
||||
return dumps(self)
|
||||
|
||||
|
||||
class JSONList(list):
|
||||
"""
|
||||
As above
|
||||
"""
|
||||
def __repr__(self):
|
||||
return dumps(self)
|
||||
|
||||
|
||||
class JSONField(six.with_metaclass(models.SubfieldBase, models.TextField)):
|
||||
"""JSONField is a generic textfield that neatly serializes/unserializes
|
||||
JSON objects seamlessly. Main thingy must be a dict object."""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
default = kwargs.get('default', None)
|
||||
if default is None:
|
||||
kwargs['default'] = '{}'
|
||||
elif isinstance(default, (list, dict)):
|
||||
kwargs['default'] = dumps(default)
|
||||
models.TextField.__init__(self, *args, **kwargs)
|
||||
|
||||
def to_python(self, value):
|
||||
"""Convert our string value to JSON after we load it from the DB"""
|
||||
if value is None or value == '':
|
||||
return {}
|
||||
elif isinstance(value, six.string_types):
|
||||
res = loads(value)
|
||||
if isinstance(res, dict):
|
||||
return JSONDict(**res)
|
||||
else:
|
||||
return JSONList(res)
|
||||
|
||||
else:
|
||||
return value
|
||||
|
||||
def get_db_prep_save(self, value, connection):
|
||||
"""Convert our JSON object to a string before we save"""
|
||||
if not isinstance(value, (list, dict)):
|
||||
return super(JSONField, self).get_db_prep_save("", connection=connection)
|
||||
else:
|
||||
return super(JSONField, self).get_db_prep_save(dumps(value),
|
||||
connection=connection)
|
||||
|
||||
def south_field_triple(self):
|
||||
"Returns a suitable description of this field for South."
|
||||
# We'll just introspect the _actual_ field.
|
||||
from south.modelsinspector import introspector
|
||||
field_class = "django.db.models.fields.TextField"
|
||||
args, kwargs = introspector(self)
|
||||
# That's our definition!
|
||||
return (field_class, args, kwargs)
|
|
@ -1,78 +0,0 @@
|
|||
"""
|
||||
Django Extensions abstract base model classes.
|
||||
"""
|
||||
from django.db import models
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
from django_extensions.db.fields import (ModificationDateTimeField,
|
||||
CreationDateTimeField, AutoSlugField)
|
||||
|
||||
try:
|
||||
from django.utils.timezone import now as datetime_now
|
||||
assert datetime_now
|
||||
except ImportError:
|
||||
import datetime
|
||||
datetime_now = datetime.datetime.now
|
||||
|
||||
|
||||
class TimeStampedModel(models.Model):
|
||||
""" TimeStampedModel
|
||||
An abstract base class model that provides self-managed "created" and
|
||||
"modified" fields.
|
||||
"""
|
||||
created = CreationDateTimeField(_('created'))
|
||||
modified = ModificationDateTimeField(_('modified'))
|
||||
|
||||
class Meta:
|
||||
get_latest_by = 'modified'
|
||||
ordering = ('-modified', '-created',)
|
||||
abstract = True
|
||||
|
||||
|
||||
class TitleSlugDescriptionModel(models.Model):
|
||||
""" TitleSlugDescriptionModel
|
||||
An abstract base class model that provides title and description fields
|
||||
and a self-managed "slug" field that populates from the title.
|
||||
"""
|
||||
title = models.CharField(_('title'), max_length=255)
|
||||
slug = AutoSlugField(_('slug'), populate_from='title')
|
||||
description = models.TextField(_('description'), blank=True, null=True)
|
||||
|
||||
class Meta:
|
||||
abstract = True
|
||||
|
||||
|
||||
class ActivatorModelManager(models.Manager):
|
||||
""" ActivatorModelManager
|
||||
Manager to return instances of ActivatorModel: SomeModel.objects.active() / .inactive()
|
||||
"""
|
||||
def active(self):
|
||||
""" Returns active instances of ActivatorModel: SomeModel.objects.active() """
|
||||
return self.get_query_set().filter(status=ActivatorModel.ACTIVE_STATUS)
|
||||
|
||||
def inactive(self):
|
||||
""" Returns inactive instances of ActivatorModel: SomeModel.objects.inactive() """
|
||||
return self.get_query_set().filter(status=ActivatorModel.INACTIVE_STATUS)
|
||||
|
||||
|
||||
class ActivatorModel(models.Model):
|
||||
""" ActivatorModel
|
||||
An abstract base class model that provides activate and deactivate fields.
|
||||
"""
|
||||
INACTIVE_STATUS, ACTIVE_STATUS = range(2)
|
||||
STATUS_CHOICES = (
|
||||
(INACTIVE_STATUS, _('Inactive')),
|
||||
(ACTIVE_STATUS, _('Active')),
|
||||
)
|
||||
status = models.IntegerField(_('status'), choices=STATUS_CHOICES, default=ACTIVE_STATUS)
|
||||
activate_date = models.DateTimeField(blank=True, null=True, help_text=_('keep empty for an immediate activation'))
|
||||
deactivate_date = models.DateTimeField(blank=True, null=True, help_text=_('keep empty for indefinite activation'))
|
||||
objects = ActivatorModelManager()
|
||||
|
||||
class Meta:
|
||||
ordering = ('status', '-activate_date',)
|
||||
abstract = True
|
||||
|
||||
def save(self, *args, **kwargs):
|
||||
if not self.activate_date:
|
||||
self.activate_date = datetime_now()
|
||||
super(ActivatorModel, self).save(*args, **kwargs)
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
A forwards compatibility module.
|
||||
|
||||
Implements some features of Django 1.5 related to the 'Custom User Model' feature
|
||||
when the application is run with a lower version of Django.
|
||||
"""
|
||||
from __future__ import unicode_literals
|
||||
|
||||
from django.contrib.auth.models import User
|
||||
|
||||
User.USERNAME_FIELD = "username"
|
||||
User.get_username = lambda self: self.username
|
||||
|
||||
|
||||
def get_user_model():
|
||||
return User
|
|
@ -1,50 +0,0 @@
|
|||
"""
|
||||
Daily cleanup job.
|
||||
|
||||
Can be run as a cronjob to clean out old data from the database (only expired
|
||||
sessions at the moment).
|
||||
"""
|
||||
|
||||
from django_extensions.management.jobs import DailyJob
|
||||
|
||||
|
||||
class Job(DailyJob):
|
||||
help = "Cache (db) cleanup Job"
|
||||
|
||||
def execute(self):
|
||||
from django.conf import settings
|
||||
from django.db import transaction
|
||||
import os
|
||||
|
||||
try:
|
||||
from django.utils import timezone
|
||||
except ImportError:
|
||||
timezone = None
|
||||
|
||||
if hasattr(settings, 'CACHES') and timezone:
|
||||
from django.core.cache import get_cache
|
||||
from django.db import router, connections
|
||||
|
||||
for cache_name, cache_options in settings.CACHES.iteritems():
|
||||
if cache_options['BACKEND'].endswith("DatabaseCache"):
|
||||
cache = get_cache(cache_name)
|
||||
db = router.db_for_write(cache.cache_model_class)
|
||||
cursor = connections[db].cursor()
|
||||
now = timezone.now()
|
||||
cache._cull(db, cursor, now)
|
||||
transaction.commit_unless_managed(using=db)
|
||||
return
|
||||
|
||||
if hasattr(settings, 'CACHE_BACKEND'):
|
||||
if settings.CACHE_BACKEND.startswith('db://'):
|
||||
from django.db import connection
|
||||
os.environ['TZ'] = settings.TIME_ZONE
|
||||
table_name = settings.CACHE_BACKEND[5:]
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(
|
||||
"DELETE FROM %s WHERE %s < current_timestamp;" % (
|
||||
connection.ops.quote_name(table_name),
|
||||
connection.ops.quote_name('expires')
|
||||
)
|
||||
)
|
||||
transaction.commit_unless_managed()
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
Daily cleanup job.
|
||||
|
||||
Can be run as a cronjob to clean out old data from the database (only expired
|
||||
sessions at the moment).
|
||||
"""
|
||||
|
||||
from django_extensions.management.jobs import DailyJob
|
||||
|
||||
|
||||
class Job(DailyJob):
|
||||
help = "Django Daily Cleanup Job"
|
||||
|
||||
def execute(self):
|
||||
from django.core import management
|
||||
management.call_command("cleanup")
|
|
@ -1,53 +0,0 @@
|
|||
import sys
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.utils.log import getLogger
|
||||
|
||||
|
||||
logger = getLogger('django.commands')
|
||||
|
||||
|
||||
class LoggingBaseCommand(BaseCommand):
|
||||
"""
|
||||
A subclass of BaseCommand that logs run time errors to `django.commands`.
|
||||
To use this, create a management command subclassing LoggingBaseCommand:
|
||||
|
||||
from django_extensions.management.base import LoggingBaseCommand
|
||||
|
||||
class Command(LoggingBaseCommand):
|
||||
help = 'Test error'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
raise Exception
|
||||
|
||||
|
||||
And then define a logging handler in settings.py:
|
||||
|
||||
LOGGING = {
|
||||
... # Other stuff here
|
||||
|
||||
'handlers': {
|
||||
'mail_admins': {
|
||||
'level': 'ERROR',
|
||||
'filters': ['require_debug_false'],
|
||||
'class': 'django.utils.log.AdminEmailHandler'
|
||||
},
|
||||
},
|
||||
'loggers': {
|
||||
'django.commands': {
|
||||
'handlers': ['mail_admins'],
|
||||
'level': 'ERROR',
|
||||
'propagate': False,
|
||||
},
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
"""
|
||||
|
||||
def execute(self, *args, **options):
|
||||
try:
|
||||
super(LoggingBaseCommand, self).execute(*args, **options)
|
||||
except Exception as e:
|
||||
logger.error(e, exc_info=sys.exc_info(), extra={'status_code': 500})
|
||||
raise
|
|
@ -1,16 +0,0 @@
|
|||
"""
|
||||
Sets up the terminal color scheme.
|
||||
"""
|
||||
|
||||
from django.core.management import color
|
||||
from django.utils import termcolors
|
||||
|
||||
|
||||
def color_style():
|
||||
style = color.color_style()
|
||||
if color.supports_color():
|
||||
style.URL = termcolors.make_style(fg='green', opts=('bold',))
|
||||
style.MODULE = termcolors.make_style(fg='yellow')
|
||||
style.MODULE_NAME = termcolors.make_style(opts=('bold',))
|
||||
style.URL_NAME = termcolors.make_style(fg='red')
|
||||
return style
|
|
@ -1,44 +0,0 @@
|
|||
from django.core.management.base import NoArgsCommand
|
||||
from django_extensions.management.utils import get_project_root
|
||||
from optparse import make_option
|
||||
from os.path import join as _j
|
||||
import os
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--optimize', '-o', '-O', action='store_true', dest='optimize',
|
||||
help='Remove optimized python bytecode files'),
|
||||
make_option('--path', '-p', action='store', dest='path',
|
||||
help='Specify path to recurse into'),
|
||||
)
|
||||
help = "Removes all python bytecode compiled files from the project."
|
||||
|
||||
requires_model_validation = False
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
project_root = options.get("path", None)
|
||||
if not project_root:
|
||||
project_root = get_project_root()
|
||||
exts = options.get("optimize", False) and [".pyc", ".pyo"] or [".pyc"]
|
||||
verbose = int(options.get("verbosity", 1))
|
||||
|
||||
if verbose > 1:
|
||||
print("Project Root: %s" % project_root)
|
||||
|
||||
for root, dirs, files in os.walk(project_root):
|
||||
for file in files:
|
||||
ext = os.path.splitext(file)[1]
|
||||
if ext in exts:
|
||||
full_path = _j(root, file)
|
||||
if verbose > 1:
|
||||
print(full_path)
|
||||
os.remove(full_path)
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,38 +0,0 @@
|
|||
from django.core.management.base import NoArgsCommand
|
||||
from django_extensions.management.utils import get_project_root
|
||||
from optparse import make_option
|
||||
from os.path import join as _j
|
||||
import py_compile
|
||||
import os
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--path', '-p', action='store', dest='path', help='Specify path to recurse into'),
|
||||
)
|
||||
help = "Compile python bytecode files for the project."
|
||||
|
||||
requires_model_validation = False
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
project_root = options.get("path", None)
|
||||
if not project_root:
|
||||
project_root = get_project_root()
|
||||
verbose = int(options.get("verbosity", 1)) > 1
|
||||
|
||||
for root, dirs, files in os.walk(project_root):
|
||||
for file in files:
|
||||
ext = os.path.splitext(file)[1]
|
||||
if ext == ".py":
|
||||
full_path = _j(root, file)
|
||||
if verbose:
|
||||
print("%sc" % full_path)
|
||||
py_compile.compile(full_path)
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,138 +0,0 @@
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
import django_extensions
|
||||
from django.conf import settings
|
||||
from django.db import connection
|
||||
from django.core.management.base import CommandError, LabelCommand
|
||||
from django.template import Template, Context
|
||||
from django_extensions.settings import REPLACEMENTS
|
||||
from django_extensions.utils.dia2django import dia2django
|
||||
from django_extensions.management.utils import _make_writeable
|
||||
from optparse import make_option
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
option_list = LabelCommand.option_list + (
|
||||
make_option('--template', '-t', action='store', dest='app_template',
|
||||
help='The path to the app template'),
|
||||
make_option('--parent_path', '-p', action='store', dest='parent_path',
|
||||
help='The parent path of the application to be created'),
|
||||
make_option('-d', action='store_true', dest='dia_parse',
|
||||
help='Generate model.py and admin.py from [APP_NAME].dia file'),
|
||||
make_option('--diagram', action='store', dest='dia_path',
|
||||
help='The diagram path of the app to be created. -d is implied'),
|
||||
)
|
||||
|
||||
help = ("Creates an application directory structure for the specified application name.")
|
||||
args = "APP_NAME"
|
||||
label = 'application name'
|
||||
|
||||
requires_model_validation = False
|
||||
can_import_settings = True
|
||||
|
||||
def handle_label(self, label, **options):
|
||||
project_dir = os.getcwd()
|
||||
project_name = os.path.split(project_dir)[-1]
|
||||
app_name = label
|
||||
app_template = options.get('app_template') or os.path.join(django_extensions.__path__[0], 'conf', 'app_template')
|
||||
app_dir = os.path.join(options.get('parent_path') or project_dir, app_name)
|
||||
dia_path = options.get('dia_path') or os.path.join(project_dir, '%s.dia' % app_name)
|
||||
|
||||
if not os.path.exists(app_template):
|
||||
raise CommandError("The template path, %r, does not exist." % app_template)
|
||||
|
||||
if not re.search(r'^\w+$', label):
|
||||
raise CommandError("%r is not a valid application name. Please use only numbers, letters and underscores." % label)
|
||||
|
||||
dia_parse = options.get('dia_path') or options.get('dia_parse')
|
||||
if dia_parse:
|
||||
if not os.path.exists(dia_path):
|
||||
raise CommandError("The diagram path, %r, does not exist." % dia_path)
|
||||
if app_name in settings.INSTALLED_APPS:
|
||||
raise CommandError("The application %s should not be defined in the settings file. Please remove %s now, and add it after using this command." % (app_name, app_name))
|
||||
tables = [name for name in connection.introspection.table_names() if name.startswith('%s_' % app_name)]
|
||||
if tables:
|
||||
raise CommandError("%r application has tables in the database. Please delete them." % app_name)
|
||||
|
||||
try:
|
||||
os.makedirs(app_dir)
|
||||
except OSError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
copy_template(app_template, app_dir, project_name, app_name)
|
||||
|
||||
if dia_parse:
|
||||
generate_models_and_admin(dia_path, app_dir, project_name, app_name)
|
||||
print("Application %r created." % app_name)
|
||||
print("Please add now %r and any other dependent application in settings.INSTALLED_APPS, and run 'manage syncdb'" % app_name)
|
||||
|
||||
|
||||
def copy_template(app_template, copy_to, project_name, app_name):
|
||||
"""copies the specified template directory to the copy_to location"""
|
||||
import shutil
|
||||
|
||||
app_template = os.path.normpath(app_template)
|
||||
# walks the template structure and copies it
|
||||
for d, subdirs, files in os.walk(app_template):
|
||||
relative_dir = d[len(app_template) + 1:]
|
||||
d_new = os.path.join(copy_to, relative_dir).replace('app_name', app_name)
|
||||
if relative_dir and not os.path.exists(d_new):
|
||||
os.mkdir(d_new)
|
||||
for i, subdir in enumerate(subdirs):
|
||||
if subdir.startswith('.'):
|
||||
del subdirs[i]
|
||||
replacements = {'app_name': app_name, 'project_name': project_name}
|
||||
replacements.update(REPLACEMENTS)
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.startswith('.DS_Store'):
|
||||
continue
|
||||
path_old = os.path.join(d, f)
|
||||
path_new = os.path.join(d_new, f.replace('app_name', app_name))
|
||||
if os.path.exists(path_new):
|
||||
path_new = os.path.join(d_new, f)
|
||||
if os.path.exists(path_new):
|
||||
continue
|
||||
if path_new.endswith('.tmpl'):
|
||||
path_new = path_new[:-5]
|
||||
fp_old = open(path_old, 'r')
|
||||
fp_new = open(path_new, 'w')
|
||||
fp_new.write(Template(fp_old.read()).render(Context(replacements)))
|
||||
fp_old.close()
|
||||
fp_new.close()
|
||||
try:
|
||||
shutil.copymode(path_old, path_new)
|
||||
_make_writeable(path_new)
|
||||
except OSError:
|
||||
sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
|
||||
|
||||
|
||||
def generate_models_and_admin(dia_path, app_dir, project_name, app_name):
|
||||
"""Generates the models.py and admin.py files"""
|
||||
|
||||
def format_text(string, indent=False):
|
||||
"""format string in lines of 80 or less characters"""
|
||||
retval = ''
|
||||
while string:
|
||||
line = string[:77]
|
||||
last_space = line.rfind(' ')
|
||||
if last_space != -1 and len(string) > 77:
|
||||
retval += "%s \\\n" % string[:last_space]
|
||||
string = string[last_space + 1:]
|
||||
else:
|
||||
retval += "%s\n" % string
|
||||
string = ''
|
||||
if string and indent:
|
||||
string = ' %s' % string
|
||||
return retval
|
||||
|
||||
model_path = os.path.join(app_dir, 'models.py')
|
||||
admin_path = os.path.join(app_dir, 'admin.py')
|
||||
|
||||
models_txt = 'from django.db import models\n' + dia2django(dia_path)
|
||||
open(model_path, 'w').write(models_txt)
|
||||
|
||||
classes = re.findall('class (\w+)', models_txt)
|
||||
admin_txt = 'from django.contrib.admin import site, ModelAdmin\n' + format_text('from %s.%s.models import %s' % (project_name, app_name, ', '.join(classes)), indent=True)
|
||||
admin_txt += format_text('\n\n%s' % '\n'.join(map((lambda t: 'site.register(%s)' % t), classes)))
|
||||
open(admin_path, 'w').write(admin_txt)
|
|
@ -1,81 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
from django.core.management.base import CommandError, AppCommand
|
||||
from django_extensions.management.utils import _make_writeable
|
||||
from optparse import make_option
|
||||
|
||||
|
||||
class Command(AppCommand):
|
||||
option_list = AppCommand.option_list + (
|
||||
make_option('--name', '-n', action='store', dest='command_name', default='sample',
|
||||
help='The name to use for the management command'),
|
||||
make_option('--base', '-b', action='store', dest='base_command', default='Base',
|
||||
help='The base class used for implementation of this command. Should be one of Base, App, Label, or NoArgs'),
|
||||
)
|
||||
|
||||
help = ("Creates a Django management command directory structure for the given app name"
|
||||
" in the current directory.")
|
||||
args = "[appname]"
|
||||
label = 'application name'
|
||||
|
||||
requires_model_validation = False
|
||||
# Can't import settings during this command, because they haven't
|
||||
# necessarily been created.
|
||||
can_import_settings = True
|
||||
|
||||
def handle_app(self, app, **options):
|
||||
directory = os.getcwd()
|
||||
app_name = app.__name__.split('.')[-2]
|
||||
project_dir = os.path.join(directory, app_name)
|
||||
if not os.path.exists(project_dir):
|
||||
try:
|
||||
os.mkdir(project_dir)
|
||||
except OSError as e:
|
||||
raise CommandError(e)
|
||||
|
||||
copy_template('command_template', project_dir, options.get('command_name'), '%sCommand' % options.get('base_command'))
|
||||
|
||||
|
||||
def copy_template(template_name, copy_to, command_name, base_command):
|
||||
"""copies the specified template directory to the copy_to location"""
|
||||
import django_extensions
|
||||
import shutil
|
||||
|
||||
template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name)
|
||||
|
||||
handle_method = "handle(self, *args, **options)"
|
||||
if base_command == 'AppCommand':
|
||||
handle_method = "handle_app(self, app, **options)"
|
||||
elif base_command == 'LabelCommand':
|
||||
handle_method = "handle_label(self, label, **options)"
|
||||
elif base_command == 'NoArgsCommand':
|
||||
handle_method = "handle_noargs(self, **options)"
|
||||
|
||||
# walks the template structure and copies it
|
||||
for d, subdirs, files in os.walk(template_dir):
|
||||
relative_dir = d[len(template_dir) + 1:]
|
||||
if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)):
|
||||
os.mkdir(os.path.join(copy_to, relative_dir))
|
||||
for i, subdir in enumerate(subdirs):
|
||||
if subdir.startswith('.'):
|
||||
del subdirs[i]
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.startswith('.DS_Store'):
|
||||
continue
|
||||
path_old = os.path.join(d, f)
|
||||
path_new = os.path.join(copy_to, relative_dir, f.replace('sample', command_name))
|
||||
if os.path.exists(path_new):
|
||||
path_new = os.path.join(copy_to, relative_dir, f)
|
||||
if os.path.exists(path_new):
|
||||
continue
|
||||
path_new = path_new.rstrip(".tmpl")
|
||||
fp_old = open(path_old, 'r')
|
||||
fp_new = open(path_new, 'w')
|
||||
fp_new.write(fp_old.read().replace('{{ command_name }}', command_name).replace('{{ base_command }}', base_command).replace('{{ handle_method }}', handle_method))
|
||||
fp_old.close()
|
||||
fp_new.close()
|
||||
try:
|
||||
shutil.copymode(path_old, path_new)
|
||||
_make_writeable(path_new)
|
||||
except OSError:
|
||||
sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
|
|
@ -1,56 +0,0 @@
|
|||
import os
|
||||
import sys
|
||||
from django.core.management.base import AppCommand
|
||||
from django_extensions.management.utils import _make_writeable
|
||||
|
||||
|
||||
class Command(AppCommand):
|
||||
help = ("Creates a Django jobs command directory structure for the given app name in the current directory.")
|
||||
args = "[appname]"
|
||||
label = 'application name'
|
||||
|
||||
requires_model_validation = False
|
||||
# Can't import settings during this command, because they haven't
|
||||
# necessarily been created.
|
||||
can_import_settings = True
|
||||
|
||||
def handle_app(self, app, **options):
|
||||
app_dir = os.path.dirname(app.__file__)
|
||||
copy_template('jobs_template', app_dir)
|
||||
|
||||
|
||||
def copy_template(template_name, copy_to):
|
||||
"""copies the specified template directory to the copy_to location"""
|
||||
import django_extensions
|
||||
import shutil
|
||||
|
||||
template_dir = os.path.join(django_extensions.__path__[0], 'conf', template_name)
|
||||
|
||||
# walks the template structure and copies it
|
||||
for d, subdirs, files in os.walk(template_dir):
|
||||
relative_dir = d[len(template_dir) + 1:]
|
||||
if relative_dir and not os.path.exists(os.path.join(copy_to, relative_dir)):
|
||||
os.mkdir(os.path.join(copy_to, relative_dir))
|
||||
for i, subdir in enumerate(subdirs):
|
||||
if subdir.startswith('.'):
|
||||
del subdirs[i]
|
||||
for f in files:
|
||||
if f.endswith('.pyc') or f.startswith('.DS_Store'):
|
||||
continue
|
||||
path_old = os.path.join(d, f)
|
||||
path_new = os.path.join(copy_to, relative_dir, f)
|
||||
if os.path.exists(path_new):
|
||||
path_new = os.path.join(copy_to, relative_dir, f)
|
||||
if os.path.exists(path_new):
|
||||
continue
|
||||
path_new = path_new.rstrip(".tmpl")
|
||||
fp_old = open(path_old, 'r')
|
||||
fp_new = open(path_new, 'w')
|
||||
fp_new.write(fp_old.read())
|
||||
fp_old.close()
|
||||
fp_new.close()
|
||||
try:
|
||||
shutil.copymode(path_old, path_new)
|
||||
_make_writeable(path_new)
|
||||
except OSError:
|
||||
sys.stderr.write("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new)
|
|
@ -1,66 +0,0 @@
|
|||
from django.core.management.base import LabelCommand, CommandError
|
||||
from django.utils.encoding import force_unicode
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
help = "Outputs the specified model as a form definition to the shell."
|
||||
args = "[app.model]"
|
||||
label = 'application name and model name'
|
||||
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle_label(self, label, **options):
|
||||
return describe_form(label)
|
||||
|
||||
|
||||
def describe_form(label, fields=None):
|
||||
"""
|
||||
Returns a string describing a form based on the model
|
||||
"""
|
||||
from django.db.models.loading import get_model
|
||||
try:
|
||||
app_name, model_name = label.split('.')[-2:]
|
||||
except (IndexError, ValueError):
|
||||
raise CommandError("Need application and model name in the form: appname.model")
|
||||
model = get_model(app_name, model_name)
|
||||
|
||||
opts = model._meta
|
||||
field_list = []
|
||||
for f in opts.fields + opts.many_to_many:
|
||||
if not f.editable:
|
||||
continue
|
||||
if fields and not f.name in fields:
|
||||
continue
|
||||
formfield = f.formfield()
|
||||
if not '__dict__' in dir(formfield):
|
||||
continue
|
||||
attrs = {}
|
||||
valid_fields = ['required', 'initial', 'max_length', 'min_length', 'max_value', 'min_value', 'max_digits', 'decimal_places', 'choices', 'help_text', 'label']
|
||||
for k, v in formfield.__dict__.items():
|
||||
if k in valid_fields and v is not None:
|
||||
# ignore defaults, to minimize verbosity
|
||||
if k == 'required' and v:
|
||||
continue
|
||||
if k == 'help_text' and not v:
|
||||
continue
|
||||
if k == 'widget':
|
||||
attrs[k] = v.__class__
|
||||
elif k in ['help_text', 'label']:
|
||||
attrs[k] = force_unicode(v).strip()
|
||||
else:
|
||||
attrs[k] = v
|
||||
|
||||
params = ', '.join(['%s=%r' % (k, v) for k, v in attrs.items()])
|
||||
field_list.append(' %(field_name)s = forms.%(field_type)s(%(params)s)' % {
|
||||
'field_name': f.name,
|
||||
'field_type': formfield.__class__.__name__,
|
||||
'params': params
|
||||
})
|
||||
return '''
|
||||
from django import forms
|
||||
from %(app_name)s.models import %(object_name)s
|
||||
|
||||
class %(object_name)sForm(forms.Form):
|
||||
%(field_list)s
|
||||
''' % {'app_name': app_name, 'object_name': opts.object_name, 'field_list': '\n'.join(field_list)}
|
|
@ -1,751 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
# -*- coding: UTF-8 -*-
|
||||
"""
|
||||
Title: Dumpscript management command
|
||||
Project: Hardytools (queryset-refactor version)
|
||||
Author: Will Hardy (http://willhardy.com.au)
|
||||
Date: June 2008
|
||||
Usage: python manage.py dumpscript appname > scripts/scriptname.py
|
||||
$Revision: 217 $
|
||||
|
||||
Description:
|
||||
Generates a Python script that will repopulate the database using objects.
|
||||
The advantage of this approach is that it is easy to understand, and more
|
||||
flexible than directly populating the database, or using XML.
|
||||
|
||||
* It also allows for new defaults to take effect and only transfers what is
|
||||
needed.
|
||||
* If a new database schema has a NEW ATTRIBUTE, it is simply not
|
||||
populated (using a default value will make the transition smooth :)
|
||||
* If a new database schema REMOVES AN ATTRIBUTE, it is simply ignored
|
||||
and the data moves across safely (I'm assuming we don't want this
|
||||
attribute anymore.
|
||||
* Problems may only occur if there is a new model and is now a required
|
||||
ForeignKey for an existing model. But this is easy to fix by editing the
|
||||
populate script. Half of the job is already done as all ForeingKey
|
||||
lookups occur though the locate_object() function in the generated script.
|
||||
|
||||
Improvements:
|
||||
See TODOs and FIXMEs scattered throughout :-)
|
||||
|
||||
"""
|
||||
|
||||
import sys
|
||||
import datetime
|
||||
import six
|
||||
|
||||
import django
|
||||
from django.db.models import AutoField, BooleanField, FileField, ForeignKey
|
||||
from django.core.exceptions import ObjectDoesNotExist
|
||||
from django.core.management.base import BaseCommand
|
||||
|
||||
# conditional import, force_unicode was renamed in Django 1.5
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
try:
|
||||
from django.utils.encoding import smart_unicode, force_unicode # NOQA
|
||||
except ImportError:
|
||||
from django.utils.encoding import smart_text as smart_unicode, force_text as force_unicode # NOQA
|
||||
|
||||
|
||||
def orm_item_locator(orm_obj):
|
||||
"""
|
||||
This function is called every time an object that will not be exported is required.
|
||||
Where orm_obj is the referred object.
|
||||
We postpone the lookup to locate_object() which will be run on the generated script
|
||||
|
||||
"""
|
||||
|
||||
the_class = orm_obj._meta.object_name
|
||||
original_class = the_class
|
||||
pk_name = orm_obj._meta.pk.name
|
||||
original_pk_name = pk_name
|
||||
pk_value = getattr(orm_obj, pk_name)
|
||||
|
||||
while hasattr(pk_value, "_meta") and hasattr(pk_value._meta, "pk") and hasattr(pk_value._meta.pk, "name"):
|
||||
the_class = pk_value._meta.object_name
|
||||
pk_name = pk_value._meta.pk.name
|
||||
pk_value = getattr(pk_value, pk_name)
|
||||
|
||||
clean_dict = make_clean_dict(orm_obj.__dict__)
|
||||
|
||||
for key in clean_dict:
|
||||
v = clean_dict[key]
|
||||
if v is not None and not isinstance(v, (six.string_types, six.integer_types, float, datetime.datetime)):
|
||||
clean_dict[key] = six.u("%s" % v)
|
||||
|
||||
output = """ importer.locate_object(%s, "%s", %s, "%s", %s, %s ) """ % (
|
||||
original_class, original_pk_name,
|
||||
the_class, pk_name, pk_value, clean_dict
|
||||
)
|
||||
return output
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Dumps the data as a customised python script.'
|
||||
args = '[appname ...]'
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
|
||||
# Get the models we want to export
|
||||
models = get_models(app_labels)
|
||||
|
||||
# A dictionary is created to keep track of all the processed objects,
|
||||
# so that foreign key references can be made using python variable names.
|
||||
# This variable "context" will be passed around like the town bicycle.
|
||||
context = {}
|
||||
|
||||
# Create a dumpscript object and let it format itself as a string
|
||||
self.stdout.write(str(Script(models=models, context=context, stdout=self.stdout, stderr=self.stderr)))
|
||||
self.stdout.write("\n")
|
||||
|
||||
|
||||
def get_models(app_labels):
|
||||
""" Gets a list of models for the given app labels, with some exceptions.
|
||||
TODO: If a required model is referenced, it should also be included.
|
||||
Or at least discovered with a get_or_create() call.
|
||||
"""
|
||||
|
||||
from django.db.models import get_app, get_apps, get_model
|
||||
from django.db.models import get_models as get_all_models
|
||||
|
||||
# These models are not to be output, e.g. because they can be generated automatically
|
||||
# TODO: This should be "appname.modelname" string
|
||||
EXCLUDED_MODELS = (ContentType, )
|
||||
|
||||
models = []
|
||||
|
||||
# If no app labels are given, return all
|
||||
if not app_labels:
|
||||
for app in get_apps():
|
||||
models += [m for m in get_all_models(app) if m not in EXCLUDED_MODELS]
|
||||
|
||||
# Get all relevant apps
|
||||
for app_label in app_labels:
|
||||
# If a specific model is mentioned, get only that model
|
||||
if "." in app_label:
|
||||
app_label, model_name = app_label.split(".", 1)
|
||||
models.append(get_model(app_label, model_name))
|
||||
# Get all models for a given app
|
||||
else:
|
||||
models += [m for m in get_all_models(get_app(app_label)) if m not in EXCLUDED_MODELS]
|
||||
|
||||
return models
|
||||
|
||||
|
||||
class Code(object):
|
||||
""" A snippet of python script.
|
||||
This keeps track of import statements and can be output to a string.
|
||||
In the future, other features such as custom indentation might be included
|
||||
in this class.
|
||||
"""
|
||||
|
||||
def __init__(self, indent=-1, stdout=None, stderr=None):
|
||||
|
||||
if not stdout:
|
||||
stdout = sys.stdout
|
||||
if not stderr:
|
||||
stderr = sys.stderr
|
||||
|
||||
self.indent = indent
|
||||
self.stdout = stdout
|
||||
self.stderr = stderr
|
||||
|
||||
def __str__(self):
|
||||
""" Returns a string representation of this script.
|
||||
"""
|
||||
if self.imports:
|
||||
self.stderr.write(repr(self.import_lines))
|
||||
return flatten_blocks([""] + self.import_lines + [""] + self.lines, num_indents=self.indent)
|
||||
else:
|
||||
return flatten_blocks(self.lines, num_indents=self.indent)
|
||||
|
||||
def get_import_lines(self):
|
||||
""" Takes the stored imports and converts them to lines
|
||||
"""
|
||||
if self.imports:
|
||||
return ["from %s import %s" % (value, key) for key, value in self.imports.items()]
|
||||
else:
|
||||
return []
|
||||
import_lines = property(get_import_lines)
|
||||
|
||||
|
||||
class ModelCode(Code):
|
||||
" Produces a python script that can recreate data for a given model class. "
|
||||
|
||||
def __init__(self, model, context=None, stdout=None, stderr=None):
|
||||
super(ModelCode, self).__init__(indent=0, stdout=stdout, stderr=stderr)
|
||||
self.model = model
|
||||
if context is None:
|
||||
context = {}
|
||||
self.context = context
|
||||
self.instances = []
|
||||
|
||||
def get_imports(self):
|
||||
""" Returns a dictionary of import statements, with the variable being
|
||||
defined as the key.
|
||||
"""
|
||||
return {self.model.__name__: smart_unicode(self.model.__module__)}
|
||||
imports = property(get_imports)
|
||||
|
||||
def get_lines(self):
|
||||
""" Returns a list of lists or strings, representing the code body.
|
||||
Each list is a block, each string is a statement.
|
||||
"""
|
||||
code = []
|
||||
|
||||
for counter, item in enumerate(self.model._default_manager.all()):
|
||||
instance = InstanceCode(instance=item, id=counter + 1, context=self.context, stdout=self.stdout, stderr=self.stderr)
|
||||
self.instances.append(instance)
|
||||
if instance.waiting_list:
|
||||
code += instance.lines
|
||||
|
||||
# After each instance has been processed, try again.
|
||||
# This allows self referencing fields to work.
|
||||
for instance in self.instances:
|
||||
if instance.waiting_list:
|
||||
code += instance.lines
|
||||
|
||||
return code
|
||||
|
||||
lines = property(get_lines)
|
||||
|
||||
|
||||
class InstanceCode(Code):
|
||||
" Produces a python script that can recreate data for a given model instance. "
|
||||
|
||||
def __init__(self, instance, id, context=None, stdout=None, stderr=None):
|
||||
""" We need the instance in question and an id """
|
||||
|
||||
super(InstanceCode, self).__init__(indent=0, stdout=stdout, stderr=stderr)
|
||||
self.imports = {}
|
||||
|
||||
self.instance = instance
|
||||
self.model = self.instance.__class__
|
||||
if context is None:
|
||||
context = {}
|
||||
self.context = context
|
||||
self.variable_name = "%s_%s" % (self.instance._meta.db_table, id)
|
||||
self.skip_me = None
|
||||
self.instantiated = False
|
||||
|
||||
self.waiting_list = list(self.model._meta.fields)
|
||||
|
||||
self.many_to_many_waiting_list = {}
|
||||
for field in self.model._meta.many_to_many:
|
||||
self.many_to_many_waiting_list[field] = list(getattr(self.instance, field.name).all())
|
||||
|
||||
def get_lines(self, force=False):
|
||||
""" Returns a list of lists or strings, representing the code body.
|
||||
Each list is a block, each string is a statement.
|
||||
|
||||
force (True or False): if an attribute object cannot be included,
|
||||
it is usually skipped to be processed later. With 'force' set, there
|
||||
will be no waiting: a get_or_create() call is written instead.
|
||||
"""
|
||||
code_lines = []
|
||||
|
||||
# Don't return anything if this is an instance that should be skipped
|
||||
if self.skip():
|
||||
return []
|
||||
|
||||
# Initialise our new object
|
||||
# e.g. model_name_35 = Model()
|
||||
code_lines += self.instantiate()
|
||||
|
||||
# Add each field
|
||||
# e.g. model_name_35.field_one = 1034.91
|
||||
# model_name_35.field_two = "text"
|
||||
code_lines += self.get_waiting_list()
|
||||
|
||||
if force:
|
||||
# TODO: Check that M2M are not affected
|
||||
code_lines += self.get_waiting_list(force=force)
|
||||
|
||||
# Print the save command for our new object
|
||||
# e.g. model_name_35.save()
|
||||
if code_lines:
|
||||
code_lines.append("%s = importer.save_or_locate(%s)\n" % (self.variable_name, self.variable_name))
|
||||
|
||||
code_lines += self.get_many_to_many_lines(force=force)
|
||||
|
||||
return code_lines
|
||||
lines = property(get_lines)
|
||||
|
||||
def skip(self):
|
||||
""" Determine whether or not this object should be skipped.
|
||||
If this model instance is a parent of a single subclassed
|
||||
instance, skip it. The subclassed instance will create this
|
||||
parent instance for us.
|
||||
|
||||
TODO: Allow the user to force its creation?
|
||||
"""
|
||||
|
||||
if self.skip_me is not None:
|
||||
return self.skip_me
|
||||
|
||||
def get_skip_version():
|
||||
""" Return which version of the skip code should be run
|
||||
|
||||
Django's deletion code was refactored in r14507 which
|
||||
was just two days before 1.3 alpha 1 (r14519)
|
||||
"""
|
||||
if not hasattr(self, '_SKIP_VERSION'):
|
||||
version = django.VERSION
|
||||
# no, it isn't lisp. I swear.
|
||||
self._SKIP_VERSION = (
|
||||
version[0] > 1 or ( # django 2k... someday :)
|
||||
version[0] == 1 and ( # 1.x
|
||||
version[1] >= 4 or # 1.4+
|
||||
version[1] == 3 and not ( # 1.3.x
|
||||
(version[3] == 'alpha' and version[1] == 0)
|
||||
)
|
||||
)
|
||||
)
|
||||
) and 2 or 1 # NOQA
|
||||
return self._SKIP_VERSION
|
||||
|
||||
if get_skip_version() == 1:
|
||||
try:
|
||||
# Django trunk since r7722 uses CollectedObjects instead of dict
|
||||
from django.db.models.query import CollectedObjects
|
||||
sub_objects = CollectedObjects()
|
||||
except ImportError:
|
||||
# previous versions don't have CollectedObjects
|
||||
sub_objects = {}
|
||||
self.instance._collect_sub_objects(sub_objects)
|
||||
sub_objects = sub_objects.keys()
|
||||
|
||||
elif get_skip_version() == 2:
|
||||
from django.db.models.deletion import Collector
|
||||
from django.db import router
|
||||
cls = self.instance.__class__
|
||||
using = router.db_for_write(cls, instance=self.instance)
|
||||
collector = Collector(using=using)
|
||||
collector.collect([self.instance], collect_related=False)
|
||||
|
||||
# collector stores its instances in two places. I *think* we
|
||||
# only need collector.data, but using the batches is needed
|
||||
# to perfectly emulate the old behaviour
|
||||
# TODO: check if batches are really needed. If not, remove them.
|
||||
sub_objects = sum([list(i) for i in collector.data.values()], [])
|
||||
|
||||
for batch in collector.batches.values():
|
||||
# batch.values can be sets, which must be converted to lists
|
||||
sub_objects += sum([list(i) for i in batch.values()], [])
|
||||
|
||||
sub_objects_parents = [so._meta.parents for so in sub_objects]
|
||||
if [self.model in p for p in sub_objects_parents].count(True) == 1:
|
||||
# since this instance isn't explicitly created, it's variable name
|
||||
# can't be referenced in the script, so record None in context dict
|
||||
pk_name = self.instance._meta.pk.name
|
||||
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
|
||||
self.context[key] = None
|
||||
self.skip_me = True
|
||||
else:
|
||||
self.skip_me = False
|
||||
|
||||
return self.skip_me
|
||||
|
||||
def instantiate(self):
|
||||
" Write lines for instantiation "
|
||||
# e.g. model_name_35 = Model()
|
||||
code_lines = []
|
||||
|
||||
if not self.instantiated:
|
||||
code_lines.append("%s = %s()" % (self.variable_name, self.model.__name__))
|
||||
self.instantiated = True
|
||||
|
||||
# Store our variable name for future foreign key references
|
||||
pk_name = self.instance._meta.pk.name
|
||||
key = '%s_%s' % (self.model.__name__, getattr(self.instance, pk_name))
|
||||
self.context[key] = self.variable_name
|
||||
|
||||
return code_lines
|
||||
|
||||
def get_waiting_list(self, force=False):
|
||||
" Add lines for any waiting fields that can be completed now. "
|
||||
|
||||
code_lines = []
|
||||
|
||||
# Process normal fields
|
||||
for field in list(self.waiting_list):
|
||||
try:
|
||||
# Find the value, add the line, remove from waiting list and move on
|
||||
value = get_attribute_value(self.instance, field, self.context, force=force)
|
||||
code_lines.append('%s.%s = %s' % (self.variable_name, field.name, value))
|
||||
self.waiting_list.remove(field)
|
||||
except SkipValue:
|
||||
# Remove from the waiting list and move on
|
||||
self.waiting_list.remove(field)
|
||||
continue
|
||||
except DoLater:
|
||||
# Move on, maybe next time
|
||||
continue
|
||||
|
||||
return code_lines
|
||||
|
||||
def get_many_to_many_lines(self, force=False):
|
||||
""" Generates lines that define many to many relations for this instance. """
|
||||
|
||||
lines = []
|
||||
|
||||
for field, rel_items in self.many_to_many_waiting_list.items():
|
||||
for rel_item in list(rel_items):
|
||||
try:
|
||||
pk_name = rel_item._meta.pk.name
|
||||
key = '%s_%s' % (rel_item.__class__.__name__, getattr(rel_item, pk_name))
|
||||
value = "%s" % self.context[key]
|
||||
lines.append('%s.%s.add(%s)' % (self.variable_name, field.name, value))
|
||||
self.many_to_many_waiting_list[field].remove(rel_item)
|
||||
except KeyError:
|
||||
if force:
|
||||
item_locator = orm_item_locator(rel_item)
|
||||
self.context["__extra_imports"][rel_item._meta.object_name] = rel_item.__module__
|
||||
lines.append('%s.%s.add( %s )' % (self.variable_name, field.name, item_locator))
|
||||
self.many_to_many_waiting_list[field].remove(rel_item)
|
||||
|
||||
if lines:
|
||||
lines.append("")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
class Script(Code):
|
||||
" Produces a complete python script that can recreate data for the given apps. "
|
||||
|
||||
def __init__(self, models, context=None, stdout=None, stderr=None):
|
||||
super(Script, self).__init__(stdout=stdout, stderr=stderr)
|
||||
self.imports = {}
|
||||
|
||||
self.models = models
|
||||
if context is None:
|
||||
context = {}
|
||||
self.context = context
|
||||
|
||||
self.context["__avaliable_models"] = set(models)
|
||||
self.context["__extra_imports"] = {}
|
||||
|
||||
def _queue_models(self, models, context):
|
||||
""" Works an an appropriate ordering for the models.
|
||||
This isn't essential, but makes the script look nicer because
|
||||
more instances can be defined on their first try.
|
||||
"""
|
||||
|
||||
# Max number of cycles allowed before we call it an infinite loop.
|
||||
MAX_CYCLES = 5
|
||||
|
||||
model_queue = []
|
||||
number_remaining_models = len(models)
|
||||
allowed_cycles = MAX_CYCLES
|
||||
|
||||
while number_remaining_models > 0:
|
||||
previous_number_remaining_models = number_remaining_models
|
||||
|
||||
model = models.pop(0)
|
||||
|
||||
# If the model is ready to be processed, add it to the list
|
||||
if check_dependencies(model, model_queue, context["__avaliable_models"]):
|
||||
model_class = ModelCode(model=model, context=context, stdout=self.stdout, stderr=self.stderr)
|
||||
model_queue.append(model_class)
|
||||
|
||||
# Otherwise put the model back at the end of the list
|
||||
else:
|
||||
models.append(model)
|
||||
|
||||
# Check for infinite loops.
|
||||
# This means there is a cyclic foreign key structure
|
||||
# That cannot be resolved by re-ordering
|
||||
number_remaining_models = len(models)
|
||||
if number_remaining_models == previous_number_remaining_models:
|
||||
allowed_cycles -= 1
|
||||
if allowed_cycles <= 0:
|
||||
# Add the remaining models, but do not remove them from the model list
|
||||
missing_models = [ModelCode(model=m, context=context, stdout=self.stdout, stderr=self.stderr) for m in models]
|
||||
model_queue += missing_models
|
||||
# Replace the models with the model class objects
|
||||
# (sure, this is a little bit of hackery)
|
||||
models[:] = missing_models
|
||||
break
|
||||
else:
|
||||
allowed_cycles = MAX_CYCLES
|
||||
|
||||
return model_queue
|
||||
|
||||
def get_lines(self):
|
||||
""" Returns a list of lists or strings, representing the code body.
|
||||
Each list is a block, each string is a statement.
|
||||
"""
|
||||
code = [self.FILE_HEADER.strip()]
|
||||
|
||||
# Queue and process the required models
|
||||
for model_class in self._queue_models(self.models, context=self.context):
|
||||
msg = 'Processing model: %s\n' % model_class.model.__name__
|
||||
self.stderr.write(msg)
|
||||
code.append(" #" + msg)
|
||||
code.append(model_class.import_lines)
|
||||
code.append("")
|
||||
code.append(model_class.lines)
|
||||
|
||||
# Process left over foreign keys from cyclic models
|
||||
for model in self.models:
|
||||
msg = 'Re-processing model: %s\n' % model.model.__name__
|
||||
self.stderr.write(msg)
|
||||
code.append(" #" + msg)
|
||||
for instance in model.instances:
|
||||
if instance.waiting_list or instance.many_to_many_waiting_list:
|
||||
code.append(instance.get_lines(force=True))
|
||||
|
||||
code.insert(1, " #initial imports")
|
||||
code.insert(2, "")
|
||||
for key, value in self.context["__extra_imports"].items():
|
||||
code.insert(2, " from %s import %s" % (value, key))
|
||||
|
||||
return code
|
||||
|
||||
lines = property(get_lines)
|
||||
|
||||
# A user-friendly file header
|
||||
FILE_HEADER = """
|
||||
|
||||
#!/usr/bin/env python
|
||||
# -*- coding: utf-8 -*-
|
||||
|
||||
# This file has been automatically generated.
|
||||
# Instead of changing it, create a file called import_helper.py
|
||||
# and put there a class called ImportHelper(object) in it.
|
||||
#
|
||||
# This class will be specially casted so that instead of extending object,
|
||||
# it will actually extend the class BasicImportHelper()
|
||||
#
|
||||
# That means you just have to overload the methods you want to
|
||||
# change, leaving the other ones inteact.
|
||||
#
|
||||
# Something that you might want to do is use transactions, for example.
|
||||
#
|
||||
# Also, don't forget to add the necessary Django imports.
|
||||
#
|
||||
# This file was generated with the following command:
|
||||
# %s
|
||||
#
|
||||
# to restore it, run
|
||||
# manage.py runscript module_name.this_script_name
|
||||
#
|
||||
# example: if manage.py is at ./manage.py
|
||||
# and the script is at ./some_folder/some_script.py
|
||||
# you must make sure ./some_folder/__init__.py exists
|
||||
# and run ./manage.py runscript some_folder.some_script
|
||||
|
||||
from django.db import transaction
|
||||
|
||||
class BasicImportHelper(object):
|
||||
|
||||
def pre_import(self):
|
||||
pass
|
||||
|
||||
# You probably want to uncomment on of these two lines
|
||||
# @transaction.atomic # Django 1.6
|
||||
# @transaction.commit_on_success # Django <1.6
|
||||
def run_import(self, import_data):
|
||||
import_data()
|
||||
|
||||
def post_import(self):
|
||||
pass
|
||||
|
||||
def locate_similar(self, current_object, search_data):
|
||||
#you will probably want to call this method from save_or_locate()
|
||||
#example:
|
||||
#new_obj = self.locate_similar(the_obj, {"national_id": the_obj.national_id } )
|
||||
|
||||
the_obj = current_object.__class__.objects.get(**search_data)
|
||||
return the_obj
|
||||
|
||||
def locate_object(self, original_class, original_pk_name, the_class, pk_name, pk_value, obj_content):
|
||||
#You may change this function to do specific lookup for specific objects
|
||||
#
|
||||
#original_class class of the django orm's object that needs to be located
|
||||
#original_pk_name the primary key of original_class
|
||||
#the_class parent class of original_class which contains obj_content
|
||||
#pk_name the primary key of original_class
|
||||
#pk_value value of the primary_key
|
||||
#obj_content content of the object which was not exported.
|
||||
#
|
||||
#you should use obj_content to locate the object on the target db
|
||||
#
|
||||
#and example where original_class and the_class are different is
|
||||
#when original_class is Farmer and
|
||||
#the_class is Person. The table may refer to a Farmer but you will actually
|
||||
#need to locate Person in order to instantiate that Farmer
|
||||
#
|
||||
#example:
|
||||
#if the_class == SurveyResultFormat or the_class == SurveyType or the_class == SurveyState:
|
||||
# pk_name="name"
|
||||
# pk_value=obj_content[pk_name]
|
||||
#if the_class == StaffGroup:
|
||||
# pk_value=8
|
||||
|
||||
search_data = { pk_name: pk_value }
|
||||
the_obj = the_class.objects.get(**search_data)
|
||||
#print(the_obj)
|
||||
return the_obj
|
||||
|
||||
|
||||
def save_or_locate(self, the_obj):
|
||||
#change this if you want to locate the object in the database
|
||||
try:
|
||||
the_obj.save()
|
||||
except:
|
||||
print("---------------")
|
||||
print("Error saving the following object:")
|
||||
print(the_obj.__class__)
|
||||
print(" ")
|
||||
print(the_obj.__dict__)
|
||||
print(" ")
|
||||
print(the_obj)
|
||||
print(" ")
|
||||
print("---------------")
|
||||
|
||||
raise
|
||||
return the_obj
|
||||
|
||||
|
||||
importer = None
|
||||
try:
|
||||
import import_helper
|
||||
#we need this so ImportHelper can extend BasicImportHelper, although import_helper.py
|
||||
#has no knowlodge of this class
|
||||
importer = type("DynamicImportHelper", (import_helper.ImportHelper, BasicImportHelper ) , {} )()
|
||||
except ImportError as e:
|
||||
if str(e) == "No module named import_helper":
|
||||
importer = BasicImportHelper()
|
||||
else:
|
||||
raise
|
||||
|
||||
import datetime
|
||||
from decimal import Decimal
|
||||
from django.contrib.contenttypes.models import ContentType
|
||||
|
||||
def run():
|
||||
importer.pre_import()
|
||||
importer.run_import(import_data)
|
||||
importer.post_import()
|
||||
|
||||
def import_data():
|
||||
|
||||
""" % " ".join(sys.argv)
|
||||
|
||||
|
||||
# HELPER FUNCTIONS
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
def flatten_blocks(lines, num_indents=-1):
|
||||
""" Takes a list (block) or string (statement) and flattens it into a string
|
||||
with indentation.
|
||||
"""
|
||||
|
||||
# The standard indent is four spaces
|
||||
INDENTATION = " " * 4
|
||||
|
||||
if not lines:
|
||||
return ""
|
||||
|
||||
# If this is a string, add the indentation and finish here
|
||||
if isinstance(lines, six.string_types):
|
||||
return INDENTATION * num_indents + lines
|
||||
|
||||
# If this is not a string, join the lines and recurse
|
||||
return "\n".join([flatten_blocks(line, num_indents + 1) for line in lines])
|
||||
|
||||
|
||||
def get_attribute_value(item, field, context, force=False):
|
||||
""" Gets a string version of the given attribute's value, like repr() might. """
|
||||
|
||||
# Find the value of the field, catching any database issues
|
||||
try:
|
||||
value = getattr(item, field.name)
|
||||
except ObjectDoesNotExist:
|
||||
raise SkipValue('Could not find object for %s.%s, ignoring.\n' % (item.__class__.__name__, field.name))
|
||||
|
||||
# AutoField: We don't include the auto fields, they'll be automatically recreated
|
||||
if isinstance(field, AutoField):
|
||||
raise SkipValue()
|
||||
|
||||
# Some databases (eg MySQL) might store boolean values as 0/1, this needs to be cast as a bool
|
||||
elif isinstance(field, BooleanField) and value is not None:
|
||||
return repr(bool(value))
|
||||
|
||||
# Post file-storage-refactor, repr() on File/ImageFields no longer returns the path
|
||||
elif isinstance(field, FileField):
|
||||
return repr(force_unicode(value))
|
||||
|
||||
# ForeignKey fields, link directly using our stored python variable name
|
||||
elif isinstance(field, ForeignKey) and value is not None:
|
||||
|
||||
# Special case for contenttype foreign keys: no need to output any
|
||||
# content types in this script, as they can be generated again
|
||||
# automatically.
|
||||
# NB: Not sure if "is" will always work
|
||||
if field.rel.to is ContentType:
|
||||
return 'ContentType.objects.get(app_label="%s", model="%s")' % (value.app_label, value.model)
|
||||
|
||||
# Generate an identifier (key) for this foreign object
|
||||
pk_name = value._meta.pk.name
|
||||
key = '%s_%s' % (value.__class__.__name__, getattr(value, pk_name))
|
||||
|
||||
if key in context:
|
||||
variable_name = context[key]
|
||||
# If the context value is set to None, this should be skipped.
|
||||
# This identifies models that have been skipped (inheritance)
|
||||
if variable_name is None:
|
||||
raise SkipValue()
|
||||
# Return the variable name listed in the context
|
||||
return "%s" % variable_name
|
||||
elif value.__class__ not in context["__avaliable_models"] or force:
|
||||
context["__extra_imports"][value._meta.object_name] = value.__module__
|
||||
item_locator = orm_item_locator(value)
|
||||
return item_locator
|
||||
else:
|
||||
raise DoLater('(FK) %s.%s\n' % (item.__class__.__name__, field.name))
|
||||
|
||||
# A normal field (e.g. a python built-in)
|
||||
else:
|
||||
return repr(value)
|
||||
|
||||
|
||||
def make_clean_dict(the_dict):
|
||||
if "_state" in the_dict:
|
||||
clean_dict = the_dict.copy()
|
||||
del clean_dict["_state"]
|
||||
return clean_dict
|
||||
return the_dict
|
||||
|
||||
|
||||
def check_dependencies(model, model_queue, avaliable_models):
|
||||
" Check that all the depenedencies for this model are already in the queue. "
|
||||
|
||||
# A list of allowed links: existing fields, itself and the special case ContentType
|
||||
allowed_links = [m.model.__name__ for m in model_queue] + [model.__name__, 'ContentType']
|
||||
|
||||
# For each ForeignKey or ManyToMany field, check that a link is possible
|
||||
|
||||
for field in model._meta.fields:
|
||||
if field.rel and field.rel.to.__name__ not in allowed_links:
|
||||
if field.rel.to not in avaliable_models:
|
||||
continue
|
||||
return False
|
||||
|
||||
for field in model._meta.many_to_many:
|
||||
if field.rel and field.rel.to.__name__ not in allowed_links:
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
# EXCEPTIONS
|
||||
#-------------------------------------------------------------------------------
|
||||
|
||||
class SkipValue(Exception):
|
||||
""" Value could not be parsed or should simply be skipped. """
|
||||
|
||||
|
||||
class DoLater(Exception):
|
||||
""" Value could not be parsed or should simply be skipped. """
|
|
@ -1,133 +0,0 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
try:
|
||||
from django.contrib.auth import get_user_model # Django 1.5
|
||||
except ImportError:
|
||||
from django_extensions.future_1_5 import get_user_model
|
||||
from django.contrib.auth.models import Group
|
||||
from optparse import make_option
|
||||
from sys import stdout
|
||||
from csv import writer
|
||||
import six
|
||||
|
||||
FORMATS = [
|
||||
'address',
|
||||
'emails',
|
||||
'google',
|
||||
'outlook',
|
||||
'linkedin',
|
||||
'vcard',
|
||||
]
|
||||
|
||||
|
||||
def full_name(first_name, last_name, username, **extra):
|
||||
name = six.u(" ").join(n for n in [first_name, last_name] if n)
|
||||
if not name:
|
||||
return username
|
||||
return name
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--group', '-g', action='store', dest='group', default=None,
|
||||
help='Limit to users which are part of the supplied group name'),
|
||||
make_option('--format', '-f', action='store', dest='format', default=FORMATS[0],
|
||||
help="output format. May be one of '" + "', '".join(FORMATS) + "'."),
|
||||
)
|
||||
|
||||
help = ("Export user email address list in one of a number of formats.")
|
||||
args = "[output file]"
|
||||
label = 'filename to save to'
|
||||
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
encoding = 'utf-8' # RED_FLAG: add as an option -DougN
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) > 1:
|
||||
raise CommandError("extra arguments supplied")
|
||||
group = options['group']
|
||||
if group and not Group.objects.filter(name=group).count() == 1:
|
||||
names = six.u("', '").join(g['name'] for g in Group.objects.values('name')).encode('utf-8')
|
||||
if names:
|
||||
names = "'" + names + "'."
|
||||
raise CommandError("Unknown group '" + group + "'. Valid group names are: " + names)
|
||||
if len(args) and args[0] != '-':
|
||||
outfile = open(args[0], 'w')
|
||||
else:
|
||||
outfile = stdout
|
||||
|
||||
User = get_user_model()
|
||||
qs = User.objects.all().order_by('last_name', 'first_name', 'username', 'email')
|
||||
if group:
|
||||
qs = qs.filter(group__name=group).distinct()
|
||||
qs = qs.values('last_name', 'first_name', 'username', 'email')
|
||||
getattr(self, options['format'])(qs, outfile)
|
||||
|
||||
def address(self, qs, out):
|
||||
"""simple single entry per line in the format of:
|
||||
"full name" <my@address.com>;
|
||||
"""
|
||||
out.write(six.u("\n").join(six.u('"%s" <%s>;' % (full_name(**ent), ent['email']))
|
||||
for ent in qs).encode(self.encoding))
|
||||
out.write("\n")
|
||||
|
||||
def emails(self, qs, out):
|
||||
"""simpler single entry with email only in the format of:
|
||||
my@address.com,
|
||||
"""
|
||||
out.write(six.u(",\n").join(six.u('%s' % (ent['email'])) for ent in qs).encode(self.encoding))
|
||||
out.write("\n")
|
||||
|
||||
def google(self, qs, out):
|
||||
"""CSV format suitable for importing into google GMail
|
||||
"""
|
||||
csvf = writer(out)
|
||||
csvf.writerow(['Name', 'Email'])
|
||||
for ent in qs:
|
||||
csvf.writerow([full_name(**ent).encode(self.encoding),
|
||||
ent['email'].encode(self.encoding)])
|
||||
|
||||
def outlook(self, qs, out):
|
||||
"""CSV format suitable for importing into outlook
|
||||
"""
|
||||
csvf = writer(out)
|
||||
columns = ['Name', 'E-mail Address', 'Notes', 'E-mail 2 Address', 'E-mail 3 Address',
|
||||
'Mobile Phone', 'Pager', 'Company', 'Job Title', 'Home Phone', 'Home Phone 2',
|
||||
'Home Fax', 'Home Address', 'Business Phone', 'Business Phone 2',
|
||||
'Business Fax', 'Business Address', 'Other Phone', 'Other Fax', 'Other Address']
|
||||
csvf.writerow(columns)
|
||||
empty = [''] * (len(columns) - 2)
|
||||
for ent in qs:
|
||||
csvf.writerow([full_name(**ent).encode(self.encoding),
|
||||
ent['email'].encode(self.encoding)] + empty)
|
||||
|
||||
def linkedin(self, qs, out):
|
||||
"""CSV format suitable for importing into linkedin Groups.
|
||||
perfect for pre-approving members of a linkedin group.
|
||||
"""
|
||||
csvf = writer(out)
|
||||
csvf.writerow(['First Name', 'Last Name', 'Email'])
|
||||
for ent in qs:
|
||||
csvf.writerow([ent['first_name'].encode(self.encoding),
|
||||
ent['last_name'].encode(self.encoding),
|
||||
ent['email'].encode(self.encoding)])
|
||||
|
||||
def vcard(self, qs, out):
|
||||
try:
|
||||
import vobject
|
||||
except ImportError:
|
||||
print(self.style.ERROR("Please install python-vobject to use the vcard export format."))
|
||||
import sys
|
||||
sys.exit(1)
|
||||
for ent in qs:
|
||||
card = vobject.vCard()
|
||||
card.add('fn').value = full_name(**ent)
|
||||
if not ent['last_name'] and not ent['first_name']:
|
||||
# fallback to fullname, if both first and lastname are not declared
|
||||
card.add('n').value = vobject.vcard.Name(full_name(**ent))
|
||||
else:
|
||||
card.add('n').value = vobject.vcard.Name(ent['last_name'], ent['first_name'])
|
||||
emailpart = card.add('email')
|
||||
emailpart.value = ent['email']
|
||||
emailpart.type_param = 'INTERNET'
|
||||
out.write(card.serialize().encode(self.encoding))
|
|
@ -1,35 +0,0 @@
|
|||
from django.core.management.base import LabelCommand
|
||||
from django.template import loader
|
||||
from django.template import TemplateDoesNotExist
|
||||
import sys
|
||||
|
||||
|
||||
def get_template_path(path):
|
||||
try:
|
||||
template = loader.find_template(path)
|
||||
if template[1]:
|
||||
return template[1].name
|
||||
# work arround https://code.djangoproject.com/ticket/17199 issue
|
||||
for template_loader in loader.template_source_loaders:
|
||||
try:
|
||||
source, origin = template_loader.load_template_source(path)
|
||||
return origin
|
||||
except TemplateDoesNotExist:
|
||||
pass
|
||||
raise TemplateDoesNotExist(path)
|
||||
except TemplateDoesNotExist:
|
||||
return None
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
help = "Finds the location of the given template by resolving its path"
|
||||
args = "[template_path]"
|
||||
label = 'template path'
|
||||
|
||||
def handle_label(self, template_path, **options):
|
||||
path = get_template_path(template_path)
|
||||
if path is None:
|
||||
sys.stderr.write("No template found\n")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(path)
|
|
@ -1,11 +0,0 @@
|
|||
from random import choice
|
||||
from django.core.management.base import NoArgsCommand
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
help = "Generates a new SECRET_KEY that can be used in a project settings file."
|
||||
|
||||
requires_model_validation = False
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
return ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
|
|
@ -1,71 +0,0 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from django_extensions.management.modelviz import generate_dot
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--disable-fields', '-d', action='store_true', dest='disable_fields',
|
||||
help='Do not show the class member fields'),
|
||||
make_option('--group-models', '-g', action='store_true', dest='group_models',
|
||||
help='Group models together respective to their application'),
|
||||
make_option('--all-applications', '-a', action='store_true', dest='all_applications',
|
||||
help='Automatically include all applications from INSTALLED_APPS'),
|
||||
make_option('--output', '-o', action='store', dest='outputfile',
|
||||
help='Render output file. Type of output dependend on file extensions. Use png or jpg to render graph to image.'),
|
||||
make_option('--layout', '-l', action='store', dest='layout', default='dot',
|
||||
help='Layout to be used by GraphViz for visualization. Layouts: circo dot fdp neato nop nop1 nop2 twopi'),
|
||||
make_option('--verbose-names', '-n', action='store_true', dest='verbose_names',
|
||||
help='Use verbose_name of models and fields'),
|
||||
make_option('--language', '-L', action='store', dest='language',
|
||||
help='Specify language used for verbose_name localization'),
|
||||
make_option('--exclude-columns', '-x', action='store', dest='exclude_columns',
|
||||
help='Exclude specific column(s) from the graph. Can also load exclude list from file.'),
|
||||
make_option('--exclude-models', '-X', action='store', dest='exclude_models',
|
||||
help='Exclude specific model(s) from the graph. Can also load exclude list from file.'),
|
||||
make_option('--inheritance', '-e', action='store_true', dest='inheritance',
|
||||
help='Include inheritance arrows'),
|
||||
)
|
||||
|
||||
help = ("Creates a GraphViz dot file for the specified app names. You can pass multiple app names and they will all be combined into a single model. Output is usually directed to a dot file.")
|
||||
args = "[appname]"
|
||||
label = 'application name'
|
||||
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) < 1 and not options['all_applications']:
|
||||
raise CommandError("need one or more arguments for appname")
|
||||
|
||||
dotdata = generate_dot(args, **options)
|
||||
if options['outputfile']:
|
||||
self.render_output(dotdata, **options)
|
||||
else:
|
||||
self.print_output(dotdata)
|
||||
|
||||
def print_output(self, dotdata):
|
||||
print(dotdata.encode('utf-8'))
|
||||
|
||||
def render_output(self, dotdata, **kwargs):
|
||||
try:
|
||||
import pygraphviz
|
||||
except ImportError:
|
||||
raise CommandError("You need to install pygraphviz python module")
|
||||
|
||||
vizdata = ' '.join(dotdata.split("\n")).strip().encode('utf-8')
|
||||
version = pygraphviz.__version__.rstrip("-svn")
|
||||
try:
|
||||
if tuple(int(v) for v in version.split('.')) < (0, 36):
|
||||
# HACK around old/broken AGraph before version 0.36 (ubuntu ships with this old version)
|
||||
import tempfile
|
||||
tmpfile = tempfile.NamedTemporaryFile()
|
||||
tmpfile.write(vizdata)
|
||||
tmpfile.seek(0)
|
||||
vizdata = tmpfile.name
|
||||
except ValueError:
|
||||
pass
|
||||
|
||||
graph = pygraphviz.AGraph(vizdata)
|
||||
graph.layout(prog=kwargs['layout'])
|
||||
graph.draw(kwargs['outputfile'])
|
|
@ -1,80 +0,0 @@
|
|||
from django_extensions.management.utils import setup_logger
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from smtpd import SMTPServer
|
||||
import sys
|
||||
import asyncore
|
||||
from logging import getLogger
|
||||
|
||||
|
||||
logger = getLogger(__name__)
|
||||
|
||||
|
||||
class ExtensionDebuggingServer(SMTPServer):
|
||||
"""Duplication of smtpd.DebuggingServer, but using logging instead of print."""
|
||||
# Do something with the gathered message
|
||||
def process_message(self, peer, mailfrom, rcpttos, data):
|
||||
"""Output will be sent to the module logger at INFO level."""
|
||||
inheaders = 1
|
||||
lines = data.split('\n')
|
||||
logger.info('---------- MESSAGE FOLLOWS ----------')
|
||||
for line in lines:
|
||||
# headers first
|
||||
if inheaders and not line:
|
||||
logger.info('X-Peer: %s' % peer[0])
|
||||
inheaders = 0
|
||||
logger.info(line)
|
||||
logger.info('------------ END MESSAGE ------------')
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--output', dest='output_file', default=None,
|
||||
help='Specifies an output file to send a copy of all messages (not flushed immediately).'),
|
||||
make_option('--use-settings', dest='use_settings',
|
||||
action='store_true', default=False,
|
||||
help='Uses EMAIL_HOST and HOST_PORT from Django settings.'),
|
||||
)
|
||||
help = "Starts a test mail server for development."
|
||||
args = '[optional port number or ippaddr:port]'
|
||||
|
||||
requires_model_validation = False
|
||||
|
||||
def handle(self, addrport='', *args, **options):
|
||||
if args:
|
||||
raise CommandError('Usage is mail_debug %s' % self.args)
|
||||
if not addrport:
|
||||
if options.get('use_settings', False):
|
||||
from django.conf import settings
|
||||
addr = getattr(settings, 'EMAIL_HOST', '')
|
||||
port = str(getattr(settings, 'EMAIL_PORT', '1025'))
|
||||
else:
|
||||
addr = ''
|
||||
port = '1025'
|
||||
else:
|
||||
try:
|
||||
addr, port = addrport.split(':')
|
||||
except ValueError:
|
||||
addr, port = '', addrport
|
||||
if not addr:
|
||||
addr = '127.0.0.1'
|
||||
|
||||
if not port.isdigit():
|
||||
raise CommandError("%r is not a valid port number." % port)
|
||||
else:
|
||||
port = int(port)
|
||||
|
||||
# Add console handler
|
||||
setup_logger(logger, stream=self.stdout, filename=options.get('output_file', None))
|
||||
|
||||
def inner_run():
|
||||
quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
|
||||
print("Now accepting mail at %s:%s -- use %s to quit" % (addr, port, quit_command))
|
||||
|
||||
ExtensionDebuggingServer((addr, port), None)
|
||||
asyncore.loop()
|
||||
|
||||
try:
|
||||
inner_run()
|
||||
except KeyboardInterrupt:
|
||||
pass
|
|
@ -1,48 +0,0 @@
|
|||
from __future__ import with_statement
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.conf import settings
|
||||
import os
|
||||
import re
|
||||
|
||||
ANNOTATION_RE = re.compile("\{?#[\s]*?(TODO|FIXME|BUG|HACK|WARNING|NOTE|XXX)[\s:]?(.+)")
|
||||
ANNOTATION_END_RE = re.compile("(.*)#\}(.*)")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = 'Show all annotations like TODO, FIXME, BUG, HACK, WARNING, NOTE or XXX in your py and HTML files.'
|
||||
args = 'tag'
|
||||
label = 'annotation tag (TODO, FIXME, BUG, HACK, WARNING, NOTE, XXX)'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
# don't add django internal code
|
||||
apps = filter(lambda app: not app.startswith('django.contrib'), settings.INSTALLED_APPS)
|
||||
template_dirs = getattr(settings, 'TEMPLATE_DIRS', [])
|
||||
if template_dirs:
|
||||
apps += template_dirs
|
||||
for app_dir in apps:
|
||||
app_dir = app_dir.replace(".", "/")
|
||||
for top, dirs, files in os.walk(app_dir):
|
||||
for f in files:
|
||||
if os.path.splitext(f)[1] in ('.py', '.html'):
|
||||
fpath = os.path.join(top, f)
|
||||
annotation_lines = []
|
||||
with open(fpath, 'r') as f:
|
||||
i = 0
|
||||
for line in f.readlines():
|
||||
i += 1
|
||||
if ANNOTATION_RE.search(line):
|
||||
tag, msg = ANNOTATION_RE.findall(line)[0]
|
||||
if len(args) == 1:
|
||||
search_for_tag = args[0].upper()
|
||||
if not search_for_tag == tag:
|
||||
break
|
||||
|
||||
if ANNOTATION_END_RE.search(msg.strip()):
|
||||
msg = ANNOTATION_END_RE.findall(msg.strip())[0][0]
|
||||
|
||||
annotation_lines.append("[%3s] %-5s %s" % (i, tag, msg.strip()))
|
||||
if annotation_lines:
|
||||
print("%s:" % fpath)
|
||||
for annotation in annotation_lines:
|
||||
print(" * %s" % annotation)
|
||||
print("")
|
|
@ -1,42 +0,0 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
try:
|
||||
from django.contrib.auth import get_user_model # Django 1.5
|
||||
except ImportError:
|
||||
from django_extensions.future_1_5 import get_user_model
|
||||
import getpass
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Clone of the UNIX program ``passwd'', for django.contrib.auth."
|
||||
|
||||
requires_model_validation = False
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) > 1:
|
||||
raise CommandError("need exactly one or zero arguments for username")
|
||||
|
||||
if args:
|
||||
username, = args
|
||||
else:
|
||||
username = getpass.getuser()
|
||||
|
||||
User = get_user_model()
|
||||
try:
|
||||
u = User.objects.get(username=username)
|
||||
except User.DoesNotExist:
|
||||
raise CommandError("user %s does not exist" % username)
|
||||
|
||||
print("Changing password for user: %s" % u.username)
|
||||
p1 = p2 = ""
|
||||
while "" in (p1, p2) or p1 != p2:
|
||||
p1 = getpass.getpass()
|
||||
p2 = getpass.getpass("Password (again): ")
|
||||
if p1 != p2:
|
||||
print("Passwords do not match, try again")
|
||||
elif "" in (p1, p2):
|
||||
raise CommandError("aborted")
|
||||
|
||||
u.set_password(p1)
|
||||
u.save()
|
||||
|
||||
return "Password changed successfully for user %s\n" % u.username
|
|
@ -1,246 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
import os
|
||||
import pip
|
||||
import sys
|
||||
import json
|
||||
import urllib2
|
||||
import urlparse
|
||||
import xmlrpclib
|
||||
from distutils.version import LooseVersion
|
||||
|
||||
try:
|
||||
import requests
|
||||
except ImportError:
|
||||
print("""The requests library is not installed. To continue:
|
||||
pip install requests""")
|
||||
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.management.base import NoArgsCommand
|
||||
|
||||
from pip.req import parse_requirements
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option(
|
||||
"-t", "--github-api-token", action="store", dest="github_api_token",
|
||||
help="A github api authentication token."
|
||||
),
|
||||
make_option(
|
||||
"-r", "--requirement", action="append", dest="requirements",
|
||||
default=[], metavar="FILENAME",
|
||||
help="Check all the packages listed in the given requirements file. "
|
||||
"This option can be used multiple times."
|
||||
),
|
||||
make_option(
|
||||
"-n", "--newer", action="store_true", dest="show_newer",
|
||||
help="Also show when newer version then available is installed."
|
||||
),
|
||||
)
|
||||
help = "Scan pip requirement files for out-of-date packages."
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
self.options = options
|
||||
if options["requirements"]:
|
||||
req_files = options["requirements"]
|
||||
elif os.path.exists("requirements.txt"):
|
||||
req_files = ["requirements.txt"]
|
||||
elif os.path.exists("requirements"):
|
||||
req_files = ["requirements/{0}".format(f) for f in os.listdir("requirements")
|
||||
if os.path.isfile(os.path.join("requirements", f)) and
|
||||
f.lower().endswith(".txt")]
|
||||
else:
|
||||
sys.exit("requirements not found")
|
||||
|
||||
self.reqs = {}
|
||||
for filename in req_files:
|
||||
class Object(object):
|
||||
pass
|
||||
mockoptions = Object()
|
||||
mockoptions.default_vcs = "git"
|
||||
mockoptions.skip_requirements_regex = None
|
||||
for req in parse_requirements(filename, options=mockoptions):
|
||||
self.reqs[req.name] = {
|
||||
"pip_req": req,
|
||||
"url": req.url,
|
||||
}
|
||||
|
||||
if options["github_api_token"]:
|
||||
self.github_api_token = options["github_api_token"]
|
||||
elif os.environ.get("GITHUB_API_TOKEN"):
|
||||
self.github_api_token = os.environ.get("GITHUB_API_TOKEN")
|
||||
else:
|
||||
self.github_api_token = None # only 50 requests per hour
|
||||
|
||||
self.check_pypi()
|
||||
self.check_github()
|
||||
self.check_other()
|
||||
|
||||
def _urlopen_as_json(self, url, headers=None):
|
||||
"""Shorcut for return contents as json"""
|
||||
req = urllib2.Request(url, headers=headers)
|
||||
return json.loads(urllib2.urlopen(req).read())
|
||||
|
||||
def check_pypi(self):
|
||||
"""
|
||||
If the requirement is frozen to pypi, check for a new version.
|
||||
"""
|
||||
for dist in pip.get_installed_distributions():
|
||||
name = dist.project_name
|
||||
if name in self.reqs.keys():
|
||||
self.reqs[name]["dist"] = dist
|
||||
|
||||
pypi = xmlrpclib.ServerProxy("http://pypi.python.org/pypi")
|
||||
for name, req in self.reqs.items():
|
||||
if req["url"]:
|
||||
continue # skipping github packages.
|
||||
elif "dist" in req:
|
||||
dist = req["dist"]
|
||||
dist_version = LooseVersion(dist.version)
|
||||
available = pypi.package_releases(req["pip_req"].url_name)
|
||||
try:
|
||||
available_version = LooseVersion(available[0])
|
||||
except IndexError:
|
||||
available_version = None
|
||||
|
||||
if not available_version:
|
||||
msg = "release is not on pypi (check capitalization and/or --extra-index-url)"
|
||||
elif self.options['show_newer'] and dist_version > available_version:
|
||||
msg = "{0} available (newer installed)".format(available_version)
|
||||
elif available_version > dist_version:
|
||||
msg = "{0} available".format(available_version)
|
||||
else:
|
||||
msg = "up to date"
|
||||
del self.reqs[name]
|
||||
continue
|
||||
pkg_info = "{dist.project_name} {dist.version}".format(dist=dist)
|
||||
else:
|
||||
msg = "not installed"
|
||||
pkg_info = name
|
||||
print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
|
||||
del self.reqs[name]
|
||||
|
||||
def check_github(self):
|
||||
"""
|
||||
If the requirement is frozen to a github url, check for new commits.
|
||||
|
||||
API Tokens
|
||||
----------
|
||||
For more than 50 github api calls per hour, pipchecker requires
|
||||
authentication with the github api by settings the environemnt
|
||||
variable ``GITHUB_API_TOKEN`` or setting the command flag
|
||||
--github-api-token='mytoken'``.
|
||||
|
||||
To create a github api token for use at the command line::
|
||||
curl -u 'rizumu' -d '{"scopes":["repo"], "note":"pipchecker"}' https://api.github.com/authorizations
|
||||
|
||||
For more info on github api tokens:
|
||||
https://help.github.com/articles/creating-an-oauth-token-for-command-line-use
|
||||
http://developer.github.com/v3/oauth/#oauth-authorizations-api
|
||||
|
||||
Requirement Format
|
||||
------------------
|
||||
Pipchecker gets the sha of frozen repo and checks if it is
|
||||
found at the head of any branches. If it is not found then
|
||||
the requirement is considered to be out of date.
|
||||
|
||||
Therefore, freezing at the commit hash will provide the expected
|
||||
results, but if freezing at a branch or tag name, pipchecker will
|
||||
not be able to determine with certainty if the repo is out of date.
|
||||
|
||||
Freeze at the commit hash (sha)::
|
||||
git+git://github.com/django/django.git@393c268e725f5b229ecb554f3fac02cfc250d2df#egg=Django
|
||||
|
||||
Freeze with a branch name::
|
||||
git+git://github.com/django/django.git@master#egg=Django
|
||||
|
||||
Freeze with a tag::
|
||||
git+git://github.com/django/django.git@1.5b2#egg=Django
|
||||
|
||||
Do not freeze::
|
||||
git+git://github.com/django/django.git#egg=Django
|
||||
|
||||
"""
|
||||
for name, req in self.reqs.items():
|
||||
req_url = req["url"]
|
||||
if req_url.startswith("git") and "github.com/" not in req_url:
|
||||
continue
|
||||
|
||||
headers = {
|
||||
"content-type": "application/json",
|
||||
}
|
||||
if self.github_api_token:
|
||||
headers["Authorization"] = "token {0}".format(self.github_api_token)
|
||||
try:
|
||||
user, repo = urlparse.urlparse(req_url).path.split("#")[0].strip("/").rstrip("/").split("/")
|
||||
except (ValueError, IndexError) as e:
|
||||
print("\nFailed to parse %r: %s\n" % (req_url, e))
|
||||
continue
|
||||
|
||||
try:
|
||||
#test_auth = self._urlopen_as_json("https://api.github.com/django/", headers=headers)
|
||||
test_auth = requests.get("https://api.github.com/django/", headers=headers).json()
|
||||
except urllib2.HTTPError as e:
|
||||
print("\n%s\n" % str(e))
|
||||
return
|
||||
|
||||
if "message" in test_auth and test_auth["message"] == "Bad credentials":
|
||||
print("\nGithub API: Bad credentials. Aborting!\n")
|
||||
return
|
||||
elif "message" in test_auth and test_auth["message"].startswith("API Rate Limit Exceeded"):
|
||||
print("\nGithub API: Rate Limit Exceeded. Aborting!\n")
|
||||
return
|
||||
|
||||
if ".git" in repo:
|
||||
repo_name, frozen_commit_full = repo.split(".git")
|
||||
if frozen_commit_full.startswith("@"):
|
||||
frozen_commit_sha = frozen_commit_full[1:]
|
||||
elif "@" in repo:
|
||||
repo_name, frozen_commit_sha = repo.split("@")
|
||||
else:
|
||||
frozen_commit_sha = None
|
||||
msg = "repo is not frozen"
|
||||
|
||||
if frozen_commit_sha:
|
||||
branch_url = "https://api.github.com/repos/{0}/{1}/branches".format(user, repo_name)
|
||||
#branch_data = self._urlopen_as_json(branch_url, headers=headers)
|
||||
branch_data = requests.get(branch_url, headers=headers).json()
|
||||
|
||||
frozen_commit_url = "https://api.github.com/repos/{0}/{1}/commits/{2}".format(
|
||||
user, repo_name, frozen_commit_sha
|
||||
)
|
||||
#frozen_commit_data = self._urlopen_as_json(frozen_commit_url, headers=headers)
|
||||
frozen_commit_data = requests.get(frozen_commit_url, headers=headers).json()
|
||||
|
||||
if "message" in frozen_commit_data and frozen_commit_data["message"] == "Not Found":
|
||||
msg = "{0} not found in {1}. Repo may be private.".format(frozen_commit_sha[:10], name)
|
||||
elif frozen_commit_sha in [branch["commit"]["sha"] for branch in branch_data]:
|
||||
msg = "up to date"
|
||||
else:
|
||||
msg = "{0} is not the head of any branch".format(frozen_commit_data["sha"][:10])
|
||||
|
||||
if "dist" in req:
|
||||
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
|
||||
else:
|
||||
pkg_info = "{0} {1}".format(name, frozen_commit_sha[:10])
|
||||
print("{pkg_info:40} {msg}".format(pkg_info=pkg_info, msg=msg))
|
||||
del self.reqs[name]
|
||||
|
||||
def check_other(self):
|
||||
"""
|
||||
If the requirement is frozen somewhere other than pypi or github, skip.
|
||||
|
||||
If you have a private pypi or use --extra-index-url, consider contributing
|
||||
support here.
|
||||
"""
|
||||
if self.reqs:
|
||||
print("\nOnly pypi and github based requirements are supported:")
|
||||
for name, req in self.reqs.items():
|
||||
if "dist" in req:
|
||||
pkg_info = "{dist.project_name} {dist.version}".format(dist=req["dist"])
|
||||
elif "url" in req:
|
||||
pkg_info = "{url}".format(url=req["url"])
|
||||
else:
|
||||
pkg_info = "unknown package"
|
||||
print("{pkg_info:40} is not a pypi or github requirement".format(pkg_info=pkg_info))
|
|
@ -1,72 +0,0 @@
|
|||
"""
|
||||
print_settings
|
||||
==============
|
||||
|
||||
Django command similar to 'diffsettings' but shows all active Django settings.
|
||||
"""
|
||||
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.conf import settings
|
||||
from optparse import make_option
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
"""print_settings command"""
|
||||
|
||||
help = "Print the active Django settings."
|
||||
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--format', default='simple', dest='format',
|
||||
help='Specifies output format.'),
|
||||
make_option('--indent', default=4, dest='indent', type='int',
|
||||
help='Specifies indent level for JSON and YAML'),
|
||||
)
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
a_dict = {}
|
||||
|
||||
for attr in dir(settings):
|
||||
if self.include_attr(attr):
|
||||
value = getattr(settings, attr)
|
||||
a_dict[attr] = value
|
||||
|
||||
output_format = options.get('format', 'json')
|
||||
indent = options.get('indent', 4)
|
||||
|
||||
if output_format == 'json':
|
||||
json = self.import_json()
|
||||
print(json.dumps(a_dict, indent=indent))
|
||||
elif output_format == 'yaml':
|
||||
import yaml # requires PyYAML
|
||||
print(yaml.dump(a_dict, indent=indent))
|
||||
elif output_format == 'pprint':
|
||||
from pprint import pprint
|
||||
pprint(a_dict)
|
||||
else:
|
||||
self.print_simple(a_dict)
|
||||
|
||||
@staticmethod
|
||||
def include_attr(attr):
|
||||
"""Whether or not to include attribute in output"""
|
||||
|
||||
if attr.startswith('__'):
|
||||
return False
|
||||
else:
|
||||
return True
|
||||
|
||||
@staticmethod
|
||||
def print_simple(a_dict):
|
||||
"""A very simple output format"""
|
||||
|
||||
for key, value in a_dict.items():
|
||||
print('%-40s = %r' % (key, value))
|
||||
|
||||
@staticmethod
|
||||
def import_json():
|
||||
"""Import a module for JSON"""
|
||||
|
||||
try:
|
||||
import json
|
||||
except ImportError:
|
||||
import simplejson as json # NOQA
|
||||
return json
|
|
@ -1,51 +0,0 @@
|
|||
from django.core.management.base import BaseCommand, CommandError
|
||||
try:
|
||||
from django.contrib.auth import get_user_model # Django 1.5
|
||||
except ImportError:
|
||||
from django_extensions.future_1_5 import get_user_model
|
||||
from django.contrib.sessions.models import Session
|
||||
import re
|
||||
|
||||
SESSION_RE = re.compile("^[0-9a-f]{20,40}$")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = ("print the user information for the provided session key. "
|
||||
"this is very helpful when trying to track down the person who "
|
||||
"experienced a site crash.")
|
||||
args = "session_key"
|
||||
label = 'session key for the user'
|
||||
|
||||
requires_model_validation = True
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if len(args) > 1:
|
||||
raise CommandError("extra arguments supplied")
|
||||
if len(args) < 1:
|
||||
raise CommandError("session_key argument missing")
|
||||
key = args[0].lower()
|
||||
if not SESSION_RE.match(key):
|
||||
raise CommandError("malformed session key")
|
||||
try:
|
||||
session = Session.objects.get(pk=key)
|
||||
except Session.DoesNotExist:
|
||||
print("Session Key does not exist. Expired?")
|
||||
return
|
||||
|
||||
data = session.get_decoded()
|
||||
print('Session to Expire: %s' % session.expire_date)
|
||||
print('Raw Data: %s' % data)
|
||||
uid = data.get('_auth_user_id', None)
|
||||
if uid is None:
|
||||
print('No user associated with session')
|
||||
return
|
||||
print("User id: %s" % uid)
|
||||
User = get_user_model()
|
||||
try:
|
||||
user = User.objects.get(pk=uid)
|
||||
except User.DoesNotExist:
|
||||
print("No user associated with that id.")
|
||||
return
|
||||
for key in ['username', 'email', 'first_name', 'last_name']:
|
||||
print("%s: %s" % (key, getattr(user, key)))
|
|
@ -1,177 +0,0 @@
|
|||
"""
|
||||
originally from http://www.djangosnippets.org/snippets/828/ by dnordberg
|
||||
"""
|
||||
|
||||
from six.moves import input
|
||||
from django.conf import settings
|
||||
from django.core.management.base import CommandError, BaseCommand
|
||||
import django
|
||||
import logging
|
||||
import re
|
||||
from optparse import make_option
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--noinput', action='store_false',
|
||||
dest='interactive', default=True,
|
||||
help='Tells Django to NOT prompt the user for input of any kind.'),
|
||||
make_option('--no-utf8', action='store_true',
|
||||
dest='no_utf8_support', default=False,
|
||||
help='Tells Django to not create a UTF-8 charset database'),
|
||||
make_option('-U', '--user', action='store',
|
||||
dest='user', default=None,
|
||||
help='Use another user for the database then defined in settings.py'),
|
||||
make_option('-P', '--password', action='store',
|
||||
dest='password', default=None,
|
||||
help='Use another password for the database then defined in settings.py'),
|
||||
make_option('-D', '--dbname', action='store',
|
||||
dest='dbname', default=None,
|
||||
help='Use another database name then defined in settings.py (For PostgreSQL this defaults to "template1")'),
|
||||
make_option('-R', '--router', action='store',
|
||||
dest='router', default=None,
|
||||
help='Use this router-database other then defined in settings.py'),
|
||||
)
|
||||
help = "Resets the database for this project."
|
||||
|
||||
def set_db_settings(self, *args, **options):
|
||||
if django.get_version() >= "1.2":
|
||||
router = options.get('router')
|
||||
if router is None:
|
||||
return False
|
||||
|
||||
# retrieve this with the 'using' argument
|
||||
dbinfo = settings.DATABASES.get(router)
|
||||
settings.DATABASE_ENGINE = dbinfo.get('ENGINE').split('.')[-1]
|
||||
settings.DATABASE_USER = dbinfo.get('USER')
|
||||
settings.DATABASE_PASSWORD = dbinfo.get('PASSWORD')
|
||||
settings.DATABASE_NAME = dbinfo.get('NAME')
|
||||
settings.DATABASE_HOST = dbinfo.get('HOST')
|
||||
settings.DATABASE_PORT = dbinfo.get('PORT')
|
||||
return True
|
||||
else:
|
||||
# settings are set for django < 1.2 no modification needed
|
||||
return True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""
|
||||
Resets the database for this project.
|
||||
|
||||
Note: Transaction wrappers are in reverse as a work around for
|
||||
autocommit, anybody know how to do this the right way?
|
||||
"""
|
||||
|
||||
if django.get_version() >= "1.2":
|
||||
got_db_settings = self.set_db_settings(*args, **options)
|
||||
if not got_db_settings:
|
||||
raise CommandError("You are using Django %s which requires to specify the db-router.\nPlease specify the router by adding --router=<routername> to this command." % django.get_version())
|
||||
return
|
||||
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
if options.get('interactive'):
|
||||
confirm = input("""
|
||||
You have requested a database reset.
|
||||
This will IRREVERSIBLY DESTROY
|
||||
ALL data in the database "%s".
|
||||
Are you sure you want to do this?
|
||||
|
||||
Type 'yes' to continue, or 'no' to cancel: """ % (settings.DATABASE_NAME,))
|
||||
else:
|
||||
confirm = 'yes'
|
||||
|
||||
if confirm != 'yes':
|
||||
print("Reset cancelled.")
|
||||
return
|
||||
|
||||
postgis = re.compile('.*postgis')
|
||||
engine = settings.DATABASE_ENGINE
|
||||
user = options.get('user', settings.DATABASE_USER)
|
||||
if user is None:
|
||||
user = settings.DATABASE_USER
|
||||
password = options.get('password', settings.DATABASE_PASSWORD)
|
||||
if password is None:
|
||||
password = settings.DATABASE_PASSWORD
|
||||
|
||||
if engine in ('sqlite3', 'spatialite'):
|
||||
import os
|
||||
try:
|
||||
logging.info("Unlinking %s database" % engine)
|
||||
os.unlink(settings.DATABASE_NAME)
|
||||
except OSError:
|
||||
pass
|
||||
elif engine == 'mysql':
|
||||
import MySQLdb as Database
|
||||
kwargs = {
|
||||
'user': user,
|
||||
'passwd': password,
|
||||
}
|
||||
if settings.DATABASE_HOST.startswith('/'):
|
||||
kwargs['unix_socket'] = settings.DATABASE_HOST
|
||||
else:
|
||||
kwargs['host'] = settings.DATABASE_HOST
|
||||
if settings.DATABASE_PORT:
|
||||
kwargs['port'] = int(settings.DATABASE_PORT)
|
||||
|
||||
connection = Database.connect(**kwargs)
|
||||
drop_query = 'DROP DATABASE IF EXISTS `%s`' % settings.DATABASE_NAME
|
||||
utf8_support = options.get('no_utf8_support', False) and '' or 'CHARACTER SET utf8'
|
||||
create_query = 'CREATE DATABASE `%s` %s' % (settings.DATABASE_NAME, utf8_support)
|
||||
logging.info('Executing... "' + drop_query + '"')
|
||||
connection.query(drop_query)
|
||||
logging.info('Executing... "' + create_query + '"')
|
||||
connection.query(create_query)
|
||||
|
||||
elif engine == 'postgresql' or engine == 'postgresql_psycopg2' or postgis.match(engine):
|
||||
if engine == 'postgresql':
|
||||
import psycopg as Database # NOQA
|
||||
elif engine == 'postgresql_psycopg2' or postgis.match(engine):
|
||||
import psycopg2 as Database # NOQA
|
||||
|
||||
if settings.DATABASE_NAME == '':
|
||||
from django.core.exceptions import ImproperlyConfigured
|
||||
raise ImproperlyConfigured("You need to specify DATABASE_NAME in your Django settings file.")
|
||||
|
||||
database_name = options.get('dbname', 'template1')
|
||||
if options.get('dbname') is None:
|
||||
database_name = 'template1'
|
||||
conn_string = "dbname=%s" % database_name
|
||||
if settings.DATABASE_USER:
|
||||
conn_string += " user=%s" % user
|
||||
if settings.DATABASE_PASSWORD:
|
||||
conn_string += " password='%s'" % password
|
||||
if settings.DATABASE_HOST:
|
||||
conn_string += " host=%s" % settings.DATABASE_HOST
|
||||
if settings.DATABASE_PORT:
|
||||
conn_string += " port=%s" % settings.DATABASE_PORT
|
||||
|
||||
connection = Database.connect(conn_string)
|
||||
connection.set_isolation_level(0) # autocommit false
|
||||
cursor = connection.cursor()
|
||||
drop_query = 'DROP DATABASE %s' % settings.DATABASE_NAME
|
||||
logging.info('Executing... "' + drop_query + '"')
|
||||
|
||||
try:
|
||||
cursor.execute(drop_query)
|
||||
except Database.ProgrammingError as e:
|
||||
logging.info("Error: %s" % str(e))
|
||||
|
||||
# Encoding should be SQL_ASCII (7-bit postgres default) or prefered UTF8 (8-bit)
|
||||
create_query = "CREATE DATABASE %s" % settings.DATABASE_NAME
|
||||
if settings.DATABASE_USER:
|
||||
create_query += " WITH OWNER = %s " % settings.DATABASE_USER
|
||||
create_query += " ENCODING = 'UTF8'"
|
||||
|
||||
if postgis.match(engine):
|
||||
create_query += ' TEMPLATE = template_postgis'
|
||||
if settings.DEFAULT_TABLESPACE:
|
||||
create_query += ' TABLESPACE = %s;' % settings.DEFAULT_TABLESPACE
|
||||
else:
|
||||
create_query += ';'
|
||||
logging.info('Executing... "' + create_query + '"')
|
||||
cursor.execute(create_query)
|
||||
|
||||
else:
|
||||
raise CommandError("Unknown database engine %s" % engine)
|
||||
|
||||
if verbosity >= 2 or options.get('interactive'):
|
||||
print("Reset successful.")
|
|
@ -1,60 +0,0 @@
|
|||
from django.core.management.base import LabelCommand
|
||||
from optparse import make_option
|
||||
from django_extensions.management.jobs import get_job, print_jobs
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
option_list = LabelCommand.option_list + (
|
||||
make_option('--list', '-l', action="store_true", dest="list_jobs",
|
||||
help="List all jobs with their description"),
|
||||
)
|
||||
help = "Run a single maintenance job."
|
||||
args = "[app_name] job_name"
|
||||
label = ""
|
||||
|
||||
requires_model_validation = True
|
||||
|
||||
def runjob(self, app_name, job_name, options):
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
if verbosity > 1:
|
||||
print("Executing job: %s (app: %s)" % (job_name, app_name))
|
||||
try:
|
||||
job = get_job(app_name, job_name)
|
||||
except KeyError:
|
||||
if app_name:
|
||||
print("Error: Job %s for applabel %s not found" % (app_name, job_name))
|
||||
else:
|
||||
print("Error: Job %s not found" % job_name)
|
||||
print("Use -l option to view all the available jobs")
|
||||
return
|
||||
try:
|
||||
job().execute()
|
||||
except Exception:
|
||||
import traceback
|
||||
print("ERROR OCCURED IN JOB: %s (APP: %s)" % (job_name, app_name))
|
||||
print("START TRACEBACK:")
|
||||
traceback.print_exc()
|
||||
print("END TRACEBACK\n")
|
||||
|
||||
def handle(self, *args, **options):
|
||||
app_name = None
|
||||
job_name = None
|
||||
if len(args) == 1:
|
||||
job_name = args[0]
|
||||
elif len(args) == 2:
|
||||
app_name, job_name = args
|
||||
if options.get('list_jobs'):
|
||||
print_jobs(only_scheduled=False, show_when=True, show_appname=True)
|
||||
else:
|
||||
if not job_name:
|
||||
print("Run a single maintenance job. Please specify the name of the job.")
|
||||
return
|
||||
self.runjob(app_name, job_name, options)
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,97 +0,0 @@
|
|||
from django.core.management.base import LabelCommand
|
||||
from optparse import make_option
|
||||
from django_extensions.management.jobs import get_jobs, print_jobs
|
||||
|
||||
|
||||
class Command(LabelCommand):
|
||||
option_list = LabelCommand.option_list + (
|
||||
make_option('--list', '-l', action="store_true", dest="list_jobs",
|
||||
help="List all jobs with their description"),
|
||||
)
|
||||
help = "Runs scheduled maintenance jobs."
|
||||
args = "[minutely quarter_hourly hourly daily weekly monthly yearly]"
|
||||
label = ""
|
||||
|
||||
requires_model_validation = True
|
||||
|
||||
def usage_msg(self):
|
||||
print("Run scheduled jobs. Please specify 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly' or 'yearly'")
|
||||
|
||||
def runjobs(self, when, options):
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
jobs = get_jobs(when, only_scheduled=True)
|
||||
list = jobs.keys()
|
||||
list.sort()
|
||||
for app_name, job_name in list:
|
||||
job = jobs[(app_name, job_name)]
|
||||
if verbosity > 1:
|
||||
print("Executing %s job: %s (app: %s)" % (when, job_name, app_name))
|
||||
try:
|
||||
job().execute()
|
||||
except Exception:
|
||||
import traceback
|
||||
print("ERROR OCCURED IN %s JOB: %s (APP: %s)" % (when.upper(), job_name, app_name))
|
||||
print("START TRACEBACK:")
|
||||
traceback.print_exc()
|
||||
print("END TRACEBACK\n")
|
||||
|
||||
def runjobs_by_signals(self, when, options):
|
||||
""" Run jobs from the signals """
|
||||
# Thanks for Ian Holsman for the idea and code
|
||||
from django_extensions.management import signals
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
for app_name in settings.INSTALLED_APPS:
|
||||
try:
|
||||
__import__(app_name + '.management', '', '', [''])
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
for app in models.get_apps():
|
||||
if verbosity > 1:
|
||||
app_name = '.'.join(app.__name__.rsplit('.')[:-1])
|
||||
print("Sending %s job signal for: %s" % (when, app_name))
|
||||
if when == 'minutely':
|
||||
signals.run_minutely_jobs.send(sender=app, app=app)
|
||||
elif when == 'quarter_hourly':
|
||||
signals.run_quarter_hourly_jobs.send(sender=app, app=app)
|
||||
elif when == 'hourly':
|
||||
signals.run_hourly_jobs.send(sender=app, app=app)
|
||||
elif when == 'daily':
|
||||
signals.run_daily_jobs.send(sender=app, app=app)
|
||||
elif when == 'weekly':
|
||||
signals.run_weekly_jobs.send(sender=app, app=app)
|
||||
elif when == 'monthly':
|
||||
signals.run_monthly_jobs.send(sender=app, app=app)
|
||||
elif when == 'yearly':
|
||||
signals.run_yearly_jobs.send(sender=app, app=app)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
when = None
|
||||
if len(args) > 1:
|
||||
self.usage_msg()
|
||||
return
|
||||
elif len(args) == 1:
|
||||
if not args[0] in ['minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly']:
|
||||
self.usage_msg()
|
||||
return
|
||||
else:
|
||||
when = args[0]
|
||||
if options.get('list_jobs'):
|
||||
print_jobs(when, only_scheduled=True, show_when=True, show_appname=True)
|
||||
else:
|
||||
if not when:
|
||||
self.usage_msg()
|
||||
return
|
||||
self.runjobs(when, options)
|
||||
self.runjobs_by_signals(when, options)
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,287 +0,0 @@
|
|||
"""
|
||||
runprofileserver.py
|
||||
|
||||
Starts a lightweight Web server with profiling enabled.
|
||||
|
||||
Credits for kcachegrind support taken from lsprofcalltree.py go to:
|
||||
David Allouche
|
||||
Jp Calderone & Itamar Shtull-Trauring
|
||||
Johan Dahlin
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from optparse import make_option
|
||||
from datetime import datetime
|
||||
from django.conf import settings
|
||||
import sys
|
||||
|
||||
try:
|
||||
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
||||
USE_STATICFILES = 'django.contrib.staticfiles' in settings.INSTALLED_APPS
|
||||
except ImportError as e:
|
||||
USE_STATICFILES = False
|
||||
|
||||
try:
|
||||
any
|
||||
except NameError:
|
||||
# backwards compatibility for <2.5
|
||||
def any(iterable):
|
||||
for element in iterable:
|
||||
if element:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def label(code):
|
||||
if isinstance(code, str):
|
||||
return ('~', 0, code) # built-in functions ('~' sorts at the end)
|
||||
else:
|
||||
return '%s %s:%d' % (code.co_name,
|
||||
code.co_filename,
|
||||
code.co_firstlineno)
|
||||
|
||||
|
||||
class KCacheGrind(object):
|
||||
def __init__(self, profiler):
|
||||
self.data = profiler.getstats()
|
||||
self.out_file = None
|
||||
|
||||
def output(self, out_file):
|
||||
self.out_file = out_file
|
||||
self.out_file.write('events: Ticks\n')
|
||||
self._print_summary()
|
||||
for entry in self.data:
|
||||
self._entry(entry)
|
||||
|
||||
def _print_summary(self):
|
||||
max_cost = 0
|
||||
for entry in self.data:
|
||||
totaltime = int(entry.totaltime * 1000)
|
||||
max_cost = max(max_cost, totaltime)
|
||||
self.out_file.write('summary: %d\n' % (max_cost,))
|
||||
|
||||
def _entry(self, entry):
|
||||
out_file = self.out_file
|
||||
|
||||
code = entry.code
|
||||
#print >> out_file, 'ob=%s' % (code.co_filename,)
|
||||
if isinstance(code, str):
|
||||
out_file.write('fi=~\n')
|
||||
else:
|
||||
out_file.write('fi=%s\n' % (code.co_filename,))
|
||||
out_file.write('fn=%s\n' % (label(code),))
|
||||
|
||||
inlinetime = int(entry.inlinetime * 1000)
|
||||
if isinstance(code, str):
|
||||
out_file.write('0 %s\n' % inlinetime)
|
||||
else:
|
||||
out_file.write('%d %d\n' % (code.co_firstlineno, inlinetime))
|
||||
|
||||
# recursive calls are counted in entry.calls
|
||||
if entry.calls:
|
||||
calls = entry.calls
|
||||
else:
|
||||
calls = []
|
||||
|
||||
if isinstance(code, str):
|
||||
lineno = 0
|
||||
else:
|
||||
lineno = code.co_firstlineno
|
||||
|
||||
for subentry in calls:
|
||||
self._subentry(lineno, subentry)
|
||||
out_file.write("\n")
|
||||
|
||||
def _subentry(self, lineno, subentry):
|
||||
out_file = self.out_file
|
||||
code = subentry.code
|
||||
#out_file.write('cob=%s\n' % (code.co_filename,))
|
||||
out_file.write('cfn=%s\n' % (label(code),))
|
||||
if isinstance(code, str):
|
||||
out_file.write('cfi=~\n')
|
||||
out_file.write('calls=%d 0\n' % (subentry.callcount,))
|
||||
else:
|
||||
out_file.write('cfi=%s\n' % (code.co_filename,))
|
||||
out_file.write('calls=%d %d\n' % (subentry.callcount, code.co_firstlineno))
|
||||
|
||||
totaltime = int(subentry.totaltime * 1000)
|
||||
out_file.write('%d %d\n' % (lineno, totaltime))
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--noreload', action='store_false', dest='use_reloader', default=True,
|
||||
help='Tells Django to NOT use the auto-reloader.'),
|
||||
make_option('--adminmedia', dest='admin_media_path', default='',
|
||||
help='Specifies the directory from which to serve admin media.'),
|
||||
make_option('--prof-path', dest='prof_path', default='/tmp',
|
||||
help='Specifies the directory which to save profile information in.'),
|
||||
make_option('--prof-file', dest='prof_file', default='{path}.{duration:06d}ms.{time}',
|
||||
help='Set filename format, default if "{path}.{duration:06d}ms.{time}".'),
|
||||
make_option('--nomedia', action='store_true', dest='no_media', default=False,
|
||||
help='Do not profile MEDIA_URL and ADMIN_MEDIA_URL'),
|
||||
make_option('--use-cprofile', action='store_true', dest='use_cprofile', default=False,
|
||||
help='Use cProfile if available, this is disabled per default because of incompatibilities.'),
|
||||
make_option('--kcachegrind', action='store_true', dest='use_lsprof', default=False,
|
||||
help='Create kcachegrind compatible lsprof files, this requires and automatically enables cProfile.'),
|
||||
)
|
||||
if USE_STATICFILES:
|
||||
option_list += (
|
||||
make_option('--nostatic', action="store_false", dest='use_static_handler', default=True,
|
||||
help='Tells Django to NOT automatically serve static files at STATIC_URL.'),
|
||||
make_option('--insecure', action="store_true", dest='insecure_serving', default=False,
|
||||
help='Allows serving static files even if DEBUG is False.'),
|
||||
)
|
||||
help = "Starts a lightweight Web server with profiling enabled."
|
||||
args = '[optional port number, or ipaddr:port]'
|
||||
|
||||
# Validation is called explicitly each time the server is reloaded.
|
||||
requires_model_validation = False
|
||||
|
||||
def handle(self, addrport='', *args, **options):
|
||||
import django
|
||||
from django.core.servers.basehttp import run, WSGIServerException
|
||||
try:
|
||||
from django.core.servers.basehttp import AdminMediaHandler
|
||||
HAS_ADMINMEDIAHANDLER = True
|
||||
except ImportError:
|
||||
HAS_ADMINMEDIAHANDLER = False
|
||||
from django.core.handlers.wsgi import WSGIHandler
|
||||
|
||||
if args:
|
||||
raise CommandError('Usage is runserver %s' % self.args)
|
||||
if not addrport:
|
||||
addr = ''
|
||||
port = '8000'
|
||||
else:
|
||||
try:
|
||||
addr, port = addrport.split(':')
|
||||
except ValueError:
|
||||
addr, port = '', addrport
|
||||
if not addr:
|
||||
addr = '127.0.0.1'
|
||||
|
||||
if not port.isdigit():
|
||||
raise CommandError("%r is not a valid port number." % port)
|
||||
|
||||
use_reloader = options.get('use_reloader', True)
|
||||
shutdown_message = options.get('shutdown_message', '')
|
||||
no_media = options.get('no_media', False)
|
||||
quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
|
||||
|
||||
def inner_run():
|
||||
import os
|
||||
import time
|
||||
import hotshot
|
||||
USE_CPROFILE = options.get('use_cprofile', False)
|
||||
USE_LSPROF = options.get('use_lsprof', False)
|
||||
if USE_LSPROF:
|
||||
USE_CPROFILE = True
|
||||
if USE_CPROFILE:
|
||||
try:
|
||||
import cProfile
|
||||
USE_CPROFILE = True
|
||||
except ImportError:
|
||||
print("cProfile disabled, module cannot be imported!")
|
||||
USE_CPROFILE = False
|
||||
if USE_LSPROF and not USE_CPROFILE:
|
||||
raise SystemExit("Kcachegrind compatible output format required cProfile from Python 2.5")
|
||||
prof_path = options.get('prof_path', '/tmp')
|
||||
|
||||
prof_file = options.get('prof_file', '{path}.{duration:06d}ms.{time}')
|
||||
if not prof_file.format(path='1', duration=2, time=3):
|
||||
prof_file = '{path}.{duration:06d}ms.{time}'
|
||||
print("Filename format is wrong. Default format used: '{path}.{duration:06d}ms.{time}'.")
|
||||
|
||||
def get_exclude_paths():
|
||||
exclude_paths = []
|
||||
media_url = getattr(settings, 'MEDIA_URL', None)
|
||||
if media_url:
|
||||
exclude_paths.append(media_url)
|
||||
static_url = getattr(settings, 'STATIC_URL', None)
|
||||
if static_url:
|
||||
exclude_paths.append(static_url)
|
||||
admin_media_prefix = getattr(settings, 'ADMIN_MEDIA_PREFIX', None)
|
||||
if admin_media_prefix:
|
||||
exclude_paths.append(admin_media_prefix)
|
||||
return exclude_paths
|
||||
|
||||
def make_profiler_handler(inner_handler):
|
||||
def handler(environ, start_response):
|
||||
path_info = environ['PATH_INFO']
|
||||
# when using something like a dynamic site middleware is could be necessary
|
||||
# to refetch the exclude_paths every time since they could change per site.
|
||||
if no_media and any(path_info.startswith(p) for p in get_exclude_paths()):
|
||||
return inner_handler(environ, start_response)
|
||||
path_name = path_info.strip("/").replace('/', '.') or "root"
|
||||
profname = "%s.%d.prof" % (path_name, time.time())
|
||||
profname = os.path.join(prof_path, profname)
|
||||
if USE_CPROFILE:
|
||||
prof = cProfile.Profile()
|
||||
else:
|
||||
prof = hotshot.Profile(profname)
|
||||
start = datetime.now()
|
||||
try:
|
||||
return prof.runcall(inner_handler, environ, start_response)
|
||||
finally:
|
||||
# seeing how long the request took is important!
|
||||
elap = datetime.now() - start
|
||||
elapms = elap.seconds * 1000.0 + elap.microseconds / 1000.0
|
||||
if USE_LSPROF:
|
||||
kg = KCacheGrind(prof)
|
||||
kg.output(open(profname, 'w'))
|
||||
elif USE_CPROFILE:
|
||||
prof.dump_stats(profname)
|
||||
profname2 = prof_file.format(path=path_name, duration=int(elapms), time=int(time.time()))
|
||||
profname2 = os.path.join(prof_path, "%s.prof" % profname2)
|
||||
if not USE_CPROFILE:
|
||||
prof.close()
|
||||
os.rename(profname, profname2)
|
||||
return handler
|
||||
|
||||
print("Validating models...")
|
||||
self.validate(display_num_errors=True)
|
||||
print("\nDjango version %s, using settings %r" % (django.get_version(), settings.SETTINGS_MODULE))
|
||||
print("Development server is running at http://%s:%s/" % (addr, port))
|
||||
print("Quit the server with %s." % quit_command)
|
||||
path = options.get('admin_media_path', '')
|
||||
if not path:
|
||||
admin_media_path = os.path.join(django.__path__[0], 'contrib/admin/static/admin')
|
||||
if os.path.isdir(admin_media_path):
|
||||
path = admin_media_path
|
||||
else:
|
||||
path = os.path.join(django.__path__[0], 'contrib/admin/media')
|
||||
try:
|
||||
handler = WSGIHandler()
|
||||
if HAS_ADMINMEDIAHANDLER:
|
||||
handler = AdminMediaHandler(handler, path)
|
||||
if USE_STATICFILES:
|
||||
use_static_handler = options.get('use_static_handler', True)
|
||||
insecure_serving = options.get('insecure_serving', False)
|
||||
if (use_static_handler and (settings.DEBUG or insecure_serving)):
|
||||
handler = StaticFilesHandler(handler)
|
||||
handler = make_profiler_handler(handler)
|
||||
run(addr, int(port), handler)
|
||||
except WSGIServerException as e:
|
||||
# Use helpful error messages instead of ugly tracebacks.
|
||||
ERRORS = {
|
||||
13: "You don't have permission to access that port.",
|
||||
98: "That port is already in use.",
|
||||
99: "That IP address can't be assigned-to.",
|
||||
}
|
||||
try:
|
||||
error_text = ERRORS[e.args[0].args[0]]
|
||||
except (AttributeError, KeyError):
|
||||
error_text = str(e)
|
||||
sys.stderr.write(self.style.ERROR("Error: %s" % error_text) + '\n')
|
||||
# Need to use an OS exit because sys.exit doesn't work in a thread
|
||||
os._exit(1)
|
||||
except KeyboardInterrupt:
|
||||
if shutdown_message:
|
||||
print(shutdown_message)
|
||||
sys.exit(0)
|
||||
if use_reloader:
|
||||
from django.utils import autoreload
|
||||
autoreload.main(inner_run)
|
||||
else:
|
||||
inner_run()
|
|
@ -1,158 +0,0 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from optparse import make_option
|
||||
import imp
|
||||
|
||||
|
||||
def vararg_callback(option, opt_str, opt_value, parser):
|
||||
parser.rargs.insert(0, opt_value)
|
||||
value = []
|
||||
for arg in parser.rargs:
|
||||
# stop on --foo like options
|
||||
if arg[:2] == "--" and len(arg) > 2:
|
||||
break
|
||||
# stop on -a like options
|
||||
if arg[:1] == "-":
|
||||
break
|
||||
value.append(arg)
|
||||
|
||||
del parser.rargs[:len(value)]
|
||||
setattr(parser.values, option.dest, value)
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--fixtures', action='store_true', dest='infixtures', default=False,
|
||||
help='Only look in app.fixtures subdir'),
|
||||
make_option('--noscripts', action='store_true', dest='noscripts', default=False,
|
||||
help='Look in app.scripts subdir'),
|
||||
make_option('-s', '--silent', action='store_true', dest='silent', default=False,
|
||||
help='Run silently, do not show errors and tracebacks'),
|
||||
make_option('--no-traceback', action='store_true', dest='no_traceback', default=False,
|
||||
help='Do not show tracebacks'),
|
||||
make_option('--script-args', action='callback', callback=vararg_callback, type='string',
|
||||
help='Space-separated argument list to be passed to the scripts. Note that the '
|
||||
'same arguments will be passed to all named scripts.'),
|
||||
)
|
||||
help = 'Runs a script in django context.'
|
||||
args = "script [script ...]"
|
||||
|
||||
def handle(self, *scripts, **options):
|
||||
from django.db.models import get_apps
|
||||
|
||||
NOTICE = self.style.SQL_TABLE
|
||||
NOTICE2 = self.style.SQL_FIELD
|
||||
ERROR = self.style.ERROR
|
||||
ERROR2 = self.style.NOTICE
|
||||
|
||||
subdirs = []
|
||||
|
||||
if not options.get('noscripts'):
|
||||
subdirs.append('scripts')
|
||||
if options.get('infixtures'):
|
||||
subdirs.append('fixtures')
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
show_traceback = options.get('traceback', True)
|
||||
if show_traceback is None:
|
||||
# XXX: traceback is set to None from Django ?
|
||||
show_traceback = True
|
||||
no_traceback = options.get('no_traceback', False)
|
||||
if no_traceback:
|
||||
show_traceback = False
|
||||
silent = options.get('silent', False)
|
||||
if silent:
|
||||
verbosity = 0
|
||||
|
||||
if len(subdirs) < 1:
|
||||
print(NOTICE("No subdirs to run left."))
|
||||
return
|
||||
|
||||
if len(scripts) < 1:
|
||||
print(ERROR("Script name required."))
|
||||
return
|
||||
|
||||
def run_script(mod, *script_args):
|
||||
try:
|
||||
mod.run(*script_args)
|
||||
except Exception:
|
||||
if silent:
|
||||
return
|
||||
if verbosity > 0:
|
||||
print(ERROR("Exception while running run() in '%s'" % mod.__name__))
|
||||
if show_traceback:
|
||||
raise
|
||||
|
||||
def my_import(mod):
|
||||
if verbosity > 1:
|
||||
print(NOTICE("Check for %s" % mod))
|
||||
# check if module exists before importing
|
||||
try:
|
||||
path = None
|
||||
for package in mod.split('.')[:-1]:
|
||||
module_tuple = imp.find_module(package, path)
|
||||
path = imp.load_module(package, *module_tuple).__path__
|
||||
imp.find_module(mod.split('.')[-1], path)
|
||||
except (ImportError, AttributeError):
|
||||
return False
|
||||
|
||||
t = __import__(mod, [], [], [" "])
|
||||
#if verbosity > 1:
|
||||
# print(NOTICE("Found script %s ..." % mod))
|
||||
if hasattr(t, "run"):
|
||||
if verbosity > 1:
|
||||
print(NOTICE2("Found script '%s' ..." % mod))
|
||||
#if verbosity > 1:
|
||||
# print(NOTICE("found run() in %s. executing..." % mod))
|
||||
return t
|
||||
else:
|
||||
if verbosity > 1:
|
||||
print(ERROR2("Find script '%s' but no run() function found." % mod))
|
||||
|
||||
def find_modules_for_script(script):
|
||||
""" find script module which contains 'run' attribute """
|
||||
modules = []
|
||||
# first look in apps
|
||||
for app in get_apps():
|
||||
app_name = app.__name__.split(".")[:-1] # + ['fixtures']
|
||||
for subdir in subdirs:
|
||||
mod = my_import(".".join(app_name + [subdir, script]))
|
||||
if mod:
|
||||
modules.append(mod)
|
||||
|
||||
# try app.DIR.script import
|
||||
sa = script.split(".")
|
||||
for subdir in subdirs:
|
||||
nn = ".".join(sa[:-1] + [subdir, sa[-1]])
|
||||
mod = my_import(nn)
|
||||
if mod:
|
||||
modules.append(mod)
|
||||
|
||||
# try direct import
|
||||
if script.find(".") != -1:
|
||||
mod = my_import(script)
|
||||
if mod:
|
||||
modules.append(mod)
|
||||
|
||||
return modules
|
||||
|
||||
if options.get('script_args'):
|
||||
script_args = options['script_args']
|
||||
else:
|
||||
script_args = []
|
||||
for script in scripts:
|
||||
modules = find_modules_for_script(script)
|
||||
if not modules:
|
||||
if verbosity > 0 and not silent:
|
||||
print(ERROR("No module for script '%s' found" % script))
|
||||
for mod in modules:
|
||||
if verbosity > 1:
|
||||
print(NOTICE2("Running script '%s' ..." % mod.__name__))
|
||||
run_script(mod, *script_args)
|
||||
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,235 +0,0 @@
|
|||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django_extensions.management.utils import setup_logger, RedirectHandler
|
||||
from optparse import make_option
|
||||
import os
|
||||
import re
|
||||
import socket
|
||||
import sys
|
||||
import time
|
||||
|
||||
try:
|
||||
if 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
|
||||
from django.contrib.staticfiles.handlers import StaticFilesHandler
|
||||
USE_STATICFILES = True
|
||||
elif 'staticfiles' in settings.INSTALLED_APPS:
|
||||
from staticfiles.handlers import StaticFilesHandler # noqa
|
||||
USE_STATICFILES = True
|
||||
else:
|
||||
USE_STATICFILES = False
|
||||
except ImportError:
|
||||
USE_STATICFILES = False
|
||||
|
||||
naiveip_re = re.compile(r"""^(?:
|
||||
(?P<addr>
|
||||
(?P<ipv4>\d{1,3}(?:\.\d{1,3}){3}) | # IPv4 address
|
||||
(?P<ipv6>\[[a-fA-F0-9:]+\]) | # IPv6 address
|
||||
(?P<fqdn>[a-zA-Z0-9-]+(?:\.[a-zA-Z0-9-]+)*) # FQDN
|
||||
):)?(?P<port>\d+)$""", re.X)
|
||||
DEFAULT_PORT = "8000"
|
||||
|
||||
import logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
from django_extensions.management.technical_response import null_technical_500_response
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
|
||||
help='Tells Django to use a IPv6 address.'),
|
||||
make_option('--noreload', action='store_false', dest='use_reloader', default=True,
|
||||
help='Tells Django to NOT use the auto-reloader.'),
|
||||
make_option('--browser', action='store_true', dest='open_browser',
|
||||
help='Tells Django to open a browser.'),
|
||||
make_option('--adminmedia', dest='admin_media_path', default='',
|
||||
help='Specifies the directory from which to serve admin media.'),
|
||||
make_option('--threaded', action='store_true', dest='threaded',
|
||||
help='Run in multithreaded mode.'),
|
||||
make_option('--output', dest='output_file', default=None,
|
||||
help='Specifies an output file to send a copy of all messages (not flushed immediately).'),
|
||||
make_option('--print-sql', action='store_true', default=False,
|
||||
help="Print SQL queries as they're executed"),
|
||||
make_option('--cert', dest='cert_path', action="store", type="string",
|
||||
help='To use SSL, specify certificate path.'),
|
||||
|
||||
)
|
||||
if USE_STATICFILES:
|
||||
option_list += (
|
||||
make_option('--nostatic', action="store_false", dest='use_static_handler', default=True,
|
||||
help='Tells Django to NOT automatically serve static files at STATIC_URL.'),
|
||||
make_option('--insecure', action="store_true", dest='insecure_serving', default=False,
|
||||
help='Allows serving static files even if DEBUG is False.'),
|
||||
)
|
||||
help = "Starts a lightweight Web server for development."
|
||||
args = '[optional port number, or ipaddr:port]'
|
||||
|
||||
# Validation is called explicitly each time the server is reloaded.
|
||||
requires_model_validation = False
|
||||
|
||||
def handle(self, addrport='', *args, **options):
|
||||
import django
|
||||
|
||||
setup_logger(logger, self.stderr, filename=options.get('output_file', None)) # , fmt="[%(name)s] %(message)s")
|
||||
logredirect = RedirectHandler(__name__)
|
||||
|
||||
# Redirect werkzeug log items
|
||||
werklogger = logging.getLogger('werkzeug')
|
||||
werklogger.setLevel(logging.INFO)
|
||||
werklogger.addHandler(logredirect)
|
||||
werklogger.propagate = False
|
||||
|
||||
if options.get("print_sql", False):
|
||||
from django.db.backends import util
|
||||
try:
|
||||
import sqlparse
|
||||
except ImportError:
|
||||
sqlparse = None # noqa
|
||||
|
||||
class PrintQueryWrapper(util.CursorDebugWrapper):
|
||||
def execute(self, sql, params=()):
|
||||
starttime = time.time()
|
||||
try:
|
||||
return self.cursor.execute(sql, params)
|
||||
finally:
|
||||
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
|
||||
execution_time = time.time() - starttime
|
||||
therest = ' -- [Execution time: %.6fs] [Database: %s]' % (execution_time, self.db.alias)
|
||||
if sqlparse:
|
||||
logger.info(sqlparse.format(raw_sql, reindent=True) + therest)
|
||||
else:
|
||||
logger.info(raw_sql + therest)
|
||||
|
||||
util.CursorDebugWrapper = PrintQueryWrapper
|
||||
|
||||
try:
|
||||
from django.core.servers.basehttp import AdminMediaHandler
|
||||
USE_ADMINMEDIAHANDLER = True
|
||||
except ImportError:
|
||||
USE_ADMINMEDIAHANDLER = False
|
||||
|
||||
try:
|
||||
from django.core.servers.basehttp import get_internal_wsgi_application as WSGIHandler
|
||||
except ImportError:
|
||||
from django.core.handlers.wsgi import WSGIHandler # noqa
|
||||
try:
|
||||
from werkzeug import run_simple, DebuggedApplication
|
||||
except ImportError:
|
||||
raise CommandError("Werkzeug is required to use runserver_plus. Please visit http://werkzeug.pocoo.org/ or install via pip. (pip install Werkzeug)")
|
||||
|
||||
# usurp django's handler
|
||||
from django.views import debug
|
||||
debug.technical_500_response = null_technical_500_response
|
||||
|
||||
self.use_ipv6 = options.get('use_ipv6')
|
||||
if self.use_ipv6 and not socket.has_ipv6:
|
||||
raise CommandError('Your Python does not support IPv6.')
|
||||
self._raw_ipv6 = False
|
||||
if not addrport:
|
||||
try:
|
||||
addrport = settings.RUNSERVERPLUS_SERVER_ADDRESS_PORT
|
||||
except AttributeError:
|
||||
pass
|
||||
if not addrport:
|
||||
self.addr = ''
|
||||
self.port = DEFAULT_PORT
|
||||
else:
|
||||
m = re.match(naiveip_re, addrport)
|
||||
if m is None:
|
||||
raise CommandError('"%s" is not a valid port number '
|
||||
'or address:port pair.' % addrport)
|
||||
self.addr, _ipv4, _ipv6, _fqdn, self.port = m.groups()
|
||||
if not self.port.isdigit():
|
||||
raise CommandError("%r is not a valid port number." %
|
||||
self.port)
|
||||
if self.addr:
|
||||
if _ipv6:
|
||||
self.addr = self.addr[1:-1]
|
||||
self.use_ipv6 = True
|
||||
self._raw_ipv6 = True
|
||||
elif self.use_ipv6 and not _fqdn:
|
||||
raise CommandError('"%s" is not a valid IPv6 address.'
|
||||
% self.addr)
|
||||
if not self.addr:
|
||||
self.addr = '::1' if self.use_ipv6 else '127.0.0.1'
|
||||
|
||||
threaded = options.get('threaded', False)
|
||||
use_reloader = options.get('use_reloader', True)
|
||||
open_browser = options.get('open_browser', False)
|
||||
cert_path = options.get("cert_path")
|
||||
quit_command = (sys.platform == 'win32') and 'CTRL-BREAK' or 'CONTROL-C'
|
||||
bind_url = "http://%s:%s/" % (
|
||||
self.addr if not self._raw_ipv6 else '[%s]' % self.addr, self.port)
|
||||
|
||||
def inner_run():
|
||||
print("Validating models...")
|
||||
self.validate(display_num_errors=True)
|
||||
print("\nDjango version %s, using settings %r" % (django.get_version(), settings.SETTINGS_MODULE))
|
||||
print("Development server is running at %s" % (bind_url,))
|
||||
print("Using the Werkzeug debugger (http://werkzeug.pocoo.org/)")
|
||||
print("Quit the server with %s." % quit_command)
|
||||
path = options.get('admin_media_path', '')
|
||||
if not path:
|
||||
admin_media_path = os.path.join(django.__path__[0], 'contrib/admin/static/admin')
|
||||
if os.path.isdir(admin_media_path):
|
||||
path = admin_media_path
|
||||
else:
|
||||
path = os.path.join(django.__path__[0], 'contrib/admin/media')
|
||||
handler = WSGIHandler()
|
||||
if USE_ADMINMEDIAHANDLER:
|
||||
handler = AdminMediaHandler(handler, path)
|
||||
if USE_STATICFILES:
|
||||
use_static_handler = options.get('use_static_handler', True)
|
||||
insecure_serving = options.get('insecure_serving', False)
|
||||
if use_static_handler and (settings.DEBUG or insecure_serving):
|
||||
handler = StaticFilesHandler(handler)
|
||||
if open_browser:
|
||||
import webbrowser
|
||||
webbrowser.open(bind_url)
|
||||
if cert_path:
|
||||
"""
|
||||
OpenSSL is needed for SSL support.
|
||||
|
||||
This will make flakes8 throw warning since OpenSSL is not used
|
||||
directly, alas, this is the only way to show meaningful error
|
||||
messages. See:
|
||||
http://lucumr.pocoo.org/2011/9/21/python-import-blackbox/
|
||||
for more information on python imports.
|
||||
"""
|
||||
try:
|
||||
import OpenSSL # NOQA
|
||||
except ImportError:
|
||||
raise CommandError("Python OpenSSL Library is "
|
||||
"required to use runserver_plus with ssl support. "
|
||||
"Install via pip (pip install pyOpenSSL).")
|
||||
|
||||
dir_path, cert_file = os.path.split(cert_path)
|
||||
if not dir_path:
|
||||
dir_path = os.getcwd()
|
||||
root, ext = os.path.splitext(cert_file)
|
||||
certfile = os.path.join(dir_path, root + ".crt")
|
||||
keyfile = os.path.join(dir_path, root + ".key")
|
||||
try:
|
||||
from werkzeug.serving import make_ssl_devcert
|
||||
if os.path.exists(certfile) and \
|
||||
os.path.exists(keyfile):
|
||||
ssl_context = (certfile, keyfile)
|
||||
else: # Create cert, key files ourselves.
|
||||
ssl_context = make_ssl_devcert(
|
||||
os.path.join(dir_path, root), host='localhost')
|
||||
except ImportError:
|
||||
print("Werkzeug version is less than 0.9, trying adhoc certificate.")
|
||||
ssl_context = "adhoc"
|
||||
|
||||
else:
|
||||
ssl_context = None
|
||||
run_simple(
|
||||
self.addr,
|
||||
int(self.port),
|
||||
DebuggedApplication(handler, True),
|
||||
use_reloader=use_reloader,
|
||||
use_debugger=True,
|
||||
threaded=threaded,
|
||||
ssl_context=ssl_context
|
||||
)
|
||||
inner_run()
|
|
@ -1,81 +0,0 @@
|
|||
"""
|
||||
set_fake_emails.py
|
||||
|
||||
Give all users a new email account. Useful for testing in a
|
||||
development environment. As such, this command is only available when
|
||||
setting.DEBUG is True.
|
||||
|
||||
"""
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
|
||||
DEFAULT_FAKE_EMAIL = '%(username)s@example.com'
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--email', dest='default_email', default=DEFAULT_FAKE_EMAIL,
|
||||
help='Use this as the new email format.'),
|
||||
make_option('-a', '--no-admin', action="store_true", dest='no_admin', default=False,
|
||||
help='Do not change administrator accounts'),
|
||||
make_option('-s', '--no-staff', action="store_true", dest='no_staff', default=False,
|
||||
help='Do not change staff accounts'),
|
||||
make_option('--include', dest='include_regexp', default=None,
|
||||
help='Include usernames matching this regexp.'),
|
||||
make_option('--exclude', dest='exclude_regexp', default=None,
|
||||
help='Exclude usernames matching this regexp.'),
|
||||
make_option('--include-groups', dest='include_groups', default=None,
|
||||
help='Include users matching this group. (use comma seperation for multiple groups)'),
|
||||
make_option('--exclude-groups', dest='exclude_groups', default=None,
|
||||
help='Exclude users matching this group. (use comma seperation for multiple groups)'),
|
||||
)
|
||||
help = '''DEBUG only: give all users a new email based on their account data ("%s" by default). Possible parameters are: username, first_name, last_name''' % (DEFAULT_FAKE_EMAIL, )
|
||||
requires_model_validation = False
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
if not settings.DEBUG:
|
||||
raise CommandError('Only available in debug mode')
|
||||
|
||||
try:
|
||||
from django.contrib.auth import get_user_model # Django 1.5
|
||||
except ImportError:
|
||||
from django_extensions.future_1_5 import get_user_model
|
||||
from django.contrib.auth.models import Group
|
||||
email = options.get('default_email', DEFAULT_FAKE_EMAIL)
|
||||
include_regexp = options.get('include_regexp', None)
|
||||
exclude_regexp = options.get('exclude_regexp', None)
|
||||
include_groups = options.get('include_groups', None)
|
||||
exclude_groups = options.get('exclude_groups', None)
|
||||
no_admin = options.get('no_admin', False)
|
||||
no_staff = options.get('no_staff', False)
|
||||
|
||||
User = get_user_model()
|
||||
users = User.objects.all()
|
||||
if no_admin:
|
||||
users = users.exclude(is_superuser=True)
|
||||
if no_staff:
|
||||
users = users.exclude(is_staff=True)
|
||||
if exclude_groups:
|
||||
groups = Group.objects.filter(name__in=exclude_groups.split(","))
|
||||
if groups:
|
||||
users = users.exclude(groups__in=groups)
|
||||
else:
|
||||
raise CommandError("No group matches filter: %s" % exclude_groups)
|
||||
if include_groups:
|
||||
groups = Group.objects.filter(name__in=include_groups.split(","))
|
||||
if groups:
|
||||
users = users.filter(groups__in=groups)
|
||||
else:
|
||||
raise CommandError("No groups matches filter: %s" % include_groups)
|
||||
if exclude_regexp:
|
||||
users = users.exclude(username__regex=exclude_regexp)
|
||||
if include_regexp:
|
||||
users = users.filter(username__regex=include_regexp)
|
||||
for user in users:
|
||||
user.email = email % {'username': user.username,
|
||||
'first_name': user.first_name,
|
||||
'last_name': user.last_name}
|
||||
user.save()
|
||||
print('Changed %d emails' % users.count())
|
|
@ -1,49 +0,0 @@
|
|||
"""
|
||||
set_fake_passwords.py
|
||||
|
||||
Reset all user passwords to a common value. Useful for testing in a
|
||||
development environment. As such, this command is only available when
|
||||
setting.DEBUG is True.
|
||||
|
||||
"""
|
||||
from optparse import make_option
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand, CommandError
|
||||
|
||||
DEFAULT_FAKE_PASSWORD = 'password'
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--prompt', dest='prompt_passwd', default=False, action='store_true',
|
||||
help='Prompts for the new password to apply to all users'),
|
||||
make_option('--password', dest='default_passwd', default=DEFAULT_FAKE_PASSWORD,
|
||||
help='Use this as default password.'),
|
||||
)
|
||||
help = 'DEBUG only: sets all user passwords to a common value ("%s" by default)' % (DEFAULT_FAKE_PASSWORD, )
|
||||
requires_model_validation = False
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
if not settings.DEBUG:
|
||||
raise CommandError('Only available in debug mode')
|
||||
|
||||
try:
|
||||
from django.contrib.auth import get_user_model # Django 1.5
|
||||
except ImportError:
|
||||
from django_extensions.future_1_5 import get_user_model
|
||||
|
||||
if options.get('prompt_passwd', False):
|
||||
from getpass import getpass
|
||||
passwd = getpass('Password: ')
|
||||
if not passwd:
|
||||
raise CommandError('You must enter a valid password')
|
||||
else:
|
||||
passwd = options.get('default_passwd', DEFAULT_FAKE_PASSWORD)
|
||||
|
||||
User = get_user_model()
|
||||
user = User()
|
||||
user.set_password(passwd)
|
||||
count = User.objects.all().update(password=user.password)
|
||||
|
||||
print('Reset %d passwords' % count)
|
|
@ -1,166 +0,0 @@
|
|||
import os
|
||||
import six
|
||||
import time
|
||||
from optparse import make_option
|
||||
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.conf import settings
|
||||
|
||||
from django_extensions.management.shells import import_objects
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
option_list = NoArgsCommand.option_list + (
|
||||
make_option('--plain', action='store_true', dest='plain',
|
||||
help='Tells Django to use plain Python, not BPython nor IPython.'),
|
||||
make_option('--bpython', action='store_true', dest='bpython',
|
||||
help='Tells Django to use BPython, not IPython.'),
|
||||
make_option('--ipython', action='store_true', dest='ipython',
|
||||
help='Tells Django to use IPython, not BPython.'),
|
||||
make_option('--notebook', action='store_true', dest='notebook',
|
||||
help='Tells Django to use IPython Notebook.'),
|
||||
make_option('--use-pythonrc', action='store_true', dest='use_pythonrc',
|
||||
help='Tells Django to execute PYTHONSTARTUP file (BE CAREFULL WITH THIS!)'),
|
||||
make_option('--print-sql', action='store_true', default=False,
|
||||
help="Print SQL queries as they're executed"),
|
||||
make_option('--dont-load', action='append', dest='dont_load', default=[],
|
||||
help='Ignore autoloading of some apps/models. Can be used several times.'),
|
||||
make_option('--quiet-load', action='store_true', default=False, dest='quiet_load',
|
||||
help='Do not display loaded models messages'),
|
||||
)
|
||||
help = "Like the 'shell' command but autoloads the models of all installed Django apps."
|
||||
|
||||
requires_model_validation = True
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
use_notebook = options.get('notebook', False)
|
||||
use_ipython = options.get('ipython', False)
|
||||
use_bpython = options.get('bpython', False)
|
||||
use_plain = options.get('plain', False)
|
||||
use_pythonrc = options.get('use_pythonrc', True)
|
||||
|
||||
if options.get("print_sql", False):
|
||||
# Code from http://gist.github.com/118990
|
||||
from django.db.backends import util
|
||||
sqlparse = None
|
||||
try:
|
||||
import sqlparse
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
class PrintQueryWrapper(util.CursorDebugWrapper):
|
||||
def execute(self, sql, params=()):
|
||||
starttime = time.time()
|
||||
try:
|
||||
return self.cursor.execute(sql, params)
|
||||
finally:
|
||||
execution_time = time.time() - starttime
|
||||
raw_sql = self.db.ops.last_executed_query(self.cursor, sql, params)
|
||||
if sqlparse:
|
||||
print(sqlparse.format(raw_sql, reindent=True))
|
||||
else:
|
||||
print(raw_sql)
|
||||
print("")
|
||||
print('Execution time: %.6fs [Database: %s]' % (execution_time, self.db.alias))
|
||||
print("")
|
||||
|
||||
util.CursorDebugWrapper = PrintQueryWrapper
|
||||
|
||||
def run_notebook():
|
||||
from django.conf import settings
|
||||
from IPython.frontend.html.notebook import notebookapp
|
||||
app = notebookapp.NotebookApp.instance()
|
||||
ipython_arguments = getattr(settings, 'IPYTHON_ARGUMENTS', ['--ext', 'django_extensions.management.notebook_extension'])
|
||||
app.initialize(ipython_arguments)
|
||||
app.start()
|
||||
|
||||
def run_plain():
|
||||
# Using normal Python shell
|
||||
import code
|
||||
imported_objects = import_objects(options, self.style)
|
||||
try:
|
||||
# Try activating rlcompleter, because it's handy.
|
||||
import readline
|
||||
except ImportError:
|
||||
pass
|
||||
else:
|
||||
# We don't have to wrap the following import in a 'try', because
|
||||
# we already know 'readline' was imported successfully.
|
||||
import rlcompleter
|
||||
readline.set_completer(rlcompleter.Completer(imported_objects).complete)
|
||||
readline.parse_and_bind("tab:complete")
|
||||
|
||||
# We want to honor both $PYTHONSTARTUP and .pythonrc.py, so follow system
|
||||
# conventions and get $PYTHONSTARTUP first then import user.
|
||||
if use_pythonrc:
|
||||
pythonrc = os.environ.get("PYTHONSTARTUP")
|
||||
if pythonrc and os.path.isfile(pythonrc):
|
||||
global_ns = {}
|
||||
with open(pythonrc) as rcfile:
|
||||
try:
|
||||
six.exec_(compile(rcfile.read(), pythonrc, 'exec'), global_ns)
|
||||
imported_objects.update(global_ns)
|
||||
except NameError:
|
||||
pass
|
||||
# This will import .pythonrc.py as a side-effect
|
||||
try:
|
||||
import user # NOQA
|
||||
except ImportError:
|
||||
pass
|
||||
code.interact(local=imported_objects)
|
||||
|
||||
def run_bpython():
|
||||
from bpython import embed
|
||||
imported_objects = import_objects(options, self.style)
|
||||
embed(imported_objects)
|
||||
|
||||
def run_ipython():
|
||||
try:
|
||||
from IPython import embed
|
||||
imported_objects = import_objects(options, self.style)
|
||||
embed(user_ns=imported_objects)
|
||||
except ImportError:
|
||||
# IPython < 0.11
|
||||
# Explicitly pass an empty list as arguments, because otherwise
|
||||
# IPython would use sys.argv from this script.
|
||||
# Notebook not supported for IPython < 0.11.
|
||||
from IPython.Shell import IPShell
|
||||
imported_objects = import_objects(options, self.style)
|
||||
shell = IPShell(argv=[], user_ns=imported_objects)
|
||||
shell.mainloop()
|
||||
|
||||
shells = (
|
||||
('bpython', run_bpython),
|
||||
('ipython', run_ipython),
|
||||
('plain', run_plain),
|
||||
)
|
||||
SETTINGS_SHELL_PLUS = getattr(settings, 'SHELL_PLUS', None)
|
||||
|
||||
if use_notebook:
|
||||
run_notebook()
|
||||
elif use_plain:
|
||||
run_plain()
|
||||
elif use_ipython:
|
||||
run_ipython()
|
||||
elif use_bpython:
|
||||
run_bpython()
|
||||
elif SETTINGS_SHELL_PLUS:
|
||||
try:
|
||||
dict(shells)[SETTINGS_SHELL_PLUS]()
|
||||
except ImportError:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(self.style.ERROR("Could not load '%s' Python environment." % SETTINGS_SHELL_PLUS))
|
||||
else:
|
||||
for shell_name, func in shells:
|
||||
try:
|
||||
func()
|
||||
except ImportError:
|
||||
continue
|
||||
else:
|
||||
break
|
||||
else:
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(self.style.ERROR("Could not load any interactive Python environment."))
|
||||
|
|
@ -1,100 +0,0 @@
|
|||
from django.conf import settings
|
||||
from django.template import get_library
|
||||
import os
|
||||
import inspect
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import color
|
||||
from django.utils import termcolors
|
||||
|
||||
|
||||
def color_style():
|
||||
style = color.color_style()
|
||||
style.FILTER = termcolors.make_style(fg='yellow', opts=('bold',))
|
||||
style.MODULE_NAME = termcolors.make_style(fg='green', opts=('bold',))
|
||||
style.TAG = termcolors.make_style(fg='red', opts=('bold',))
|
||||
style.TAGLIB = termcolors.make_style(fg='blue', opts=('bold',))
|
||||
return style
|
||||
|
||||
|
||||
def format_block(block, nlspaces=0):
|
||||
'''Format the given block of text, trimming leading/trailing
|
||||
empty lines and any leading whitespace that is common to all lines.
|
||||
The purpose is to let us list a code block as a multiline,
|
||||
triple-quoted Python string, taking care of
|
||||
indentation concerns.
|
||||
http://code.activestate.com/recipes/145672/'''
|
||||
|
||||
import re
|
||||
|
||||
# separate block into lines
|
||||
lines = str(block).split('\n')
|
||||
|
||||
# remove leading/trailing empty lines
|
||||
while lines and not lines[0]:
|
||||
del lines[0]
|
||||
while lines and not lines[-1]:
|
||||
del lines[-1]
|
||||
|
||||
# look at first line to see how much indentation to trim
|
||||
ws = re.match(r'\s*', lines[0]).group(0)
|
||||
if ws:
|
||||
lines = map(lambda x: x.replace(ws, '', 1), lines)
|
||||
|
||||
# remove leading/trailing blank lines (after leading ws removal)
|
||||
# we do this again in case there were pure-whitespace lines
|
||||
while lines and not lines[0]:
|
||||
del lines[0]
|
||||
while lines and not lines[-1]:
|
||||
del lines[-1]
|
||||
|
||||
# account for user-specified leading spaces
|
||||
flines = ['%s%s' % (' ' * nlspaces, line) for line in lines]
|
||||
|
||||
return '\n'.join(flines) + '\n'
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
help = "Displays template tags and filters available in the current project."
|
||||
results = ""
|
||||
|
||||
def add_result(self, s, depth=0):
|
||||
self.results += '\n%s\n' % s.rjust(depth * 4 + len(s))
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if args:
|
||||
appname, = args
|
||||
|
||||
style = color_style()
|
||||
|
||||
if settings.ADMIN_FOR:
|
||||
settings_modules = [__import__(m, {}, {}, ['']) for m in settings.ADMIN_FOR]
|
||||
else:
|
||||
settings_modules = [settings]
|
||||
|
||||
for settings_mod in settings_modules:
|
||||
for app in settings_mod.INSTALLED_APPS:
|
||||
try:
|
||||
templatetag_mod = __import__(app + '.templatetags', {}, {}, [''])
|
||||
except ImportError:
|
||||
continue
|
||||
mod_path = inspect.getabsfile(templatetag_mod)
|
||||
mod_files = os.listdir(os.path.dirname(mod_path))
|
||||
tag_files = [i.rstrip('.py') for i in mod_files if i.endswith('.py') and i[0] != '_']
|
||||
app_labeled = False
|
||||
for taglib in tag_files:
|
||||
try:
|
||||
lib = get_library(taglib)
|
||||
except:
|
||||
continue
|
||||
if not app_labeled:
|
||||
self.add_result('\nApp: %s' % style.MODULE_NAME(app))
|
||||
app_labeled = True
|
||||
self.add_result('load: %s' % style.TAGLIB(taglib), 1)
|
||||
for items, label, style_func in [(lib.tags, 'Tag:', style.TAG), (lib.filters, 'Filter:', style.FILTER)]:
|
||||
for item in items:
|
||||
self.add_result('%s %s' % (label, style_func(item)), 2)
|
||||
doc = inspect.getdoc(items[item])
|
||||
if doc:
|
||||
self.add_result(format_block(doc, 12))
|
||||
return self.results
|
||||
# return "\n".join(results)
|
|
@ -1,103 +0,0 @@
|
|||
from django.conf import settings
|
||||
from django.core.exceptions import ViewDoesNotExist
|
||||
from django.core.urlresolvers import RegexURLPattern, RegexURLResolver
|
||||
from django.core.management.base import BaseCommand
|
||||
from optparse import make_option
|
||||
|
||||
try:
|
||||
# 2008-05-30 admindocs found in newforms-admin brand
|
||||
from django.contrib.admindocs.views import simplify_regex
|
||||
assert simplify_regex
|
||||
except ImportError:
|
||||
# fall back to trunk, pre-NFA merge
|
||||
from django.contrib.admin.views.doc import simplify_regex
|
||||
import re
|
||||
|
||||
from django_extensions.management.color import color_style
|
||||
|
||||
|
||||
def extract_views_from_urlpatterns(urlpatterns, base=''):
|
||||
"""
|
||||
Return a list of views from a list of urlpatterns.
|
||||
|
||||
Each object in the returned list is a two-tuple: (view_func, regex)
|
||||
"""
|
||||
views = []
|
||||
for p in urlpatterns:
|
||||
if isinstance(p, RegexURLPattern):
|
||||
try:
|
||||
views.append((p.callback, base + p.regex.pattern, p.name))
|
||||
except ViewDoesNotExist:
|
||||
continue
|
||||
elif isinstance(p, RegexURLResolver):
|
||||
try:
|
||||
patterns = p.url_patterns
|
||||
except ImportError:
|
||||
continue
|
||||
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern))
|
||||
elif hasattr(p, '_get_callback'):
|
||||
try:
|
||||
views.append((p._get_callback(), base + p.regex.pattern, p.name))
|
||||
except ViewDoesNotExist:
|
||||
continue
|
||||
elif hasattr(p, 'url_patterns') or hasattr(p, '_get_url_patterns'):
|
||||
try:
|
||||
patterns = p.url_patterns
|
||||
except ImportError:
|
||||
continue
|
||||
views.extend(extract_views_from_urlpatterns(patterns, base + p.regex.pattern))
|
||||
else:
|
||||
raise TypeError("%s does not appear to be a urlpattern object" % p)
|
||||
return views
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option("--unsorted", "-u", action="store_true", dest="unsorted",
|
||||
help="Show urls unsorted but same order as found in url patterns"),
|
||||
)
|
||||
|
||||
help = "Displays all of the url matching routes for the project."
|
||||
|
||||
requires_model_validation = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if args:
|
||||
appname, = args
|
||||
|
||||
style = color_style()
|
||||
|
||||
if settings.ADMIN_FOR:
|
||||
settings_modules = [__import__(m, {}, {}, ['']) for m in settings.ADMIN_FOR]
|
||||
else:
|
||||
settings_modules = [settings]
|
||||
|
||||
views = []
|
||||
for settings_mod in settings_modules:
|
||||
try:
|
||||
urlconf = __import__(settings_mod.ROOT_URLCONF, {}, {}, [''])
|
||||
except Exception as e:
|
||||
if options.get('traceback', None):
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
print(style.ERROR("Error occurred while trying to load %s: %s" % (settings_mod.ROOT_URLCONF, str(e))))
|
||||
continue
|
||||
view_functions = extract_views_from_urlpatterns(urlconf.urlpatterns)
|
||||
for (func, regex, url_name) in view_functions:
|
||||
if hasattr(func, '__name__'):
|
||||
func_name = func.__name__
|
||||
elif hasattr(func, '__class__'):
|
||||
func_name = '%s()' % func.__class__.__name__
|
||||
else:
|
||||
func_name = re.sub(r' at 0x[0-9a-f]+', '', repr(func))
|
||||
views.append("%(url)s\t%(module)s.%(name)s\t%(url_name)s" % {
|
||||
'name': style.MODULE_NAME(func_name),
|
||||
'module': style.MODULE(func.__module__),
|
||||
'url_name': style.URL_NAME(url_name or ''),
|
||||
'url': style.URL(simplify_regex(regex))
|
||||
})
|
||||
|
||||
if not options.get('unsorted', False):
|
||||
views = sorted(views)
|
||||
|
||||
return "\n".join([v for v in views]) + "\n"
|
|
@ -1,89 +0,0 @@
|
|||
from optparse import make_option
|
||||
import sys
|
||||
import socket
|
||||
|
||||
import django
|
||||
from django.core.management.base import CommandError, BaseCommand
|
||||
from django.conf import settings
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('-R', '--router', action='store',
|
||||
dest='router', default=None,
|
||||
help='Use this router-database other then defined in settings.py'),
|
||||
make_option('-D', '--drop', action='store_true',
|
||||
dest='drop', default=False,
|
||||
help='If given, includes commands to drop any existing user and database.'),
|
||||
)
|
||||
help = """Generates the SQL to create your database for you, as specified in settings.py
|
||||
The envisioned use case is something like this:
|
||||
|
||||
./manage.py sqlcreate [--router=<routername>] | mysql -u <db_administrator> -p
|
||||
./manage.py sqlcreate [--router=<routername>] | psql -U <db_administrator> -W"""
|
||||
|
||||
requires_model_validation = False
|
||||
can_import_settings = True
|
||||
|
||||
@staticmethod
|
||||
def set_db_settings(**options):
|
||||
if django.get_version() >= "1.2":
|
||||
router = options.get('router')
|
||||
if router is None:
|
||||
return False
|
||||
|
||||
# retrieve this with the 'using' argument
|
||||
dbinfo = settings.DATABASES.get(router)
|
||||
settings.DATABASE_ENGINE = dbinfo.get('ENGINE').split('.')[-1]
|
||||
settings.DATABASE_USER = dbinfo.get('USER')
|
||||
settings.DATABASE_PASSWORD = dbinfo.get('PASSWORD')
|
||||
settings.DATABASE_NAME = dbinfo.get('NAME')
|
||||
settings.DATABASE_HOST = dbinfo.get('HOST')
|
||||
settings.DATABASE_PORT = dbinfo.get('PORT')
|
||||
return True
|
||||
else:
|
||||
# settings are set for django < 1.2 no modification needed
|
||||
return True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
if django.get_version() >= "1.2":
|
||||
got_db_settings = self.set_db_settings(**options)
|
||||
if not got_db_settings:
|
||||
raise CommandError("You are using Django %s which requires to specify the db-router.\nPlease specify the router by adding --router=<routername> to this command." % django.get_version())
|
||||
|
||||
#print("%s %s %s %s" % (settings.DATABASE_ENGINE, settings.DATABASE_NAME, settings.DATABASE_USER, settings.DATABASE_PASSWORD))
|
||||
engine = settings.DATABASE_ENGINE
|
||||
dbname = settings.DATABASE_NAME
|
||||
dbuser = settings.DATABASE_USER
|
||||
dbpass = settings.DATABASE_PASSWORD
|
||||
dbhost = settings.DATABASE_HOST
|
||||
dbclient = socket.gethostname()
|
||||
|
||||
# django settings file tells you that localhost should be specified by leaving
|
||||
# the DATABASE_HOST blank
|
||||
if not dbhost:
|
||||
dbhost = 'localhost'
|
||||
|
||||
if engine == 'mysql':
|
||||
sys.stderr.write("""-- WARNING!: https://docs.djangoproject.com/en/dev/ref/databases/#collation-settings
|
||||
-- Please read this carefully! Collation will be set to utf8_bin to have case-sensitive data.
|
||||
""")
|
||||
print("CREATE DATABASE %s CHARACTER SET utf8 COLLATE utf8_bin;" % dbname)
|
||||
print("GRANT ALL PRIVILEGES ON %s.* to '%s'@'%s' identified by '%s';" % (
|
||||
dbname, dbuser, dbclient, dbpass
|
||||
))
|
||||
elif engine == 'postgresql_psycopg2':
|
||||
if options.get('drop'):
|
||||
print("DROP DATABASE IF EXISTS %s;" % (dbname,))
|
||||
print("DROP USER IF EXISTS %s;" % (dbuser,))
|
||||
print("CREATE USER %s WITH ENCRYPTED PASSWORD '%s' CREATEDB;" % (dbuser, dbpass))
|
||||
print("CREATE DATABASE %s WITH ENCODING 'UTF-8' OWNER \"%s\";" % (dbname, dbuser))
|
||||
print("GRANT ALL PRIVILEGES ON DATABASE %s TO %s;" % (dbname, dbuser))
|
||||
elif engine == 'sqlite3':
|
||||
sys.stderr.write("-- manage.py syncdb will automatically create a sqlite3 database file.\n")
|
||||
else:
|
||||
# CREATE DATABASE is not SQL standard, but seems to be supported by most.
|
||||
sys.stderr.write("-- Don't know how to handle '%s' falling back to SQL.\n" % engine)
|
||||
print("CREATE DATABASE %s;" % dbname)
|
||||
print("GRANT ALL PRIVILEGES ON DATABASE %s to %s" % (dbname, dbuser))
|
|
@ -1,678 +0,0 @@
|
|||
"""
|
||||
sqldiff.py - Prints the (approximated) difference between models and database
|
||||
|
||||
TODO:
|
||||
- better support for relations
|
||||
- better support for constraints (mainly postgresql?)
|
||||
- support for table spaces with postgresql
|
||||
- when a table is not managed (meta.managed==False) then only do a one-way
|
||||
sqldiff ? show differences from db->table but not the other way around since
|
||||
it's not managed.
|
||||
|
||||
KNOWN ISSUES:
|
||||
- MySQL has by far the most problems with introspection. Please be
|
||||
carefull when using MySQL with sqldiff.
|
||||
- Booleans are reported back as Integers, so there's know way to know if
|
||||
there was a real change.
|
||||
- Varchar sizes are reported back without unicode support so their size
|
||||
may change in comparison to the real length of the varchar.
|
||||
- Some of the 'fixes' to counter these problems might create false
|
||||
positives or false negatives.
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management import sql as _sql
|
||||
from django.core.management import CommandError
|
||||
from django.core.management.color import no_style
|
||||
from django.db import transaction, connection
|
||||
from django.db.models.fields import IntegerField
|
||||
from optparse import make_option
|
||||
|
||||
ORDERING_FIELD = IntegerField('_order', null=True)
|
||||
|
||||
|
||||
def flatten(l, ltypes=(list, tuple)):
|
||||
ltype = type(l)
|
||||
l = list(l)
|
||||
i = 0
|
||||
while i < len(l):
|
||||
while isinstance(l[i], ltypes):
|
||||
if not l[i]:
|
||||
l.pop(i)
|
||||
i -= 1
|
||||
break
|
||||
else:
|
||||
l[i:i + 1] = l[i]
|
||||
i += 1
|
||||
return ltype(l)
|
||||
|
||||
|
||||
def all_local_fields(meta):
|
||||
all_fields = meta.local_fields[:]
|
||||
for parent in meta.parents:
|
||||
all_fields.extend(all_local_fields(parent._meta))
|
||||
return all_fields
|
||||
|
||||
|
||||
class SQLDiff(object):
|
||||
DATA_TYPES_REVERSE_OVERRIDE = {}
|
||||
|
||||
DIFF_TYPES = [
|
||||
'error',
|
||||
'comment',
|
||||
'table-missing-in-db',
|
||||
'field-missing-in-db',
|
||||
'field-missing-in-model',
|
||||
'index-missing-in-db',
|
||||
'index-missing-in-model',
|
||||
'unique-missing-in-db',
|
||||
'unique-missing-in-model',
|
||||
'field-type-differ',
|
||||
'field-parameter-differ',
|
||||
'notnull-differ',
|
||||
]
|
||||
DIFF_TEXTS = {
|
||||
'error': 'error: %(0)s',
|
||||
'comment': 'comment: %(0)s',
|
||||
'table-missing-in-db': "table '%(0)s' missing in database",
|
||||
'field-missing-in-db': "field '%(1)s' defined in model but missing in database",
|
||||
'field-missing-in-model': "field '%(1)s' defined in database but missing in model",
|
||||
'index-missing-in-db': "field '%(1)s' INDEX defined in model but missing in database",
|
||||
'index-missing-in-model': "field '%(1)s' INDEX defined in database schema but missing in model",
|
||||
'unique-missing-in-db': "field '%(1)s' UNIQUE defined in model but missing in database",
|
||||
'unique-missing-in-model': "field '%(1)s' UNIQUE defined in database schema but missing in model",
|
||||
'field-type-differ': "field '%(1)s' not of same type: db='%(3)s', model='%(2)s'",
|
||||
'field-parameter-differ': "field '%(1)s' parameters differ: db='%(3)s', model='%(2)s'",
|
||||
'notnull-differ': "field '%(1)s' null differ: db='%(3)s', model='%(2)s'",
|
||||
}
|
||||
|
||||
SQL_FIELD_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD'), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
|
||||
SQL_FIELD_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP COLUMN'), style.SQL_FIELD(qn(args[1])))
|
||||
SQL_INDEX_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('CREATE INDEX'), style.SQL_TABLE(qn("%s_idx" % '_'.join(args[0:2]))), style.SQL_KEYWORD('ON'), style.SQL_TABLE(qn(args[0])), style.SQL_FIELD(qn(args[1])))
|
||||
# FIXME: need to lookup index name instead of just appending _idx to table + fieldname
|
||||
SQL_INDEX_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s;" % (style.SQL_KEYWORD('DROP INDEX'), style.SQL_TABLE(qn("%s_idx" % '_'.join(args[0:2]))))
|
||||
SQL_UNIQUE_MISSING_IN_DB = lambda self, style, qn, args: "%s %s\n\t%s %s (%s);" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ADD'), style.SQL_KEYWORD('UNIQUE'), style.SQL_FIELD(qn(args[1])))
|
||||
# FIXME: need to lookup unique constraint name instead of appending _key to table + fieldname
|
||||
SQL_UNIQUE_MISSING_IN_MODEL = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('DROP'), style.SQL_KEYWORD('CONSTRAINT'), style.SQL_TABLE(qn("%s_key" % ('_'.join(args[:2])))))
|
||||
SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
|
||||
SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD("MODIFY"), style.SQL_FIELD(qn(args[1])), style.SQL_COLTYPE(args[2]))
|
||||
SQL_NOTNULL_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('MODIFY'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[2]), style.SQL_KEYWORD('NOT NULL'))
|
||||
SQL_ERROR = lambda self, style, qn, args: style.NOTICE('-- Error: %s' % style.ERROR(args[0]))
|
||||
SQL_COMMENT = lambda self, style, qn, args: style.NOTICE('-- Comment: %s' % style.SQL_TABLE(args[0]))
|
||||
SQL_TABLE_MISSING_IN_DB = lambda self, style, qn, args: style.NOTICE('-- Table missing: %s' % args[0])
|
||||
|
||||
def __init__(self, app_models, options):
|
||||
self.app_models = app_models
|
||||
self.options = options
|
||||
self.dense = options.get('dense_output', False)
|
||||
|
||||
try:
|
||||
self.introspection = connection.introspection
|
||||
except AttributeError:
|
||||
from django.db import get_introspection_module
|
||||
self.introspection = get_introspection_module()
|
||||
|
||||
self.cursor = connection.cursor()
|
||||
self.django_tables = self.get_django_tables(options.get('only_existing', True))
|
||||
self.db_tables = self.introspection.get_table_list(self.cursor)
|
||||
self.differences = []
|
||||
self.unknown_db_fields = {}
|
||||
|
||||
self.DIFF_SQL = {
|
||||
'error': self.SQL_ERROR,
|
||||
'comment': self.SQL_COMMENT,
|
||||
'table-missing-in-db': self.SQL_TABLE_MISSING_IN_DB,
|
||||
'field-missing-in-db': self.SQL_FIELD_MISSING_IN_DB,
|
||||
'field-missing-in-model': self.SQL_FIELD_MISSING_IN_MODEL,
|
||||
'index-missing-in-db': self.SQL_INDEX_MISSING_IN_DB,
|
||||
'index-missing-in-model': self.SQL_INDEX_MISSING_IN_MODEL,
|
||||
'unique-missing-in-db': self.SQL_UNIQUE_MISSING_IN_DB,
|
||||
'unique-missing-in-model': self.SQL_UNIQUE_MISSING_IN_MODEL,
|
||||
'field-type-differ': self.SQL_FIELD_TYPE_DIFFER,
|
||||
'field-parameter-differ': self.SQL_FIELD_PARAMETER_DIFFER,
|
||||
'notnull-differ': self.SQL_NOTNULL_DIFFER,
|
||||
}
|
||||
|
||||
def add_app_model_marker(self, app_label, model_name):
|
||||
self.differences.append((app_label, model_name, []))
|
||||
|
||||
def add_difference(self, diff_type, *args):
|
||||
assert diff_type in self.DIFF_TYPES, 'Unknown difference type'
|
||||
self.differences[-1][-1].append((diff_type, args))
|
||||
|
||||
def get_django_tables(self, only_existing):
|
||||
try:
|
||||
django_tables = self.introspection.django_table_names(only_existing=only_existing)
|
||||
except AttributeError:
|
||||
# backwards compatibility for before introspection refactoring (r8296)
|
||||
try:
|
||||
django_tables = _sql.django_table_names(only_existing=only_existing)
|
||||
except AttributeError:
|
||||
# backwards compatibility for before svn r7568
|
||||
django_tables = _sql.django_table_list(only_existing=only_existing)
|
||||
return django_tables
|
||||
|
||||
def sql_to_dict(self, query, param):
|
||||
""" sql_to_dict(query, param) -> list of dicts
|
||||
|
||||
code from snippet at http://www.djangosnippets.org/snippets/1383/
|
||||
"""
|
||||
cursor = connection.cursor()
|
||||
cursor.execute(query, param)
|
||||
fieldnames = [name[0] for name in cursor.description]
|
||||
result = []
|
||||
for row in cursor.fetchall():
|
||||
rowset = []
|
||||
for field in zip(fieldnames, row):
|
||||
rowset.append(field)
|
||||
result.append(dict(rowset))
|
||||
return result
|
||||
|
||||
def get_field_model_type(self, field):
|
||||
return field.db_type(connection=connection)
|
||||
|
||||
def get_field_db_type(self, description, field=None, table_name=None):
|
||||
from django.db import models
|
||||
# DB-API cursor.description
|
||||
#(name, type_code, display_size, internal_size, precision, scale, null_ok) = description
|
||||
type_code = description[1]
|
||||
if type_code in self.DATA_TYPES_REVERSE_OVERRIDE:
|
||||
reverse_type = self.DATA_TYPES_REVERSE_OVERRIDE[type_code]
|
||||
else:
|
||||
try:
|
||||
try:
|
||||
reverse_type = self.introspection.data_types_reverse[type_code]
|
||||
except AttributeError:
|
||||
# backwards compatibility for before introspection refactoring (r8296)
|
||||
reverse_type = self.introspection.DATA_TYPES_REVERSE.get(type_code)
|
||||
except KeyError:
|
||||
reverse_type = self.get_field_db_type_lookup(type_code)
|
||||
if not reverse_type:
|
||||
# type_code not found in data_types_reverse map
|
||||
key = (self.differences[-1][:2], description[:2])
|
||||
if key not in self.unknown_db_fields:
|
||||
self.unknown_db_fields[key] = 1
|
||||
self.add_difference('comment', "Unknown database type for field '%s' (%s)" % (description[0], type_code))
|
||||
return None
|
||||
|
||||
kwargs = {}
|
||||
if isinstance(reverse_type, tuple):
|
||||
kwargs.update(reverse_type[1])
|
||||
reverse_type = reverse_type[0]
|
||||
|
||||
if reverse_type == "CharField" and description[3]:
|
||||
kwargs['max_length'] = description[3]
|
||||
|
||||
if reverse_type == "DecimalField":
|
||||
kwargs['max_digits'] = description[4]
|
||||
kwargs['decimal_places'] = description[5] and abs(description[5]) or description[5]
|
||||
|
||||
if description[6]:
|
||||
kwargs['blank'] = True
|
||||
if not reverse_type in ('TextField', 'CharField'):
|
||||
kwargs['null'] = True
|
||||
|
||||
if '.' in reverse_type:
|
||||
from django.utils import importlib
|
||||
# TODO: when was importlib added to django.utils ? and do we
|
||||
# need to add backwards compatibility code ?
|
||||
module_path, package_name = reverse_type.rsplit('.', 1)
|
||||
module = importlib.import_module(module_path)
|
||||
field_db_type = getattr(module, package_name)(**kwargs).db_type(connection=connection)
|
||||
else:
|
||||
field_db_type = getattr(models, reverse_type)(**kwargs).db_type(connection=connection)
|
||||
return field_db_type
|
||||
|
||||
def get_field_db_type_lookup(self, type_code):
|
||||
return None
|
||||
|
||||
def strip_parameters(self, field_type):
|
||||
if field_type and field_type != 'double precision':
|
||||
return field_type.split(" ")[0].split("(")[0].lower()
|
||||
return field_type
|
||||
|
||||
def find_unique_missing_in_db(self, meta, table_indexes, table_name):
|
||||
for field in all_local_fields(meta):
|
||||
if field.unique:
|
||||
attname = field.db_column or field.attname
|
||||
if attname in table_indexes and table_indexes[attname]['unique']:
|
||||
continue
|
||||
self.add_difference('unique-missing-in-db', table_name, attname)
|
||||
|
||||
def find_unique_missing_in_model(self, meta, table_indexes, table_name):
|
||||
# TODO: Postgresql does not list unique_togethers in table_indexes
|
||||
# MySQL does
|
||||
fields = dict([(field.db_column or field.name, field.unique) for field in all_local_fields(meta)])
|
||||
for att_name, att_opts in table_indexes.iteritems():
|
||||
if att_opts['unique'] and att_name in fields and not fields[att_name]:
|
||||
if att_name in flatten(meta.unique_together):
|
||||
continue
|
||||
self.add_difference('unique-missing-in-model', table_name, att_name)
|
||||
|
||||
def find_index_missing_in_db(self, meta, table_indexes, table_name):
|
||||
for field in all_local_fields(meta):
|
||||
if field.db_index:
|
||||
attname = field.db_column or field.attname
|
||||
if not attname in table_indexes:
|
||||
self.add_difference('index-missing-in-db', table_name, attname)
|
||||
|
||||
def find_index_missing_in_model(self, meta, table_indexes, table_name):
|
||||
fields = dict([(field.name, field) for field in all_local_fields(meta)])
|
||||
for att_name, att_opts in table_indexes.iteritems():
|
||||
if att_name in fields:
|
||||
field = fields[att_name]
|
||||
if field.db_index:
|
||||
continue
|
||||
if att_opts['primary_key'] and field.primary_key:
|
||||
continue
|
||||
if att_opts['unique'] and field.unique:
|
||||
continue
|
||||
if att_opts['unique'] and att_name in flatten(meta.unique_together):
|
||||
continue
|
||||
self.add_difference('index-missing-in-model', table_name, att_name)
|
||||
|
||||
def find_field_missing_in_model(self, fieldmap, table_description, table_name):
|
||||
for row in table_description:
|
||||
if row[0] not in fieldmap:
|
||||
self.add_difference('field-missing-in-model', table_name, row[0])
|
||||
|
||||
def find_field_missing_in_db(self, fieldmap, table_description, table_name):
|
||||
db_fields = [row[0] for row in table_description]
|
||||
for field_name, field in fieldmap.iteritems():
|
||||
if field_name not in db_fields:
|
||||
self.add_difference('field-missing-in-db', table_name, field_name, field.db_type(connection=connection))
|
||||
|
||||
def find_field_type_differ(self, meta, table_description, table_name, func=None):
|
||||
db_fields = dict([(row[0], row) for row in table_description])
|
||||
for field in all_local_fields(meta):
|
||||
if field.name not in db_fields:
|
||||
continue
|
||||
description = db_fields[field.name]
|
||||
|
||||
model_type = self.get_field_model_type(field)
|
||||
db_type = self.get_field_db_type(description, field)
|
||||
|
||||
# use callback function if defined
|
||||
if func:
|
||||
model_type, db_type = func(field, description, model_type, db_type)
|
||||
|
||||
if not self.strip_parameters(db_type) == self.strip_parameters(model_type):
|
||||
self.add_difference('field-type-differ', table_name, field.name, model_type, db_type)
|
||||
|
||||
def find_field_parameter_differ(self, meta, table_description, table_name, func=None):
|
||||
db_fields = dict([(row[0], row) for row in table_description])
|
||||
for field in all_local_fields(meta):
|
||||
if field.name not in db_fields:
|
||||
continue
|
||||
description = db_fields[field.name]
|
||||
|
||||
model_type = self.get_field_model_type(field)
|
||||
db_type = self.get_field_db_type(description, field, table_name)
|
||||
|
||||
if not self.strip_parameters(model_type) == self.strip_parameters(db_type):
|
||||
continue
|
||||
|
||||
# use callback function if defined
|
||||
if func:
|
||||
model_type, db_type = func(field, description, model_type, db_type)
|
||||
|
||||
if not model_type == db_type:
|
||||
self.add_difference('field-parameter-differ', table_name, field.name, model_type, db_type)
|
||||
|
||||
@transaction.commit_manually
|
||||
def find_differences(self):
|
||||
cur_app_label = None
|
||||
for app_model in self.app_models:
|
||||
meta = app_model._meta
|
||||
table_name = meta.db_table
|
||||
app_label = meta.app_label
|
||||
|
||||
if cur_app_label != app_label:
|
||||
# Marker indicating start of difference scan for this table_name
|
||||
self.add_app_model_marker(app_label, app_model.__name__)
|
||||
|
||||
#if not table_name in self.django_tables:
|
||||
if not table_name in self.db_tables:
|
||||
# Table is missing from database
|
||||
self.add_difference('table-missing-in-db', table_name)
|
||||
continue
|
||||
|
||||
table_indexes = self.introspection.get_indexes(self.cursor, table_name)
|
||||
fieldmap = dict([(field.db_column or field.get_attname(), field) for field in all_local_fields(meta)])
|
||||
|
||||
# add ordering field if model uses order_with_respect_to
|
||||
if meta.order_with_respect_to:
|
||||
fieldmap['_order'] = ORDERING_FIELD
|
||||
|
||||
try:
|
||||
table_description = self.introspection.get_table_description(self.cursor, table_name)
|
||||
except Exception as e:
|
||||
self.add_difference('error', 'unable to introspect table: %s' % str(e).strip())
|
||||
transaction.rollback() # reset transaction
|
||||
continue
|
||||
else:
|
||||
transaction.commit()
|
||||
# Fields which are defined in database but not in model
|
||||
# 1) find: 'unique-missing-in-model'
|
||||
self.find_unique_missing_in_model(meta, table_indexes, table_name)
|
||||
# 2) find: 'index-missing-in-model'
|
||||
self.find_index_missing_in_model(meta, table_indexes, table_name)
|
||||
# 3) find: 'field-missing-in-model'
|
||||
self.find_field_missing_in_model(fieldmap, table_description, table_name)
|
||||
|
||||
# Fields which are defined in models but not in database
|
||||
# 4) find: 'field-missing-in-db'
|
||||
self.find_field_missing_in_db(fieldmap, table_description, table_name)
|
||||
# 5) find: 'unique-missing-in-db'
|
||||
self.find_unique_missing_in_db(meta, table_indexes, table_name)
|
||||
# 6) find: 'index-missing-in-db'
|
||||
self.find_index_missing_in_db(meta, table_indexes, table_name)
|
||||
|
||||
# Fields which have a different type or parameters
|
||||
# 7) find: 'type-differs'
|
||||
self.find_field_type_differ(meta, table_description, table_name)
|
||||
# 8) find: 'type-parameter-differs'
|
||||
self.find_field_parameter_differ(meta, table_description, table_name)
|
||||
|
||||
def print_diff(self, style=no_style()):
|
||||
""" print differences to stdout """
|
||||
if self.options.get('sql', True):
|
||||
self.print_diff_sql(style)
|
||||
else:
|
||||
self.print_diff_text(style)
|
||||
|
||||
def print_diff_text(self, style):
|
||||
cur_app_label = None
|
||||
for app_label, model_name, diffs in self.differences:
|
||||
if not diffs:
|
||||
continue
|
||||
if not self.dense and cur_app_label != app_label:
|
||||
print("%s %s" % (style.NOTICE("+ Application:"), style.SQL_TABLE(app_label)))
|
||||
cur_app_label = app_label
|
||||
if not self.dense:
|
||||
print("%s %s" % (style.NOTICE("|-+ Differences for model:"), style.SQL_TABLE(model_name)))
|
||||
for diff in diffs:
|
||||
diff_type, diff_args = diff
|
||||
text = self.DIFF_TEXTS[diff_type] % dict((str(i), style.SQL_TABLE(e)) for i, e in enumerate(diff_args))
|
||||
text = "'".join(i % 2 == 0 and style.ERROR(e) or e for i, e in enumerate(text.split("'")))
|
||||
if not self.dense:
|
||||
print("%s %s" % (style.NOTICE("|--+"), text))
|
||||
else:
|
||||
print("%s %s %s %s %s" % (style.NOTICE("App"), style.SQL_TABLE(app_label), style.NOTICE('Model'), style.SQL_TABLE(model_name), text))
|
||||
|
||||
def print_diff_sql(self, style):
|
||||
cur_app_label = None
|
||||
qn = connection.ops.quote_name
|
||||
has_differences = max([len(diffs) for app_label, model_name, diffs in self.differences])
|
||||
if not has_differences:
|
||||
if not self.dense:
|
||||
print(style.SQL_KEYWORD("-- No differences"))
|
||||
else:
|
||||
print(style.SQL_KEYWORD("BEGIN;"))
|
||||
for app_label, model_name, diffs in self.differences:
|
||||
if not diffs:
|
||||
continue
|
||||
if not self.dense and cur_app_label != app_label:
|
||||
print(style.NOTICE("-- Application: %s" % style.SQL_TABLE(app_label)))
|
||||
cur_app_label = app_label
|
||||
if not self.dense:
|
||||
print(style.NOTICE("-- Model: %s" % style.SQL_TABLE(model_name)))
|
||||
for diff in diffs:
|
||||
diff_type, diff_args = diff
|
||||
text = self.DIFF_SQL[diff_type](style, qn, diff_args)
|
||||
if self.dense:
|
||||
text = text.replace("\n\t", " ")
|
||||
print(text)
|
||||
print(style.SQL_KEYWORD("COMMIT;"))
|
||||
|
||||
|
||||
class GenericSQLDiff(SQLDiff):
|
||||
pass
|
||||
|
||||
|
||||
class MySQLDiff(SQLDiff):
|
||||
# All the MySQL hacks together create something of a problem
|
||||
# Fixing one bug in MySQL creates another issue. So just keep in mind
|
||||
# that this is way unreliable for MySQL atm.
|
||||
def get_field_db_type(self, description, field=None, table_name=None):
|
||||
from MySQLdb.constants import FIELD_TYPE
|
||||
# weird bug? in mysql db-api where it returns three times the correct value for field length
|
||||
# if i remember correctly it had something todo with unicode strings
|
||||
# TODO: Fix this is a more meaningful and better understood manner
|
||||
description = list(description)
|
||||
if description[1] not in [FIELD_TYPE.TINY, FIELD_TYPE.SHORT]: # exclude tinyints from conversion.
|
||||
description[3] = description[3] / 3
|
||||
description[4] = description[4] / 3
|
||||
db_type = super(MySQLDiff, self).get_field_db_type(description)
|
||||
if not db_type:
|
||||
return
|
||||
if field:
|
||||
if field.primary_key and (db_type == 'integer' or db_type == 'bigint'):
|
||||
db_type += ' AUTO_INCREMENT'
|
||||
# MySQL isn't really sure about char's and varchar's like sqlite
|
||||
field_type = self.get_field_model_type(field)
|
||||
# Fix char/varchar inconsistencies
|
||||
if self.strip_parameters(field_type) == 'char' and self.strip_parameters(db_type) == 'varchar':
|
||||
db_type = db_type.lstrip("var")
|
||||
# They like to call 'bool's 'tinyint(1)' and introspection makes that a integer
|
||||
# just convert it back to it's proper type, a bool is a bool and nothing else.
|
||||
if db_type == 'integer' and description[1] == FIELD_TYPE.TINY and description[4] == 1:
|
||||
db_type = 'bool'
|
||||
if db_type == 'integer' and description[1] == FIELD_TYPE.SHORT:
|
||||
db_type = 'smallint UNSIGNED' # FIXME: what about if it's not UNSIGNED ?
|
||||
return db_type
|
||||
|
||||
|
||||
class SqliteSQLDiff(SQLDiff):
|
||||
# Unique does not seem to be implied on Sqlite for Primary_key's
|
||||
# if this is more generic among databases this might be usefull
|
||||
# to add to the superclass's find_unique_missing_in_db method
|
||||
def find_unique_missing_in_db(self, meta, table_indexes, table_name):
|
||||
for field in all_local_fields(meta):
|
||||
if field.unique:
|
||||
attname = field.db_column or field.attname
|
||||
if attname in table_indexes and table_indexes[attname]['unique']:
|
||||
continue
|
||||
if attname in table_indexes and table_indexes[attname]['primary_key']:
|
||||
continue
|
||||
self.add_difference('unique-missing-in-db', table_name, attname)
|
||||
|
||||
# Finding Indexes by using the get_indexes dictionary doesn't seem to work
|
||||
# for sqlite.
|
||||
def find_index_missing_in_db(self, meta, table_indexes, table_name):
|
||||
pass
|
||||
|
||||
def find_index_missing_in_model(self, meta, table_indexes, table_name):
|
||||
pass
|
||||
|
||||
def get_field_db_type(self, description, field=None, table_name=None):
|
||||
db_type = super(SqliteSQLDiff, self).get_field_db_type(description)
|
||||
if not db_type:
|
||||
return
|
||||
if field:
|
||||
field_type = self.get_field_model_type(field)
|
||||
# Fix char/varchar inconsistencies
|
||||
if self.strip_parameters(field_type) == 'char' and self.strip_parameters(db_type) == 'varchar':
|
||||
db_type = db_type.lstrip("var")
|
||||
return db_type
|
||||
|
||||
|
||||
class PostgresqlSQLDiff(SQLDiff):
|
||||
DATA_TYPES_REVERSE_OVERRIDE = {
|
||||
1042: 'CharField',
|
||||
# postgis types (TODO: support is very incomplete)
|
||||
17506: 'django.contrib.gis.db.models.fields.PointField',
|
||||
55902: 'django.contrib.gis.db.models.fields.MultiPolygonField',
|
||||
}
|
||||
|
||||
DATA_TYPES_REVERSE_NAME = {
|
||||
'hstore': 'django_hstore.hstore.DictionaryField',
|
||||
}
|
||||
|
||||
# Hopefully in the future we can add constraint checking and other more
|
||||
# advanced checks based on this database.
|
||||
SQL_LOAD_CONSTRAINTS = """
|
||||
SELECT nspname, relname, conname, attname, pg_get_constraintdef(pg_constraint.oid)
|
||||
FROM pg_constraint
|
||||
INNER JOIN pg_attribute ON pg_constraint.conrelid = pg_attribute.attrelid AND pg_attribute.attnum = any(pg_constraint.conkey)
|
||||
INNER JOIN pg_class ON conrelid=pg_class.oid
|
||||
INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace
|
||||
ORDER BY CASE WHEN contype='f' THEN 0 ELSE 1 END,contype,nspname,relname,conname;
|
||||
"""
|
||||
SQL_LOAD_NULL = """
|
||||
SELECT nspname, relname, attname, attnotnull
|
||||
FROM pg_attribute
|
||||
INNER JOIN pg_class ON attrelid=pg_class.oid
|
||||
INNER JOIN pg_namespace ON pg_namespace.oid=pg_class.relnamespace;
|
||||
"""
|
||||
|
||||
SQL_FIELD_TYPE_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2]))
|
||||
SQL_FIELD_PARAMETER_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD("TYPE"), style.SQL_COLTYPE(args[2]))
|
||||
SQL_NOTNULL_DIFFER = lambda self, style, qn, args: "%s %s\n\t%s %s %s %s;" % (style.SQL_KEYWORD('ALTER TABLE'), style.SQL_TABLE(qn(args[0])), style.SQL_KEYWORD('ALTER COLUMN'), style.SQL_FIELD(qn(args[1])), style.SQL_KEYWORD(args[2]), style.SQL_KEYWORD('NOT NULL'))
|
||||
|
||||
def __init__(self, app_models, options):
|
||||
SQLDiff.__init__(self, app_models, options)
|
||||
self.check_constraints = {}
|
||||
self.null = {}
|
||||
self.load_constraints()
|
||||
self.load_null()
|
||||
|
||||
def load_null(self):
|
||||
for dct in self.sql_to_dict(self.SQL_LOAD_NULL, []):
|
||||
key = (dct['nspname'], dct['relname'], dct['attname'])
|
||||
self.null[key] = not dct['attnotnull']
|
||||
|
||||
def load_constraints(self):
|
||||
for dct in self.sql_to_dict(self.SQL_LOAD_CONSTRAINTS, []):
|
||||
key = (dct['nspname'], dct['relname'], dct['attname'])
|
||||
if 'CHECK' in dct['pg_get_constraintdef']:
|
||||
self.check_constraints[key] = dct
|
||||
|
||||
def get_field_db_type(self, description, field=None, table_name=None):
|
||||
db_type = super(PostgresqlSQLDiff, self).get_field_db_type(description)
|
||||
if not db_type:
|
||||
return
|
||||
if field:
|
||||
if field.primary_key:
|
||||
if db_type == 'integer':
|
||||
db_type = 'serial'
|
||||
elif db_type == 'bigint':
|
||||
db_type = 'bigserial'
|
||||
if table_name:
|
||||
tablespace = field.db_tablespace
|
||||
if tablespace == "":
|
||||
tablespace = "public"
|
||||
check_constraint = self.check_constraints.get((tablespace, table_name, field.attname), {}).get('pg_get_constraintdef', None)
|
||||
if check_constraint:
|
||||
check_constraint = check_constraint.replace("((", "(")
|
||||
check_constraint = check_constraint.replace("))", ")")
|
||||
check_constraint = '("'.join([')' in e and '" '.join(e.split(" ", 1)) or e for e in check_constraint.split("(")])
|
||||
# TODO: might be more then one constraint in definition ?
|
||||
db_type += ' ' + check_constraint
|
||||
null = self.null.get((tablespace, table_name, field.attname), 'fixme')
|
||||
if field.null != null:
|
||||
action = field.null and 'DROP' or 'SET'
|
||||
self.add_difference('notnull-differ', table_name, field.name, action)
|
||||
return db_type
|
||||
|
||||
@transaction.autocommit
|
||||
def get_field_db_type_lookup(self, type_code):
|
||||
try:
|
||||
name = self.sql_to_dict("SELECT typname FROM pg_type WHERE typelem=%s;", [type_code])[0]['typname']
|
||||
return self.DATA_TYPES_REVERSE_NAME.get(name.strip('_'))
|
||||
except (IndexError, KeyError):
|
||||
pass
|
||||
|
||||
"""
|
||||
def find_field_type_differ(self, meta, table_description, table_name):
|
||||
def callback(field, description, model_type, db_type):
|
||||
if field.primary_key and db_type=='integer':
|
||||
db_type = 'serial'
|
||||
return model_type, db_type
|
||||
super(PostgresqlSQLDiff, self).find_field_type_differ(meta, table_description, table_name, callback)
|
||||
"""
|
||||
|
||||
DATABASE_SQLDIFF_CLASSES = {
|
||||
'postgis': PostgresqlSQLDiff,
|
||||
'postgresql_psycopg2': PostgresqlSQLDiff,
|
||||
'postgresql': PostgresqlSQLDiff,
|
||||
'mysql': MySQLDiff,
|
||||
'sqlite3': SqliteSQLDiff,
|
||||
'oracle': GenericSQLDiff
|
||||
}
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--all-applications', '-a', action='store_true', dest='all_applications',
|
||||
help="Automaticly include all application from INSTALLED_APPS."),
|
||||
make_option('--not-only-existing', '-e', action='store_false', dest='only_existing',
|
||||
help="Check all tables that exist in the database, not only tables that should exist based on models."),
|
||||
make_option('--dense-output', '-d', action='store_true', dest='dense_output',
|
||||
help="Shows the output in dense format, normally output is spreaded over multiple lines."),
|
||||
make_option('--output_text', '-t', action='store_false', dest='sql', default=True,
|
||||
help="Outputs the differences as descriptive text instead of SQL"),
|
||||
)
|
||||
|
||||
help = """Prints the (approximated) difference between models and fields in the database for the given app name(s).
|
||||
|
||||
It indicates how columns in the database are different from the sql that would
|
||||
be generated by Django. This command is not a database migration tool. (Though
|
||||
it can certainly help) It's purpose is to show the current differences as a way
|
||||
to check/debug ur models compared to the real database tables and columns."""
|
||||
|
||||
output_transaction = False
|
||||
args = '<appname appname ...>'
|
||||
|
||||
def handle(self, *app_labels, **options):
|
||||
from django import VERSION
|
||||
if VERSION[:2] < (1, 0):
|
||||
raise CommandError("SQLDiff only support Django 1.0 or higher!")
|
||||
|
||||
from django.db import models
|
||||
from django.conf import settings
|
||||
|
||||
engine = None
|
||||
if hasattr(settings, 'DATABASES'):
|
||||
engine = settings.DATABASES['default']['ENGINE']
|
||||
else:
|
||||
engine = settings.DATABASE_ENGINE
|
||||
|
||||
if engine == 'dummy':
|
||||
# This must be the "dummy" database backend, which means the user
|
||||
# hasn't set DATABASE_ENGINE.
|
||||
raise CommandError("""Django doesn't know which syntax to use for your SQL statements,
|
||||
because you haven't specified the DATABASE_ENGINE setting.
|
||||
Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.""")
|
||||
|
||||
if options.get('all_applications', False):
|
||||
app_models = models.get_models()
|
||||
else:
|
||||
if not app_labels:
|
||||
raise CommandError('Enter at least one appname.')
|
||||
try:
|
||||
app_list = [models.get_app(app_label) for app_label in app_labels]
|
||||
except (models.ImproperlyConfigured, ImportError) as e:
|
||||
raise CommandError("%s. Are you sure your INSTALLED_APPS setting is correct?" % e)
|
||||
|
||||
app_models = []
|
||||
for app in app_list:
|
||||
app_models.extend(models.get_models(app))
|
||||
|
||||
## remove all models that are not managed by Django
|
||||
#app_models = [model for model in app_models if getattr(model._meta, 'managed', True)]
|
||||
|
||||
if not app_models:
|
||||
raise CommandError('Unable to execute sqldiff no models founds.')
|
||||
|
||||
if not engine:
|
||||
engine = connection.__module__.split('.')[-2]
|
||||
|
||||
if '.' in engine:
|
||||
engine = engine.split('.')[-1]
|
||||
|
||||
cls = DATABASE_SQLDIFF_CLASSES.get(engine, GenericSQLDiff)
|
||||
sqldiff_instance = cls(app_models, options)
|
||||
sqldiff_instance.find_differences()
|
||||
sqldiff_instance.print_diff(self.style)
|
||||
return
|
|
@ -1,340 +0,0 @@
|
|||
"""
|
||||
Sync Media to S3
|
||||
================
|
||||
|
||||
Django command that scans all files in your settings.MEDIA_ROOT folder and
|
||||
uploads them to S3 with the same directory structure.
|
||||
|
||||
This command can optionally do the following but it is off by default:
|
||||
* gzip compress any CSS and Javascript files it finds and adds the appropriate
|
||||
'Content-Encoding' header.
|
||||
* set a far future 'Expires' header for optimal caching.
|
||||
|
||||
Note: This script requires the Python boto library and valid Amazon Web
|
||||
Services API keys.
|
||||
|
||||
Required settings.py variables:
|
||||
AWS_ACCESS_KEY_ID = ''
|
||||
AWS_SECRET_ACCESS_KEY = ''
|
||||
AWS_BUCKET_NAME = ''
|
||||
|
||||
When you call this command with the `--renamegzip` param, it will add
|
||||
the '.gz' extension to the file name. But Safari just doesn't recognize
|
||||
'.gz' files and your site won't work on it! To fix this problem, you can
|
||||
set any other extension (like .jgz) in the `SYNC_S3_RENAME_GZIP_EXT`
|
||||
variable.
|
||||
|
||||
Command options are:
|
||||
-p PREFIX, --prefix=PREFIX
|
||||
The prefix to prepend to the path on S3.
|
||||
--gzip Enables gzipping CSS and Javascript files.
|
||||
--expires Enables setting a far future expires header.
|
||||
--force Skip the file mtime check to force upload of all
|
||||
files.
|
||||
--filter-list Override default directory and file exclusion
|
||||
filters. (enter as comma seperated line)
|
||||
--renamegzip Enables renaming of gzipped files by appending '.gz'.
|
||||
to the original file name. This way your original
|
||||
assets will not be replaced by the gzipped ones.
|
||||
You can change the extension setting the
|
||||
`SYNC_S3_RENAME_GZIP_EXT` var in your settings.py
|
||||
file.
|
||||
--invalidate Invalidates the objects in CloudFront after uploaading
|
||||
stuff to s3.
|
||||
|
||||
|
||||
TODO:
|
||||
* Use fnmatch (or regex) to allow more complex FILTER_LIST rules.
|
||||
|
||||
"""
|
||||
import datetime
|
||||
import email
|
||||
import mimetypes
|
||||
from optparse import make_option
|
||||
import os
|
||||
import time
|
||||
import gzip
|
||||
try:
|
||||
from cStringIO import StringIO
|
||||
assert StringIO
|
||||
except ImportError:
|
||||
from StringIO import StringIO
|
||||
|
||||
|
||||
from django.conf import settings
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
|
||||
# Make sure boto is available
|
||||
try:
|
||||
import boto
|
||||
import boto.exception
|
||||
except ImportError:
|
||||
raise ImportError("The boto Python library is not installed.")
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
# Extra variables to avoid passing these around
|
||||
AWS_ACCESS_KEY_ID = ''
|
||||
AWS_SECRET_ACCESS_KEY = ''
|
||||
AWS_BUCKET_NAME = ''
|
||||
AWS_CLOUDFRONT_DISTRIBUTION = ''
|
||||
SYNC_S3_RENAME_GZIP_EXT = ''
|
||||
|
||||
DIRECTORY = ''
|
||||
FILTER_LIST = ['.DS_Store', '.svn', '.hg', '.git', 'Thumbs.db']
|
||||
GZIP_CONTENT_TYPES = (
|
||||
'text/css',
|
||||
'application/javascript',
|
||||
'application/x-javascript',
|
||||
'text/javascript'
|
||||
)
|
||||
|
||||
uploaded_files = []
|
||||
upload_count = 0
|
||||
skip_count = 0
|
||||
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('-p', '--prefix',
|
||||
dest='prefix',
|
||||
default=getattr(settings, 'SYNC_MEDIA_S3_PREFIX', ''),
|
||||
help="The prefix to prepend to the path on S3."),
|
||||
make_option('-d', '--dir',
|
||||
dest='dir', default=settings.MEDIA_ROOT,
|
||||
help="The root directory to use instead of your MEDIA_ROOT"),
|
||||
make_option('--gzip',
|
||||
action='store_true', dest='gzip', default=False,
|
||||
help="Enables gzipping CSS and Javascript files."),
|
||||
make_option('--renamegzip',
|
||||
action='store_true', dest='renamegzip', default=False,
|
||||
help="Enables renaming of gzipped assets to have '.gz' appended to the filename."),
|
||||
make_option('--expires',
|
||||
action='store_true', dest='expires', default=False,
|
||||
help="Enables setting a far future expires header."),
|
||||
make_option('--force',
|
||||
action='store_true', dest='force', default=False,
|
||||
help="Skip the file mtime check to force upload of all files."),
|
||||
make_option('--filter-list', dest='filter_list',
|
||||
action='store', default='',
|
||||
help="Override default directory and file exclusion filters. (enter as comma seperated line)"),
|
||||
make_option('--invalidate', dest='invalidate', default=False,
|
||||
action='store_true',
|
||||
help='Invalidates the associated objects in CloudFront')
|
||||
)
|
||||
|
||||
help = 'Syncs the complete MEDIA_ROOT structure and files to S3 into the given bucket name.'
|
||||
args = 'bucket_name'
|
||||
|
||||
can_import_settings = True
|
||||
|
||||
def handle(self, *args, **options):
|
||||
|
||||
# Check for AWS keys in settings
|
||||
if not hasattr(settings, 'AWS_ACCESS_KEY_ID') or not hasattr(settings, 'AWS_SECRET_ACCESS_KEY'):
|
||||
raise CommandError('Missing AWS keys from settings file. Please supply both AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY.')
|
||||
else:
|
||||
self.AWS_ACCESS_KEY_ID = settings.AWS_ACCESS_KEY_ID
|
||||
self.AWS_SECRET_ACCESS_KEY = settings.AWS_SECRET_ACCESS_KEY
|
||||
|
||||
if not hasattr(settings, 'AWS_BUCKET_NAME'):
|
||||
raise CommandError('Missing bucket name from settings file. Please add the AWS_BUCKET_NAME to your settings file.')
|
||||
else:
|
||||
if not settings.AWS_BUCKET_NAME:
|
||||
raise CommandError('AWS_BUCKET_NAME cannot be empty.')
|
||||
self.AWS_BUCKET_NAME = settings.AWS_BUCKET_NAME
|
||||
|
||||
if not hasattr(settings, 'MEDIA_ROOT'):
|
||||
raise CommandError('MEDIA_ROOT must be set in your settings.')
|
||||
else:
|
||||
if not settings.MEDIA_ROOT:
|
||||
raise CommandError('MEDIA_ROOT must be set in your settings.')
|
||||
|
||||
self.AWS_CLOUDFRONT_DISTRIBUTION = getattr(settings, 'AWS_CLOUDFRONT_DISTRIBUTION', '')
|
||||
|
||||
self.SYNC_S3_RENAME_GZIP_EXT = \
|
||||
getattr(settings, 'SYNC_S3_RENAME_GZIP_EXT', '.gz')
|
||||
|
||||
self.verbosity = int(options.get('verbosity'))
|
||||
self.prefix = options.get('prefix')
|
||||
self.do_gzip = options.get('gzip')
|
||||
self.rename_gzip = options.get('renamegzip')
|
||||
self.do_expires = options.get('expires')
|
||||
self.do_force = options.get('force')
|
||||
self.invalidate = options.get('invalidate')
|
||||
self.DIRECTORY = options.get('dir')
|
||||
self.FILTER_LIST = getattr(settings, 'FILTER_LIST', self.FILTER_LIST)
|
||||
filter_list = options.get('filter_list')
|
||||
if filter_list:
|
||||
# command line option overrides default filter_list and
|
||||
# settings.filter_list
|
||||
self.FILTER_LIST = filter_list.split(',')
|
||||
|
||||
# Now call the syncing method to walk the MEDIA_ROOT directory and
|
||||
# upload all files found.
|
||||
self.sync_s3()
|
||||
|
||||
# Sending the invalidation request to CloudFront if the user
|
||||
# requested this action
|
||||
if self.invalidate:
|
||||
self.invalidate_objects_cf()
|
||||
|
||||
print("")
|
||||
print("%d files uploaded." % self.upload_count)
|
||||
print("%d files skipped." % self.skip_count)
|
||||
|
||||
def open_cf(self):
|
||||
"""
|
||||
Returns an open connection to CloudFront
|
||||
"""
|
||||
return boto.connect_cloudfront(
|
||||
self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY)
|
||||
|
||||
def invalidate_objects_cf(self):
|
||||
"""
|
||||
Split the invalidation request in groups of 1000 objects
|
||||
"""
|
||||
if not self.AWS_CLOUDFRONT_DISTRIBUTION:
|
||||
raise CommandError(
|
||||
'An object invalidation was requested but the variable '
|
||||
'AWS_CLOUDFRONT_DISTRIBUTION is not present in your settings.')
|
||||
|
||||
# We can't send more than 1000 objects in the same invalidation
|
||||
# request.
|
||||
chunk = 1000
|
||||
|
||||
# Connecting to CloudFront
|
||||
conn = self.open_cf()
|
||||
|
||||
# Splitting the object list
|
||||
objs = self.uploaded_files
|
||||
chunks = [objs[i:i + chunk] for i in range(0, len(objs), chunk)]
|
||||
|
||||
# Invalidation requests
|
||||
for paths in chunks:
|
||||
conn.create_invalidation_request(
|
||||
self.AWS_CLOUDFRONT_DISTRIBUTION, paths)
|
||||
|
||||
def sync_s3(self):
|
||||
"""
|
||||
Walks the media directory and syncs files to S3
|
||||
"""
|
||||
bucket, key = self.open_s3()
|
||||
os.path.walk(self.DIRECTORY, self.upload_s3, (bucket, key, self.AWS_BUCKET_NAME, self.DIRECTORY))
|
||||
|
||||
def compress_string(self, s):
|
||||
"""Gzip a given string."""
|
||||
zbuf = StringIO()
|
||||
zfile = gzip.GzipFile(mode='wb', compresslevel=6, fileobj=zbuf)
|
||||
zfile.write(s)
|
||||
zfile.close()
|
||||
return zbuf.getvalue()
|
||||
|
||||
def open_s3(self):
|
||||
"""
|
||||
Opens connection to S3 returning bucket and key
|
||||
"""
|
||||
conn = boto.connect_s3(self.AWS_ACCESS_KEY_ID, self.AWS_SECRET_ACCESS_KEY)
|
||||
try:
|
||||
bucket = conn.get_bucket(self.AWS_BUCKET_NAME)
|
||||
except boto.exception.S3ResponseError:
|
||||
bucket = conn.create_bucket(self.AWS_BUCKET_NAME)
|
||||
return bucket, boto.s3.key.Key(bucket)
|
||||
|
||||
def upload_s3(self, arg, dirname, names):
|
||||
"""
|
||||
This is the callback to os.path.walk and where much of the work happens
|
||||
"""
|
||||
bucket, key, bucket_name, root_dir = arg
|
||||
|
||||
# Skip directories we don't want to sync
|
||||
if os.path.basename(dirname) in self.FILTER_LIST:
|
||||
# prevent walk from processing subfiles/subdirs below the ignored one
|
||||
del names[:]
|
||||
return
|
||||
|
||||
# Later we assume the MEDIA_ROOT ends with a trailing slash
|
||||
if not root_dir.endswith(os.path.sep):
|
||||
root_dir = root_dir + os.path.sep
|
||||
|
||||
for file in names:
|
||||
headers = {}
|
||||
|
||||
if file in self.FILTER_LIST:
|
||||
continue # Skip files we don't want to sync
|
||||
|
||||
filename = os.path.join(dirname, file)
|
||||
if os.path.isdir(filename):
|
||||
continue # Don't try to upload directories
|
||||
|
||||
file_key = filename[len(root_dir):]
|
||||
if self.prefix:
|
||||
file_key = '%s/%s' % (self.prefix, file_key)
|
||||
|
||||
# Check if file on S3 is older than local file, if so, upload
|
||||
if not self.do_force:
|
||||
s3_key = bucket.get_key(file_key)
|
||||
if s3_key:
|
||||
s3_datetime = datetime.datetime(*time.strptime(
|
||||
s3_key.last_modified, '%a, %d %b %Y %H:%M:%S %Z')[0:6])
|
||||
local_datetime = datetime.datetime.utcfromtimestamp(
|
||||
os.stat(filename).st_mtime)
|
||||
if local_datetime < s3_datetime:
|
||||
self.skip_count += 1
|
||||
if self.verbosity > 1:
|
||||
print("File %s hasn't been modified since last being uploaded" % file_key)
|
||||
continue
|
||||
|
||||
# File is newer, let's process and upload
|
||||
if self.verbosity > 0:
|
||||
print("Uploading %s..." % file_key)
|
||||
|
||||
content_type = mimetypes.guess_type(filename)[0]
|
||||
if content_type:
|
||||
headers['Content-Type'] = content_type
|
||||
file_obj = open(filename, 'rb')
|
||||
file_size = os.fstat(file_obj.fileno()).st_size
|
||||
filedata = file_obj.read()
|
||||
if self.do_gzip:
|
||||
# Gzipping only if file is large enough (>1K is recommended)
|
||||
# and only if file is a common text type (not a binary file)
|
||||
if file_size > 1024 and content_type in self.GZIP_CONTENT_TYPES:
|
||||
filedata = self.compress_string(filedata)
|
||||
if self.rename_gzip:
|
||||
# If rename_gzip is True, then rename the file
|
||||
# by appending an extension (like '.gz)' to
|
||||
# original filename.
|
||||
file_key = '%s.%s' % (
|
||||
file_key, self.SYNC_S3_RENAME_GZIP_EXT)
|
||||
headers['Content-Encoding'] = 'gzip'
|
||||
if self.verbosity > 1:
|
||||
print("\tgzipped: %dk to %dk" % (file_size / 1024, len(filedata) / 1024))
|
||||
if self.do_expires:
|
||||
# HTTP/1.0
|
||||
headers['Expires'] = '%s GMT' % (email.Utils.formatdate(time.mktime((datetime.datetime.now() + datetime.timedelta(days=365 * 2)).timetuple())))
|
||||
# HTTP/1.1
|
||||
headers['Cache-Control'] = 'max-age %d' % (3600 * 24 * 365 * 2)
|
||||
if self.verbosity > 1:
|
||||
print("\texpires: %s" % headers['Expires'])
|
||||
print("\tcache-control: %s" % headers['Cache-Control'])
|
||||
|
||||
try:
|
||||
key.name = file_key
|
||||
key.set_contents_from_string(filedata, headers, replace=True)
|
||||
key.set_acl('public-read')
|
||||
except boto.exception.S3CreateError as e:
|
||||
print("Failed: %s" % e)
|
||||
except Exception as e:
|
||||
print(e)
|
||||
raise
|
||||
else:
|
||||
self.upload_count += 1
|
||||
self.uploaded_files.append(file_key)
|
||||
|
||||
file_obj.close()
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('-v', '--verbosity',
|
||||
dest='verbosity', default=1, action='count',
|
||||
help="Verbose mode. Multiple -v options increase the verbosity."),
|
||||
)
|
|
@ -1,210 +0,0 @@
|
|||
"""
|
||||
SyncData
|
||||
========
|
||||
|
||||
Django command similar to 'loaddata' but also deletes.
|
||||
After 'syncdata' has run, the database will have the same data as the fixture - anything
|
||||
missing will of been added, anything different will of been updated,
|
||||
and anything extra will of been deleted.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import six
|
||||
from django.core.management.base import BaseCommand
|
||||
from django.core.management.color import no_style
|
||||
from optparse import make_option
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
""" syncdata command """
|
||||
|
||||
help = 'Makes the current database have the same data as the fixture(s), no more, no less.'
|
||||
args = "fixture [fixture ...]"
|
||||
|
||||
def remove_objects_not_in(self, objects_to_keep, verbosity):
|
||||
"""
|
||||
Deletes all the objects in the database that are not in objects_to_keep.
|
||||
- objects_to_keep: A map where the keys are classes, and the values are a
|
||||
set of the objects of that class we should keep.
|
||||
"""
|
||||
for class_ in objects_to_keep.keys():
|
||||
current = class_.objects.all()
|
||||
current_ids = set([x.id for x in current])
|
||||
keep_ids = set([x.id for x in objects_to_keep[class_]])
|
||||
|
||||
remove_these_ones = current_ids.difference(keep_ids)
|
||||
if remove_these_ones:
|
||||
for obj in current:
|
||||
if obj.id in remove_these_ones:
|
||||
obj.delete()
|
||||
if verbosity >= 2:
|
||||
print("Deleted object: %s" % six.u(obj))
|
||||
|
||||
if verbosity > 0 and remove_these_ones:
|
||||
num_deleted = len(remove_these_ones)
|
||||
if num_deleted > 1:
|
||||
type_deleted = six.u(class_._meta.verbose_name_plural)
|
||||
else:
|
||||
type_deleted = six.u(class_._meta.verbose_name)
|
||||
|
||||
print("Deleted %s %s" % (str(num_deleted), type_deleted))
|
||||
|
||||
def handle(self, *fixture_labels, **options):
|
||||
""" Main method of a Django command """
|
||||
from django.db.models import get_apps
|
||||
from django.core import serializers
|
||||
from django.db import connection, transaction
|
||||
from django.conf import settings
|
||||
|
||||
self.style = no_style()
|
||||
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
show_traceback = options.get('traceback', False)
|
||||
|
||||
# Keep a count of the installed objects and fixtures
|
||||
fixture_count = 0
|
||||
object_count = 0
|
||||
objects_per_fixture = []
|
||||
models = set()
|
||||
|
||||
humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
|
||||
|
||||
# Get a cursor (even though we don't need one yet). This has
|
||||
# the side effect of initializing the test database (if
|
||||
# it isn't already initialized).
|
||||
cursor = connection.cursor()
|
||||
|
||||
# Start transaction management. All fixtures are installed in a
|
||||
# single transaction to ensure that all references are resolved.
|
||||
transaction.commit_unless_managed()
|
||||
transaction.enter_transaction_management()
|
||||
transaction.managed(True)
|
||||
|
||||
app_fixtures = [os.path.join(os.path.dirname(app.__file__), 'fixtures') for app in get_apps()]
|
||||
for fixture_label in fixture_labels:
|
||||
parts = fixture_label.split('.')
|
||||
if len(parts) == 1:
|
||||
fixture_name = fixture_label
|
||||
formats = serializers.get_public_serializer_formats()
|
||||
else:
|
||||
fixture_name, format = '.'.join(parts[:-1]), parts[-1]
|
||||
if format in serializers.get_public_serializer_formats():
|
||||
formats = [format]
|
||||
else:
|
||||
formats = []
|
||||
|
||||
if formats:
|
||||
if verbosity > 1:
|
||||
print("Loading '%s' fixtures..." % fixture_name)
|
||||
else:
|
||||
sys.stderr.write(self.style.ERROR("Problem installing fixture '%s': %s is not a known serialization format." % (fixture_name, format)))
|
||||
transaction.rollback()
|
||||
transaction.leave_transaction_management()
|
||||
return
|
||||
|
||||
if os.path.isabs(fixture_name):
|
||||
fixture_dirs = [fixture_name]
|
||||
else:
|
||||
fixture_dirs = app_fixtures + list(settings.FIXTURE_DIRS) + ['']
|
||||
|
||||
for fixture_dir in fixture_dirs:
|
||||
if verbosity > 1:
|
||||
print("Checking %s for fixtures..." % humanize(fixture_dir))
|
||||
|
||||
label_found = False
|
||||
for format in formats:
|
||||
#serializer = serializers.get_serializer(format)
|
||||
if verbosity > 1:
|
||||
print("Trying %s for %s fixture '%s'..." % (humanize(fixture_dir), format, fixture_name))
|
||||
try:
|
||||
full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
|
||||
fixture = open(full_path, 'r')
|
||||
if label_found:
|
||||
fixture.close()
|
||||
print(self.style.ERROR("Multiple fixtures named '%s' in %s. Aborting." % (fixture_name, humanize(fixture_dir))))
|
||||
transaction.rollback()
|
||||
transaction.leave_transaction_management()
|
||||
return
|
||||
else:
|
||||
fixture_count += 1
|
||||
objects_per_fixture.append(0)
|
||||
if verbosity > 0:
|
||||
print("Installing %s fixture '%s' from %s." % (format, fixture_name, humanize(fixture_dir)))
|
||||
try:
|
||||
objects_to_keep = {}
|
||||
objects = serializers.deserialize(format, fixture)
|
||||
for obj in objects:
|
||||
object_count += 1
|
||||
objects_per_fixture[-1] += 1
|
||||
|
||||
class_ = obj.object.__class__
|
||||
if not class_ in objects_to_keep:
|
||||
objects_to_keep[class_] = set()
|
||||
objects_to_keep[class_].add(obj.object)
|
||||
|
||||
models.add(class_)
|
||||
obj.save()
|
||||
|
||||
self.remove_objects_not_in(objects_to_keep, verbosity)
|
||||
|
||||
label_found = True
|
||||
except (SystemExit, KeyboardInterrupt):
|
||||
raise
|
||||
except Exception:
|
||||
import traceback
|
||||
fixture.close()
|
||||
transaction.rollback()
|
||||
transaction.leave_transaction_management()
|
||||
if show_traceback:
|
||||
traceback.print_exc()
|
||||
else:
|
||||
sys.stderr.write(self.style.ERROR("Problem installing fixture '%s': %s\n" % (full_path, traceback.format_exc())))
|
||||
return
|
||||
fixture.close()
|
||||
except:
|
||||
if verbosity > 1:
|
||||
print("No %s fixture '%s' in %s." % (format, fixture_name, humanize(fixture_dir)))
|
||||
|
||||
# If any of the fixtures we loaded contain 0 objects, assume that an
|
||||
# error was encountered during fixture loading.
|
||||
if 0 in objects_per_fixture:
|
||||
sys.stderr.write(
|
||||
self.style.ERROR("No fixture data found for '%s'. (File format may be invalid.)" % (fixture_name)))
|
||||
transaction.rollback()
|
||||
transaction.leave_transaction_management()
|
||||
return
|
||||
|
||||
# If we found even one object in a fixture, we need to reset the
|
||||
# database sequences.
|
||||
if object_count > 0:
|
||||
sequence_sql = connection.ops.sequence_reset_sql(self.style, models)
|
||||
if sequence_sql:
|
||||
if verbosity > 1:
|
||||
print("Resetting sequences")
|
||||
for line in sequence_sql:
|
||||
cursor.execute(line)
|
||||
|
||||
transaction.commit()
|
||||
transaction.leave_transaction_management()
|
||||
|
||||
if object_count == 0:
|
||||
if verbosity > 1:
|
||||
print("No fixtures found.")
|
||||
else:
|
||||
if verbosity > 0:
|
||||
print("Installed %d object(s) from %d fixture(s)" % (object_count, fixture_count))
|
||||
|
||||
# Close the DB connection. This is required as a workaround for an
|
||||
# edge case in MySQL: if the same connection is used to
|
||||
# create tables, load data, and query, the query can return
|
||||
# incorrect results. See Django #7572, MySQL #37735.
|
||||
connection.close()
|
||||
|
||||
# Backwards compatibility for Django r9110
|
||||
if not [opt for opt in Command.option_list if opt.dest == 'verbosity']:
|
||||
Command.option_list += (
|
||||
make_option('--verbosity', '-v', action="store", dest="verbosity",
|
||||
default='1', type='choice', choices=['0', '1', '2'],
|
||||
help="Verbosity level; 0=minimal output, 1=normal output, 2=all output"),
|
||||
)
|
|
@ -1,47 +0,0 @@
|
|||
from collections import defaultdict
|
||||
import os
|
||||
from django.conf import settings
|
||||
from django.core.management.base import NoArgsCommand
|
||||
from django.db import models
|
||||
from django.db.models.loading import cache
|
||||
|
||||
|
||||
class Command(NoArgsCommand):
|
||||
help = "Prints a list of all files in MEDIA_ROOT that are not referenced in the database."
|
||||
|
||||
def handle_noargs(self, **options):
|
||||
|
||||
if settings.MEDIA_ROOT == '':
|
||||
print("MEDIA_ROOT is not set, nothing to do")
|
||||
return
|
||||
|
||||
# Get a list of all files under MEDIA_ROOT
|
||||
media = []
|
||||
for root, dirs, files in os.walk(settings.MEDIA_ROOT):
|
||||
for f in files:
|
||||
media.append(os.path.abspath(os.path.join(root, f)))
|
||||
|
||||
# Get list of all fields (value) for each model (key)
|
||||
# that is a FileField or subclass of a FileField
|
||||
model_dict = defaultdict(list)
|
||||
for app in cache.get_apps():
|
||||
model_list = cache.get_models(app)
|
||||
for model in model_list:
|
||||
for field in model._meta.fields:
|
||||
if issubclass(field.__class__, models.FileField):
|
||||
model_dict[model].append(field)
|
||||
|
||||
# Get a list of all files referenced in the database
|
||||
referenced = []
|
||||
for model in model_dict.iterkeys():
|
||||
all = model.objects.all().iterator()
|
||||
for object in all:
|
||||
for field in model_dict[model]:
|
||||
target_file = getattr(object, field.name)
|
||||
if target_file:
|
||||
referenced.append(os.path.abspath(target_file.path))
|
||||
|
||||
# Print each file in MEDIA_ROOT that is not referenced in the database
|
||||
for m in media:
|
||||
if m not in referenced:
|
||||
print(m)
|
|
@ -1,21 +0,0 @@
|
|||
from django.core.management.base import BaseCommand
|
||||
from django.db.models import get_models, get_app
|
||||
from django.contrib.auth.management import create_permissions
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
args = '<app app ...>'
|
||||
help = 'reloads permissions for specified apps, or all apps if no args are specified'
|
||||
|
||||
def handle(self, *args, **options):
|
||||
if not args:
|
||||
apps = []
|
||||
for model in get_models():
|
||||
apps.append(get_app(model._meta.app_label))
|
||||
else:
|
||||
apps = []
|
||||
for arg in args:
|
||||
apps.append(get_app(arg))
|
||||
for app in apps:
|
||||
create_permissions(app, get_models(), options.get('verbosity', 0))
|
||||
|
|
@ -1,71 +0,0 @@
|
|||
import os
|
||||
from optparse import make_option
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from django.core.management.color import color_style
|
||||
from django.template.base import add_to_builtins
|
||||
from django.template.loaders.filesystem import Loader
|
||||
from django_extensions.utils import validatingtemplatetags
|
||||
|
||||
#
|
||||
# TODO: Render the template with fake request object ?
|
||||
#
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
args = ''
|
||||
help = "Validate templates on syntax and compile errors"
|
||||
option_list = BaseCommand.option_list + (
|
||||
make_option('--break', '-b', action='store_true', dest='break',
|
||||
default=False, help="Break on first error."),
|
||||
make_option('--check-urls', '-u', action='store_true', dest='check_urls',
|
||||
default=False, help="Check url tag view names are quoted appropriately"),
|
||||
make_option('--force-new-urls', '-n', action='store_true', dest='force_new_urls',
|
||||
default=False, help="Error on usage of old style url tags (without {% load urls from future %}"),
|
||||
make_option('--include', '-i', action='append', dest='includes',
|
||||
default=[], help="Append these paths to TEMPLATE_DIRS")
|
||||
)
|
||||
|
||||
def handle(self, *args, **options):
|
||||
from django.conf import settings
|
||||
style = color_style()
|
||||
template_dirs = set(settings.TEMPLATE_DIRS)
|
||||
template_dirs |= set(options.get('includes', []))
|
||||
template_dirs |= set(getattr(settings, 'VALIDATE_TEMPLATES_EXTRA_TEMPLATE_DIRS', []))
|
||||
settings.TEMPLATE_DIRS = list(template_dirs)
|
||||
settings.TEMPLATE_DEBUG = True
|
||||
verbosity = int(options.get('verbosity', 1))
|
||||
errors = 0
|
||||
|
||||
template_loader = Loader()
|
||||
|
||||
# Replace built in template tags with our own validating versions
|
||||
if options.get('check_urls', False):
|
||||
add_to_builtins('django_extensions.utils.validatingtemplatetags')
|
||||
|
||||
for template_dir in template_dirs:
|
||||
for root, dirs, filenames in os.walk(template_dir):
|
||||
for filename in filenames:
|
||||
if filename.endswith(".swp"):
|
||||
continue
|
||||
if filename.endswith("~"):
|
||||
continue
|
||||
filepath = os.path.join(root, filename)
|
||||
if verbosity > 1:
|
||||
print(filepath)
|
||||
validatingtemplatetags.before_new_template(options.get('force_new_urls', False))
|
||||
try:
|
||||
template_loader.load_template(filename, [root])
|
||||
except Exception as e:
|
||||
errors += 1
|
||||
print("%s: %s" % (filepath, style.ERROR("%s %s" % (e.__class__.__name__, str(e)))))
|
||||
template_errors = validatingtemplatetags.get_template_errors()
|
||||
for origin, line, message in template_errors:
|
||||
errors += 1
|
||||
print("%s(%s): %s" % (origin, line, style.ERROR(message)))
|
||||
if errors and options.get('break', False):
|
||||
raise CommandError("Errors found")
|
||||
|
||||
if errors:
|
||||
raise CommandError("%s errors found" % errors)
|
||||
print("%s errors found" % errors)
|
||||
|
|
@ -1,177 +0,0 @@
|
|||
"""
|
||||
django_extensions.management.jobs
|
||||
"""
|
||||
|
||||
import os
|
||||
from imp import find_module
|
||||
|
||||
_jobs = None
|
||||
|
||||
|
||||
def noneimplementation(meth):
|
||||
return None
|
||||
|
||||
|
||||
class JobError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class BaseJob(object):
|
||||
help = "undefined job description."
|
||||
when = None
|
||||
|
||||
def execute(self):
|
||||
raise NotImplementedError("Job needs to implement the execute method")
|
||||
|
||||
|
||||
class MinutelyJob(BaseJob):
|
||||
when = "minutely"
|
||||
|
||||
|
||||
class QuarterHourlyJob(BaseJob):
|
||||
when = "quarter_hourly"
|
||||
|
||||
|
||||
class HourlyJob(BaseJob):
|
||||
when = "hourly"
|
||||
|
||||
|
||||
class DailyJob(BaseJob):
|
||||
when = "daily"
|
||||
|
||||
|
||||
class WeeklyJob(BaseJob):
|
||||
when = "weekly"
|
||||
|
||||
|
||||
class MonthlyJob(BaseJob):
|
||||
when = "monthly"
|
||||
|
||||
|
||||
class YearlyJob(BaseJob):
|
||||
when = "yearly"
|
||||
|
||||
|
||||
def my_import(name):
|
||||
imp = __import__(name)
|
||||
mods = name.split('.')
|
||||
if len(mods) > 1:
|
||||
for mod in mods[1:]:
|
||||
imp = getattr(imp, mod)
|
||||
return imp
|
||||
|
||||
|
||||
def find_jobs(jobs_dir):
|
||||
try:
|
||||
return [f[:-3] for f in os.listdir(jobs_dir) if not f.startswith('_') and f.endswith(".py")]
|
||||
except OSError:
|
||||
return []
|
||||
|
||||
|
||||
def find_job_module(app_name, when=None):
|
||||
parts = app_name.split('.')
|
||||
parts.append('jobs')
|
||||
if when:
|
||||
parts.append(when)
|
||||
parts.reverse()
|
||||
path = None
|
||||
while parts:
|
||||
part = parts.pop()
|
||||
f, path, descr = find_module(part, path and [path] or None)
|
||||
return path
|
||||
|
||||
|
||||
def import_job(app_name, name, when=None):
|
||||
jobmodule = "%s.jobs.%s%s" % (app_name, when and "%s." % when or "", name)
|
||||
job_mod = my_import(jobmodule)
|
||||
# todo: more friendly message for AttributeError if job_mod does not exist
|
||||
try:
|
||||
job = job_mod.Job
|
||||
except:
|
||||
raise JobError("Job module %s does not contain class instance named 'Job'" % jobmodule)
|
||||
if when and not (job.when == when or job.when is None):
|
||||
raise JobError("Job %s is not a %s job." % (jobmodule, when))
|
||||
return job
|
||||
|
||||
|
||||
def get_jobs(when=None, only_scheduled=False):
|
||||
"""
|
||||
Returns a dictionary mapping of job names together with their respective
|
||||
application class.
|
||||
"""
|
||||
# FIXME: HACK: make sure the project dir is on the path when executed as ./manage.py
|
||||
import sys
|
||||
try:
|
||||
cpath = os.path.dirname(os.path.realpath(sys.argv[0]))
|
||||
ppath = os.path.dirname(cpath)
|
||||
if ppath not in sys.path:
|
||||
sys.path.append(ppath)
|
||||
except:
|
||||
pass
|
||||
_jobs = {}
|
||||
if True:
|
||||
from django.conf import settings
|
||||
for app_name in settings.INSTALLED_APPS:
|
||||
scandirs = (None, 'minutely', 'quarter_hourly', 'hourly', 'daily', 'weekly', 'monthly', 'yearly')
|
||||
if when:
|
||||
scandirs = None, when
|
||||
for subdir in scandirs:
|
||||
try:
|
||||
path = find_job_module(app_name, subdir)
|
||||
for name in find_jobs(path):
|
||||
if (app_name, name) in _jobs:
|
||||
raise JobError("Duplicate job %s" % name)
|
||||
job = import_job(app_name, name, subdir)
|
||||
if only_scheduled and job.when is None:
|
||||
# only include jobs which are scheduled
|
||||
continue
|
||||
if when and job.when != when:
|
||||
# generic job not in same schedule
|
||||
continue
|
||||
_jobs[(app_name, name)] = job
|
||||
except ImportError:
|
||||
# No job module -- continue scanning
|
||||
pass
|
||||
return _jobs
|
||||
|
||||
|
||||
def get_job(app_name, job_name):
|
||||
jobs = get_jobs()
|
||||
if app_name:
|
||||
return jobs[(app_name, job_name)]
|
||||
else:
|
||||
for a, j in jobs.keys():
|
||||
if j == job_name:
|
||||
return jobs[(a, j)]
|
||||
raise KeyError("Job not found: %s" % job_name)
|
||||
|
||||
|
||||
def print_jobs(when=None, only_scheduled=False, show_when=True, show_appname=False, show_header=True):
|
||||
jobmap = get_jobs(when, only_scheduled=only_scheduled)
|
||||
print("Job List: %i jobs" % len(jobmap))
|
||||
jlist = jobmap.keys()
|
||||
jlist.sort()
|
||||
appname_spacer = "%%-%is" % max(len(e[0]) for e in jlist)
|
||||
name_spacer = "%%-%is" % max(len(e[1]) for e in jlist)
|
||||
when_spacer = "%%-%is" % max(len(e.when) for e in jobmap.values() if e.when)
|
||||
if show_header:
|
||||
line = " "
|
||||
if show_appname:
|
||||
line += appname_spacer % "appname" + " - "
|
||||
line += name_spacer % "jobname"
|
||||
if show_when:
|
||||
line += " - " + when_spacer % "when"
|
||||
line += " - help"
|
||||
print(line)
|
||||
print("-" * 80)
|
||||
|
||||
for app_name, job_name in jlist:
|
||||
job = jobmap[(app_name, job_name)]
|
||||
line = " "
|
||||
if show_appname:
|
||||
line += appname_spacer % app_name + " - "
|
||||
line += name_spacer % job_name
|
||||
if show_when:
|
||||
line += " - " + when_spacer % (job.when and job.when or "")
|
||||
line += " - " + job.help
|
||||
print(line)
|
|
@ -1,274 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
"""
|
||||
Django model to DOT (Graphviz) converter
|
||||
by Antonio Cavedoni <antonio@cavedoni.org>
|
||||
|
||||
Adapted to be used with django-extensions
|
||||
"""
|
||||
|
||||
__version__ = "0.9"
|
||||
__license__ = "Python"
|
||||
__author__ = "Antonio Cavedoni <http://cavedoni.com/>"
|
||||
__contributors__ = [
|
||||
"Stefano J. Attardi <http://attardi.org/>",
|
||||
"limodou <http://www.donews.net/limodou/>",
|
||||
"Carlo C8E Miron",
|
||||
"Andre Campos <cahenan@gmail.com>",
|
||||
"Justin Findlay <jfindlay@gmail.com>",
|
||||
"Alexander Houben <alexander@houben.ch>",
|
||||
"Bas van Oostveen <v.oostveen@gmail.com>",
|
||||
"Joern Hees <gitdev@joernhees.de>",
|
||||
]
|
||||
|
||||
import os
|
||||
|
||||
from django.utils.translation import activate as activate_language
|
||||
from django.utils.safestring import mark_safe
|
||||
from django.template import Context, loader
|
||||
from django.db import models
|
||||
from django.db.models import get_models
|
||||
from django.db.models.fields.related import \
|
||||
ForeignKey, OneToOneField, ManyToManyField, RelatedField
|
||||
|
||||
try:
|
||||
from django.db.models.fields.generic import GenericRelation
|
||||
assert GenericRelation
|
||||
except ImportError:
|
||||
from django.contrib.contenttypes.generic import GenericRelation
|
||||
|
||||
|
||||
def parse_file_or_list(arg):
|
||||
if not arg:
|
||||
return []
|
||||
if not ',' in arg and os.path.isfile(arg):
|
||||
return [e.strip() for e in open(arg).readlines()]
|
||||
return arg.split(',')
|
||||
|
||||
|
||||
def generate_dot(app_labels, **kwargs):
|
||||
disable_fields = kwargs.get('disable_fields', False)
|
||||
include_models = parse_file_or_list(kwargs.get('include_models', ""))
|
||||
all_applications = kwargs.get('all_applications', False)
|
||||
use_subgraph = kwargs.get('group_models', False)
|
||||
verbose_names = kwargs.get('verbose_names', False)
|
||||
inheritance = kwargs.get('inheritance', False)
|
||||
language = kwargs.get('language', None)
|
||||
if language is not None:
|
||||
activate_language(language)
|
||||
exclude_columns = parse_file_or_list(kwargs.get('exclude_columns', ""))
|
||||
exclude_models = parse_file_or_list(kwargs.get('exclude_models', ""))
|
||||
|
||||
def skip_field(field):
|
||||
if exclude_columns:
|
||||
if verbose_names and field.verbose_name:
|
||||
if field.verbose_name in exclude_columns:
|
||||
return True
|
||||
if field.name in exclude_columns:
|
||||
return True
|
||||
return False
|
||||
|
||||
t = loader.get_template('django_extensions/graph_models/head.html')
|
||||
c = Context({})
|
||||
dot = t.render(c)
|
||||
|
||||
apps = []
|
||||
if all_applications:
|
||||
apps = models.get_apps()
|
||||
|
||||
for app_label in app_labels:
|
||||
app = models.get_app(app_label)
|
||||
if not app in apps:
|
||||
apps.append(app)
|
||||
|
||||
graphs = []
|
||||
for app in apps:
|
||||
graph = Context({
|
||||
'name': '"%s"' % app.__name__,
|
||||
'app_name': "%s" % '.'.join(app.__name__.split('.')[:-1]),
|
||||
'cluster_app_name': "cluster_%s" % app.__name__.replace(".", "_"),
|
||||
'disable_fields': disable_fields,
|
||||
'use_subgraph': use_subgraph,
|
||||
'models': []
|
||||
})
|
||||
|
||||
appmodels = get_models(app)
|
||||
abstract_models = []
|
||||
for appmodel in appmodels:
|
||||
abstract_models = abstract_models + [abstract_model for abstract_model in appmodel.__bases__ if hasattr(abstract_model, '_meta') and abstract_model._meta.abstract]
|
||||
abstract_models = list(set(abstract_models)) # remove duplicates
|
||||
appmodels = abstract_models + appmodels
|
||||
|
||||
for appmodel in appmodels:
|
||||
appmodel_abstracts = [abstract_model.__name__ for abstract_model in appmodel.__bases__ if hasattr(abstract_model, '_meta') and abstract_model._meta.abstract]
|
||||
|
||||
# collect all attribs of abstract superclasses
|
||||
def getBasesAbstractFields(c):
|
||||
_abstract_fields = []
|
||||
for e in c.__bases__:
|
||||
if hasattr(e, '_meta') and e._meta.abstract:
|
||||
_abstract_fields.extend(e._meta.fields)
|
||||
_abstract_fields.extend(getBasesAbstractFields(e))
|
||||
return _abstract_fields
|
||||
abstract_fields = getBasesAbstractFields(appmodel)
|
||||
|
||||
model = {
|
||||
'app_name': appmodel.__module__.replace(".", "_"),
|
||||
'name': appmodel.__name__,
|
||||
'abstracts': appmodel_abstracts,
|
||||
'fields': [],
|
||||
'relations': []
|
||||
}
|
||||
|
||||
# consider given model name ?
|
||||
def consider(model_name):
|
||||
if exclude_models and model_name in exclude_models:
|
||||
return False
|
||||
return not include_models or model_name in include_models
|
||||
|
||||
if not consider(appmodel._meta.object_name):
|
||||
continue
|
||||
|
||||
if verbose_names and appmodel._meta.verbose_name:
|
||||
model['label'] = appmodel._meta.verbose_name
|
||||
else:
|
||||
model['label'] = model['name']
|
||||
|
||||
# model attributes
|
||||
def add_attributes(field):
|
||||
if verbose_names and field.verbose_name:
|
||||
label = field.verbose_name
|
||||
else:
|
||||
label = field.name
|
||||
|
||||
t = type(field).__name__
|
||||
if isinstance(field, (OneToOneField, ForeignKey)):
|
||||
t += " ({0})".format(field.rel.field_name)
|
||||
# TODO: ManyToManyField, GenericRelation
|
||||
|
||||
model['fields'].append({
|
||||
'name': field.name,
|
||||
'label': label,
|
||||
'type': t,
|
||||
'blank': field.blank,
|
||||
'abstract': field in abstract_fields,
|
||||
})
|
||||
|
||||
# Find all the real attributes. Relations are depicted as graph edges instead of attributes
|
||||
attributes = [field for field in appmodel._meta.local_fields if not isinstance(field, RelatedField)]
|
||||
|
||||
# find primary key and print it first, ignoring implicit id if other pk exists
|
||||
pk = appmodel._meta.pk
|
||||
if not appmodel._meta.abstract and pk in attributes:
|
||||
add_attributes(pk)
|
||||
for field in attributes:
|
||||
if skip_field(field):
|
||||
continue
|
||||
if not field.primary_key:
|
||||
add_attributes(field)
|
||||
|
||||
# FIXME: actually many_to_many fields aren't saved in this model's db table, so why should we add an attribute-line for them in the resulting graph?
|
||||
#if appmodel._meta.many_to_many:
|
||||
# for field in appmodel._meta.many_to_many:
|
||||
# if skip_field(field):
|
||||
# continue
|
||||
# add_attributes(field)
|
||||
|
||||
# relations
|
||||
def add_relation(field, extras=""):
|
||||
if verbose_names and field.verbose_name:
|
||||
label = field.verbose_name
|
||||
else:
|
||||
label = field.name
|
||||
|
||||
# show related field name
|
||||
if hasattr(field, 'related_query_name'):
|
||||
label += ' (%s)' % field.related_query_name()
|
||||
|
||||
# handle self-relationships
|
||||
if field.rel.to == 'self':
|
||||
target_model = field.model
|
||||
else:
|
||||
target_model = field.rel.to
|
||||
|
||||
_rel = {
|
||||
'target_app': target_model.__module__.replace('.', '_'),
|
||||
'target': target_model.__name__,
|
||||
'type': type(field).__name__,
|
||||
'name': field.name,
|
||||
'label': label,
|
||||
'arrows': extras,
|
||||
'needs_node': True
|
||||
}
|
||||
if _rel not in model['relations'] and consider(_rel['target']):
|
||||
model['relations'].append(_rel)
|
||||
|
||||
for field in appmodel._meta.local_fields:
|
||||
if field.attname.endswith('_ptr_id'): # excluding field redundant with inheritance relation
|
||||
continue
|
||||
if field in abstract_fields: # excluding fields inherited from abstract classes. they too show as local_fields
|
||||
continue
|
||||
if skip_field(field):
|
||||
continue
|
||||
if isinstance(field, OneToOneField):
|
||||
add_relation(field, '[arrowhead=none, arrowtail=none, dir=both]')
|
||||
elif isinstance(field, ForeignKey):
|
||||
add_relation(field, '[arrowhead=none, arrowtail=dot, dir=both]')
|
||||
|
||||
for field in appmodel._meta.local_many_to_many:
|
||||
if skip_field(field):
|
||||
continue
|
||||
if isinstance(field, ManyToManyField):
|
||||
if (getattr(field, 'creates_table', False) or # django 1.1.
|
||||
(hasattr(field.rel.through, '_meta') and field.rel.through._meta.auto_created)): # django 1.2
|
||||
add_relation(field, '[arrowhead=dot arrowtail=dot, dir=both]')
|
||||
elif isinstance(field, GenericRelation):
|
||||
add_relation(field, mark_safe('[style="dotted", arrowhead=normal, arrowtail=normal, dir=both]'))
|
||||
|
||||
if inheritance:
|
||||
# add inheritance arrows
|
||||
for parent in appmodel.__bases__:
|
||||
if hasattr(parent, "_meta"): # parent is a model
|
||||
l = "multi-table"
|
||||
if parent._meta.abstract:
|
||||
l = "abstract"
|
||||
if appmodel._meta.proxy:
|
||||
l = "proxy"
|
||||
l += r"\ninheritance"
|
||||
_rel = {
|
||||
'target_app': parent.__module__.replace(".", "_"),
|
||||
'target': parent.__name__,
|
||||
'type': "inheritance",
|
||||
'name': "inheritance",
|
||||
'label': l,
|
||||
'arrows': '[arrowhead=empty, arrowtail=none, dir=both]',
|
||||
'needs_node': True
|
||||
}
|
||||
# TODO: seems as if abstract models aren't part of models.getModels, which is why they are printed by this without any attributes.
|
||||
if _rel not in model['relations'] and consider(_rel['target']):
|
||||
model['relations'].append(_rel)
|
||||
|
||||
graph['models'].append(model)
|
||||
graphs.append(graph)
|
||||
|
||||
nodes = []
|
||||
for graph in graphs:
|
||||
nodes.extend([e['name'] for e in graph['models']])
|
||||
|
||||
for graph in graphs:
|
||||
# don't draw duplication nodes because of relations
|
||||
for model in graph['models']:
|
||||
for relation in model['relations']:
|
||||
if relation['target'] in nodes:
|
||||
relation['needs_node'] = False
|
||||
# render templates
|
||||
t = loader.get_template('django_extensions/graph_models/body.html')
|
||||
dot += '\n' + t.render(graph)
|
||||
|
||||
for graph in graphs:
|
||||
t = loader.get_template('django_extensions/graph_models/rel.html')
|
||||
dot += '\n' + t.render(graph)
|
||||
|
||||
t = loader.get_template('django_extensions/graph_models/tail.html')
|
||||
c = Context({})
|
||||
dot += '\n' + t.render(c)
|
||||
return dot
|
|
@ -1,6 +0,0 @@
|
|||
def load_ipython_extension(ipython):
|
||||
from django.core.management.color import no_style
|
||||
from django_extensions.management.shells import import_objects
|
||||
imported_objects = import_objects(options={'dont_load': []},
|
||||
style=no_style())
|
||||
ipython.push(imported_objects)
|
|
@ -1,58 +0,0 @@
|
|||
|
||||
|
||||
class ObjectImportError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
def import_objects(options, style):
|
||||
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
|
||||
# models from installed apps. (this is fixed by now, but leaving it here
|
||||
# for people using 0.96 or older trunk (pre [5919]) versions.
|
||||
from django.db.models.loading import get_models, get_apps
|
||||
loaded_models = get_models() # NOQA
|
||||
|
||||
from django.conf import settings
|
||||
imported_objects = {'settings': settings}
|
||||
|
||||
dont_load_cli = options.get('dont_load') # optparse will set this to [] if it doensnt exists
|
||||
dont_load_conf = getattr(settings, 'SHELL_PLUS_DONT_LOAD', [])
|
||||
dont_load = dont_load_cli + dont_load_conf
|
||||
quiet_load = options.get('quiet_load')
|
||||
|
||||
model_aliases = getattr(settings, 'SHELL_PLUS_MODEL_ALIASES', {})
|
||||
|
||||
for app_mod in get_apps():
|
||||
app_models = get_models(app_mod)
|
||||
if not app_models:
|
||||
continue
|
||||
|
||||
app_name = app_mod.__name__.split('.')[-2]
|
||||
if app_name in dont_load:
|
||||
continue
|
||||
|
||||
app_aliases = model_aliases.get(app_name, {})
|
||||
model_labels = []
|
||||
|
||||
for model in app_models:
|
||||
try:
|
||||
imported_object = getattr(__import__(app_mod.__name__, {}, {}, model.__name__), model.__name__)
|
||||
model_name = model.__name__
|
||||
|
||||
if "%s.%s" % (app_name, model_name) in dont_load:
|
||||
continue
|
||||
|
||||
alias = app_aliases.get(model_name, model_name)
|
||||
imported_objects[alias] = imported_object
|
||||
if model_name == alias:
|
||||
model_labels.append(model_name)
|
||||
else:
|
||||
model_labels.append("%s (as %s)" % (model_name, alias))
|
||||
|
||||
except AttributeError as e:
|
||||
if not quiet_load:
|
||||
print(style.ERROR("Failed to import '%s' from '%s' reason: %s" % (model.__name__, app_name, str(e))))
|
||||
continue
|
||||
if not quiet_load:
|
||||
print(style.SQL_COLTYPE("From '%s' autoload: %s" % (app_mod.__name__.split('.')[-2], ", ".join(model_labels))))
|
||||
|
||||
return imported_objects
|
|
@ -1,12 +0,0 @@
|
|||
"""
|
||||
signals we use to trigger regular batch jobs
|
||||
"""
|
||||
from django.dispatch import Signal
|
||||
|
||||
run_minutely_jobs = Signal()
|
||||
run_quarter_hourly_jobs = Signal()
|
||||
run_hourly_jobs = Signal()
|
||||
run_daily_jobs = Signal()
|
||||
run_weekly_jobs = Signal()
|
||||
run_monthly_jobs = Signal()
|
||||
run_yearly_jobs = Signal()
|
|
@ -1,6 +0,0 @@
|
|||
import six
|
||||
|
||||
|
||||
def null_technical_500_response(request, exc_type, exc_value, tb):
|
||||
six.reraise(exc_type, exc_value, tb)
|
||||
|
|
@ -1,55 +0,0 @@
|
|||
from django.conf import settings
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
|
||||
|
||||
def get_project_root():
|
||||
""" get the project root directory """
|
||||
settings_mod = __import__(settings.SETTINGS_MODULE, {}, {}, [''])
|
||||
return os.path.dirname(os.path.abspath(settings_mod.__file__))
|
||||
|
||||
|
||||
def _make_writeable(filename):
|
||||
"""
|
||||
Make sure that the file is writeable. Useful if our source is
|
||||
read-only.
|
||||
|
||||
"""
|
||||
import stat
|
||||
if sys.platform.startswith('java'):
|
||||
# On Jython there is no os.access()
|
||||
return
|
||||
if not os.access(filename, os.W_OK):
|
||||
st = os.stat(filename)
|
||||
new_permissions = stat.S_IMODE(st.st_mode) | stat.S_IWUSR
|
||||
os.chmod(filename, new_permissions)
|
||||
|
||||
|
||||
def setup_logger(logger, stream, filename=None, fmt=None):
|
||||
"""Sets up a logger (if no handlers exist) for console output,
|
||||
and file 'tee' output if desired."""
|
||||
if len(logger.handlers) < 1:
|
||||
console = logging.StreamHandler(stream)
|
||||
console.setLevel(logging.DEBUG)
|
||||
console.setFormatter(logging.Formatter(fmt))
|
||||
logger.addHandler(console)
|
||||
logger.setLevel(logging.DEBUG)
|
||||
|
||||
if filename:
|
||||
outfile = logging.FileHandler(filename)
|
||||
outfile.setLevel(logging.INFO)
|
||||
outfile.setFormatter(logging.Formatter("%(asctime)s " + (fmt if fmt else '%(message)s')))
|
||||
logger.addHandler(outfile)
|
||||
|
||||
|
||||
class RedirectHandler(logging.Handler):
|
||||
"""Redirect logging sent to one logger (name) to another."""
|
||||
def __init__(self, name, level=logging.DEBUG):
|
||||
# Contemplate feasibility of copying a destination (allow original handler) and redirecting.
|
||||
logging.Handler.__init__(self, level)
|
||||
self.name = name
|
||||
self.logger = logging.getLogger(name)
|
||||
|
||||
def emit(self, record):
|
||||
self.logger.handle(record)
|
|
@ -1,17 +0,0 @@
|
|||
# -*- coding: utf-8 -*-
|
||||
from south.v2 import SchemaMigration
|
||||
|
||||
|
||||
class Migration(SchemaMigration):
|
||||
|
||||
def forwards(self, orm):
|
||||
pass
|
||||
|
||||
def backwards(self, orm):
|
||||
pass
|
||||
|
||||
models = {
|
||||
|
||||
}
|
||||
|
||||
complete_apps = ['django_extensions']
|
|
@ -1,248 +0,0 @@
|
|||
"""
|
||||
MongoDB model fields emulating Django Extensions' additional model fields
|
||||
|
||||
These fields are essentially identical to existing Extensions fields, but South hooks have been removed (since mongo requires no schema migration)
|
||||
|
||||
"""
|
||||
|
||||
import six
|
||||
from django.template.defaultfilters import slugify
|
||||
from django import forms
|
||||
from mongoengine.fields import StringField, DateTimeField
|
||||
import datetime
|
||||
import re
|
||||
from django.utils.translation import ugettext_lazy as _
|
||||
|
||||
try:
|
||||
import uuid
|
||||
assert uuid
|
||||
except ImportError:
|
||||
from django_extensions.utils import uuid
|
||||
|
||||
|
||||
class SlugField(StringField):
|
||||
description = _("String (up to %(max_length)s)")
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs['max_length'] = kwargs.get('max_length', 50)
|
||||
# Set db_index=True unless it's been set manually.
|
||||
if 'db_index' not in kwargs:
|
||||
kwargs['db_index'] = True
|
||||
super(SlugField, self).__init__(*args, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return "SlugField"
|
||||
|
||||
def formfield(self, **kwargs):
|
||||
defaults = {'form_class': forms.SlugField}
|
||||
defaults.update(kwargs)
|
||||
return super(SlugField, self).formfield(**defaults)
|
||||
|
||||
|
||||
class AutoSlugField(SlugField):
|
||||
""" AutoSlugField, adapted for MongoDB
|
||||
|
||||
By default, sets editable=False, blank=True.
|
||||
|
||||
Required arguments:
|
||||
|
||||
populate_from
|
||||
Specifies which field or list of fields the slug is populated from.
|
||||
|
||||
Optional arguments:
|
||||
|
||||
separator
|
||||
Defines the used separator (default: '-')
|
||||
|
||||
overwrite
|
||||
If set to True, overwrites the slug on every save (default: False)
|
||||
|
||||
Inspired by SmileyChris' Unique Slugify snippet:
|
||||
http://www.djangosnippets.org/snippets/690/
|
||||
"""
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('blank', True)
|
||||
kwargs.setdefault('editable', False)
|
||||
|
||||
populate_from = kwargs.pop('populate_from', None)
|
||||
if populate_from is None:
|
||||
raise ValueError("missing 'populate_from' argument")
|
||||
else:
|
||||
self._populate_from = populate_from
|
||||
self.separator = kwargs.pop('separator', six.u('-'))
|
||||
self.overwrite = kwargs.pop('overwrite', False)
|
||||
super(AutoSlugField, self).__init__(*args, **kwargs)
|
||||
|
||||
def _slug_strip(self, value):
|
||||
"""
|
||||
Cleans up a slug by removing slug separator characters that occur at
|
||||
the beginning or end of a slug.
|
||||
|
||||
If an alternate separator is used, it will also replace any instances
|
||||
of the default '-' separator with the new separator.
|
||||
"""
|
||||
re_sep = '(?:-|%s)' % re.escape(self.separator)
|
||||
value = re.sub('%s+' % re_sep, self.separator, value)
|
||||
return re.sub(r'^%s+|%s+$' % (re_sep, re_sep), '', value)
|
||||
|
||||
def slugify_func(self, content):
|
||||
return slugify(content)
|
||||
|
||||
def create_slug(self, model_instance, add):
|
||||
# get fields to populate from and slug field to set
|
||||
if not isinstance(self._populate_from, (list, tuple)):
|
||||
self._populate_from = (self._populate_from, )
|
||||
slug_field = model_instance._meta.get_field(self.attname)
|
||||
|
||||
if add or self.overwrite:
|
||||
# slugify the original field content and set next step to 2
|
||||
slug_for_field = lambda field: self.slugify_func(getattr(model_instance, field))
|
||||
slug = self.separator.join(map(slug_for_field, self._populate_from))
|
||||
next = 2
|
||||
else:
|
||||
# get slug from the current model instance and calculate next
|
||||
# step from its number, clean-up
|
||||
slug = self._slug_strip(getattr(model_instance, self.attname))
|
||||
next = slug.split(self.separator)[-1]
|
||||
if next.isdigit():
|
||||
slug = self.separator.join(slug.split(self.separator)[:-1])
|
||||
next = int(next)
|
||||
else:
|
||||
next = 2
|
||||
|
||||
# strip slug depending on max_length attribute of the slug field
|
||||
# and clean-up
|
||||
slug_len = slug_field.max_length
|
||||
if slug_len:
|
||||
slug = slug[:slug_len]
|
||||
slug = self._slug_strip(slug)
|
||||
original_slug = slug
|
||||
|
||||
# exclude the current model instance from the queryset used in finding
|
||||
# the next valid slug
|
||||
queryset = model_instance.__class__._default_manager.all()
|
||||
if model_instance.pk:
|
||||
queryset = queryset.exclude(pk=model_instance.pk)
|
||||
|
||||
# form a kwarg dict used to impliment any unique_together contraints
|
||||
kwargs = {}
|
||||
for params in model_instance._meta.unique_together:
|
||||
if self.attname in params:
|
||||
for param in params:
|
||||
kwargs[param] = getattr(model_instance, param, None)
|
||||
kwargs[self.attname] = slug
|
||||
|
||||
# increases the number while searching for the next valid slug
|
||||
# depending on the given slug, clean-up
|
||||
while not slug or queryset.filter(**kwargs):
|
||||
slug = original_slug
|
||||
end = '%s%s' % (self.separator, next)
|
||||
end_len = len(end)
|
||||
if slug_len and len(slug) + end_len > slug_len:
|
||||
slug = slug[:slug_len - end_len]
|
||||
slug = self._slug_strip(slug)
|
||||
slug = '%s%s' % (slug, end)
|
||||
kwargs[self.attname] = slug
|
||||
next += 1
|
||||
return slug
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
value = six.u(self.create_slug(model_instance, add))
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
|
||||
def get_internal_type(self):
|
||||
return "SlugField"
|
||||
|
||||
|
||||
class CreationDateTimeField(DateTimeField):
|
||||
""" CreationDateTimeField
|
||||
|
||||
By default, sets editable=False, blank=True, default=datetime.now
|
||||
"""
|
||||
|
||||
def __init__(self, *args, **kwargs):
|
||||
kwargs.setdefault('default', datetime.datetime.now)
|
||||
DateTimeField.__init__(self, *args, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return "DateTimeField"
|
||||
|
||||
|
||||
class ModificationDateTimeField(CreationDateTimeField):
|
||||
""" ModificationDateTimeField
|
||||
|
||||
By default, sets editable=False, blank=True, default=datetime.now
|
||||
|
||||
Sets value to datetime.now() on each save of the model.
|
||||
"""
|
||||
|
||||
def pre_save(self, model, add):
|
||||
value = datetime.datetime.now()
|
||||
setattr(model, self.attname, value)
|
||||
return value
|
||||
|
||||
def get_internal_type(self):
|
||||
return "DateTimeField"
|
||||
|
||||
|
||||
class UUIDVersionError(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class UUIDField(StringField):
|
||||
""" UUIDField
|
||||
|
||||
By default uses UUID version 1 (generate from host ID, sequence number and current time)
|
||||
|
||||
The field support all uuid versions which are natively supported by the uuid python module.
|
||||
For more information see: http://docs.python.org/lib/module-uuid.html
|
||||
"""
|
||||
|
||||
def __init__(self, verbose_name=None, name=None, auto=True, version=1, node=None, clock_seq=None, namespace=None, **kwargs):
|
||||
kwargs['max_length'] = 36
|
||||
self.auto = auto
|
||||
self.version = version
|
||||
if version == 1:
|
||||
self.node, self.clock_seq = node, clock_seq
|
||||
elif version == 3 or version == 5:
|
||||
self.namespace, self.name = namespace, name
|
||||
StringField.__init__(self, verbose_name, name, **kwargs)
|
||||
|
||||
def get_internal_type(self):
|
||||
return StringField.__name__
|
||||
|
||||
def contribute_to_class(self, cls, name):
|
||||
if self.primary_key:
|
||||
assert not cls._meta.has_auto_field, "A model can't have more than one AutoField: %s %s %s; have %s" % (self, cls, name, cls._meta.auto_field)
|
||||
super(UUIDField, self).contribute_to_class(cls, name)
|
||||
cls._meta.has_auto_field = True
|
||||
cls._meta.auto_field = self
|
||||
else:
|
||||
super(UUIDField, self).contribute_to_class(cls, name)
|
||||
|
||||
def create_uuid(self):
|
||||
if not self.version or self.version == 4:
|
||||
return uuid.uuid4()
|
||||
elif self.version == 1:
|
||||
return uuid.uuid1(self.node, self.clock_seq)
|
||||
elif self.version == 2:
|
||||
raise UUIDVersionError("UUID version 2 is not supported.")
|
||||
elif self.version == 3:
|
||||
return uuid.uuid3(self.namespace, self.name)
|
||||
elif self.version == 5:
|
||||
return uuid.uuid5(self.namespace, self.name)
|
||||
else:
|
||||
raise UUIDVersionError("UUID version %s is not valid." % self.version)
|
||||
|
||||
def pre_save(self, model_instance, add):
|
||||
if self.auto and add:
|
||||
value = six.u(self.create_uuid())
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
||||
else:
|
||||
value = super(UUIDField, self).pre_save(model_instance, add)
|
||||
if self.auto and not value:
|
||||
value = six.u(self.create_uuid())
|
||||
setattr(model_instance, self.attname, value)
|
||||
return value
|
Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше
Загрузка…
Ссылка в новой задаче