2010-09-03 22:40:49 +04:00
|
|
|
import contextlib
|
|
|
|
import threading
|
|
|
|
|
2009-10-22 04:33:00 +04:00
|
|
|
from django.db import models
|
2010-06-03 05:10:50 +04:00
|
|
|
from django.utils import translation
|
2009-10-22 04:33:00 +04:00
|
|
|
|
2010-04-02 20:01:09 +04:00
|
|
|
|
2010-01-30 10:13:39 +03:00
|
|
|
import caching.base
|
2010-09-03 22:40:49 +04:00
|
|
|
import multidb.pinning
|
|
|
|
import queryset_transform
|
2010-04-02 20:01:09 +04:00
|
|
|
|
2010-07-23 01:18:37 +04:00
|
|
|
from . import signals
|
|
|
|
|
2010-04-02 20:01:09 +04:00
|
|
|
|
2010-09-03 22:40:49 +04:00
|
|
|
_locals = threading.local()
|
|
|
|
_locals.skip_cache = False
|
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def use_master():
|
|
|
|
"""Within this context, all queries go to the master."""
|
2010-09-29 00:09:24 +04:00
|
|
|
old = getattr(multidb.pinning._locals, 'pinned', False)
|
2010-09-03 22:40:49 +04:00
|
|
|
multidb.pinning.pin_this_thread()
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
2010-09-29 00:09:24 +04:00
|
|
|
multidb.pinning._locals.pinned = old
|
2010-09-03 22:40:49 +04:00
|
|
|
|
|
|
|
|
|
|
|
@contextlib.contextmanager
|
|
|
|
def skip_cache():
|
|
|
|
"""Within this context, no queries come from cache."""
|
2010-09-29 00:09:24 +04:00
|
|
|
old = getattr(_locals, 'skip_cache', False)
|
2010-09-03 22:40:49 +04:00
|
|
|
_locals.skip_cache = True
|
|
|
|
try:
|
|
|
|
yield
|
|
|
|
finally:
|
2010-09-29 00:09:24 +04:00
|
|
|
_locals.skip_cache = old
|
2010-09-03 22:40:49 +04:00
|
|
|
|
|
|
|
|
2010-11-17 03:45:22 +03:00
|
|
|
# This is sadly a copy and paste of annotate to get around this
|
|
|
|
# ticket http://code.djangoproject.com/ticket/14707
|
|
|
|
def annotate(self, *args, **kwargs):
|
|
|
|
|
|
|
|
for arg in args:
|
|
|
|
if arg.default_alias in kwargs:
|
|
|
|
raise ValueError("The %s named annotation conflicts with the "
|
|
|
|
"default name for another annotation."
|
|
|
|
% arg.default_alias)
|
|
|
|
kwargs[arg.default_alias] = arg
|
|
|
|
|
|
|
|
obj = self._clone()
|
|
|
|
|
|
|
|
obj._setup_aggregate_query(kwargs.keys())
|
|
|
|
|
|
|
|
# Add the aggregates to the query
|
|
|
|
for (alias, aggregate_expr) in kwargs.items():
|
|
|
|
obj.query.add_aggregate(aggregate_expr, self.model, alias,
|
|
|
|
is_summary=False)
|
|
|
|
|
|
|
|
return obj
|
|
|
|
|
|
|
|
models.query.QuerySet.annotate = annotate
|
|
|
|
|
|
|
|
|
2010-04-17 18:53:31 +04:00
|
|
|
class TransformQuerySet(queryset_transform.TransformQuerySet):
|
|
|
|
|
|
|
|
def pop_transforms(self):
|
|
|
|
qs = self._clone()
|
|
|
|
transforms = qs._transform_fns
|
|
|
|
qs._transform_fns = []
|
|
|
|
return transforms, qs
|
|
|
|
|
|
|
|
def no_transforms(self):
|
|
|
|
return self.pop_transforms()[1]
|
|
|
|
|
2010-04-29 20:03:51 +04:00
|
|
|
def only_translations(self):
|
|
|
|
"""Remove all transforms except translations."""
|
2011-01-11 02:33:34 +03:00
|
|
|
from translations import transformer
|
2010-04-29 20:03:51 +04:00
|
|
|
# Add an extra select so these are cached separately.
|
2011-01-13 23:41:40 +03:00
|
|
|
return (self.no_transforms().extra(select={'_only_trans': 1})
|
|
|
|
.transform(transformer.get_trans))
|
2010-04-29 20:03:51 +04:00
|
|
|
|
2010-09-25 03:40:05 +04:00
|
|
|
def transform(self, fn):
|
|
|
|
from . import decorators
|
|
|
|
f = decorators.skip_cache(fn)
|
|
|
|
return super(TransformQuerySet, self).transform(f)
|
|
|
|
|
2010-04-17 18:53:31 +04:00
|
|
|
|
2010-05-05 08:34:38 +04:00
|
|
|
class RawQuerySet(models.query.RawQuerySet):
|
|
|
|
"""A RawQuerySet with __len__."""
|
|
|
|
|
|
|
|
def __init__(self, *args, **kw):
|
|
|
|
super(RawQuerySet, self).__init__(*args, **kw)
|
|
|
|
self._result_cache = None
|
|
|
|
|
|
|
|
def __iter__(self):
|
|
|
|
if self._result_cache is None:
|
|
|
|
self._result_cache = list(super(RawQuerySet, self).__iter__())
|
|
|
|
return iter(self._result_cache)
|
|
|
|
|
|
|
|
def __len__(self):
|
|
|
|
return len(list(self.__iter__()))
|
|
|
|
|
|
|
|
|
|
|
|
class CachingRawQuerySet(RawQuerySet, caching.base.CachingRawQuerySet):
|
|
|
|
"""A RawQuerySet with __len__ and caching."""
|
|
|
|
|
2010-04-02 20:01:09 +04:00
|
|
|
# Make TransformQuerySet one of CachingQuerySet's parents so that we can do
|
|
|
|
# transforms on objects and then get them cached.
|
|
|
|
CachingQuerySet = caching.base.CachingQuerySet
|
|
|
|
CachingQuerySet.__bases__ = (TransformQuerySet,) + CachingQuerySet.__bases__
|
|
|
|
|
|
|
|
|
2010-05-05 08:34:38 +04:00
|
|
|
class UncachedManagerBase(models.Manager):
|
|
|
|
|
|
|
|
def get_query_set(self):
|
2010-09-03 22:40:49 +04:00
|
|
|
qs = self._with_translations(TransformQuerySet(self.model))
|
|
|
|
return qs
|
2010-05-05 08:34:38 +04:00
|
|
|
|
|
|
|
def _with_translations(self, qs):
|
2011-01-11 02:33:34 +03:00
|
|
|
from translations import transformer
|
2010-06-03 05:10:50 +04:00
|
|
|
# Since we're attaching translations to the object, we need to stick
|
|
|
|
# the locale in the query so objects aren't shared across locales.
|
2010-05-05 08:34:38 +04:00
|
|
|
if hasattr(self.model._meta, 'translated_fields'):
|
2010-06-05 00:36:42 +04:00
|
|
|
lang = translation.get_language()
|
2010-05-05 08:34:38 +04:00
|
|
|
qs = qs.transform(transformer.get_trans)
|
2010-06-05 00:36:42 +04:00
|
|
|
qs = qs.extra(where=['"%s"="%s"' % (lang, lang)])
|
|
|
|
return qs
|
2010-05-05 08:34:38 +04:00
|
|
|
|
|
|
|
def transform(self, fn):
|
|
|
|
return self.all().transform(fn)
|
|
|
|
|
|
|
|
def raw(self, raw_query, params=None, *args, **kwargs):
|
|
|
|
return RawQuerySet(raw_query, self.model, params=params,
|
|
|
|
using=self._db, *args, **kwargs)
|
|
|
|
|
|
|
|
|
|
|
|
class ManagerBase(caching.base.CachingManager, UncachedManagerBase):
|
2010-04-02 20:01:09 +04:00
|
|
|
"""
|
|
|
|
Base for all managers in AMO.
|
|
|
|
|
|
|
|
Returns TransformQuerySets from the queryset_transform project.
|
|
|
|
|
|
|
|
If a model has translated fields, they'll be attached through a transform
|
|
|
|
function.
|
|
|
|
"""
|
2009-10-23 21:20:06 +04:00
|
|
|
|
2010-04-02 20:01:09 +04:00
|
|
|
def get_query_set(self):
|
|
|
|
qs = super(ManagerBase, self).get_query_set()
|
2010-09-03 22:40:49 +04:00
|
|
|
if getattr(_locals, 'skip_cache', False):
|
|
|
|
qs = qs.no_cache()
|
2010-05-05 08:34:38 +04:00
|
|
|
return self._with_translations(qs)
|
2009-12-11 00:21:51 +03:00
|
|
|
|
2010-05-05 08:34:38 +04:00
|
|
|
def raw(self, raw_query, params=None, *args, **kwargs):
|
|
|
|
return CachingRawQuerySet(raw_query, self.model, params=params,
|
|
|
|
using=self._db, *args, **kwargs)
|
2010-01-31 08:45:11 +03:00
|
|
|
|
|
|
|
|
2010-04-02 20:01:09 +04:00
|
|
|
class ModelBase(caching.base.CachingMixin, models.Model):
|
2009-12-11 00:20:59 +03:00
|
|
|
"""
|
|
|
|
Base class for AMO models to abstract some common features.
|
|
|
|
|
|
|
|
* Adds automatic created and modified fields to the model.
|
2009-12-11 00:21:51 +03:00
|
|
|
* Fetches all translations in one subsequent query during initialization.
|
2009-12-11 00:20:59 +03:00
|
|
|
"""
|
2009-10-22 04:33:00 +04:00
|
|
|
|
|
|
|
created = models.DateTimeField(auto_now_add=True)
|
|
|
|
modified = models.DateTimeField(auto_now=True)
|
|
|
|
|
2010-01-31 08:45:11 +03:00
|
|
|
objects = ManagerBase()
|
2010-05-05 08:34:38 +04:00
|
|
|
uncached = UncachedManagerBase()
|
2009-12-23 06:58:00 +03:00
|
|
|
|
2009-10-22 04:33:00 +04:00
|
|
|
class Meta:
|
|
|
|
abstract = True
|
2009-12-30 01:04:57 +03:00
|
|
|
get_latest_by = 'created'
|
2010-02-26 21:41:49 +03:00
|
|
|
|
|
|
|
def get_absolute_url(self, *args, **kwargs):
|
|
|
|
return self.get_url_path(*args, **kwargs)
|
2010-03-30 01:06:02 +04:00
|
|
|
|
2010-09-16 23:44:52 +04:00
|
|
|
def update(self, **kw):
|
|
|
|
"""
|
|
|
|
Shortcut for doing an UPDATE on this object.
|
|
|
|
|
|
|
|
If _signal=False is in ``kw`` the post_save signal won't be sent.
|
|
|
|
"""
|
|
|
|
signal = kw.pop('_signal', True)
|
|
|
|
cls = self.__class__
|
|
|
|
for k, v in kw.items():
|
|
|
|
setattr(self, k, v)
|
2011-01-11 21:54:23 +03:00
|
|
|
if signal:
|
|
|
|
# Detect any attribute changes during pre_save and add those to the
|
|
|
|
# update kwargs.
|
|
|
|
attrs = dict(self.__dict__)
|
|
|
|
models.signals.pre_save.send(sender=cls, instance=self)
|
|
|
|
for k, v in self.__dict__.items():
|
|
|
|
if attrs[k] != v:
|
|
|
|
kw[k] = v
|
|
|
|
setattr(self, k, v)
|
|
|
|
cls.objects.filter(pk=self.pk).update(**kw)
|
2010-09-16 23:44:52 +04:00
|
|
|
if signal:
|
|
|
|
models.signals.post_save.send(sender=cls, instance=self,
|
|
|
|
created=False)
|
|
|
|
|
2010-03-30 01:06:02 +04:00
|
|
|
|
|
|
|
def manual_order(qs, pks, pk_name='id'):
|
|
|
|
"""
|
|
|
|
Given a query set and a list of primary keys, return a set of objects from
|
|
|
|
the query set in that exact order.
|
|
|
|
"""
|
|
|
|
|
2010-04-06 23:23:11 +04:00
|
|
|
if not pks:
|
|
|
|
return []
|
|
|
|
|
2010-03-30 01:06:02 +04:00
|
|
|
objects = qs.filter(id__in=pks).extra(
|
|
|
|
select={'_manual': 'FIELD(%s, %s)'
|
|
|
|
% (pk_name, ','.join(map(str, pks)))},
|
|
|
|
order_by=['_manual'])
|
|
|
|
|
|
|
|
return objects
|