]> git.openstreetmap.org Git - osqa.git/blobdiff - forum/models/base.py
Migrate to Django 1.6
[osqa.git] / forum / models / base.py
index c5a80f993208a79866b9fe23b778e48fc3a74842..9c328c597a27712632d7067eaff029381512338c 100644 (file)
@@ -1,9 +1,14 @@
 import datetime
 import re
+try:
+    from hashlib import md5
+except:
+    from md5 import new as md5
 from urllib import quote_plus, urlencode
 from django.db import models, IntegrityError, connection, transaction
 from django.utils.http import urlquote  as django_urlquote
 from django.utils.html import strip_tags
+from django.conf import settings as django_settings
 from django.core.urlresolvers import reverse
 from django.contrib.contenttypes import generic
 from django.contrib.contenttypes.models import ContentType
@@ -12,12 +17,17 @@ from django.template.defaultfilters import slugify
 from django.db.models.signals import post_delete, post_save, pre_save, pre_delete
 from django.utils.translation import ugettext as _
 from django.utils.safestring import mark_safe
+from django.utils.encoding import force_unicode
 from django.contrib.sitemaps import ping_google
 import django.dispatch
 from forum import settings
 import logging
 
 
+if not hasattr(cache, 'get_many'):
+    #put django 1.2 code here
+    pass
+
 class LazyQueryList(object):
     def __init__(self, model, items):
         self.items = items
@@ -33,6 +43,9 @@ class LazyQueryList(object):
     def __len__(self):
         return len(self.items)
 
+class ToFetch(unicode):
+    pass
+
 class CachedQuerySet(models.query.QuerySet):
 
     def lazy(self):
@@ -45,15 +58,20 @@ class CachedQuerySet(models.query.QuerySet):
 
             return LazyQueryList(self.model, list(self.values_list(*values_list)))
         else:
-            if len(self.query.extra):
-                print self.query.extra
             return self
 
     def obj_from_datadict(self, datadict):
         obj = self.model()
         obj.__dict__.update(datadict)
+
+        if hasattr(obj, '_state'):
+            obj._state.db = 'default'
+
         return obj
 
+    def _base_clone(self):
+        return self._clone(klass=models.query.QuerySet)
+
     def get(self, *args, **kwargs):
         key = self.model.infer_cache_key(kwargs)
 
@@ -61,20 +79,118 @@ class CachedQuerySet(models.query.QuerySet):
             obj = cache.get(key)
 
             if obj is None:
-                obj = super(CachedQuerySet, self).get(*args, **kwargs)
+                obj = self._base_clone().get(*args, **kwargs)
                 obj.cache()
             else:
                 obj = self.obj_from_datadict(obj)
-                obj.reset_original_state()
+
+            obj.reset_original_state()
 
             return obj
 
-        return super(CachedQuerySet, self).get(*args, **kwargs)
+        return self._base_clone().get(*args, **kwargs)
+
+    def _fetch_from_query_cache(self, key):
+        invalidation_key = self.model._get_cache_query_invalidation_key()
+        cached_result = cache.get_many([invalidation_key, key])
+
+        if not invalidation_key in cached_result:
+            self.model._set_query_cache_invalidation_timestamp()
+            return None
+
+        if (key in cached_result) and(cached_result[invalidation_key] < cached_result[key][0]):
+            return cached_result[key][1]
+
+        return None
+
+    def count(self):
+        cache_key = self.model._generate_cache_key("CNT:%s" % self._get_query_hash())
+        result = self._fetch_from_query_cache(cache_key)
+
+        if result is not None:
+            return result
+
+        result = super(CachedQuerySet, self).count()
+        cache.set(cache_key, (datetime.datetime.now(), result), 60 * 60)
+        return result
+
+    def iterator(self):
+        cache_key = self.model._generate_cache_key("QUERY:%s" % self._get_query_hash())
+        on_cache_query_attr = self.model.value_to_list_on_cache_query()
+
+        to_return = None
+        to_cache = {}
+
+        with_aggregates = len(self.query.aggregates) > 0
+        key_list = self._fetch_from_query_cache(cache_key)
+
+        if key_list is None:
+            if not with_aggregates:
+                values_list = [on_cache_query_attr]
+
+                if len(self.query.extra):
+                    values_list += self.query.extra.keys()
+
+                key_list = [v[0] for v in self.values_list(*values_list)]
+                to_cache[cache_key] = (datetime.datetime.now(), key_list)
+            else:
+                to_return = list(super(CachedQuerySet, self).iterator())
+                to_cache[cache_key] = (datetime.datetime.now(), [
+                    (row.__dict__[on_cache_query_attr], dict([(k, row.__dict__[k]) for k in self.query.aggregates.keys()]))
+                    for row in to_return])
+        elif with_aggregates:
+            tmp = key_list
+            key_list = [k[0] for k in tmp]
+            with_aggregates = [k[1] for k in tmp]
+            del tmp
+
+        if (not to_return) and key_list:
+            row_keys = [self.model.infer_cache_key({on_cache_query_attr: attr}) for attr in key_list]
+            cached = cache.get_many(row_keys)
+
+            to_return = [
+                (ck in cached) and self.obj_from_datadict(cached[ck]) or ToFetch(force_unicode(key_list[i])) for i, ck in enumerate(row_keys)
+            ]
+
+            if len(cached) != len(row_keys):
+                to_fetch = [unicode(tr) for tr in to_return if isinstance(tr, ToFetch)]
+
+                fetched = dict([(force_unicode(r.__dict__[on_cache_query_attr]), r) for r in
+                              models.query.QuerySet(self.model).filter(**{"%s__in" % on_cache_query_attr: to_fetch})])
+
+                to_return = [(isinstance(tr, ToFetch) and fetched[unicode(tr)] or tr) for tr in to_return]
+                to_cache.update(dict([(self.model.infer_cache_key({on_cache_query_attr: attr}), r._as_dict()) for attr, r in fetched.items()]))
+
+            if with_aggregates:
+                for i, r in enumerate(to_return):
+                    r.__dict__.update(with_aggregates[i])
+
+
+        if len(to_cache):
+            cache.set_many(to_cache, 60 * 60)
+
+        if to_return:
+            for row in to_return:
+                if hasattr(row, 'leaf'):
+                    row = row.leaf
+
+                row.reset_original_state()
+                yield row
+
+    def _get_query_hash(self):
+        try:
+            return md5(unicode(self.query).encode("utf-8")).hexdigest()
+        except:
+            try:
+                return md5(self.query).hexdigest()
+            except:
+                return md5(str(self.query)).hexdigest()
+
 
 class CachedManager(models.Manager):
     use_for_related_fields = True
 
-    def get_query_set(self):
+    def get_queryset(self):
         return CachedQuerySet(self.model)
 
     def get_or_create(self, *args, **kwargs):
@@ -110,7 +226,7 @@ class DenormalizedField(object):
         cls.add_to_class("reset_%s_cache" % name, reset_cache)
 
 
-class BaseMetaClass(models.Model.__metaclass__):
+class BaseMetaClass(type(models.Model)):
     to_denormalize = []
 
     def __new__(cls, *args, **kwargs):
@@ -163,6 +279,9 @@ class BaseModel(models.Model):
     def save(self, full_save=False, *args, **kwargs):
         put_back = [k for k, v in self.__dict__.items() if isinstance(v, models.expressions.ExpressionNode)]
 
+        if hasattr(self, '_state'):
+            self._state.db = 'default'
+
         if self.id and not full_save:
             self.__class__.objects.filter(id=self.id).update(**self._get_update_kwargs())
         else:
@@ -178,8 +297,20 @@ class BaseModel(models.Model):
                 self.uncache()
 
         self.reset_original_state()
+        self._set_query_cache_invalidation_timestamp()
         self.cache()
 
+    @classmethod
+    def _get_cache_query_invalidation_key(cls):
+        return cls._generate_cache_key("INV_TS")
+
+    @classmethod
+    def _set_query_cache_invalidation_timestamp(cls):
+        cache.set(cls._get_cache_query_invalidation_key(), datetime.datetime.now(), 60 * 60 * 24)
+
+        for base in filter(lambda c: issubclass(c, BaseModel) and (not c is BaseModel), cls.__bases__):
+            base._set_query_cache_invalidation_timestamp()
+
     @classmethod
     def _generate_cache_key(cls, key, group=None):
         if group is None:
@@ -190,13 +321,22 @@ class BaseModel(models.Model):
     def cache_key(self):
         return self._generate_cache_key(self.id)
 
+    @classmethod
+    def value_to_list_on_cache_query(cls):
+        return 'id'
+
     @classmethod
     def infer_cache_key(cls, querydict):
         try:
             pk = [v for (k,v) in querydict.items() if k in ('pk', 'pk__exact', 'id', 'id__exact'
                             ) or k.endswith('_ptr__pk') or k.endswith('_ptr__id')][0]
 
-            return cls._generate_cache_key(pk)
+            cache_key = cls._generate_cache_key(pk)
+
+            if len(cache_key) > django_settings.CACHE_MAX_KEY_LENGTH:
+                cache_key = cache_key[:django_settings.CACHE_MAX_KEY_LENGTH]
+
+            return cache_key
         except:
             return None
 
@@ -208,6 +348,7 @@ class BaseModel(models.Model):
 
     def delete(self):
         self.uncache()
+        self._set_query_cache_invalidation_timestamp()
         super(BaseModel, self).delete()