]> git.openstreetmap.org Git - osqa.git/blobdiff - forum/models/base.py
Fixed problem in data cache with international strings
[osqa.git] / forum / models / base.py
index 41f448191d57fa14da285e6d7c2f231eb2d21e5c..a5f74eb39821c17213094a40ff668e90240941fd 100644 (file)
@@ -1,9 +1,14 @@
 import datetime
 import re
+try:
+    from hashlib import md5
+except:
+    from md5 import new as md5
 from urllib import quote_plus, urlencode
 from django.db import models, IntegrityError, connection, transaction
 from django.utils.http import urlquote  as django_urlquote
 from django.utils.html import strip_tags
+from django.conf import settings as django_settings
 from django.core.urlresolvers import reverse
 from django.contrib.contenttypes import generic
 from django.contrib.contenttypes.models import ContentType
@@ -12,46 +17,179 @@ from django.template.defaultfilters import slugify
 from django.db.models.signals import post_delete, post_save, pre_save, pre_delete
 from django.utils.translation import ugettext as _
 from django.utils.safestring import mark_safe
+from django.utils.encoding import force_unicode
 from django.contrib.sitemaps import ping_google
 import django.dispatch
-from django.conf import settings
-from forum import const
+from forum import settings
 import logging
 
-from forum.const import *
 
-class CachedManager(models.Manager):
-    use_for_related_fields = True
-    int_cache_re = re.compile('^_[\w_]+cache$')
+if not hasattr(cache, 'get_many'):
+    #put django 1.2 code here
+    pass
+
+class LazyQueryList(object):
+    def __init__(self, model, items):
+        self.items = items
+        self.model = model
+
+    def __getitem__(self, k):
+        return self.model.objects.get(id=self.items[k][0])
 
-    def cache_obj(self, obj):
-        int_cache_keys = [k for k in obj.__dict__.keys() if self.int_cache_re.match(k)]
+    def __iter__(self):
+        for id in self.items:
+            yield self.model.objects.get(id=id[0])
 
-        for k in int_cache_keys:
-            del obj.__dict__[k]
+    def __len__(self):
+        return len(self.items)
 
-        cache.set(self.model.cache_key(obj.id), obj, 60 * 60)
+class ToFetch(unicode):
+    pass
+
+class CachedQuerySet(models.query.QuerySet):
+
+    def lazy(self):
+        if not len(self.query.aggregates):
+            values_list = ['id']
+
+            if len(self.query.extra):
+                extra_keys = self.query.extra.keys()
+                values_list += extra_keys
+
+            return LazyQueryList(self.model, list(self.values_list(*values_list)))
+        else:
+            return self
+
+    def obj_from_datadict(self, datadict):
+        obj = self.model()
+        obj.__dict__.update(datadict)
+
+        if hasattr(obj, '_state'):
+            obj._state.db = 'default'
+
+        return obj
+
+    def _base_clone(self):
+        return self._clone(klass=models.query.QuerySet)
 
     def get(self, *args, **kwargs):
-        try:
-            pk = [v for (k,v) in kwargs.items() if k in ('pk', 'pk__exact', 'id', 'id__exact'
-                            ) or k.endswith('_ptr__pk') or k.endswith('_ptr__id')][0]
-        except:
-            pk = None
+        key = self.model.infer_cache_key(kwargs)
 
-        if pk is not None:
-            key = self.model.cache_key(pk)
+        if key is not None:
             obj = cache.get(key)
 
             if obj is None:
-                obj = super(CachedManager, self).get(*args, **kwargs)
-                self.cache_obj(obj)
+                obj = self._base_clone().get(*args, **kwargs)
+                obj.cache()
             else:
-                d = obj.__dict__
+                obj = self.obj_from_datadict(obj)
+
+            obj.reset_original_state()
 
             return obj
-        
-        return super(CachedManager, self).get(*args, **kwargs)
+
+        return self._base_clone().get(*args, **kwargs)
+
+    def _fetch_from_query_cache(self, key):
+        invalidation_key = self.model._get_cache_query_invalidation_key()
+        cached_result = cache.get_many([invalidation_key, key])
+
+        if not invalidation_key in cached_result:
+            self.model._set_query_cache_invalidation_timestamp()
+            return None
+
+        if (key in cached_result) and(cached_result[invalidation_key] < cached_result[key][0]):
+            return cached_result[key][1]
+
+        return None
+
+    def count(self):
+        cache_key = self.model._generate_cache_key("CNT:%s" % self._get_query_hash())
+        result = self._fetch_from_query_cache(cache_key)
+
+        if result is not None:
+            return result
+
+        result = super(CachedQuerySet, self).count()
+        cache.set(cache_key, (datetime.datetime.now(), result), 60 * 60)
+        return result
+
+    def iterator(self):
+        cache_key = self.model._generate_cache_key("QUERY:%s" % self._get_query_hash())
+        on_cache_query_attr = self.model.value_to_list_on_cache_query()
+
+        to_return = None
+        to_cache = {}
+
+        with_aggregates = len(self.query.aggregates) > 0
+        key_list = self._fetch_from_query_cache(cache_key)
+
+        if key_list is None:
+            if not with_aggregates:
+                values_list = [on_cache_query_attr]
+
+                if len(self.query.extra):
+                    values_list += self.query.extra.keys()
+
+                key_list = [v[0] for v in self.values_list(*values_list)]
+                to_cache[cache_key] = (datetime.datetime.now(), key_list)
+            else:
+                to_return = list(super(CachedQuerySet, self).iterator())
+                to_cache[cache_key] = (datetime.datetime.now(), [
+                    (row.__dict__[on_cache_query_attr], dict([(k, row.__dict__[k]) for k in self.query.aggregates.keys()]))
+                    for row in to_return])
+        elif with_aggregates:
+            tmp = key_list
+            key_list = [k[0] for k in tmp]
+            with_aggregates = [k[1] for k in tmp]
+            del tmp
+
+        if (not to_return) and key_list:
+            row_keys = [self.model.infer_cache_key({on_cache_query_attr: attr}) for attr in key_list]
+            cached = cache.get_many(row_keys)
+
+            to_return = [
+                (ck in cached) and self.obj_from_datadict(cached[ck]) or ToFetch(force_unicode(key_list[i])) for i, ck in enumerate(row_keys)
+            ]
+
+            if len(cached) != len(row_keys):
+                to_fetch = [unicode(tr) for tr in to_return if isinstance(tr, ToFetch)]
+
+                fetched = dict([(force_unicode(r.__dict__[on_cache_query_attr]), r) for r in
+                              models.query.QuerySet(self.model).filter(**{"%s__in" % on_cache_query_attr: to_fetch})])
+
+                to_return = [(isinstance(tr, ToFetch) and fetched[unicode(tr)] or tr) for tr in to_return]
+                to_cache.update(dict([(self.model.infer_cache_key({on_cache_query_attr: attr}), r._as_dict()) for attr, r in fetched.items()]))
+
+            if with_aggregates:
+                for i, r in enumerate(to_return):
+                    r.__dict__.update(with_aggregates[i])
+
+
+        if len(to_cache):
+            cache.set_many(to_cache, 60 * 60)
+
+        if to_return:
+            for row in to_return:
+                if hasattr(row, 'leaf'):
+                    row = row.leaf
+
+                row.reset_original_state()
+                yield row
+
+    def _get_query_hash(self):
+        try:
+            return md5(unicode(self.query).encode("utf-8")).hexdigest()
+        except:
+            return md5(self.query).hexdigest()        
+
+
+
+class CachedManager(models.Manager):
+    use_for_related_fields = True
+
+    def get_query_set(self):
+        return CachedQuerySet(self.model)
 
     def get_or_create(self, *args, **kwargs):
         try:
@@ -59,19 +197,54 @@ class CachedManager(models.Manager):
         except:
             return super(CachedManager, self).get_or_create(*args, **kwargs)
 
-denorm_update = django.dispatch.Signal(providing_args=["instance", "field", "old", "new"])
 
-class DenormalizedField(models.PositiveIntegerField):
-    __metaclass__ = models.SubfieldBase
+class DenormalizedField(object):
+    def __init__(self, manager, *args, **kwargs):
+        self.manager = manager
+        self.filter = (args, kwargs)
+
+    def setup_class(self, cls, name):
+        dict_name = '_%s_dencache_' % name
+
+        def getter(inst):
+            val = inst.__dict__.get(dict_name, None)
+
+            if val is None:
+                val = getattr(inst, self.manager).filter(*self.filter[0], **self.filter[1]).count()
+                inst.__dict__[dict_name] = val
+                inst.cache()
+
+            return val
+
+        def reset_cache(inst):
+            inst.__dict__.pop(dict_name, None)
+            inst.uncache()
+
+        cls.add_to_class(name, property(getter))
+        cls.add_to_class("reset_%s_cache" % name, reset_cache)
+
 
-    def contribute_to_class(self, cls, name):
-        super (DenormalizedField, self).contribute_to_class(cls, name)
-        if not hasattr(cls, '_denormalizad_fields'):
-            cls._denormalizad_fields = []
+class BaseMetaClass(models.Model.__metaclass__):
+    to_denormalize = []
+
+    def __new__(cls, *args, **kwargs):
+        new_cls = super(BaseMetaClass, cls).__new__(cls, *args, **kwargs)
+
+        BaseMetaClass.to_denormalize.extend(
+            [(new_cls, name, field) for name, field in new_cls.__dict__.items() if isinstance(field, DenormalizedField)]
+        )
+
+        return new_cls
+
+    @classmethod
+    def setup_denormalizes(cls):
+        for new_cls, name, field in BaseMetaClass.to_denormalize:
+            field.setup_class(new_cls, name)
 
-        cls._denormalizad_fields.append(name)
 
 class BaseModel(models.Model):
+    __metaclass__ = BaseMetaClass
+
     objects = CachedManager()
 
     class Meta:
@@ -80,162 +253,107 @@ class BaseModel(models.Model):
 
     def __init__(self, *args, **kwargs):
         super(BaseModel, self).__init__(*args, **kwargs)
-        self._original_state = dict([(k, v) for k,v in self.__dict__.items() if not k in kwargs])
+        self.reset_original_state(kwargs.keys())
 
-    @classmethod
-    def cache_key(cls, pk):
-        return '%s.%s:%s' % (settings.APP_URL, cls.__name__, pk)
+    def reset_original_state(self, reset_fields=None):
+        self._original_state = self._as_dict()
+        
+        if reset_fields:
+            self._original_state.update(dict([(f, None) for f in reset_fields]))
 
     def get_dirty_fields(self):
-        missing = object()
-        return dict([(k, self._original_state.get(k, None)) for k,v in self.__dict__.items()
-                 if self._original_state.get(k, missing) == missing or self._original_state[k] != v])
+        return [f.name for f in self._meta.fields if self._original_state[f.attname] != self.__dict__[f.attname]]
 
-    def save(self, *args, **kwargs):
-        put_back = None
+    def _as_dict(self):
+        return dict([(name, getattr(self, name)) for name in
+                     ([f.attname for f in self._meta.fields] + [k for k in self.__dict__.keys() if k.endswith('_dencache_')])
+        ])
 
-        if hasattr(self.__class__, '_denormalizad_fields'):
-            dirty = self.get_dirty_fields()
-            put_back = [f for f in self.__class__._denormalizad_fields if f in dirty]
+    def _get_update_kwargs(self):
+        return dict([
+            (f.name, getattr(self, f.name)) for f in self._meta.fields if self._original_state[f.attname] != self.__dict__[f.attname]
+        ])
 
-            if put_back:
-                for n in put_back:
-                    self.__dict__[n] = models.F(n) + (self.__dict__[n] - dirty[n])
+    def save(self, full_save=False, *args, **kwargs):
+        put_back = [k for k, v in self.__dict__.items() if isinstance(v, models.expressions.ExpressionNode)]
 
-        super(BaseModel, self).save(*args, **kwargs)
+        if hasattr(self, '_state'):
+            self._state.db = 'default'
+
+        if self.id and not full_save:
+            self.__class__.objects.filter(id=self.id).update(**self._get_update_kwargs())
+        else:
+            super(BaseModel, self).save()
 
         if put_back:
             try:
                 self.__dict__.update(
                     self.__class__.objects.filter(id=self.id).values(*put_back)[0]
                 )
-                for f in put_back:
-                    denorm_update.send(sender=self.__class__, instance=self, field=f,
-                                       old=self._original_state[f], new=self.__dict__[f])
             except:
-                #todo: log this properly
-                pass
-
-        self._original_state = dict(self.__dict__)
-        self.__class__.objects.cache_obj(self)
-
-    def delete(self):
-        cache.delete(self.cache_key(self.pk))
-        super(BaseModel, self).delete()
-
-
-class ActiveObjectManager(models.Manager):
-    use_for_related_fields = True
-    def get_query_set(self):
-        return super(ActiveObjectManager, self).get_query_set().filter(canceled=False)
-
-class UndeletedObjectManager(models.Manager):
-    def get_query_set(self):
-        return super(UndeletedObjectManager, self).get_query_set().filter(deleted=False)
-
-class GenericContent(models.Model):
-    content_type   = models.ForeignKey(ContentType)
-    object_id      = models.PositiveIntegerField()
-    content_object = generic.GenericForeignKey('content_type', 'object_id')
-
-    class Meta:
-        abstract = True
-        app_label = 'forum'
-
-class MetaContent(BaseModel):
-    node = models.ForeignKey('Node', null=True, related_name='%(class)ss')
-
-    def __init__(self, *args, **kwargs):
-        if 'content_object' in kwargs:
-            kwargs['node'] = kwargs['content_object']
-            del kwargs['content_object']
-
-        super (MetaContent, self).__init__(*args, **kwargs)
-    
-    @property
-    def content_object(self):
-        return self.node.leaf
-
-    class Meta:
-        abstract = True
-        app_label = 'forum'
-
-from user import User
+                logging.error("Unable to read %s from %s" % (", ".join(put_back), self.__class__.__name__))
+                self.uncache()
 
-class UserContent(models.Model):
-    user = models.ForeignKey(User, related_name='%(class)ss')
+        self.reset_original_state()
+        self._set_query_cache_invalidation_timestamp()
+        self.cache()
 
-    class Meta:
-        abstract = True
-        app_label = 'forum'
+    @classmethod
+    def _get_cache_query_invalidation_key(cls):
+        return cls._generate_cache_key("INV_TS")
 
+    @classmethod
+    def _set_query_cache_invalidation_timestamp(cls):
+        cache.set(cls._get_cache_query_invalidation_key(), datetime.datetime.now(), 60 * 60 * 24)
 
-marked_deleted = django.dispatch.Signal(providing_args=["instance", "deleted_by"])
+        for base in filter(lambda c: issubclass(c, BaseModel) and (not c is BaseModel), cls.__bases__):
+            base._set_query_cache_invalidation_timestamp()
 
-class DeletableContent(models.Model):
-    deleted     = models.BooleanField(default=False)
-    deleted_at  = models.DateTimeField(null=True, blank=True)
-    deleted_by  = models.ForeignKey(User, null=True, blank=True, related_name='deleted_%(class)ss')
+    @classmethod
+    def _generate_cache_key(cls, key, group=None):
+        if group is None:
+            group = cls.__name__
 
-    active = UndeletedObjectManager()
+        return '%s:%s:%s' % (settings.APP_URL, group, key)
 
-    class Meta:
-        abstract = True
-        app_label = 'forum'
+    def cache_key(self):
+        return self._generate_cache_key(self.id)
 
-    def mark_deleted(self, user):
-        if not self.deleted:
-            self.deleted = True
-            self.deleted_at = datetime.datetime.now()
-            self.deleted_by = user
-            self.save()
-            marked_deleted.send(sender=self.__class__, instance=self, deleted_by=user)
-            return True
-        else:
-            return False
+    @classmethod
+    def value_to_list_on_cache_query(cls):
+        return 'id'
 
-    def unmark_deleted(self):
-        if self.deleted:
-            self.deleted = False
-            self.save()
-            return True
-        else:
-            return False
+    @classmethod
+    def infer_cache_key(cls, querydict):
+        try:
+            pk = [v for (k,v) in querydict.items() if k in ('pk', 'pk__exact', 'id', 'id__exact'
+                            ) or k.endswith('_ptr__pk') or k.endswith('_ptr__id')][0]
 
-mark_canceled = django.dispatch.Signal(providing_args=['instance'])
+            cache_key = cls._generate_cache_key(pk)
 
-class CancelableContent(models.Model):
-    canceled = models.BooleanField(default=False)
+            if len(cache_key) > django_settings.CACHE_MAX_KEY_LENGTH:
+                cache_key = cache_key[:django_settings.CACHE_MAX_KEY_LENGTH]
 
-    def cancel(self):
-        if not self.canceled:
-            self.canceled = True
-            self.save()
-            mark_canceled.send(sender=self.__class__, instance=self)
-            return True
-            
-        return False
+            return cache_key
+        except:
+            return None
 
-    class Meta:
-        abstract = True
-        app_label = 'forum'
+    def cache(self):
+        cache.set(self.cache_key(), self._as_dict(), 60 * 60)
 
+    def uncache(self):
+        cache.delete(self.cache_key())
 
-from node import Node, NodeRevision
+    def delete(self):
+        self.uncache()
+        self._set_query_cache_invalidation_timestamp()
+        super(BaseModel, self).delete()
 
-class QandA(Node):
-    wiki                 = models.BooleanField(default=False)
-    wikified_at          = models.DateTimeField(null=True, blank=True)
 
-    class Meta:
-        abstract = True
-        app_label = 'forum'
+from user import User
+from node import Node, NodeRevision, NodeManager
+from action import Action
 
-    def wikify(self):
-        if not self.wiki:
-            self.wiki = True
-            self.wikified_at = datetime.datetime.now()
-            self.save()