+
+ return self._base_clone().get(*args, **kwargs)
+
+ def _fetch_from_query_cache(self, key):
+ invalidation_key = self.model._get_cache_query_invalidation_key()
+ cached_result = cache.get_many([invalidation_key, key])
+
+ if not invalidation_key in cached_result:
+ self.model._set_query_cache_invalidation_timestamp()
+ return None
+
+ if (key in cached_result) and(cached_result[invalidation_key] < cached_result[key][0]):
+ return cached_result[key][1]
+
+ return None
+
+ def count(self):
+ cache_key = self.model._generate_cache_key("CNT:%s" % self._get_query_hash())
+ result = self._fetch_from_query_cache(cache_key)
+
+ if result is not None:
+ return result
+
+ result = super(CachedQuerySet, self).count()
+ cache.set(cache_key, (datetime.datetime.now(), result), 60 * 60)
+ return result
+
+ def iterator(self):
+ cache_key = self.model._generate_cache_key("QUERY:%s" % self._get_query_hash())
+ on_cache_query_attr = self.model.value_to_list_on_cache_query()
+
+ to_return = None
+ to_cache = {}
+
+ with_aggregates = len(self.query.aggregates) > 0
+ key_list = self._fetch_from_query_cache(cache_key)
+
+ if key_list is None:
+ if not with_aggregates:
+ values_list = [on_cache_query_attr]
+
+ if len(self.query.extra):
+ values_list += self.query.extra.keys()
+
+ key_list = [v[0] for v in self.values_list(*values_list)]
+ to_cache[cache_key] = (datetime.datetime.now(), key_list)
+ else:
+ to_return = list(super(CachedQuerySet, self).iterator())
+ to_cache[cache_key] = (datetime.datetime.now(), [
+ (row.__dict__[on_cache_query_attr], dict([(k, row.__dict__[k]) for k in self.query.aggregates.keys()]))
+ for row in to_return])
+ elif with_aggregates:
+ tmp = key_list
+ key_list = [k[0] for k in tmp]
+ with_aggregates = [k[1] for k in tmp]
+ del tmp
+
+ if (not to_return) and key_list:
+ row_keys = [self.model.infer_cache_key({on_cache_query_attr: attr}) for attr in key_list]
+ cached = cache.get_many(row_keys)
+
+ to_return = [
+ (ck in cached) and self.obj_from_datadict(cached[ck]) or ToFetch(force_unicode(key_list[i])) for i, ck in enumerate(row_keys)
+ ]
+
+ if len(cached) != len(row_keys):
+ to_fetch = [unicode(tr) for tr in to_return if isinstance(tr, ToFetch)]
+
+ fetched = dict([(force_unicode(r.__dict__[on_cache_query_attr]), r) for r in
+ models.query.QuerySet(self.model).filter(**{"%s__in" % on_cache_query_attr: to_fetch})])
+
+ to_return = [(isinstance(tr, ToFetch) and fetched[unicode(tr)] or tr) for tr in to_return]
+ to_cache.update(dict([(self.model.infer_cache_key({on_cache_query_attr: attr}), r._as_dict()) for attr, r in fetched.items()]))
+
+ if with_aggregates:
+ for i, r in enumerate(to_return):
+ r.__dict__.update(with_aggregates[i])
+
+
+ if len(to_cache):
+ cache.set_many(to_cache, 60 * 60)
+
+ if to_return:
+ for row in to_return:
+ if hasattr(row, 'leaf'):
+ row = row.leaf
+
+ row.reset_original_state()
+ yield row
+
+ def _get_query_hash(self):
+ try:
+ return md5(unicode(self.query).encode("utf-8")).hexdigest()
+ except:
+ try:
+ return md5(self.query).hexdigest()
+ except:
+ return md5(str(self.query)).hexdigest()
+
+
+class CachedManager(models.Manager):
+ use_for_related_fields = True
+
+ def get_queryset(self):
+ return CachedQuerySet(self.model)