-import os, tarfile, datetime, logging, re
+from __future__ import with_statement
+
+import os, tarfile, datetime, logging, re, ConfigParser, shutil, zipfile
from django.core.cache import cache
from django.utils.translation import ugettext as _
from forum.templatetags.extra_tags import diff_date
import xml.etree.ElementTree
from xml.etree import ElementTree as ET
-from xml.etree.ElementTree import Comment, _encode, ProcessingInstruction, QName, fixtag, _escape_attrib, _escape_cdata
+from xml.etree.ElementTree import Comment, _encode, ProcessingInstruction, QName, _escape_attrib, _escape_cdata, _namespace_map
+from forum import settings
+from django.conf import settings as djsettings
+import settings as selfsettings
+import string
+
+try:
+ from xml.etree.ElementTree import fixtag
+except ImportError:
+ def fixtag(tag, namespaces):
+ # given a decorated tag (of the form {uri}tag), return prefixed
+ # tag and namespace declaration, if any
+ if isinstance(tag, QName):
+ tag = tag.text
+ namespace_uri, tag = string.split(tag[1:], "}", 1)
+ prefix = namespaces.get(namespace_uri)
+ if prefix is None:
+ prefix = _namespace_map.get(namespace_uri)
+ if prefix is None:
+ prefix = "ns%d" % len(namespaces)
+ namespaces[namespace_uri] = prefix
+ if prefix == "xml":
+ xmlns = None
+ else:
+ xmlns = ("xmlns:%s" % prefix, namespace_uri)
+ else:
+ xmlns = None
+ return "%s:%s" % (prefix, tag), xmlns
CACHE_KEY = "%s_exporter_state" % APP_URL
EXPORT_STEPS = []
TMP_FOLDER = os.path.join(os.path.dirname(__file__), 'tmp')
-LAST_BACKUP = os.path.join(TMP_FOLDER, 'backup.tar.gz')
+
+DATE_AND_AUTHOR_INF_SECTION = 'DateAndAuthor'
+OPTIONS_INF_SECTION = 'Options'
+META_INF_SECTION = 'Meta'
+
+DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
+DATE_FORMAT = "%Y-%m-%d"
def Etree_pretty__write(self, file, node, encoding, namespaces,
level=0, identator=" "):
if xmlns: xmlns_items.append(xmlns)
except TypeError:
raise #_raise_serialization_error(v)
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
+ file.write(u" %s=\"%s\"" % (_encode(k, encoding),
_escape_attrib(v, encoding)))
for k, v in xmlns_items:
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
+ file.write(u" %s=\"%s\"" % (_encode(k, encoding),
_escape_attrib(v, encoding)))
if node.text or len(node):
file.write(">")
if node.text:
- file.write(_escape_cdata(node.text.replace("\n", (level + 1) * identator + "\n"), encoding))
+ file.write(_escape_cdata(node.text, encoding))
for n in node:
self._write(file, n, encoding, namespaces, level + 1, identator)
if node.text and len(node.text) < 125:
for k, v in xmlns_items:
del namespaces[v]
if node.tail:
- file.write(_escape_cdata(node.tail.replace("\n", level * identator + "\n"), encoding))
+ file.write(_escape_cdata(node.tail.replace("\n", (level * identator )+ "\n"), encoding))
+
+def make_date(date, with_time=True):
+ try:
+ return date.strftime(with_time and DATETIME_FORMAT or DATE_FORMAT)
+ except ValueError, e:
+ return date.replace(year=1900).strftime(with_time and DATETIME_FORMAT or DATE_FORMAT)
-def _add_tag(el, name, content = None):
- tag = ET.SubElement(el, name)
- if content:
- tag.text = content
- return tag
def ET_Element_add_tag(el, tag_name, content = None, **attrs):
tag = ET.SubElement(el, tag_name)
if content:
- tag.text = unicode(content)
+ try:
+ tag.text = unicode(content)
+ except Exception, e:
+ #logging.error('error converting unicode characters')
+ #import traceback
+ #logging.error(traceback.print_exc())
+
+ import string
+ tag.text = unicode("".join([c for c in content if c in string.printable]))
for k, v in attrs.items():
tag.set(k, unicode(v))
return
- if isinstance(v, (int, long, str, float, bool, dict, list, tuple)):
+ if isinstance(v, (int, long, str, unicode, float, bool, dict, list, tuple)):
if isinstance(v, tuple):
t = 'list'
else:
tree = ET.ElementTree(root)
tree.write(os.path.join(tmp, filename), encoding='UTF-8')
-def create_targz(tmp, files):
- if os.path.exists(LAST_BACKUP):
- os.remove(LAST_BACKUP)
-
- t = tarfile.open(name=LAST_BACKUP, mode = 'w:gz')
+def create_targz(tmp, files, start_time, options, user, state, set_state, file_format):
+ now = datetime.datetime.now()
+ domain = re.match('[\w-]+\.[\w-]+(\.[\w-]+)*', djsettings.APP_URL)
+ if domain:
+ domain = '_'.join(domain.get(0).split('.'))
+ else:
+ domain = 'localhost'
+
+ fname = "%s-%s" % (domain, now.strftime('%Y%m%d%H%M'))
+ if file_format == 'zip':
+ full_fname = "%s.zip" % fname
+ else:
+ full_fname = "%s.tar.gz" % fname
+
+ if file_format == 'zip':
+ t = zipfile.ZipFile(os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, full_fname), 'w')
+
+ def add_to_file(f, a):
+ t.write(f, a)
+ else:
+ t = tarfile.open(os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, full_fname), mode = 'w:gz')
+
+ def add_to_file(f, a):
+ t.add(f, a)
+
+ state['overall']['status'] = _('Compressing xml files')
+ set_state()
for f in files:
- t.add(os.path.join(tmp, f), arcname=f)
+ add_to_file(os.path.join(tmp, f), "/%s" % f)
+
+ if options.get('uplodaded_files', False):
+ state['overall']['status'] = _('Importing uploaded files')
+ set_state()
+ export_upfiles(t)
+
+ if options.get('import_skins_folder', False):
+ state['overall']['status'] = _('Importing skins folder')
+ set_state()
+ export_skinsfolder(t)
+
+ state['overall']['status'] = _('Writing inf file.')
+ set_state()
+
+ inf = ConfigParser.SafeConfigParser()
+
+ inf.add_section(DATE_AND_AUTHOR_INF_SECTION)
+
+ inf.set(DATE_AND_AUTHOR_INF_SECTION, 'file-name', full_fname)
+ inf.set(DATE_AND_AUTHOR_INF_SECTION, 'author', unicode(user.id))
+ inf.set(DATE_AND_AUTHOR_INF_SECTION, 'site', djsettings.APP_URL)
+ inf.set(DATE_AND_AUTHOR_INF_SECTION, 'started', start_time.strftime(DATETIME_FORMAT))
+ inf.set(DATE_AND_AUTHOR_INF_SECTION, 'finished', now.strftime(DATETIME_FORMAT))
+
+ inf.add_section(OPTIONS_INF_SECTION)
+ inf.set(OPTIONS_INF_SECTION, 'anon-data', str(options.get('anon_data', False)))
+ inf.set(OPTIONS_INF_SECTION, 'with-upfiles', str(options.get('uplodaded_files', False)))
+ inf.set(OPTIONS_INF_SECTION, 'with-skins', str(options.get('import_skins_folder', False)))
+
+ inf.add_section(META_INF_SECTION)
+
+ for id, s in state.items():
+ inf.set(META_INF_SECTION, id, str(s['count']))
+ with open(os.path.join(tmp, '%s.backup.inf' % fname), 'wb') as inffile:
+ inf.write(inffile)
+
+ add_to_file(os.path.join(tmp, '%s.backup.inf' % fname), '/backup.inf')
+ state['overall']['status'] = _('Saving backup file')
+ set_state()
t.close()
+ return full_fname
+
+
+def export_upfiles(tf):
+ folder = str(settings.UPFILES_FOLDER)
+
+ if os.path.exists(folder):
+ tf.add(folder, arcname='/upfiles')
+
+
+def export_skinsfolder(tf):
+ folder = djsettings.TEMPLATE_DIRS[0]
+
+ if os.path.exists(folder):
+ tf.add(folder, arcname='/skins')
-def export(options):
- original__write = xml.etree.ElementTree.ElementTree._write
+def export(options, user):
+ original__write = xml.etree.ElementTree.ElementTree.write
xml.etree.ElementTree.ElementTree._write = Etree_pretty__write
xml.etree.ElementTree._ElementInterface.add = ET_Element_add_tag
state['overall']['status'] = _('Compressing files')
set_state()
- create_targz(tmp, dump_files)
+ fname = create_targz(tmp, dump_files, start_time, options, user, state, set_state, options['file_format'])
full_state['running'] = False
full_state['errors'] = False
state['overall']['status'] = _('Done')
+ state['overall']['fname'] = fname
set_state()
except Exception, e:
xml.etree.ElementTree.ElementTree._write = original__write
del xml.etree.ElementTree._ElementInterface.add
+
def exporter_step(queryset, root_tag_name, el_tag_name, name, date_lock=None, user_data=False):
def decorator(fn):
el.add('password', u.password)
el.add('email', u.email, validated=u.email_isvalid and 'true' or 'false')
el.add('reputation', u.reputation)
- el.add('joindate', u.date_joined)
+ el.add('badges', bronze=u.bronze, silver=u.silver, gold=u.gold)
+ el.add('joindate', make_date(u.date_joined))
+ el.add('active', u.is_active and 'true' or 'false')
- el.add('firstname', u.first_name)
- el.add('lastname', u.last_name)
+ el.add('realname', u.real_name)
el.add('bio', u.about)
el.add('location', u.location)
el.add('website', u.website)
- el.add('birthdate', u.date_of_birth)
+ el.add('birthdate', u.date_of_birth and make_date(u.date_of_birth, with_time=False) or "")
roles = el.add('roles')
key.add('provider', a.provider)
key.add('key', a.key)
+ try:
+ ss = u.subscription_settings
- ss = u.subscription_settings
-
- notify = el.add('notifications', enabled=ss.enable_notifications and 'true' or 'false')
+ notify = el.add('notifications', enabled=ss.enable_notifications and 'true' or 'false')
- notify.add('notify', **dict([(t, ss.__dict__.get(t, 'n') == 'i' and 'true' or 'false') for t in ['member_joins', 'new_question', 'new_question_watched_tags', 'subscribed_questions']]))
+ notify.add('notify', **dict([(t, ss.__dict__.get(t, 'n') == 'i' and 'true' or 'false') for t in ['member_joins', 'new_question', 'new_question_watched_tags', 'subscribed_questions']]))
- notify.add('autoSubscribe', **dict([(t, ss.__dict__.get(t, False) and 'true' or 'false') for t in [
- 'all_questions', 'all_questions_watched_tags', 'questions_asked', 'questions_answered', 'questions_commented', 'questions_viewed']]))
+ notify.add('autoSubscribe', **dict([(t, ss.__dict__.get(t, False) and 'true' or 'false') for t in [
+ 'all_questions', 'all_questions_watched_tags', 'questions_asked', 'questions_answered', 'questions_commented', 'questions_viewed']]))
- notify.add('notifyOnSubscribed', **dict([(t, ss.__dict__.get("notify_%s" % t, False) and 'true' or 'false') for t in [
- 'answers', 'reply_to_comments', 'comments_own_post', 'comments', 'accepted']]))
+ notify.add('notifyOnSubscribed', **dict([(t, ss.__dict__.get("notify_%s" % t, False) and 'true' or 'false') for t in [
+ 'answers', 'reply_to_comments', 'comments_own_post', 'comments', 'accepted']]))
- notify.add('digest', ss.send_digest and 'on' or 'off')
+ notify.add('digest', ss.send_digest and 'on' or 'off')
+ except SubscriptionSettings.DoesNotExist:
+ pass
watched = el.add('watchedTags')
rejected = el.add('rejectedTags')
if not anon_data:
el.add('author', n.author.id)
- el.add('date', n.added_at)
+ el.add('date', make_date(n.added_at))
el.add('parent', n.parent and n.parent.id or "")
+ el.add('absparent', n.abs_parent and n.abs_parent.id or "")
+
+ act = el.add('lastactivity')
+ act.add('by', n.last_activity_by and n.last_activity_by.id or "")
+ act.add('at', n.last_activity_at and make_date(n.last_activity_at) or "")
el.add('title', n.title)
el.add('body', n.body)
+ el.add('score', n.score)
+
tags = el.add('tags')
for t in n.tagname_list():
tags.add('tag', t)
- revs = el.add('revisions', active=n.active_revision and n.active_revision or n.revisions.order_by('revision')[0])
+ try:
+ active = n.active_revision and n.active_revision.revision or n.revisions.order_by('revision')[0].revision
+ except IndexError:
+ active = 0
+
+ revs = el.add('revisions', active=active)
for r in n.revisions.order_by('revision'):
- rev = _add_tag(revs, 'revision')
+ rev = revs.add('revision')
rev.add('number', r.revision)
rev.add('summary', r.summary)
if not anon_data:
rev.add('author', r.author.id)
- rev.add('date', r.revised_at)
+ rev.add('date', make_date(r.revised_at))
rev.add('title', r.title)
rev.add('body', r.body)
rev.add('tags', ", ".join(r.tagname_list()))
+ el.add('marked', n.marked and 'true' or 'false')
+ el.add('wiki', n.nis.wiki and 'true' or 'false')
el.add('extraRef', n.extra_ref and n.extra_ref.id or "")
- make_extra(el.add('exraData'), n.extra)
+ make_extra(el.add('extraData'), n.extra)
+ el.add('extraCount', n.extra_count and n.extra_count or "")
@exporter_step(Action.objects.all(), 'actions', 'action', _('Actions'), 'action_date')
def export_actions(a, el, anon_data):
el.add('id', a.id)
el.add('type', a.action_type)
- el.add('date', a.action_date)
+ el.add('date', make_date(a.action_date))
if not anon_data:
el.add('user', a.user.id)
canceled.add('user', a.canceled_by.id)
canceled.add('ip', a.canceled_ip)
- canceled.add('date', a.canceled_at)
+ canceled.add('date', make_date(a.canceled_at))
if not anon_data:
reputes = el.add('reputes')
repute.add('value', r.value)
-@exporter_step(NodeState.objects.all(), 'states', 'state', _('Node states'), 'action__action_date')
-def export_states(s, el, anon_data):
- el.add('type', s.state_type)
- el.add('node', s.node.id)
- el.add('trigger', s.action.id)
+#@exporter_step(NodeState.objects.all(), 'states', 'state', _('Node states'), 'action__action_date')
+#def export_states(s, el, anon_data):
+# el.add('type', s.state_type)
+# el.add('node', s.node.id)
+# el.add('trigger', s.action.id)
-@exporter_step(Badge.objects.all(), 'badges', 'badge', _('Badges'), user_data=True)
-def export_badges(b, el, anon_data):
- el.add('type', ["", 'gold', 'silver', 'bronze'][b.type])
- el.add('name', b.cls)
- el.add('count', b.awarded_count)
+#@exporter_step(Badge.objects.all(), 'badges', 'badge', _('Badges'), user_data=True)
+#def export_badges(b, el, anon_data):
+# el.add('type', ["", 'gold', 'silver', 'bronze'][b.type])
+# el.add('name', b.cls)
+# el.add('count', b.awarded_count)
@exporter_step(Award.objects.all(), 'awards', 'award', _('Awards'), 'awarded_at', True)
def export_awards(a, el, anon_data):
el.add('badge', a.badge.cls)
- el.add('user', a.user)
+ el.add('user', a.user.id)
el.add('node', a.node and a.node.id or "")
el.add('trigger', a.trigger and a.trigger.id or "")
el.add('action', a.action.id)
def export_settings(s, el, anon_data):
el.add('key', s.key)
make_extra(el.add('value'), s.value)
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-
-