1 import os, tarfile, datetime, logging, re, ConfigParser, shutil
3 from django.core.cache import cache
4 from django.utils.translation import ugettext as _
5 from forum.models import *
6 from forum.settings import APP_URL
7 from forum.templatetags.extra_tags import diff_date
8 import xml.etree.ElementTree
9 from xml.etree import ElementTree as ET
10 from xml.etree.ElementTree import Comment, _encode, ProcessingInstruction, QName, fixtag, _escape_attrib, _escape_cdata
11 from forum import settings
12 from django.conf import settings as djsettings
13 import settings as selfsettings
15 CACHE_KEY = "%s_exporter_state" % APP_URL
18 TMP_FOLDER = os.path.join(os.path.dirname(__file__), 'tmp')
19 LAST_BACKUP = os.path.join(TMP_FOLDER, 'backup.tar.gz')
21 DATE_AND_AUTHOR_INF_SECTION = 'DateAndAuthor'
22 OPTIONS_INF_SECTION = 'Options'
23 META_INF_SECTION = 'Meta'
25 DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
26 DATE_FORMAT = "%Y-%m-%d"
28 def Etree_pretty__write(self, file, node, encoding, namespaces,
29 level=0, identator=" "):
32 file.write(level * identator + "<!-- %s -->" % _escape_cdata(node.text, encoding))
33 elif tag is ProcessingInstruction:
34 file.write("<?%s?>" % _escape_cdata(node.text, encoding))
37 xmlns_items = [] # new namespaces in this scope
39 if isinstance(tag, QName) or tag[:1] == "{":
40 tag, xmlns = fixtag(tag, namespaces)
41 if xmlns: xmlns_items.append(xmlns)
43 raise #_raise_serialization_error(tag)
44 file.write("\n" + level * identator + "<" + _encode(tag, encoding))
45 if items or xmlns_items:
46 items.sort() # lexical order
49 if isinstance(k, QName) or k[:1] == "{":
50 k, xmlns = fixtag(k, namespaces)
51 if xmlns: xmlns_items.append(xmlns)
53 raise #_raise_serialization_error(k)
55 if isinstance(v, QName):
56 v, xmlns = fixtag(v, namespaces)
57 if xmlns: xmlns_items.append(xmlns)
59 raise #_raise_serialization_error(v)
60 file.write(u" %s=\"%s\"" % (_encode(k, encoding),
61 _escape_attrib(v, encoding)))
62 for k, v in xmlns_items:
63 file.write(u" %s=\"%s\"" % (_encode(k, encoding),
64 _escape_attrib(v, encoding)))
65 if node.text or len(node):
68 file.write(_escape_cdata(node.text, encoding))
70 self._write(file, n, encoding, namespaces, level + 1, identator)
71 if node.text and len(node.text) < 125:
72 file.write("</" + _encode(tag, encoding) + ">")
74 file.write("\n" + level * identator + "</" + _encode(tag, encoding) + ">")
77 for k, v in xmlns_items:
80 file.write(_escape_cdata(node.tail.replace("\n", (level * identator )+ "\n"), encoding))
82 def make_date(date, with_time=True):
84 return date.strftime(with_time and DATETIME_FORMAT or DATE_FORMAT)
86 return date.replace(year=1900).strftime(with_time and DATETIME_FORMAT or DATE_FORMAT)
89 def ET_Element_add_tag(el, tag_name, content = None, **attrs):
90 tag = ET.SubElement(el, tag_name)
93 tag.text = unicode(content)
95 for k, v in attrs.items():
96 tag.set(k, unicode(v))
100 GOOD_TAG_NAME = re.compile("^\w+$")
102 def make_extra(el, v):
107 if isinstance(v, (int, long, str, unicode, float, bool, dict, list, tuple)):
108 if isinstance(v, tuple):
111 t = v.__class__.__name__
115 value = el.add('value', type=t)
117 if isinstance(v, (list, tuple)):
119 item = value.add('item')
122 elif isinstance(v, dict):
123 for k, i in v.items():
124 item = value.add('item', key=k)
127 value.text = unicode(v)
129 def write_to_file(root, tmp, filename):
130 tree = ET.ElementTree(root)
131 tree.write(os.path.join(tmp, filename), encoding='UTF-8')
133 def create_targz(tmp, files, start_time, options, user, state, set_state):
134 if os.path.exists(LAST_BACKUP):
135 os.remove(LAST_BACKUP)
137 t = tarfile.open(name=LAST_BACKUP, mode = 'w:gz')
139 state['overall']['status'] = _('Compressing xml files')
143 t.add(os.path.join(tmp, f), arcname="/%s" % f)
145 if options.get('uplodaded_files', False):
146 state['overall']['status'] = _('Importing uploaded files')
150 if options.get('import_skins_folder', False):
151 state['overall']['status'] = _('Importing skins folder')
153 export_skinsfolder(t)
155 state['overall']['status'] = _('Writing inf file.')
158 now = datetime.datetime.now()
159 domain = re.match('[\w-]+\.[\w-]+(\.[\w-]+)*', djsettings.APP_URL)
161 domain = '_'.join(domain.get(0).split('.'))
165 fname = "%s-%s" % (domain, now.strftime('%Y%m%d%H%M'))
167 inf = ConfigParser.SafeConfigParser()
169 inf.add_section(DATE_AND_AUTHOR_INF_SECTION)
171 inf.set(DATE_AND_AUTHOR_INF_SECTION, 'file-name', "%s.tar.gz" % fname)
172 inf.set(DATE_AND_AUTHOR_INF_SECTION, 'author', unicode(user.id))
173 inf.set(DATE_AND_AUTHOR_INF_SECTION, 'site', djsettings.APP_URL)
174 inf.set(DATE_AND_AUTHOR_INF_SECTION, 'started', start_time.strftime(DATETIME_FORMAT))
175 inf.set(DATE_AND_AUTHOR_INF_SECTION, 'finished', now.strftime(DATETIME_FORMAT))
177 inf.add_section(OPTIONS_INF_SECTION)
178 inf.set(OPTIONS_INF_SECTION, 'anon-data', str(options.get('anon_data', False)))
179 inf.set(OPTIONS_INF_SECTION, 'with-upfiles', str(options.get('uplodaded_files', False)))
180 inf.set(OPTIONS_INF_SECTION, 'with-skins', str(options.get('import_skins_folder', False)))
182 inf.add_section(META_INF_SECTION)
184 for id, s in state.items():
185 inf.set(META_INF_SECTION, id, str(s['count']))
187 with open(os.path.join(tmp, 'backup.inf'), 'wb') as inffile:
190 t.add(os.path.join(tmp, 'backup.inf'), arcname='backup.inf')
191 state['overall']['status'] = _('Saving backup file')
194 shutil.copyfile(LAST_BACKUP, os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, "%s.tar.gz" % fname))
195 shutil.copyfile(os.path.join(tmp, 'backup.inf'), os.path.join(selfsettings.EXPORTER_BACKUP_STORAGE, "%s.backup.inf" % fname))
199 def export_upfiles(tf):
200 folder = str(settings.UPFILES_FOLDER)
202 if os.path.exists(folder):
203 tf.add(folder, arcname='/upfiles')
206 def export_skinsfolder(tf):
207 folder = djsettings.TEMPLATE_DIRS[0]
209 if os.path.exists(folder):
210 tf.add(folder, arcname='/skins')
213 def export(options, user):
214 original__write = xml.etree.ElementTree.ElementTree._write
215 xml.etree.ElementTree.ElementTree._write = Etree_pretty__write
216 xml.etree.ElementTree._ElementInterface.add = ET_Element_add_tag
218 start_time = datetime.datetime.now()
220 anon_data = options.get('anon_data', False)
222 steps = [s for s in EXPORT_STEPS if not (anon_data and s['fn'].is_user_data())]
224 state = dict([(s['id'], {
225 'status': _('Queued'), 'count': s['fn'].count(start_time), 'parsed': 0
226 }) for s in steps] + [
228 'status': _('Starting'), 'count': sum([s['fn'].count(start_time) for s in steps]), 'parsed': 0
232 full_state = dict(running=True, state=state, time_started="")
235 full_state['time_started'] = diff_date(start_time)
236 cache.set(CACHE_KEY, full_state)
240 def ping_state(name):
241 state[name]['parsed'] += 1
242 state['overall']['parsed'] += 1
249 state['overall']['status'] = _('Exporting %s') % s['name']
250 state[name]['status'] = _('Exporting')
252 root, fname = fn(ping, start_time, anon_data)
254 state[name]['status'] = _('Writing temp file')
255 state['overall']['status'] = _('Writing %s temp file') % s['name']
259 write_to_file(root, tmp, fname)
260 state[name]['status'] = _('Done')
270 dump_files.append(run(s['fn'], s['id']))
272 state['overall']['status'] = _('Compressing files')
275 create_targz(tmp, dump_files, start_time, options, user, state, set_state)
276 full_state['running'] = False
277 full_state['errors'] = False
278 state['overall']['status'] = _('Done')
282 full_state['running'] = False
283 full_state['errors'] = "%s: %s" % (e.__class__.__name__, unicode(e))
287 logging.error("Error executing xml backup: \n %s" % (traceback.format_exc()))
289 xml.etree.ElementTree.ElementTree._write = original__write
290 del xml.etree.ElementTree._ElementInterface.add
293 def exporter_step(queryset, root_tag_name, el_tag_name, name, date_lock=None, user_data=False):
298 return queryset.filter(**{"%s__lte" % date_lock: lock})
301 def decorated(ping, lock, anon_data):
302 root = ET.Element(root_tag_name)
304 for item in qs(lock).order_by('id').select_related():
305 el = root.add(el_tag_name)
306 fn(item, el, anon_data)
309 return root, "%s.xml" % root_tag_name
312 return qs(lock).count()
317 decorated.count = count
318 decorated.is_user_data = is_user_data
320 EXPORT_STEPS.append(dict(id=root_tag_name, name=name, fn=decorated))
326 @exporter_step(Tag.objects.all(), 'tags', 'tag', _('Tags'))
327 def export_tags(t, el, anon_data):
328 el.add('name', t.name)
330 el.add('author', t.created_by.id)
331 el.add('used', t.used_count)
334 @exporter_step(User.objects.all(), 'users', 'user', _('Users'), 'date_joined', True)
335 def export_users(u, el, anon_data):
337 el.add('username', u.username)
338 el.add('password', u.password)
339 el.add('email', u.email, validated=u.email_isvalid and 'true' or 'false')
340 el.add('reputation', u.reputation)
341 el.add('badges', bronze=u.bronze, silver=u.silver, gold=u.gold)
342 el.add('joindate', make_date(u.date_joined))
343 el.add('active', u.is_active and 'true' or 'false')
345 el.add('realname', u.real_name)
346 el.add('bio', u.about)
347 el.add('location', u.location)
348 el.add('website', u.website)
349 el.add('birthdate', u.date_of_birth and make_date(u.date_of_birth, with_time=False) or "")
351 roles = el.add('roles')
354 roles.add('role', 'superuser')
357 roles.add('role', 'moderator')
359 auth = el.add('authKeys')
360 for a in u.auth_keys.all():
361 key = auth.add('key')
362 key.add('provider', a.provider)
363 key.add('key', a.key)
366 ss = u.subscription_settings
368 notify = el.add('notifications', enabled=ss.enable_notifications and 'true' or 'false')
370 notify.add('notify', **dict([(t, ss.__dict__.get(t, 'n') == 'i' and 'true' or 'false') for t in ['member_joins', 'new_question', 'new_question_watched_tags', 'subscribed_questions']]))
372 notify.add('autoSubscribe', **dict([(t, ss.__dict__.get(t, False) and 'true' or 'false') for t in [
373 'all_questions', 'all_questions_watched_tags', 'questions_asked', 'questions_answered', 'questions_commented', 'questions_viewed']]))
375 notify.add('notifyOnSubscribed', **dict([(t, ss.__dict__.get("notify_%s" % t, False) and 'true' or 'false') for t in [
376 'answers', 'reply_to_comments', 'comments_own_post', 'comments', 'accepted']]))
378 notify.add('digest', ss.send_digest and 'on' or 'off')
380 watched = el.add('watchedTags')
381 rejected = el.add('rejectedTags')
383 for m in u.tag_selections.all():
384 if m.reason == 'good':
385 watched.add('tag', m.tag.name)
387 rejected.add('tag', m.tag.name)
391 @exporter_step(Node.objects.all(), 'nodes', 'node', _('Nodes'), 'added_at')
392 def export_nodes(n, el, anon_data):
394 el.add('type', n.node_type)
397 el.add('author', n.author.id)
398 el.add('date', make_date(n.added_at))
399 el.add('parent', n.parent and n.parent.id or "")
400 el.add('absparent', n.abs_parent and n.abs_parent or "")
402 act = el.add('lastactivity')
403 act.add('by', n.last_activity_by and n.last_activity_by.id or "")
404 act.add('at', n.last_activity_at and make_date(n.last_activity_at) or "")
406 el.add('title', n.title)
407 el.add('body', n.body)
409 el.add('score', n.score)
411 tags = el.add('tags')
413 for t in n.tagname_list():
417 active = n.active_revision and n.active_revision.revision or n.revisions.order_by('revision')[0].revision
421 revs = el.add('revisions', active=active)
423 for r in n.revisions.order_by('revision'):
424 rev = revs.add('revision')
425 rev.add('number', r.revision)
426 rev.add('summary', r.summary)
428 rev.add('author', r.author.id)
429 rev.add('date', make_date(r.revised_at))
431 rev.add('title', r.title)
432 rev.add('body', r.body)
433 rev.add('tags', ", ".join(r.tagname_list()))
435 el.add('marked', n.marked and 'true' or 'false')
436 el.add('extraRef', n.extra_ref and n.extra_ref.id or "")
437 make_extra(el.add('extraData'), n.extra)
438 el.add('extraCount', n.extra_count and n.extra_count or "")
441 @exporter_step(Action.objects.all(), 'actions', 'action', _('Actions'), 'action_date')
442 def export_actions(a, el, anon_data):
444 el.add('type', a.action_type)
445 el.add('date', make_date(a.action_date))
448 el.add('user', a.user.id)
449 el.add('realUser', a.real_user and a.real_user.id or "")
451 el.add('node', a.node and a.node.id or "")
453 make_extra(el.add('extraData'), a.extra)
455 canceled = el.add('canceled', state=a.canceled and 'true' or 'false')
459 canceled.add('user', a.canceled_by.id)
460 canceled.add('ip', a.canceled_ip)
462 canceled.add('date', make_date(a.canceled_at))
465 reputes = el.add('reputes')
467 for r in a.reputes.all():
468 repute = reputes.add('repute', byCanceled=r.by_canceled and 'true' or 'false')
469 repute.add('user', r.user.id)
470 repute.add('value', r.value)
473 #@exporter_step(NodeState.objects.all(), 'states', 'state', _('Node states'), 'action__action_date')
474 #def export_states(s, el, anon_data):
475 # el.add('type', s.state_type)
476 # el.add('node', s.node.id)
477 # el.add('trigger', s.action.id)
480 #@exporter_step(Badge.objects.all(), 'badges', 'badge', _('Badges'), user_data=True)
481 #def export_badges(b, el, anon_data):
482 # el.add('type', ["", 'gold', 'silver', 'bronze'][b.type])
483 # el.add('name', b.cls)
484 # el.add('count', b.awarded_count)
487 @exporter_step(Award.objects.all(), 'awards', 'award', _('Awards'), 'awarded_at', True)
488 def export_awards(a, el, anon_data):
489 el.add('badge', a.badge.cls)
490 el.add('user', a.user.id)
491 el.add('node', a.node and a.node.id or "")
492 el.add('trigger', a.trigger and a.trigger.id or "")
493 el.add('action', a.action.id)
495 @exporter_step(KeyValue.objects.all(), 'settings', 'setting', _('Settings'))
496 def export_settings(s, el, anon_data):
498 make_extra(el.add('value'), s.value)