--- /dev/null
+PG_SEQUENCE_RESETS = """
+SELECT setval('"auth_user_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user";
+SELECT setval('"auth_user_groups_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_groups";
+SELECT setval('"auth_user_user_permissions_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_user_permissions";
+SELECT setval('"forum_keyvalue_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_keyvalue";
+SELECT setval('"forum_action_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_action";
+SELECT setval('"forum_actionrepute_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_actionrepute";
+SELECT setval('"forum_subscriptionsettings_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_subscriptionsettings";
+SELECT setval('"forum_validationhash_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_validationhash";
+SELECT setval('"forum_authkeyuserassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_authkeyuserassociation";
+SELECT setval('"forum_tag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_tag";
+SELECT setval('"forum_markedtag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_markedtag";
+SELECT setval('"forum_node_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node";
+SELECT setval('"forum_nodestate_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_nodestate";
+SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";
+SELECT setval('"forum_noderevision_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_noderevision";
+SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";
+SELECT setval('"forum_questionsubscription_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_questionsubscription";
+SELECT setval('"forum_vote_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_vote";
+SELECT setval('"forum_flag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_flag";
+SELECT setval('"forum_badge_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_badge";
+SELECT setval('"forum_award_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_award";
+SELECT setval('"forum_openidnonce_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidnonce";
+SELECT setval('"forum_openidassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidassociation";
+"""
+
+PG_DISABLE_TRIGGERS = """
+ALTER table auth_user DISABLE TRIGGER ALL;
+ALTER table auth_user_groups DISABLE TRIGGER ALL;
+ALTER table auth_user_user_permissions DISABLE TRIGGER ALL;
+ALTER table forum_keyvalue DISABLE TRIGGER ALL;
+ALTER table forum_action DISABLE TRIGGER ALL;
+ALTER table forum_actionrepute DISABLE TRIGGER ALL;
+ALTER table forum_subscriptionsettings DISABLE TRIGGER ALL;
+ALTER table forum_validationhash DISABLE TRIGGER ALL;
+ALTER table forum_authkeyuserassociation DISABLE TRIGGER ALL;
+ALTER table forum_tag DISABLE TRIGGER ALL;
+ALTER table forum_markedtag DISABLE TRIGGER ALL;
+ALTER table forum_node DISABLE TRIGGER ALL;
+ALTER table forum_nodestate DISABLE TRIGGER ALL;
+ALTER table forum_node_tags DISABLE TRIGGER ALL;
+ALTER table forum_noderevision DISABLE TRIGGER ALL;
+ALTER table forum_node_tags DISABLE TRIGGER ALL;
+ALTER table forum_questionsubscription DISABLE TRIGGER ALL;
+ALTER table forum_vote DISABLE TRIGGER ALL;
+ALTER table forum_flag DISABLE TRIGGER ALL;
+ALTER table forum_badge DISABLE TRIGGER ALL;
+ALTER table forum_award DISABLE TRIGGER ALL;
+ALTER table forum_openidnonce DISABLE TRIGGER ALL;
+ALTER table forum_openidassociation DISABLE TRIGGER ALL;
+"""
+
+PG_ENABLE_TRIGGERS = """
+ALTER table auth_user ENABLE TRIGGER ALL;
+ALTER table auth_user_groups ENABLE TRIGGER ALL;
+ALTER table auth_user_user_permissions ENABLE TRIGGER ALL;
+ALTER table forum_keyvalue ENABLE TRIGGER ALL;
+ALTER table forum_action ENABLE TRIGGER ALL;
+ALTER table forum_actionrepute ENABLE TRIGGER ALL;
+ALTER table forum_subscriptionsettings ENABLE TRIGGER ALL;
+ALTER table forum_validationhash ENABLE TRIGGER ALL;
+ALTER table forum_authkeyuserassociation ENABLE TRIGGER ALL;
+ALTER table forum_tag ENABLE TRIGGER ALL;
+ALTER table forum_markedtag ENABLE TRIGGER ALL;
+ALTER table forum_node ENABLE TRIGGER ALL;
+ALTER table forum_nodestate ENABLE TRIGGER ALL;
+ALTER table forum_node_tags ENABLE TRIGGER ALL;
+ALTER table forum_noderevision ENABLE TRIGGER ALL;
+ALTER table forum_node_tags ENABLE TRIGGER ALL;
+ALTER table forum_questionsubscription ENABLE TRIGGER ALL;
+ALTER table forum_vote ENABLE TRIGGER ALL;
+ALTER table forum_flag ENABLE TRIGGER ALL;
+ALTER table forum_badge ENABLE TRIGGER ALL;
+ALTER table forum_award ENABLE TRIGGER ALL;
+ALTER table forum_openidnonce ENABLE TRIGGER ALL;
+ALTER table forum_openidassociation ENABLE TRIGGER ALL;
+"""
\ No newline at end of file
DATE_AND_AUTHOR_INF_SECTION = 'DateAndAuthor'
OPTIONS_INF_SECTION = 'Options'
+META_INF_SECTION = 'Meta'
DATETIME_FORMAT = "%Y-%m-%d %H:%M:%S"
DATE_FORMAT = "%Y-%m-%d"
if xmlns: xmlns_items.append(xmlns)
except TypeError:
raise #_raise_serialization_error(v)
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
+ file.write(u" %s=\"%s\"" % (_encode(k, encoding),
_escape_attrib(v, encoding)))
for k, v in xmlns_items:
- file.write(" %s=\"%s\"" % (_encode(k, encoding),
+ file.write(u" %s=\"%s\"" % (_encode(k, encoding),
_escape_attrib(v, encoding)))
if node.text or len(node):
file.write(">")
if node.tail:
file.write(_escape_cdata(node.tail.replace("\n", level * identator + "\n"), encoding))
-def _add_tag(el, name, content = None):
- tag = ET.SubElement(el, name)
- if content:
- tag.text = content
- return tag
-
def make_date(date, with_time=True):
try:
return date.strftime(with_time and DATETIME_FORMAT or DATE_FORMAT)
tag = ET.SubElement(el, tag_name)
if content:
- tag.text = unicode(content).encode('utf-8')
+ tag.text = unicode(content)
for k, v in attrs.items():
tag.set(k, unicode(v))
inf.set(OPTIONS_INF_SECTION, 'with-upfiles', str(options.get('uplodaded_files', False)))
inf.set(OPTIONS_INF_SECTION, 'with-skins', str(options.get('import_skins_folder', False)))
+ inf.add_section(META_INF_SECTION)
+
+ for id, s in state.items():
+ inf.set(META_INF_SECTION, id, str(s['count']))
+
with open(os.path.join(tmp, 'backup.inf'), 'wb') as inffile:
inf.write(inffile)
el.add('title', n.title)
el.add('body', n.body)
+ el.add('score', n.score)
+
tags = el.add('tags')
for t in n.tagname_list():
revs = el.add('revisions', active=n.active_revision and n.active_revision.revision or n.revisions.order_by('revision')[0].revision)
for r in n.revisions.order_by('revision'):
- rev = _add_tag(revs, 'revision')
+ rev = revs.add('revision')
rev.add('number', r.revision)
rev.add('summary', r.summary)
if not anon_data:
def export_actions(a, el, anon_data):
el.add('id', a.id)
el.add('type', a.action_type)
- el.add('date', a.action_date)
+ el.add('date', make_date(a.action_date))
if not anon_data:
el.add('user', a.user.id)
canceled.add('user', a.canceled_by.id)
canceled.add('ip', a.canceled_ip)
- canceled.add('date', a.canceled_at)
+ canceled.add('date', make_date(a.canceled_at))
if not anon_data:
reputes = el.add('reputes')
-import os, tarfile, datetime
+import os, tarfile, datetime, ConfigParser, logging
+
+from django.utils.translation import ugettext as _
+from django.core.cache import cache
+
+from south.db import db
from xml.sax import make_parser
from xml.sax.handler import ContentHandler, ErrorHandler
-from exporter import TMP_FOLDER, DATETIME_FORMAT, DATE_FORMAT
+from forum.templatetags.extra_tags import diff_date
+
+from exporter import TMP_FOLDER, DATETIME_FORMAT, DATE_FORMAT, META_INF_SECTION, CACHE_KEY
from orm import orm
+import commands
NO_DEFAULT = object()
self.name = name.lower()
self.parent = parent
self.attrs = dict([(k.lower(), ContentElement(v)) for k, v in attrs.items()])
- self._content = ''
+ self._content = u''
self.sub_elements = {}
if parent:
parent.add(self)
def add_to_content(self, ch):
- self._content += ch
+ self._content += unicode(ch)
def add(self, sub):
curr = self.sub_elements.get(sub.name, None)
class TableHandler(ContentHandler):
- def __init__(self, root_name, row_name, callback, callback_args = []):
+ def __init__(self, root_name, row_name, callback, callback_args = [], ping = None):
self.root_name = root_name.lower()
self.row_name = row_name.lower()
self.callback = callback
self.callback_args = callback_args
+ self.ping = ping
self._reset()
pass
elif name == self.row_name:
self.callback(self.curr_element, *self.callback_args)
+ if self.ping:
+ self.ping()
+
self._reset()
else:
self.curr_element = self.curr_element.parent
def warning(self, e):
raise e
+def disable_triggers():
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many(commands.PG_DISABLE_TRIGGERS)
+ db.commit_transaction()
+
+def enable_triggers():
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many(commands.PG_ENABLE_TRIGGERS)
+ db.commit_transaction()
+
+def reset_sequences():
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many(commands.PG_SEQUENCE_RESETS)
+ db.commit_transaction()
+
FILE_HANDLERS = []
def start_import(fname, user):
+
+ start_time = datetime.datetime.now()
+ steps = [s for s in FILE_HANDLERS]
+
+ with open(os.path.join(TMP_FOLDER, 'backup.inf'), 'r') as inffile:
+ inf = ConfigParser.SafeConfigParser()
+ inf.readfp(inffile)
+
+ state = dict([(s['id'], {
+ 'status': _('Queued'), 'count': int(inf.get(META_INF_SECTION, s['id'])), 'parsed': 0
+ }) for s in steps] + [
+ ('overall', {
+ 'status': _('Starting'), 'count': int(inf.get(META_INF_SECTION, 'overall')), 'parsed': 0
+ })
+ ])
+
+ full_state = dict(running=True, state=state, time_started="")
+
+ def set_state():
+ full_state['time_started'] = diff_date(start_time)
+ cache.set(CACHE_KEY, full_state)
+
+ set_state()
+
+ def ping_state(name):
+ state[name]['parsed'] += 1
+ state['overall']['parsed'] += 1
+ set_state()
+
+ def run(fn, name):
+ def ping():
+ ping_state(name)
+
+ state['overall']['status'] = _('Importing %s') % s['name']
+ state[name]['status'] = _('Importing')
+
+
+ fn(TMP_FOLDER, user, ping)
+
+ state[name]['status'] = _('Done')
+
+ set_state()
+
+ return fname
+
#dump = tarfile.open(fname, 'r')
#dump.extractall(TMP_FOLDER)
- for h in FILE_HANDLERS:
- h(TMP_FOLDER, user)
+ try:
+
+ disable_triggers()
+ db.start_transaction()
+
+ for h in FILE_HANDLERS:
+ run(h['fn'], h['id'])
+
+ raise Exception
+ db.commit_transaction()
+ enable_triggers()
+
+ reset_sequences()
+ except Exception, e:
+ full_state['running'] = False
+ full_state['errors'] = "%s: %s" % (e.__class__.__name__, unicode(e))
+ set_state()
+
+ import traceback
+ logging.error("Error executing xml import: \n %s" % (traceback.format_exc()))
-def file_handler(file_name, root_tag, el_tag, args_handler=None, pre_callback=None, post_callback=None):
+def file_handler(file_name, root_tag, el_tag, name, args_handler=None, pre_callback=None, post_callback=None):
def decorator(fn):
- def decorated(location, current_user):
+ def decorated(location, current_user, ping):
if pre_callback:
pre_callback(current_user)
args = []
parser = make_parser()
- handler = TableHandler(root_tag, el_tag, fn, args)
+ handler = TableHandler(root_tag, el_tag, fn, args, ping)
parser.setContentHandler(handler)
#parser.setErrorHandler(SaxErrorHandler())
if post_callback:
post_callback()
- FILE_HANDLERS.append(decorated)
+ FILE_HANDLERS.append(dict(id=root_tag, name=name, fn=decorated))
return decorated
return decorator
-@file_handler('users.xml', 'users', 'user', args_handler=lambda u: [u])
+@file_handler('users.xml', 'users', 'user', _('Users'), args_handler=lambda u: [u])
def user_import(row, current_user):
+ existent = False
+
if str(current_user.id) == row.getc('id'):
- return
+ existent = True
roles = row.get('roles').get_listc('role')
valid_email = row.get('email').get_attr('validated').as_bool()
attributes.update(dict([(str(k), v.as_bool()) for k, v in notifications.get('autoSubscribe').attrs.items()]))
attributes.update(dict([(str("notify_%s" % k), v.as_bool()) for k, v in notifications.get('notifyOnSubscribed').attrs.items()]))
- orm.SubscriptionSettings(user=user, enable_notifications=notifications.get_attr('enabled').as_bool(), **attributes).save()
+ ss = orm.SubscriptionSettings(user=user, enable_notifications=notifications.get_attr('enabled').as_bool(), **attributes)
+
+ if existent:
+ ss.id = current_user.subscription_settings.id
+
+ ss.save()
+
def pre_tag_import(user):
tag_import.tag_mappings={}
-@file_handler('tags.xml', 'tags', 'tag', pre_callback=pre_tag_import)
+@file_handler('tags.xml', 'tags', 'tag', _('Tags'), pre_callback=pre_tag_import)
def tag_import(row):
tag = orm.Tag(name=row.getc('name'), used_count=row.get('used').as_int(), created_by_id=row.get('author').as_int())
tag.save()
def post_node_import():
tag_import.tag_mappings = None
-@file_handler('nodes.xml', 'nodes', 'node', args_handler=lambda u: [tag_import.tag_mappings], post_callback=post_node_import)
+@file_handler('nodes.xml', 'nodes', 'node', _('Nodes'), args_handler=lambda u: [tag_import.tag_mappings], post_callback=post_node_import)
def node_import(row, tags):
ntags = []
added_at = row.get('date').as_datetime(),
parent_id = row.get('parent').as_int(None),
abs_parent_id = row.get('absparent').as_int(None),
+ score = row.get('score').as_int(0),
last_activity_by_id = last_act.get('by').as_int(None),
last_activity_at = last_act.get('at').as_datetime(None),
n.state_string = "".join(["(%s)" % s for s in n.states.values_list('state_type')])
n.save()
-@file_handler('actions.xml', 'actions', 'action', post_callback=post_action_import_callback)
+@file_handler('actions.xml', 'actions', 'action', _('Actions'), post_callback=post_action_import_callback)
def actions_import(row):
action = orm.Action(
id = row.get('id').as_int(),
canceled = row.get('canceled')
if canceled.get_attr('state').as_bool():
+ action.canceled = True
action.canceled_by_id = canceled.get('user').as_int()
- action.canceled_at = canceled.get('date').as_datetime(),
+ #action.canceled_at = canceled.get('date').as_datetime(),
action.canceled_ip = canceled.getc('ip')
action.save()
by_canceled = by_canceled
).save()
- if (not action.canceled) and action.action_type in POST_ACTION:
+ if (not action.canceled) and (action.action_type in POST_ACTION):
POST_ACTION[action.action_type](row, action)
return [ dict([ (b.cls, b) for b in orm.Badge.objects.all() ]) ]
-@file_handler('awards.xml', 'awards', 'award', args_handler=award_import_args)
+@file_handler('awards.xml', 'awards', 'award', _('Awards'), args_handler=award_import_args)
def awards_import(row, badges):
- award = orm.Award(
- user_id = row.get('user').as_int(),
- badge = badges[row.getc('badge')],
- node_id = row.get('node').as_int(None),
- action_id = row.get('action').as_int(None),
- trigger_id = row.get('trigger').as_int(None)
- ).save()
+ try:
+ badge_type = badges.get(row.getc('badge'), None)
+
+ if not badge_type:
+ return
+
+ award = orm.Award(
+ user_id = row.get('user').as_int(),
+ badge = badges[row.getc('badge')],
+ node_id = row.get('node').as_int(None),
+ action_id = row.get('action').as_int(None),
+ trigger_id = row.get('trigger').as_int(None)
+ ).save()
+ except Exception, e:
+ return
+
+
+@file_handler('settings.xml', 'settings', 'setting', _('Settings'))
+def settings_import(row):
+ orm.KeyValue(key=row.getc('key'), value=row.get('value').as_pickled())
{% load i18n %}
{% block subtitle %}
- {% trans "XML data exporter" %}
+ {% ifequal mode "exporter" %}{% trans "XML data exporter" %}{% else %}{% trans "XML data importer" %}{% endifequal %}
{% endblock %}
{% block description %}
- {% trans "Export database to XML format" %}
+ {% ifequal mode "exporter" %}{% trans "Export database to XML format" %}{% else %}{% trans "Restore data from a previouus export" %}{% endifequal %}
{% endblock %}
{% block adminjs %}
<script type="text/javascript">
$(function() {
+ var exporting = {% ifequal mode "exporter" %}true{% else %}false{% endifequal %};
+
var state = null;
function set_state(name, data) {
}
function check_state() {
- $.getJSON('{% url exporter_state %}', function(data) {
- set_state('overall', data.state)
- {% for s in steps %}
- set_state('{{ s.id }}', data.state);
- {% endfor %}
-
- $('#time_started').html(data.time_started);
-
- state = data;
-
- if (data.running) {
- check_state();
- } else {
- if (data.errors == false) {
- $('#wait_message').html('{% trans "Your backup is ready to be downloaded."%}')
- $('#download_link').slideDown();
+ var done = false;
+
+ function callback(data) {
+ if (done) {
+ return;
+ }
+ set_state('overall', data.state);
+ {% for s in steps %}
+ set_state('{{ s.id }}', data.state);
+ {% endfor %}
+
+ $('#time_started').html(data.time_started);
+
+ state = data;
+
+ if (data.running) {
+ check_state();
} else {
- $('#wait_message').html('{% trans "An error has occurred during de export proccess: "%}' + data.errors + '<br />' +
- '{% trans "Please check the log file for a full stack trace." %}')
+ if (data.errors == false) {
+ if (exporting) {
+ $('#wait_message').html('{% trans "Your backup is ready to be downloaded."%}');
+ $('#download_link').slideDown();
+ } else {
+ $('#wait_message').html('{% trans "All data sucessfully imported."%}')
+ }
+ } else {
+ var msg;
+ if (exporting) {
+ msg = '{% trans "An error has occurred during de export proccess"%}';
+ } else {
+ msg = '{% trans "An error has occurred during de import proccess"%}';
+ }
+ $('#wait_message').html(msg + ': ' + data.errors + '<br />' +
+ '{% trans "Please check the log file for a full stack trace." %}')
+ }
}
- }
- });
+
+ done = true;
+ }
+
+ window.setTimeout(function() {
+ if (!done) {
+ done = true;
+ check_state();
+ }
+ }, 1000);
+
+ $.getJSON('{% url exporter_state %}', callback);
}
check_state();
{% block admincontent %}
<p id="wait_message">
- {% trans "Please wait while we prepare your backup file to download." %} -
+ {% ifequal mode "exporter" %}
+ {% trans "Please wait while we prepare your backup file to download." %}
+ {% else %}
+ {% trans "Please wait while we import your data." %}
+ {% endifequal %} -
{% blocktrans %}
Started <span id="time_started"></span>
{% endblocktrans %}
urlpatterns = patterns('',
url(r'^%s%s%s$' % (_('admin/'), _('exporter/'), _('state/')), state, name='exporter_state'),
- url(r'^%s%s%s$' % (_('admin/'), _('exporter/'), _('running/')), running, name='exporter_running'),
+ url(r'^%s(?P<mode>\w+)/%s$' % (_('admin/'), _('running/')), running, name='exporter_running'),
url(r'^%s%s%s$' % (_('admin/'), _('exporter/'), _('download/')), download, name='exporter_download'),
url(r'^%s%s%s$' % (_('admin/'), _('exporter/'), _('import/')), importer, name='exporter_import'),
thread.setDaemon(True)
thread.start()
- return HttpResponseRedirect(reverse('exporter_running'))
+ return HttpResponseRedirect(reverse('exporter_running', kwargs=dict(mode='exporter')))
else:
form = ExporterForm()
})
@admin_page
-def running(request):
+def running(request, mode):
state = cache.get(CACHE_KEY)
if state is None:
return HttpResponseRedirect(reverse('admin_tools', args=[_('exporter')]))
return ('modules/exporter/running.html', {
+ 'mode': mode,
'steps': EXPORT_STEPS
})
@admin_page
def importer(request):
- start_import('/Users/admin/dev/pyenv/osqa/maintain/forum_modules/exporter/backups/localhost-201010121118.tar.gz', request.user)
+ thread = Thread(target=start_import, args=['/Users/admin/dev/pyenv/osqa/maintain/forum_modules/exporter/backups/localhost-201010121118.tar.gz', request.user])
+ thread.setDaemon(True)
+ thread.start()
- return ('modules/exporter/importer.html', {
+ return HttpResponseRedirect(reverse('exporter_running', kwargs=dict(mode='importer')))
- })
+ #return ('modules/exporter/importer.html', {
+
+ #})