-# -*- coding: utf-8 -*-\r
-\r
-from xml.dom import minidom\r
-from datetime import datetime, timedelta\r
-import time\r
-import re\r
-from django.utils.translation import ugettext as _\r
-from django.template.defaultfilters import slugify\r
-from forum.models.utils import dbsafe_encode\r
-from orm import orm\r
-\r
-def getText(el):\r
- rc = ""\r
- for node in el.childNodes:\r
- if node.nodeType == node.TEXT_NODE:\r
- rc = rc + node.data\r
- return rc.strip()\r
-\r
-msstrip = re.compile(r'^(.*)\.\d+')\r
-def readTime(ts):\r
- noms = msstrip.match(ts)\r
- if noms:\r
- ts = noms.group(1)\r
-\r
- return datetime(*time.strptime(ts, '%Y-%m-%dT%H:%M:%S')[0:6])\r
-\r
-def readEl(el):\r
- return dict([(n.tagName.lower(), getText(n)) for n in el.childNodes if n.nodeType == el.ELEMENT_NODE])\r
-\r
-def readTable(dump, name):\r
- return [readEl(e) for e in minidom.parseString(dump.read("%s.xml" % name)).getElementsByTagName('row')]\r
-\r
-google_accounts_lookup = re.compile(r'^https?://www.google.com/accounts/')\r
-yahoo_accounts_lookup = re.compile(r'^https?://me.yahoo.com/a/')\r
-\r
-openid_lookups = [\r
- re.compile(r'^https?://www.google.com/profiles/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://me.yahoo.com/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://openid.aol.com/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).myopenid.com/?$'),\r
- re.compile(r'^https?://flickr.com/(\w+/)*(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://technorati.com/people/technorati/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).wordpress.com/?$'),\r
- re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).blogspot.com/?$'),\r
- re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).livejournal.com/?$'),\r
- re.compile(r'^https?://claimid.com/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).pip.verisignlabs.com/?$'),\r
- re.compile(r'^https?://getopenid.com/(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://[\w\.]+/(\w+/)*(?P<uname>\w+(\.\w+)*)/?$'),\r
- re.compile(r'^https?://(?P<uname>[\w\.]+)/?$'),\r
-]\r
-\r
-def final_username_attempt(sxu):\r
- openid = sxu.get('openid', None)\r
-\r
- if openid:\r
- if google_accounts_lookup.search(openid):\r
- return UnknownGoogleUser(sxu.get('id'))\r
- if yahoo_accounts_lookup.search(openid):\r
- return UnknownYahooUser(sxu.get('id'))\r
-\r
- for lookup in openid_lookups:\r
- if lookup.search(openid):\r
- return lookup.search(openid).group('uname')\r
-\r
- return UnknownUser(sxu.get('id'))\r
-\r
-class UnknownUser(object):\r
- def __init__(self, id):\r
- self._id = id\r
-\r
- def __str__(self):\r
- return _("user-%(id)s") % {'id': self._id}\r
-\r
- def __unicode__(self):\r
- return self.__str__()\r
-\r
- def encode(self, *args):\r
- return self.__str__()\r
-\r
-class UnknownGoogleUser(UnknownUser):\r
- def __str__(self):\r
- return _("user-%(id)s (google)") % {'id': self._id}\r
-\r
-class UnknownYahooUser(UnknownUser):\r
- def __str__(self):\r
- return _("user-%(id)s (yahoo)") % {'id': self._id}\r
-\r
-\r
-class IdMapper(dict):\r
- def __getitem__(self, key):\r
- key = int(key)\r
- return super(IdMapper, self).get(key, 1)\r
-\r
- def __setitem__(self, key, value):\r
- super(IdMapper, self).__setitem__(int(key), int(value))\r
-\r
-openidre = re.compile('^https?\:\/\/')\r
-def userimport(dump, options):\r
- users = readTable(dump, "Users")\r
-\r
- user_by_name = {}\r
- uidmapper = IdMapper()\r
- merged_users = []\r
-\r
- owneruid = options.get('owneruid', None)\r
- #check for empty values\r
- if not owneruid:\r
- owneruid = None\r
-\r
- for sxu in users:\r
- create = True\r
-\r
- if sxu.get('id') == '-1':\r
- continue\r
-\r
- if int(sxu.get('id')) == int(owneruid):\r
- osqau = orm.User.objects.get(id=1)\r
- uidmapper[owneruid] = 1\r
- uidmapper[-1] = 1\r
- create = False\r
- else:\r
- username = sxu.get('displayname', sxu.get('displaynamecleaned', sxu.get('realname', final_username_attempt(sxu))))\r
-\r
- if not isinstance(username, UnknownUser) and username in user_by_name:\r
- #if options.get('mergesimilar', False) and sxu.get('email', 'INVALID') == user_by_name[username].email:\r
- # osqau = user_by_name[username]\r
- # create = False\r
- # uidmapper[sxu.get('id')] = osqau.id\r
- #else:\r
- inc = 1\r
- while ("%s %d" % (username, inc)) in user_by_name:\r
- inc += 1\r
-\r
- username = "%s %d" % (username, inc)\r
-\r
- sxbadges = sxu.get('badgesummary', None)\r
- badges = {'1':'0','2':'0','3':'0'}\r
-\r
- if sxbadges:\r
- badges.update(dict([b.split('=') for b in sxbadges.split()]))\r
-\r
- if create:\r
- osqau = orm.User(\r
- id = sxu.get('id'),\r
- username = unicode(username),\r
- password = '!',\r
- email = sxu.get('email', ''),\r
- is_superuser = sxu.get('usertypeid') == '5',\r
- is_staff = sxu.get('usertypeid') == '4',\r
- is_active = True,\r
- date_joined = readTime(sxu.get('creationdate')),\r
- last_seen = readTime(sxu.get('lastaccessdate')),\r
- about = sxu.get('aboutme', ''),\r
- date_of_birth = sxu.get('birthday', None) and readTime(sxu['birthday']) or None,\r
- email_isvalid = int(sxu.get('usertypeid')) > 2,\r
- website = sxu.get('websiteurl', ''),\r
- reputation = int(sxu.get('reputation')),\r
- gold = int(badges['1']),\r
- silver = int(badges['2']),\r
- bronze = int(badges['3']),\r
- real_name = sxu.get('realname', ''),\r
- location = sxu.get('location', ''),\r
- )\r
-\r
- osqau.save()\r
-\r
- user_joins = orm.Action(\r
- action_type = "userjoins",\r
- action_date = osqau.date_joined,\r
- user = osqau\r
- )\r
- user_joins.save()\r
-\r
- rep = orm.ActionRepute(\r
- value = 1,\r
- user = osqau,\r
- date = osqau.date_joined,\r
- action = user_joins\r
- )\r
- rep.save() \r
-\r
- try:\r
- orm.SubscriptionSettings.objects.get(user=osqau)\r
- except:\r
- s = orm.SubscriptionSettings(user=osqau)\r
- s.save()\r
-\r
- uidmapper[osqau.id] = osqau.id\r
- else:\r
- new_about = sxu.get('aboutme', None)\r
- if new_about and osqau.about != new_about:\r
- if osqau.about:\r
- osqau.about = "%s\n|\n%s" % (osqau.about, new_about)\r
- else:\r
- osqau.about = new_about\r
-\r
- osqau.username = sxu.get('displayname', sxu.get('displaynamecleaned', sxu.get('realname', final_username_attempt(sxu))))\r
- osqau.email = sxu.get('email', '')\r
- osqau.reputation += int(sxu.get('reputation'))\r
- osqau.gold += int(badges['1'])\r
- osqau.silver += int(badges['2'])\r
- osqau.bronze += int(badges['3'])\r
-\r
- osqau.date_joined = readTime(sxu.get('creationdate'))\r
- osqau.website = sxu.get('websiteurl', '')\r
- osqau.date_of_birth = sxu.get('birthday', None) and readTime(sxu['birthday']) or None\r
- osqau.location = sxu.get('location', '')\r
- osqau.real_name = sxu.get('realname', '')\r
-\r
- merged_users.append(osqau.id)\r
- osqau.save()\r
-\r
- user_by_name[osqau.username] = osqau\r
-\r
- openid = sxu.get('openid', None)\r
- if openid and openidre.match(openid):\r
- assoc = orm.AuthKeyUserAssociation(user=osqau, key=openid, provider="openidurl")\r
- assoc.save()\r
-\r
- if uidmapper[-1] == -1:\r
- uidmapper[-1] = 1\r
-\r
- return (uidmapper, merged_users)\r
-\r
-def tagsimport(dump, uidmap):\r
- tags = readTable(dump, "Tags")\r
-\r
- tagmap = {}\r
-\r
- for sxtag in tags:\r
- otag = orm.Tag(\r
- id = int(sxtag['id']),\r
- name = sxtag['name'],\r
- used_count = int(sxtag['count']),\r
- created_by_id = uidmap[sxtag.get('userid', 1)],\r
- )\r
- otag.save()\r
-\r
- tagmap[otag.name] = otag\r
-\r
- return tagmap\r
-\r
-def postimport(dump, uidmap, tagmap):\r
- history = {}\r
- accepted = {}\r
- all = {}\r
-\r
- for h in readTable(dump, "PostHistory"):\r
- if not history.get(h.get('postid'), None):\r
- history[h.get('postid')] = []\r
-\r
- history[h.get('postid')].append(h)\r
-\r
- posts = readTable(dump, "Posts")\r
-\r
- for sxpost in posts:\r
- nodetype = (sxpost.get('posttypeid') == '1') and "nodetype" or "answer"\r
-\r
- post = orm.Node(\r
- node_type = nodetype,\r
- id = sxpost['id'],\r
- added_at = readTime(sxpost['creationdate']),\r
- body = sxpost['body'],\r
- score = sxpost.get('score', 0),\r
- author_id = sxpost.get('deletiondate', None) and 1 or uidmap[sxpost['owneruserid']]\r
- )\r
-\r
- post.save()\r
-\r
- create_action = orm.Action(\r
- action_type = (nodetype == "nodetype") and "ask" or "answer",\r
- user_id = post.author_id,\r
- node = post,\r
- action_date = post.added_at\r
- )\r
-\r
- create_action.save()\r
-\r
- #if sxpost.get('deletiondate', None):\r
- # delete_action = orm.Action(\r
- # action_type = "delete",\r
- # user_id = 1,\r
- # node = post,\r
- # action_date = readTime(sxpost['deletiondate'])\r
- # )\r
-\r
- # delete_action.save()\r
- # post.deleted = delete_action\r
-\r
- if sxpost.get('lasteditoruserid', None):\r
- revise_action = orm.Action(\r
- action_type = "revise",\r
- user_id = uidmap[sxpost.get('lasteditoruserid')],\r
- node = post,\r
- action_date = readTime(sxpost['lasteditdate']),\r
- )\r
-\r
- revise_action.save()\r
- post.last_edited = revise_action\r
-\r
- if sxpost.get('communityowneddate', None):\r
- post.wiki = True\r
-\r
- wikify_action = orm.Action(\r
- action_type = "wikify",\r
- user_id = 1,\r
- node = post,\r
- action_date = readTime(sxpost['communityowneddate'])\r
- )\r
-\r
- wikify_action.save()\r
-\r
-\r
- if sxpost.get('lastactivityuserid', None):\r
- post.last_activity_by_id = uidmap[sxpost['lastactivityuserid']]\r
- post.last_activity_at = readTime(sxpost['lastactivitydate'])\r
-\r
- \r
- if sxpost.get('posttypeid') == '1': #question\r
- post.node_type = "question"\r
- post.title = sxpost['title']\r
-\r
- tagnames = sxpost['tags'].replace(u'ö', '-').replace(u'é', '').replace(u'à', '')\r
- post.tagnames = tagnames\r
-\r
- post.extra_count = sxpost.get('viewcount', 0)\r
-\r
- #if sxpost.get('closeddate', None):\r
- # post.marked = True\r
- #\r
- # close_action = orm.Action(\r
- # action_type = "close",\r
- # user_id = 1,\r
- # node = post,\r
- # action_date = datetime.now() - timedelta(days=7)\r
- # )\r
- #\r
- # close_action.save()\r
- # post.extra_action = close_action\r
-\r
- #if sxpost.get('acceptedanswerid', None):\r
- # accepted[int(sxpost.get('acceptedanswerid'))] = post\r
-\r
- #post.save()\r
-\r
- else:\r
- post.parent_id = sxpost['parentid']\r
-\r
- #if int(post.id) in accepted:\r
- #post.marked = True\r
-\r
- #accept_action = orm.Action(\r
- # action_type = "acceptanswer",\r
- # user_id = accepted[int(post.id)].author_id,\r
- # node = post,\r
- # action_date = datetime.now() - timedelta(days=7)\r
- #)\r
-\r
- #accept_action.save()\r
-\r
-\r
- #post.accepted_at = datetime.now()\r
- #post.accepted_by_id = accepted[int(post.id)].author_id\r
-\r
- #accepted[int(post.id)].extra_ref = post\r
- #accepted[int(post.id)].save()\r
-\r
- post.save()\r
-\r
- all[int(post.id)] = post\r
-\r
- return all\r
-\r
-def comment_import(dump, uidmap, posts):\r
- comments = readTable(dump, "PostComments")\r
- currid = max(posts.keys())\r
- mapping = {}\r
-\r
- for sxc in comments:\r
- currid += 1\r
- oc = orm.Node(\r
- id = currid,\r
- node_type = "comment",\r
- added_at = readTime(sxc['creationdate']),\r
- author_id = uidmap[sxc.get('userid', 1)],\r
- body = sxc['text'],\r
- parent_id = sxc.get('postid'),\r
- )\r
-\r
- if sxc.get('deletiondate', None):\r
- delete_action = orm.Action(\r
- action_type = "delete",\r
- user_id = uidmap[sxc['deletionuserid']],\r
- action_date = readTime(sxc['deletiondate'])\r
- )\r
-\r
- oc.author_id = uidmap[sxc['deletionuserid']]\r
- oc.save()\r
-\r
- delete_action.node = oc\r
- delete_action.save()\r
-\r
- oc.deleted = delete_action\r
- else:\r
- oc.author_id = uidmap[sxc.get('userid', 1)]\r
- oc.save()\r
-\r
- create_action = orm.Action(\r
- action_type = "comment",\r
- user_id = oc.author_id,\r
- node = oc,\r
- action_date = oc.added_at\r
- )\r
-\r
- create_action.save()\r
- oc.save()\r
-\r
- posts[oc.id] = oc\r
- mapping[int(sxc['id'])] = int(oc.id)\r
-\r
- return posts, mapping\r
-\r
-\r
-def add_tags_to_posts(posts, tagmap):\r
- for post in posts.values():\r
- if post.node_type == "question":\r
- tags = [tag for tag in [tagmap.get(name.strip()) for name in post.tagnames.split(u' ') if name] if tag]\r
- post.tagnames = " ".join([t.name for t in tags]).strip()\r
- post.tags = tags\r
-\r
- create_and_activate_revision(post)\r
-\r
-\r
-def create_and_activate_revision(post):\r
- rev = orm.NodeRevision(\r
- author_id = post.author_id,\r
- body = post.body,\r
- node_id = post.id,\r
- revised_at = post.added_at,\r
- revision = 1,\r
- summary = 'Initial revision',\r
- tagnames = post.tagnames,\r
- title = post.title,\r
- )\r
-\r
- rev.save()\r
- post.active_revision_id = rev.id\r
- post.save()\r
-\r
-def post_vote_import(dump, uidmap, posts):\r
- votes = readTable(dump, "Posts2Votes")\r
- close_reasons = dict([(r['id'], r['name']) for r in readTable(dump, "CloseReasons")])\r
-\r
- user2vote = []\r
-\r
- for sxv in votes:\r
- action = orm.Action(\r
- user_id=uidmap[sxv['userid']],\r
- action_date = readTime(sxv['creationdate']),\r
- )\r
-\r
- node = posts.get(int(sxv['postid']), None)\r
- if not node: continue\r
- action.node = node\r
-\r
- if sxv['votetypeid'] == '1':\r
- answer = node\r
- question = posts.get(int(answer.parent_id), None)\r
-\r
- action.action_type = "acceptanswer"\r
- action.save()\r
-\r
- answer.marked = True\r
- answer.extra_action = action\r
-\r
- question.extra_ref_id = answer.id\r
-\r
- answer.save()\r
- question.save()\r
-\r
- elif sxv['votetypeid'] in ('2', '3'):\r
- if not (action.node.id, action.user_id) in user2vote:\r
- user2vote.append((action.node.id, action.user_id))\r
-\r
- action.action_type = (sxv['votetypeid'] == '2') and "voteup" or "votedown"\r
- action.save()\r
-\r
- ov = orm.Vote(\r
- node_id = action.node.id,\r
- user_id = action.user_id,\r
- voted_at = action.action_date,\r
- value = sxv['votetypeid'] == '2' and 1 or -1,\r
- action = action\r
- )\r
- ov.save()\r
- else:\r
- action.action_type = "unknown"\r
- action.save()\r
-\r
- elif sxv['votetypeid'] in ('4', '12', '13'):\r
- action.action_type = "flag"\r
- action.save()\r
-\r
- of = orm.Flag(\r
- node = action.node,\r
- user_id = action.user_id,\r
- flagged_at = action.action_date,\r
- reason = '',\r
- action = action\r
- )\r
-\r
- of.save()\r
-\r
- elif sxv['votetypeid'] == '5':\r
- action.action_type = "favorite"\r
- action.save()\r
-\r
- elif sxv['votetypeid'] == '6':\r
- action.action_type = "close"\r
- action.extra = dbsafe_encode(close_reasons[sxv['comment']])\r
- action.save()\r
-\r
- node.marked = True\r
- node.extra_action = action\r
- node.save()\r
-\r
- elif sxv['votetypeid'] == '7':\r
- action.action_type = "unknown"\r
- action.save()\r
- \r
- node.marked = False\r
- node.extra_action = None\r
- node.save()\r
-\r
- elif sxv['votetypeid'] == '10':\r
- action.action_type = "delete"\r
- action.save()\r
-\r
- node.deleted = action\r
- node.save()\r
-\r
- elif sxv['votetypeid'] == '11':\r
- action.action_type = "unknown"\r
- action.save()\r
-\r
- node.deleted = None\r
- node.save()\r
-\r
- else:\r
- action.action_type = "unknown"\r
- action.save()\r
-\r
-\r
- if sxv.get('targetrepchange', None):\r
- rep = orm.ActionRepute(\r
- action = action,\r
- date = action.action_date,\r
- user_id = uidmap[sxv['targetuserid']],\r
- value = int(sxv['targetrepchange'])\r
- )\r
-\r
- rep.save()\r
-\r
- if sxv.get('voterrepchange', None):\r
- rep = orm.ActionRepute(\r
- action = action,\r
- date = action.action_date,\r
- user_id = uidmap[sxv['userid']],\r
- value = int(sxv['voterrepchange'])\r
- )\r
-\r
- rep.save()\r
-\r
-\r
-def comment_vote_import(dump, uidmap, comments, posts):\r
- votes = readTable(dump, "Comments2Votes")\r
- user2vote = []\r
-\r
- for sxv in votes:\r
- if sxv['votetypeid'] == "2":\r
- comment_id = comments[int(sxv['postcommentid'])]\r
- user_id = uidmap[sxv['userid']]\r
-\r
- if not (comment_id, user_id) in user2vote:\r
- user2vote.append((comment_id, user_id))\r
-\r
- action = orm.Action(\r
- action_type = "voteupcomment",\r
- user_id = user_id,\r
- action_date = readTime(sxv['creationdate']),\r
- node_id = comment_id\r
- )\r
- action.save()\r
-\r
- ov = orm.Vote(\r
- node_id = comment_id,\r
- user_id = user_id,\r
- voted_at = action.action_date,\r
- value = 1,\r
- action = action\r
- )\r
-\r
- ov.save()\r
-\r
- posts[int(action.node_id)].score += 1\r
- posts[int(action.node_id)].save()\r
-\r
-\r
-\r
-def badges_import(dump, uidmap, post_list):\r
- node_ctype = orm['contenttypes.contenttype'].objects.get(name='node')\r
- obadges = dict([(b.cls, b) for b in orm.Badge.objects.all()])\r
- sxbadges = dict([(int(b['id']), b) for b in readTable(dump, "Badges")])\r
- user_badge_count = {}\r
-\r
- sx_to_osqa = {}\r
-\r
- for id, sxb in sxbadges.items():\r
- cls = "".join(sxb['name'].replace('&', 'And').split(' '))\r
-\r
- if cls in obadges:\r
- sx_to_osqa[id] = obadges[cls]\r
- else:\r
- osqab = orm.Badge(\r
- cls = cls,\r
- awarded_count = 0,\r
- type = sxb['class'] \r
- )\r
- osqab.save()\r
- sx_to_osqa[id] = osqab\r
-\r
- sxawards = readTable(dump, "Users2Badges")\r
- osqaawards = []\r
-\r
- for sxa in sxawards:\r
- badge = sx_to_osqa[int(sxa['badgeid'])]\r
-\r
- user_id = uidmap[sxa['userid']]\r
- if not user_badge_count.get(user_id, None):\r
- user_badge_count[user_id] = 0\r
-\r
- action = orm.Action(\r
- action_type = "award",\r
- user_id = user_id,\r
- action_date = readTime(sxa['date'])\r
- )\r
-\r
- action.save()\r
-\r
- osqaa = orm.Award(\r
- user_id = uidmap[sxa['userid']],\r
- badge = badge,\r
- node = post_list[user_badge_count[user_id]],\r
- awarded_at = action.action_date,\r
- action = action\r
- )\r
-\r
- osqaa.save()\r
- badge.awarded_count += 1\r
- user_badge_count[user_id] += 1\r
-\r
- for badge in obadges.values():\r
- badge.save()\r
-\r
-\r
-def reset_sequences():\r
- from south.db import db\r
- if db.backend_name == "postgres":\r
- db.start_transaction()\r
- db.execute_many(PG_SEQUENCE_RESETS)\r
- db.commit_transaction()\r
-\r
-def sximport(dump, options):\r
- uidmap, merged_users = userimport(dump, options)\r
- tagmap = tagsimport(dump, uidmap)\r
- posts = postimport(dump, uidmap, tagmap)\r
- posts, comments = comment_import(dump, uidmap, posts)\r
- add_tags_to_posts(posts, tagmap)\r
- post_vote_import(dump, uidmap, posts)\r
- comment_vote_import(dump, uidmap, comments, posts)\r
- badges_import(dump, uidmap, posts.values())\r
-\r
- from south.db import db\r
- db.commit_transaction()\r
-\r
- reset_sequences()\r
-\r
- \r
- \r
-PG_SEQUENCE_RESETS = """\r
-SELECT setval('"auth_user_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user";\r
-SELECT setval('"auth_user_groups_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_groups";\r
-SELECT setval('"auth_user_user_permissions_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_user_permissions";\r
-SELECT setval('"forum_keyvalue_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_keyvalue";\r
-SELECT setval('"forum_action_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_action";\r
-SELECT setval('"forum_actionrepute_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_actionrepute";\r
-SELECT setval('"forum_subscriptionsettings_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_subscriptionsettings";\r
-SELECT setval('"forum_validationhash_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_validationhash";\r
-SELECT setval('"forum_authkeyuserassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_authkeyuserassociation";\r
-SELECT setval('"forum_tag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_tag";\r
-SELECT setval('"forum_markedtag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_markedtag";\r
-SELECT setval('"forum_node_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node";\r
-SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";\r
-SELECT setval('"forum_noderevision_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_noderevision";\r
-SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";\r
-SELECT setval('"forum_questionsubscription_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_questionsubscription";\r
-SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";\r
-SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";\r
-SELECT setval('"forum_vote_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_vote";\r
-SELECT setval('"forum_flag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_flag";\r
-SELECT setval('"forum_badge_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_badge";\r
-SELECT setval('"forum_award_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_award";\r
-SELECT setval('"forum_openidnonce_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidnonce";\r
-SELECT setval('"forum_openidassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidassociation";\r
-"""\r
-\r
-\r
- \r
-
\ No newline at end of file
+# -*- coding: utf-8 -*-
+
+from xml.dom import minidom
+from datetime import datetime, timedelta
+import time
+import re
+import os
+import gc
+from django.utils.translation import ugettext as _
+from django.template.defaultfilters import slugify
+from forum.models.utils import dbsafe_encode
+from orm import orm
+
+from django.utils.encoding import force_unicode
+
+try:
+ from cPickle import loads, dumps
+except ImportError:
+ from pickle import loads, dumps
+
+from copy import deepcopy
+from base64 import b64encode, b64decode
+from zlib import compress, decompress
+
+from xml.sax import make_parser
+from xml.sax.handler import ContentHandler
+
+class SXTableHandler(ContentHandler):
+ def __init__(self, fname, callback):
+ self.in_row = False
+ self.el_data = {}
+ self.ch_data = ''
+
+ self.fname = fname.lower()
+ self.callback = callback
+
+ def startElement(self, name, attrs):
+ if name.lower() == self.fname:
+ pass
+ elif name.lower() == "row":
+ self.in_row = True
+
+ def characters(self, ch):
+ self.ch_data += ch
+
+ def endElement(self, name):
+ if name.lower() == self.fname:
+ pass
+ elif name.lower() == "row":
+ self.callback(self.el_data)
+
+ self.in_row = False
+ del self.el_data
+ self.el_data = {}
+ elif self.in_row:
+ self.el_data[name.lower()] = self.ch_data.strip()
+ del self.ch_data
+ self.ch_data = ''
+
+
+def readTable(path, name, callback):
+ parser = make_parser()
+ handler = SXTableHandler(name, callback)
+ parser.setContentHandler(handler)
+
+ f = os.path.join(path, "%s.xml" % name)
+ parser.parse(f)
+
+
+def dbsafe_encode(value):
+ return force_unicode(b64encode(compress(dumps(deepcopy(value)))))
+
+def getText(el):
+ rc = ""
+ for node in el.childNodes:
+ if node.nodeType == node.TEXT_NODE:
+ rc = rc + node.data
+ return rc.strip()
+
+msstrip = re.compile(r'^(.*)\.\d+')
+def readTime(ts):
+ noms = msstrip.match(ts)
+ if noms:
+ ts = noms.group(1)
+
+ return datetime(*time.strptime(ts, '%Y-%m-%dT%H:%M:%S')[0:6])
+
+#def readEl(el):
+# return dict([(n.tagName.lower(), getText(n)) for n in el.childNodes if n.nodeType == el.ELEMENT_NODE])
+
+#def readTable(dump, name):
+# for e in minidom.parseString(dump.read("%s.xml" % name)).getElementsByTagName('row'):
+# yield readEl(e)
+#return [readEl(e) for e in minidom.parseString(dump.read("%s.xml" % name)).getElementsByTagName('row')]
+
+google_accounts_lookup = re.compile(r'^https?://www.google.com/accounts/')
+yahoo_accounts_lookup = re.compile(r'^https?://me.yahoo.com/a/')
+
+openid_lookups = [
+ re.compile(r'^https?://www.google.com/profiles/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://me.yahoo.com/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://openid.aol.com/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).myopenid.com/?$'),
+ re.compile(r'^https?://flickr.com/(\w+/)*(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://technorati.com/people/technorati/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).wordpress.com/?$'),
+ re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).blogspot.com/?$'),
+ re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).livejournal.com/?$'),
+ re.compile(r'^https?://claimid.com/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://(?P<uname>\w+(\.\w+)*).pip.verisignlabs.com/?$'),
+ re.compile(r'^https?://getopenid.com/(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://[\w\.]+/(\w+/)*(?P<uname>\w+(\.\w+)*)/?$'),
+ re.compile(r'^https?://(?P<uname>[\w\.]+)/?$'),
+ ]
+
+def final_username_attempt(sxu):
+ openid = sxu.get('openid', None)
+
+ if openid:
+ if google_accounts_lookup.search(openid):
+ return UnknownGoogleUser(sxu.get('id'))
+ if yahoo_accounts_lookup.search(openid):
+ return UnknownYahooUser(sxu.get('id'))
+
+ for lookup in openid_lookups:
+ if lookup.search(openid):
+ return lookup.search(openid).group('uname')
+
+ return UnknownUser(sxu.get('id'))
+
+class UnknownUser(object):
+ def __init__(self, id):
+ self._id = id
+
+ def __str__(self):
+ return _("user-%(id)s") % {'id': self._id}
+
+ def __unicode__(self):
+ return self.__str__()
+
+ def encode(self, *args):
+ return self.__str__()
+
+class UnknownGoogleUser(UnknownUser):
+ def __str__(self):
+ return _("user-%(id)s (google)") % {'id': self._id}
+
+class UnknownYahooUser(UnknownUser):
+ def __str__(self):
+ return _("user-%(id)s (yahoo)") % {'id': self._id}
+
+
+class IdMapper(dict):
+ def __getitem__(self, key):
+ key = int(key)
+ return super(IdMapper, self).get(key, 1)
+
+ def __setitem__(self, key, value):
+ super(IdMapper, self).__setitem__(int(key), int(value))
+
+class IdIncrementer():
+ def __init__(self, initial):
+ self.value = initial
+
+ def inc(self):
+ self.value += 1
+
+openidre = re.compile('^https?\:\/\/')
+def userimport(path, options):
+#users = readTable(dump, "Users")
+
+ usernames = []
+ openids = set()
+ uidmapper = IdMapper()
+ #merged_users = []
+
+ owneruid = options.get('owneruid', None)
+ #check for empty values
+ if not owneruid:
+ owneruid = None
+
+ def callback(sxu):
+ create = True
+
+ if sxu.get('id') == '-1':
+ return
+ #print "\n".join(["%s : %s" % i for i in sxu.items()])
+ if int(sxu.get('id')) == int(owneruid):
+ osqau = orm.User.objects.get(id=1)
+ for assoc in orm.AuthKeyUserAssociation.objects.filter(user=osqau):
+ openids.add(assoc.key)
+ uidmapper[owneruid] = 1
+ uidmapper[-1] = 1
+ create = False
+ else:
+ username = unicode(sxu.get('displayname',
+ sxu.get('displaynamecleaned', sxu.get('realname', final_username_attempt(sxu)))))[:30]
+
+ if username in usernames:
+ #if options.get('mergesimilar', False) and sxu.get('email', 'INVALID') == user_by_name[username].email:
+ # osqau = user_by_name[username]
+ # create = False
+ # uidmapper[sxu.get('id')] = osqau.id
+ #else:
+ inc = 0
+
+ while True:
+ inc += 1
+ totest = "%s %d" % (username[:29 - len(str(inc))], inc)
+
+ if not totest in usernames:
+ username = totest
+ break
+
+ sxbadges = sxu.get('badgesummary', None)
+ badges = {'1':'0', '2':'0', '3':'0'}
+
+ if sxbadges:
+ badges.update(dict([b.split('=') for b in sxbadges.split()]))
+
+ if create:
+ osqau = orm.User(
+ id = sxu.get('id'),
+ username = username,
+ password = '!',
+ email = sxu.get('email', ''),
+ is_superuser = sxu.get('usertypeid') == '5',
+ is_staff = sxu.get('usertypeid') == '4',
+ is_active = True,
+ date_joined = readTime(sxu.get('creationdate')),
+ last_seen = readTime(sxu.get('lastaccessdate')),
+ about = sxu.get('aboutme', ''),
+ date_of_birth = sxu.get('birthday', None) and readTime(sxu['birthday']) or None,
+ email_isvalid = int(sxu.get('usertypeid')) > 2,
+ website = sxu.get('websiteurl', ''),
+ reputation = int(sxu.get('reputation')),
+ gold = int(badges['1']),
+ silver = int(badges['2']),
+ bronze = int(badges['3']),
+ real_name = sxu.get('realname', '')[:30],
+ location = sxu.get('location', ''),
+ )
+
+ osqau.save()
+
+ user_joins = orm.Action(
+ action_type = "userjoins",
+ action_date = osqau.date_joined,
+ user = osqau
+ )
+ user_joins.save()
+
+ rep = orm.ActionRepute(
+ value = 1,
+ user = osqau,
+ date = osqau.date_joined,
+ action = user_joins
+ )
+ rep.save()
+
+ try:
+ orm.SubscriptionSettings.objects.get(user=osqau)
+ except:
+ s = orm.SubscriptionSettings(user=osqau)
+ s.save()
+
+ uidmapper[osqau.id] = osqau.id
+ else:
+ new_about = sxu.get('aboutme', None)
+ if new_about and osqau.about != new_about:
+ if osqau.about:
+ osqau.about = "%s\n|\n%s" % (osqau.about, new_about)
+ else:
+ osqau.about = new_about
+
+ osqau.username = sxu.get('displayname',
+ sxu.get('displaynamecleaned', sxu.get('realname', final_username_attempt(sxu))))
+ osqau.email = sxu.get('email', '')
+ osqau.reputation += int(sxu.get('reputation'))
+ osqau.gold += int(badges['1'])
+ osqau.silver += int(badges['2'])
+ osqau.bronze += int(badges['3'])
+
+ osqau.date_joined = readTime(sxu.get('creationdate'))
+ osqau.website = sxu.get('websiteurl', '')
+ osqau.date_of_birth = sxu.get('birthday', None) and readTime(sxu['birthday']) or None
+ osqau.location = sxu.get('location', '')
+ osqau.real_name = sxu.get('realname', '')
+
+ #merged_users.append(osqau.id)
+ osqau.save()
+
+ usernames.append(osqau.username)
+
+ openid = sxu.get('openid', None)
+ if openid and openidre.match(openid) and (not openid in openids):
+ assoc = orm.AuthKeyUserAssociation(user=osqau, key=openid, provider="openidurl")
+ assoc.save()
+ openids.add(openid)
+
+ openidalt = sxu.get('openidalt', None)
+ if openidalt and openidre.match(openidalt) and (not openidalt in openids):
+ assoc = orm.AuthKeyUserAssociation(user=osqau, key=openidalt, provider="openidurl")
+ assoc.save()
+ openids.add(openidalt)
+
+ readTable(path, "Users", callback)
+
+ if uidmapper[-1] == -1:
+ uidmapper[-1] = 1
+
+ return uidmapper
+
+def tagsimport(dump, uidmap):
+#tags = readTable(dump, "Tags")
+
+ tagmap = {}
+
+ def callback(sxtag):
+ otag = orm.Tag(
+ id = int(sxtag['id']),
+ name = sxtag['name'],
+ used_count = int(sxtag['count']),
+ created_by_id = uidmap[sxtag.get('userid', 1)],
+ )
+ otag.save()
+
+ tagmap[otag.name] = otag
+
+ readTable(dump, "Tags", callback)
+
+ return tagmap
+
+def add_post_state(name, post, action):
+ if not "(%s)" % name in post.state_string:
+ post.state_string = "%s(%s)" % (post.state_string, name)
+ post.save()
+
+ try:
+ state = orm.NodeState.objects.get(node=post, state_type=name)
+ state.action = action
+ state.save()
+ except:
+ state = orm.NodeState(node=post, state_type=name, action=action)
+ state.save()
+
+def remove_post_state(name, post):
+ if "(%s)" % name in post.state_string:
+ try:
+ state = orm.NodeState.objects.get(state_type=name, post=post)
+ state.delete()
+ except:
+ pass
+ post.state_string = "".join("(%s)" % s for s in re.findall('\w+', post.state_string) if s != name)
+
+def postimport(dump, uidmap, tagmap):
+#history = {}
+#accepted = {}
+ all = []
+
+ #for h in readTable(dump, "PostHistory"):
+ # if not history.get(h.get('postid'), None):
+ # history[h.get('postid')] = []
+ #
+ # history[h.get('postid')].append(h)
+
+ #posts = readTable(dump, "Posts")
+
+ def callback(sxpost):
+ nodetype = (sxpost.get('posttypeid') == '1') and "nodetype" or "answer"
+
+ post = orm.Node(
+ node_type = nodetype,
+ id = sxpost['id'],
+ added_at = readTime(sxpost['creationdate']),
+ body = sxpost['body'],
+ score = sxpost.get('score', 0),
+ author_id = sxpost.get('deletiondate', None) and 1 or uidmap[sxpost.get('owneruserid', 1)]
+ )
+
+ post.save()
+
+ create_action = orm.Action(
+ action_type = (nodetype == "nodetype") and "ask" or "answer",
+ user_id = post.author_id,
+ node = post,
+ action_date = post.added_at
+ )
+
+ create_action.save()
+
+ if sxpost.get('lasteditoruserid', None):
+ revise_action = orm.Action(
+ action_type = "revise",
+ user_id = uidmap[sxpost.get('lasteditoruserid')],
+ node = post,
+ action_date = readTime(sxpost['lasteditdate']),
+ )
+
+ revise_action.save()
+ post.last_edited = revise_action
+
+ if sxpost.get('communityowneddate', None):
+ wikify_action = orm.Action(
+ action_type = "wikify",
+ user_id = 1,
+ node = post,
+ action_date = readTime(sxpost['communityowneddate'])
+ )
+
+ wikify_action.save()
+ add_post_state("wiki", post, wikify_action)
+
+ if sxpost.get('lastactivityuserid', None):
+ post.last_activity_by_id = uidmap[sxpost['lastactivityuserid']]
+ post.last_activity_at = readTime(sxpost['lastactivitydate'])
+
+ if sxpost.get('posttypeid') == '1': #question
+ post.node_type = "question"
+ post.title = sxpost['title']
+
+ tagnames = sxpost['tags'].replace(u'ö', '-').replace(u'é', '').replace(u'à', '')
+ post.tagnames = tagnames
+
+ post.extra_count = sxpost.get('viewcount', 0)
+
+ add_tags_to_post(post, tagmap)
+
+ else:
+ post.parent_id = sxpost['parentid']
+
+ post.save()
+
+ all.append(int(post.id))
+ create_and_activate_revision(post)
+
+ del post
+
+ readTable(dump, "Posts", callback)
+
+ return all
+
+def comment_import(dump, uidmap, posts):
+#comments = readTable(dump, "PostComments")
+ currid = IdIncrementer(max(posts))
+ mapping = {}
+
+ def callback(sxc):
+ currid.inc()
+ oc = orm.Node(
+ id = currid.value,
+ node_type = "comment",
+ added_at = readTime(sxc['creationdate']),
+ author_id = uidmap[sxc.get('userid', 1)],
+ body = sxc['text'],
+ parent_id = sxc.get('postid'),
+ )
+
+ if sxc.get('deletiondate', None):
+ delete_action = orm.Action(
+ action_type = "delete",
+ user_id = uidmap[sxc['deletionuserid']],
+ action_date = readTime(sxc['deletiondate'])
+ )
+
+ oc.author_id = uidmap[sxc['deletionuserid']]
+ oc.save()
+
+ delete_action.node = oc
+ delete_action.save()
+
+ add_post_state("deleted", oc, delete_action)
+ else:
+ oc.author_id = uidmap[sxc.get('userid', 1)]
+ oc.save()
+
+ create_action = orm.Action(
+ action_type = "comment",
+ user_id = oc.author_id,
+ node = oc,
+ action_date = oc.added_at
+ )
+
+ create_and_activate_revision(oc)
+
+ create_action.save()
+ oc.save()
+
+ posts.append(int(oc.id))
+ mapping[int(sxc['id'])] = int(oc.id)
+
+ readTable(dump, "PostComments", callback)
+ return posts, mapping
+
+
+def add_tags_to_post(post, tagmap):
+ tags = [tag for tag in [tagmap.get(name.strip()) for name in post.tagnames.split(u' ') if name] if tag]
+ post.tagnames = " ".join([t.name for t in tags]).strip()
+ post.tags = tags
+
+
+def create_and_activate_revision(post):
+ rev = orm.NodeRevision(
+ author_id = post.author_id,
+ body = post.body,
+ node_id = post.id,
+ revised_at = post.added_at,
+ revision = 1,
+ summary = 'Initial revision',
+ tagnames = post.tagnames,
+ title = post.title,
+ )
+
+ rev.save()
+ post.active_revision_id = rev.id
+ post.save()
+
+def post_vote_import(dump, uidmap, posts):
+#votes = readTable(dump, "Posts2Votes")
+ close_reasons = {}
+
+ def close_callback(r):
+ close_reasons[r['id']] = r['name']
+
+ readTable(dump, "CloseReasons", close_callback)
+
+ user2vote = []
+
+ def callback(sxv):
+ action = orm.Action(
+ user_id=uidmap[sxv['userid']],
+ action_date = readTime(sxv['creationdate']),
+ )
+
+ if not int(sxv['postid']) in posts: return
+ node = orm.Node.objects.get(id=sxv['postid'])
+ action.node = node
+
+ if sxv['votetypeid'] == '1':
+ answer = node
+ question = orm.Node.objects.get(id=answer.parent_id)
+
+ action.action_type = "acceptanswer"
+ action.save()
+
+ answer.marked = True
+
+ question.extra_ref_id = answer.id
+
+ answer.save()
+ question.save()
+
+ elif sxv['votetypeid'] in ('2', '3'):
+ if not (action.node.id, action.user_id) in user2vote:
+ user2vote.append((action.node.id, action.user_id))
+
+ action.action_type = (sxv['votetypeid'] == '2') and "voteup" or "votedown"
+ action.save()
+
+ ov = orm.Vote(
+ node_id = action.node.id,
+ user_id = action.user_id,
+ voted_at = action.action_date,
+ value = sxv['votetypeid'] == '2' and 1 or -1,
+ action = action
+ )
+ ov.save()
+ else:
+ action.action_type = "unknown"
+ action.save()
+
+ elif sxv['votetypeid'] in ('4', '12', '13'):
+ action.action_type = "flag"
+ action.save()
+
+ of = orm.Flag(
+ node = action.node,
+ user_id = action.user_id,
+ flagged_at = action.action_date,
+ reason = '',
+ action = action
+ )
+
+ of.save()
+
+ elif sxv['votetypeid'] == '5':
+ action.action_type = "favorite"
+ action.save()
+
+ elif sxv['votetypeid'] == '6':
+ action.action_type = "close"
+ action.extra = dbsafe_encode(close_reasons[sxv['comment']])
+ action.save()
+
+ node.marked = True
+ node.save()
+
+ elif sxv['votetypeid'] == '7':
+ action.action_type = "unknown"
+ action.save()
+
+ node.marked = False
+ node.save()
+
+ remove_post_state("closed", node)
+
+ elif sxv['votetypeid'] == '10':
+ action.action_type = "delete"
+ action.save()
+
+ elif sxv['votetypeid'] == '11':
+ action.action_type = "unknown"
+ action.save()
+
+ remove_post_state("deleted", node)
+
+ else:
+ action.action_type = "unknown"
+ action.save()
+
+ if sxv.get('targetrepchange', None):
+ rep = orm.ActionRepute(
+ action = action,
+ date = action.action_date,
+ user_id = uidmap[sxv['targetuserid']],
+ value = int(sxv['targetrepchange'])
+ )
+
+ rep.save()
+
+ if sxv.get('voterrepchange', None):
+ rep = orm.ActionRepute(
+ action = action,
+ date = action.action_date,
+ user_id = uidmap[sxv['userid']],
+ value = int(sxv['voterrepchange'])
+ )
+
+ rep.save()
+
+ if action.action_type in ("acceptanswer", "delete", "close"):
+ state = {"acceptanswer": "accepted", "delete": "deleted", "close": "closed"}[action.action_type]
+ add_post_state(state, node, action)
+
+ readTable(dump, "Posts2Votes", callback)
+
+
+def comment_vote_import(dump, uidmap, comments):
+#votes = readTable(dump, "Comments2Votes")
+ user2vote = []
+ comments2score = {}
+
+ def callback(sxv):
+ if sxv['votetypeid'] == "2":
+ comment_id = comments[int(sxv['postcommentid'])]
+ user_id = uidmap[sxv['userid']]
+
+ if not (comment_id, user_id) in user2vote:
+ user2vote.append((comment_id, user_id))
+
+ action = orm.Action(
+ action_type = "voteupcomment",
+ user_id = user_id,
+ action_date = readTime(sxv['creationdate']),
+ node_id = comment_id
+ )
+ action.save()
+
+ ov = orm.Vote(
+ node_id = comment_id,
+ user_id = user_id,
+ voted_at = action.action_date,
+ value = 1,
+ action = action
+ )
+
+ ov.save()
+
+ if not comment_id in comments2score:
+ comments2score[comment_id] = 1
+ else:
+ comments2score[comment_id] += 1
+
+ readTable(dump, "Comments2Votes", callback)
+
+ for cid, score in comments2score.items():
+ orm.Node.objects.filter(id=cid).update(score=score)
+
+
+def badges_import(dump, uidmap, post_list):
+#node_ctype = orm['contenttypes.contenttype'].objects.get(name='node')
+
+ sxbadges = {}
+
+ def sxcallback(b):
+ sxbadges[int(b['id'])] = b
+
+ readTable(dump, "Badges", sxcallback)
+
+ obadges = dict([(b.cls, b) for b in orm.Badge.objects.all()])
+ user_badge_count = {}
+
+ sx_to_osqa = {}
+
+ for id, sxb in sxbadges.items():
+ cls = "".join(sxb['name'].replace('&', 'And').split(' '))
+
+ if cls in obadges:
+ sx_to_osqa[id] = obadges[cls]
+ else:
+ osqab = orm.Badge(
+ cls = cls,
+ awarded_count = 0,
+ type = sxb['class']
+ )
+ osqab.save()
+ sx_to_osqa[id] = osqab
+
+ osqaawards = []
+
+ def callback(sxa):
+ badge = sx_to_osqa[int(sxa['badgeid'])]
+
+ user_id = uidmap[sxa['userid']]
+ if not user_badge_count.get(user_id, None):
+ user_badge_count[user_id] = 0
+
+ action = orm.Action(
+ action_type = "award",
+ user_id = user_id,
+ action_date = readTime(sxa['date'])
+ )
+
+ action.save()
+
+ osqaa = orm.Award(
+ user_id = uidmap[sxa['userid']],
+ badge = badge,
+ node_id = post_list[user_badge_count[user_id]],
+ awarded_at = action.action_date,
+ action = action
+ )
+
+ osqaa.save()
+ badge.awarded_count += 1
+ user_badge_count[user_id] += 1
+
+ readTable(dump, "Users2Badges", callback)
+
+ for badge in obadges.values():
+ badge.save()
+
+def pages_import(dump, currid):
+ currid = IdIncrementer(currid)
+ registry = {}
+ #sx_pages = readTable(dump, "FlatPages")
+
+ def callback(sxp):
+ currid.inc()
+ page = orm.Node(
+ id = currid.value,
+ node_type = "page",
+ title = sxp['name'],
+ body = b64decode(sxp['value']),
+ extra = dbsafe_encode({
+ 'path': sxp['url'][1:],
+ 'mimetype': sxp['contenttype'],
+ 'template': (sxp['usemaster'] == "true") and "default" or "none",
+ 'render': "html",
+ 'sidebar': "",
+ 'sidebar_wrap': True,
+ 'sidebar_render': "html",
+ 'comments': False
+ }),
+ author_id = 1
+ )
+
+ create_and_activate_revision(page)
+
+ page.save()
+ registry[sxp['url'][1:]] = page.id
+
+ create_action = orm.Action(
+ action_type = "newpage",
+ user_id = page.author_id,
+ node = page
+ )
+
+ create_action.save()
+
+ if sxp['active'] == "true" and sxp['contenttype'] == "text/html":
+ pub_action = orm.Action(
+ action_type = "publish",
+ user_id = page.author_id,
+ node = page
+ )
+
+ pub_action.save()
+ add_post_state("published", page, pub_action)
+
+ readTable(dump, "FlatPages", callback)
+
+ kv = orm.KeyValue(key='STATIC_PAGE_REGISTRY', value=dbsafe_encode(registry))
+ kv.save()
+
+sx2osqa_set_map = {
+u'theme.html.name': 'APP_TITLE',
+u'theme.html.footer': 'CUSTOM_FOOTER',
+u'theme.html.sidebar': 'SIDEBAR_UPPER_TEXT',
+u'theme.html.sidebar-low': 'SIDEBAR_LOWER_TEXT',
+u'theme.html.welcome': 'APP_INTRO',
+u'theme.html.head': 'CUSTOM_HEAD',
+u'theme.html.header': 'CUSTOM_HEADER',
+u'theme.css': 'CUSTOM_CSS',
+}
+
+html_codes = (
+('&', '&'),
+('<', '<'),
+('>', '>'),
+('"', '"'),
+(''', "'"),
+)
+
+def html_decode(html):
+ html = force_unicode(html)
+
+ for args in html_codes:
+ html = html.replace(*args)
+
+ return html
+
+
+def static_import(dump):
+#sx_sets = readTable(dump, "ThemeTextResources")
+ sx_unknown = {}
+
+ def callback(set):
+ if unicode(set['name']) in sx2osqa_set_map:
+ try:
+ kv = orm.KeyValue.objects.get(key=sx2osqa_set_map[set['name']])
+ kv.value = dbsafe_encode(html_decode(set['value']))
+ except:
+ kv = orm.KeyValue(
+ key = sx2osqa_set_map[set['name']],
+ value = dbsafe_encode(html_decode(set['value']))
+ )
+
+ kv.save()
+ else:
+ sx_unknown[set['name']] = html_decode(set['value'])
+
+ readTable(dump, "ThemeTextResources", callback)
+
+ unknown = orm.KeyValue(key='SXIMPORT_UNKNOWN_SETS', value=dbsafe_encode(sx_unknown))
+ unknown.save()
+
+def disable_triggers():
+ from south.db import db
+ if db.backend_name == "postgres":
+ db.execute_many(PG_DISABLE_TRIGGERS)
+ db.commit_transaction()
+ db.start_transaction()
+
+def enable_triggers():
+ from south.db import db
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many(PG_ENABLE_TRIGGERS)
+ db.commit_transaction()
+
+def reset_sequences():
+ from south.db import db
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many(PG_SEQUENCE_RESETS)
+ db.commit_transaction()
+
+def reindex_fts():
+ from south.db import db
+ if db.backend_name == "postgres":
+ db.start_transaction()
+ db.execute_many("UPDATE forum_noderevision set id = id WHERE TRUE;")
+ db.commit_transaction()
+
+
+def sximport(dump, options):
+ try:
+ disable_triggers()
+ triggers_disabled = True
+ except:
+ triggers_disabled = False
+
+ uidmap = userimport(dump, options)
+ tagmap = tagsimport(dump, uidmap)
+ gc.collect()
+
+ posts = postimport(dump, uidmap, tagmap)
+ gc.collect()
+
+ posts, comments = comment_import(dump, uidmap, posts)
+ gc.collect()
+
+ post_vote_import(dump, uidmap, posts)
+ gc.collect()
+
+ comment_vote_import(dump, uidmap, comments)
+ gc.collect()
+
+ badges_import(dump, uidmap, posts)
+
+ pages_import(dump, max(posts))
+ static_import(dump)
+ gc.collect()
+
+ from south.db import db
+ db.commit_transaction()
+
+ reset_sequences()
+
+ if triggers_disabled:
+ enable_triggers()
+ reindex_fts()
+
+
+PG_DISABLE_TRIGGERS = """
+ALTER table auth_user DISABLE TRIGGER ALL;
+ALTER table auth_user_groups DISABLE TRIGGER ALL;
+ALTER table auth_user_user_permissions DISABLE TRIGGER ALL;
+ALTER table forum_keyvalue DISABLE TRIGGER ALL;
+ALTER table forum_action DISABLE TRIGGER ALL;
+ALTER table forum_actionrepute DISABLE TRIGGER ALL;
+ALTER table forum_subscriptionsettings DISABLE TRIGGER ALL;
+ALTER table forum_validationhash DISABLE TRIGGER ALL;
+ALTER table forum_authkeyuserassociation DISABLE TRIGGER ALL;
+ALTER table forum_tag DISABLE TRIGGER ALL;
+ALTER table forum_markedtag DISABLE TRIGGER ALL;
+ALTER table forum_node DISABLE TRIGGER ALL;
+ALTER table forum_nodestate DISABLE TRIGGER ALL;
+ALTER table forum_node_tags DISABLE TRIGGER ALL;
+ALTER table forum_noderevision DISABLE TRIGGER ALL;
+ALTER table forum_node_tags DISABLE TRIGGER ALL;
+ALTER table forum_questionsubscription DISABLE TRIGGER ALL;
+ALTER table forum_vote DISABLE TRIGGER ALL;
+ALTER table forum_flag DISABLE TRIGGER ALL;
+ALTER table forum_badge DISABLE TRIGGER ALL;
+ALTER table forum_award DISABLE TRIGGER ALL;
+ALTER table forum_openidnonce DISABLE TRIGGER ALL;
+ALTER table forum_openidassociation DISABLE TRIGGER ALL;
+"""
+
+PG_ENABLE_TRIGGERS = """
+ALTER table auth_user ENABLE TRIGGER ALL;
+ALTER table auth_user_groups ENABLE TRIGGER ALL;
+ALTER table auth_user_user_permissions ENABLE TRIGGER ALL;
+ALTER table forum_keyvalue ENABLE TRIGGER ALL;
+ALTER table forum_action ENABLE TRIGGER ALL;
+ALTER table forum_actionrepute ENABLE TRIGGER ALL;
+ALTER table forum_subscriptionsettings ENABLE TRIGGER ALL;
+ALTER table forum_validationhash ENABLE TRIGGER ALL;
+ALTER table forum_authkeyuserassociation ENABLE TRIGGER ALL;
+ALTER table forum_tag ENABLE TRIGGER ALL;
+ALTER table forum_markedtag ENABLE TRIGGER ALL;
+ALTER table forum_node ENABLE TRIGGER ALL;
+ALTER table forum_nodestate ENABLE TRIGGER ALL;
+ALTER table forum_node_tags ENABLE TRIGGER ALL;
+ALTER table forum_noderevision ENABLE TRIGGER ALL;
+ALTER table forum_node_tags ENABLE TRIGGER ALL;
+ALTER table forum_questionsubscription ENABLE TRIGGER ALL;
+ALTER table forum_vote ENABLE TRIGGER ALL;
+ALTER table forum_flag ENABLE TRIGGER ALL;
+ALTER table forum_badge ENABLE TRIGGER ALL;
+ALTER table forum_award ENABLE TRIGGER ALL;
+ALTER table forum_openidnonce ENABLE TRIGGER ALL;
+ALTER table forum_openidassociation ENABLE TRIGGER ALL;
+"""
+
+PG_SEQUENCE_RESETS = """
+SELECT setval('"auth_user_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user";
+SELECT setval('"auth_user_groups_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_groups";
+SELECT setval('"auth_user_user_permissions_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "auth_user_user_permissions";
+SELECT setval('"forum_keyvalue_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_keyvalue";
+SELECT setval('"forum_action_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_action";
+SELECT setval('"forum_actionrepute_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_actionrepute";
+SELECT setval('"forum_subscriptionsettings_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_subscriptionsettings";
+SELECT setval('"forum_validationhash_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_validationhash";
+SELECT setval('"forum_authkeyuserassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_authkeyuserassociation";
+SELECT setval('"forum_tag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_tag";
+SELECT setval('"forum_markedtag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_markedtag";
+SELECT setval('"forum_node_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node";
+SELECT setval('"forum_nodestate_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_nodestate";
+SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";
+SELECT setval('"forum_noderevision_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_noderevision";
+SELECT setval('"forum_node_tags_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_node_tags";
+SELECT setval('"forum_questionsubscription_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_questionsubscription";
+SELECT setval('"forum_vote_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_vote";
+SELECT setval('"forum_flag_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_flag";
+SELECT setval('"forum_badge_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_badge";
+SELECT setval('"forum_award_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_award";
+SELECT setval('"forum_openidnonce_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidnonce";
+SELECT setval('"forum_openidassociation_id_seq"', coalesce(max("id"), 1) + 2, max("id") IS NOT null) FROM "forum_openidassociation";
+"""
+
+
+
+