require "fileutils"
require "xml/libxml"
require "zlib"
+require "set"
# after this many changes, a changeset will be closed
CHANGES_LIMIT = 50000
# this is the scale factor for lat/lon values stored as integers in the database
GEO_SCALE = 10000000
+##
+# replace characters which cannot be represented in XML 1.0.
+def xml_sanitize(str)
+ str.gsub(/[\x00-\x08\x0b\x0c\x0e-\x1f]/, "?")
+end
+
##
# changeset class keeps some information about changesets downloaded from the
# database - enough to let us know which changesets are closed/open & recently
end
end
+##
+# builds an XML representation of a changeset from the database
+class ChangesetBuilder
+ def initialize(now, conn)
+ @now = now
+ @conn = conn
+ end
+
+ def changeset_xml(cs)
+ xml = XML::Node.new("changeset")
+ xml["id"] = cs.id.to_s
+ xml["created_at"] = cs.created_at.getutc.xmlschema
+ xml["closed_at"] = cs.closed_at.getutc.xmlschema if cs.closed?(@now)
+ xml["open"] = cs.open?(@now).to_s
+ xml["num_changes"] = cs.num_changes.to_s
+
+ res = @conn.exec("select u.id, u.display_name, c.min_lat, c.max_lat, c.min_lon, c.max_lon from users u join changesets c on u.id=c.user_id where c.id=#{cs.id}")
+ xml["user"] = xml_sanitize(res[0]["display_name"])
+ xml["uid"] = res[0]["id"]
+
+ unless res[0]["min_lat"].nil? ||
+ res[0]["max_lat"].nil? ||
+ res[0]["min_lon"].nil? ||
+ res[0]["max_lon"].nil?
+ xml["min_lat"] = (res[0]["min_lat"].to_f / GEO_SCALE).to_s
+ xml["max_lat"] = (res[0]["max_lat"].to_f / GEO_SCALE).to_s
+ xml["min_lon"] = (res[0]["min_lon"].to_f / GEO_SCALE).to_s
+ xml["max_lon"] = (res[0]["max_lon"].to_f / GEO_SCALE).to_s
+ end
+
+ add_tags(xml, cs)
+ add_comments(xml, cs)
+
+ xml
+ end
+
+ def add_tags(xml, cs)
+ res = @conn.exec("select k, v from changeset_tags where changeset_id=#{cs.id}")
+ res.each do |row|
+ tag = XML::Node.new("tag")
+ tag["k"] = xml_sanitize(row["k"])
+ tag["v"] = xml_sanitize(row["v"])
+ xml << tag
+ end
+ end
+
+ def add_comments(xml, cs)
+ # grab the visible changeset comments as well
+ res = @conn.exec("select cc.author_id, u.display_name as author, cc.body, cc.created_at from changeset_comments cc join users u on cc.author_id=u.id where cc.changeset_id=#{cs.id} and cc.visible order by cc.created_at asc")
+ xml["comments_count"] = res.num_tuples.to_s
+
+ # early return if there aren't any comments
+ return unless res.num_tuples > 0
+
+ discussion = XML::Node.new("discussion")
+ res.each do |row|
+ comment = XML::Node.new("comment")
+ comment["uid"] = row["author_id"]
+ comment["user"] = xml_sanitize(row["author"])
+ comment["date"] = Time.parse(row["created_at"]).getutc.xmlschema
+ text = XML::Node.new("text")
+ text.content = xml_sanitize(row["body"])
+ comment << text
+ discussion << comment
+ end
+ xml << discussion
+ end
+end
+
+##
+# sync a file to guarantee it's on disk
+def fsync(f)
+ File.open(f) do |fh|
+ fh.fsync
+ end
+end
+
+##
+# sync a directory to guarantee it's on disk. have to recurse to the root
+# to guarantee sync for newly created directories.
+def fdirsync(d)
+ while d != "/"
+ Dir.open(d) do |dh|
+ io = IO.for_fd(dh.fileno)
+ io.fsync
+ end
+ d = File.dirname(d)
+ end
+end
+
##
# state and connections associated with getting changeset data
# replicated to a file.
# time (see rails_port's changeset model). so it is probably enough
# for us to look at anything that was closed recently, and filter from
# there.
- @conn
- .exec("select id, created_at, closed_at, num_changes from changesets where closed_at > ((now() at time zone 'utc') - '1 hour'::interval)")
- .map { |row| Changeset.new(row) }
- .select { |cs| cs.activity_between?(last_run, @now) }
+ changesets = @conn
+ .exec("select id, created_at, closed_at, num_changes from changesets where closed_at > ((now() at time zone 'utc') - '1 hour'::interval)")
+ .map { |row| Changeset.new(row) }
+ .select { |cs| cs.activity_between?(last_run, @now) }
+
+ # set for faster presence lookups by ID
+ cs_ids = Set.new(changesets.map(&:id))
+
+ # but also add any changesets which have new comments
+ new_ids = @conn
+ .exec("select distinct changeset_id from changeset_comments where created_at >= '#{last_run}' and created_at < '#{@now}' and visible")
+ .map { |row| row["changeset_id"].to_i }
+ .select { |c_id| !cs_ids.include?(c_id) }
+
+ new_ids.each do |id|
+ @conn
+ .exec("select id, created_at, closed_at, num_changes from changesets where id=#{id}")
+ .map { |row| Changeset.new(row) }
+ .each { |cs| changesets << cs }
+ end
+
+ changesets.sort_by(&:id)
end
# creates an XML file containing the changeset information from the
"license" => "http://opendatacommons.org/licenses/odbl/1-0/" }
.each { |k, v| doc.root[k] = v }
+ builder = ChangesetBuilder.new(@now, @conn)
changesets.each do |cs|
- xml = XML::Node.new("changeset")
- xml["id"] = cs.id.to_s
- xml["created_at"] = cs.created_at.getutc.xmlschema
- xml["closed_at"] = cs.closed_at.getutc.xmlschema if cs.closed?(@now)
- xml["open"] = cs.open?(@now).to_s
- xml["num_changes"] = cs.num_changes.to_s
-
- res = @conn.exec("select u.id, u.display_name, c.min_lat, c.max_lat, c.min_lon, c.max_lon from users u join changesets c on u.id=c.user_id where c.id=#{cs.id}")
- xml["user"] = res[0]["display_name"]
- xml["uid"] = res[0]["id"]
-
- unless res[0]["min_lat"].nil? ||
- res[0]["max_lat"].nil? ||
- res[0]["min_lon"].nil? ||
- res[0]["max_lon"].nil?
- xml["min_lat"] = (res[0]["min_lat"].to_f / GEO_SCALE).to_s
- xml["max_lat"] = (res[0]["max_lat"].to_f / GEO_SCALE).to_s
- xml["min_lon"] = (res[0]["min_lon"].to_f / GEO_SCALE).to_s
- xml["max_lon"] = (res[0]["max_lon"].to_f / GEO_SCALE).to_s
- end
-
- res = @conn.exec("select k, v from changeset_tags where changeset_id=#{cs.id}")
- res.each do |row|
- tag = XML::Node.new("tag")
- tag["k"] = row["k"]
- tag["v"] = row["v"]
- xml << tag
- end
-
- doc.root << xml
+ doc.root << builder.changeset_xml(cs)
end
doc.to_s
fl.flock(File::LOCK_EX)
sequence = (@state.key?("sequence") ? @state["sequence"] + 1 : 0)
- data_file = @config["data_dir"] + format("/%03d/%03d/%03d.osm.gz", sequence / 1000000, (sequence / 1000) % 1000, (sequence % 1000))
+ data_stem = @config["data_dir"] + format("/%03d/%03d/%03d", sequence / 1000000, (sequence / 1000) % 1000, (sequence % 1000))
+ data_file = data_stem + ".osm.gz"
+ data_state_file = data_stem + ".state.txt"
tmp_state = @config["state_file"] + ".tmp"
- tmp_data = "/tmp/changeset_data.osm.tmp"
+ tmp_data = data_file + ".tmp"
# try and write the files to tmp locations and then
# move them into place later, to avoid in-progress
# clashes, or people seeing incomplete files.
File.open(tmp_state, "w") do |fh|
fh.write(YAML.dump(@state))
end
+
+ # fsync the files in their old locations.
+ fsync(tmp_data)
+ fsync(tmp_state)
+
+ # sync the directory as well, to ensure that the file is reachable
+ # from the dirent and has been updated to account for any allocations.
+ fdirsync(File.dirname(tmp_data))
+ fdirsync(File.dirname(tmp_state))
+
+ # sanity check: the files we're moving into place
+ # should be non-empty.
+ raise "Temporary gzip file should exist, but doesn't." unless File.exist?(tmp_data)
+ raise "Temporary state file should exist, but doesn't." unless File.exist?(tmp_state)
+ raise "Temporary gzip file should be non-empty, but isn't." if File.zero?(tmp_data)
+ raise "Temporary state file should be non-empty, but isn't." if File.zero?(tmp_state)
+
FileUtils.mv(tmp_data, data_file)
- FileUtils.mv(tmp_state, @config["state_file"])
+ FileUtils.cp(tmp_state, @config["state_file"])
+ FileUtils.mv(tmp_state, data_state_file)
+
+ # fsync the files in their new locations, in case the inodes have
+ # changed in the move / copy.
+ fsync(data_fie)
+ fsync(@config["state_file"])
+ fsync(data_state_file)
+
+ # sync the directory as well, to ensure that the file is reachable
+ # from the dirent and has been updated to account for any allocations.
+ fdirsync(File.dirname(data_file))
+ fdirsync(File.dirname(@config["state_file"]))
+
fl.flock(File::LOCK_UN)
rescue
end
end
-rep = Replicator.new(ARGV[0])
-rep.save!
+begin
+ rep = Replicator.new(ARGV[0])
+ rep.save!
+rescue StandardError => e
+ STDERR.puts "ERROR: #{e.message}"
+ exit 1
+end