X-Git-Url: https://git.openstreetmap.org./chef.git/blobdiff_plain/468dde2d236ea2cbcfcecef05cb17d2261f117ea..79c6636acc892cc8fd336a0506948fcd16e8654f:/cookbooks/tile/files/default/ruby/expire.rb diff --git a/cookbooks/tile/files/default/ruby/expire.rb b/cookbooks/tile/files/default/ruby/expire.rb index ec0a59860..8fb0033cd 100755 --- a/cookbooks/tile/files/default/ruby/expire.rb +++ b/cookbooks/tile/files/default/ruby/expire.rb @@ -4,8 +4,8 @@ require 'rubygems' require 'proj4' require 'xml/libxml' require 'set' -require 'pg' require 'time' +require 'mmap' module Expire # projection object to go from latlon -> spherical mercator @@ -20,17 +20,8 @@ module Expire METATILE = 8 # the directory root for meta tiles HASH_ROOT = "/tiles/default/" - # lowest zoom that we want to expire - # MIN_ZOOM=12 - MIN_ZOOM=13 - # highest zoom that we want to expire - MAX_ZOOM=18 - # database parameters - DBNAME="gis" - DBHOST="" - #DBPORT=5432 - DBPORT=5432 - DBTABLE="planet_osm_nodes" + # node cache file + NODE_CACHE_FILE="/store/database/nodes" # turns a spherical mercator coord into a tile coord def Expire.tile_from_merc(point, zoom) @@ -72,8 +63,8 @@ module Expire File.utime(EXPIRY_TIME, EXPIRY_TIME, meta) end - def Expire.expire(change_file, tile_dirs) - do_expire(change_file) do |set| + def Expire.expire(change_file, min_zoom, max_zoom, tile_dirs) + do_expire(change_file, min_zoom, max_zoom) do |set| new_set = Set.new meta_set = Set.new @@ -81,7 +72,7 @@ module Expire # so that we don't expire things multiple times set.each do |xy| # this has to match the routine in mod_tile - meta = xyz_to_meta(HASH_ROOT, xy[0], xy[1], xy[2]) + meta = xyz_to_meta(xy[0], xy[1], xy[2]) # check each style working out what needs expiring tile_dirs.each do |tile_dir| @@ -102,7 +93,7 @@ module Expire end end - def Expire.do_expire(change_file, &block) + def Expire.do_expire(change_file, min_zoom, max_zoom, &block) # read in the osm change file doc = XML::Document.file(change_file) @@ -121,7 +112,7 @@ module Expire end point = Proj4::Point.new(Math::PI * node['lon'].to_f / 180, Math::PI * lat / 180) - nodes[node['id'].to_i] = tile_from_latlon(point, MAX_ZOOM) + nodes[node['id'].to_i] = tile_from_latlon(point, max_zoom) end # now we look for all the ways that have changed and put all of their nodes into @@ -132,22 +123,16 @@ module Expire # also, we miss cases where nodes are deleted from ways where that node is not # itself deleted and the coverage of the point set isn't enough to encompass the # change. - conn = PG::Connection.new(:host => DBHOST, :port => DBPORT, :dbname => DBNAME) + node_cache = NodeCache.new(NODE_CACHE_FILE) doc.find('//way/nd').each do |node| node_id = node['ref'].to_i unless nodes.include? node_id # this is a node referenced but not added, modified or deleted, so it should - # still be in the postgis DB. - res = conn.query("select lon, lat from #{DBTABLE} where id=#{node_id};") - - # loop over results, adding tiles to the change set - res.each do |row| - point = Proj4::Point.new(row[0].to_f / 100.0, row[1].to_f / 100.0) - nodes[node_id] = tile_from_merc(point, MAX_ZOOM) + # still be in the node cache. + if entry = node_cache[node_id] + point = Proj4::Point.new(entry.lon, entry.lat) + nodes[node_id] = tile_from_merc(point, max_zoom) end - - # Discard results - res.clear end end @@ -157,10 +142,48 @@ module Expire set = Set.new nodes.values # expire tiles and shrink to the set of parents - (MAX_ZOOM).downto(MIN_ZOOM) do |z| + (max_zoom).downto(min_zoom) do |z| # allow the block to work on the set, returning the set at the next # zoom level set = yield set end end + + # wrapper to access the osm2pgsql node cache + class NodeCache + # node cache entry + class Node + attr_reader :lon, :lat + + def initialize(lon, lat) + @lat = lat.to_f / 100.0 + @lon = lon.to_f / 100.0 + end + end + + # open the cache + def initialize(filename) + @cache = Mmap.new(filename) + + throw "Unexpected format" unless @cache[0..3].unpack("l").first == 1 + throw "Unexpected ID size" unless @cache[4..7].unpack("l").first == 8 + + @max_id = @cache[8..15].unpack("q").first + end + + # lookup a node + def [](id) + if id <= @max_id + offset = 16 + id * 8 + + lon, lat = @cache[offset .. offset+7].unpack("ll") + + if lon != -2147483648 && lat != -2147483648 + node = Node.new(lon, lat) + end + end + + node + end + end end