11 # projection object to go from latlon -> spherical mercator
12 PROJ = Proj4::Projection.new(["+proj=merc", "+a=6378137", "+b=6378137",
13 "+lat_ts=0.0", "+lon_0=0.0", "+x_0=0.0",
14 "+y_0=0", "+k=1.0", "+units=m",
15 "+nadgrids=@null", "+no_defs +over"])
17 # width/height of the spherical mercator projection
18 SIZE = 40075016.6855784
19 # the size of the meta tile blocks
21 # the directory root for meta tiles
22 HASH_ROOT = "/tiles/default/"
24 NODE_CACHE_FILE = "/store/database/nodes"
26 # turns a spherical mercator coord into a tile coord
27 def Expire.tile_from_merc(point, zoom)
28 # renormalise into unit space [0,1]
29 point.x = 0.5 + point.x / SIZE
30 point.y = 0.5 - point.y / SIZE
31 # transform into tile space
32 point.x = point.x * 2**zoom
33 point.y = point.y * 2**zoom
34 # chop of the fractional parts
35 [point.x.to_int, point.y.to_int, zoom]
38 # turns a latlon -> tile x,y given a zoom level
39 def Expire.tile_from_latlon(latlon, zoom)
40 # first convert to spherical mercator
41 point = PROJ.forward(latlon)
42 tile_from_merc(point, zoom)
45 # this must match the definition of xyz_to_meta in mod_tile
46 def Expire.xyz_to_meta(x, y, z)
47 # mask off the final few bits
51 hash_path = (0..4).collect { |i|
52 (((x >> 4 * i) & 0xf) << 4) | ((y >> 4 * i) & 0xf)
54 z.to_s + '/' + hash_path + ".meta"
57 # time to reset to, some very stupidly early time, before OSM started
58 EXPIRY_TIME = Time.parse("2000-01-01 00:00:00")
60 # expire the meta tile by setting the modified time back
61 def Expire.expire_meta(meta)
62 puts "Expiring #{meta}"
63 File.utime(EXPIRY_TIME, EXPIRY_TIME, meta)
66 def Expire.expire(change_file, min_zoom, max_zoom, tile_dirs)
67 do_expire(change_file, min_zoom, max_zoom) do |set|
71 # turn all the tiles into expires, putting them in the set
72 # so that we don't expire things multiple times
74 # this has to match the routine in mod_tile
75 meta = xyz_to_meta(xy[0], xy[1], xy[2])
77 # check each style working out what needs expiring
78 tile_dirs.each do |tile_dir|
79 meta_set.add(tile_dir + "/" + meta) if File.exist?(tile_dir + "/" + meta)
82 # add the parent into the set for the next round
83 new_set.add([xy[0] / 2, xy[1] / 2, xy[2] - 1])
86 # expire all meta tiles
87 meta_set.each do |meta|
91 # return the new set, consisting of all the parents
96 def Expire.do_expire(change_file, min_zoom, max_zoom, &_)
97 # read in the osm change file
98 doc = XML::Document.file(change_file)
100 # hash map to contain all the nodes
103 # we put all the nodes into the hash, as it doesn't matter whether the node was
104 # added, deleted or modified - the tile will need updating anyway.
105 doc.find('//node').each do |node|
106 lat = node['lat'].to_f
113 point = Proj4::Point.new(Math::PI * node['lon'].to_f / 180,
114 Math::PI * lat / 180)
115 nodes[node['id'].to_i] = tile_from_latlon(point, max_zoom)
118 # now we look for all the ways that have changed and put all of their nodes into
119 # the hash too. this will add too many nodes, as it is possible a long way will be
120 # changed at only a portion of its length. however, due to the non-local way that
121 # mapnik does text placement, it may stil not be enough.
123 # also, we miss cases where nodes are deleted from ways where that node is not
124 # itself deleted and the coverage of the point set isn't enough to encompass the
126 node_cache = NodeCache.new(NODE_CACHE_FILE)
127 doc.find('//way/nd').each do |node|
128 node_id = node['ref'].to_i
129 unless nodes.include? node_id
130 # this is a node referenced but not added, modified or deleted, so it should
131 # still be in the node cache.
132 if entry = node_cache[node_id]
133 point = Proj4::Point.new(entry.lon, entry.lat)
134 nodes[node_id] = tile_from_merc(point, max_zoom)
139 # create a set of all the tiles at the maximum zoom level which are touched by
140 # any of the nodes we've collected. we'll create the tiles at other zoom levels
141 # by a simple recursion.
142 set = Set.new nodes.values
144 # expire tiles and shrink to the set of parents
145 (max_zoom).downto(min_zoom) do |_|
146 # allow the block to work on the set, returning the set at the next
152 # wrapper to access the osm2pgsql node cache
156 attr_reader :lon, :lat
158 def initialize(lon, lat)
159 @lat = lat.to_f / 100.0
160 @lon = lon.to_f / 100.0
165 def initialize(filename)
166 @cache = Mmap.new(filename)
168 throw "Unexpected format" unless @cache[0..3].unpack("l").first == 1
169 throw "Unexpected ID size" unless @cache[4..7].unpack("l").first == 8
171 @max_id = @cache[8..15].unpack("q").first
179 lon, lat = @cache[offset..offset + 7].unpack("ll")
181 if lon != -2147483648 && lat != -2147483648
182 node = Node.new(lon, lat)