]> git.openstreetmap.org Git - chef.git/blob - cookbooks/planet/templates/default/planetdump.erb
planet: wait for s3 replication to complete
[chef.git] / cookbooks / planet / templates / default / planetdump.erb
1 #!/bin/bash
2
3 # DO NOT EDIT - This file is being maintained by Chef
4
5 # Exit on error
6 set -e
7
8 # Get the name of the file and the expected pattern
9 file="$1"
10 pattern="^osm-([0-9]{4})-([0-9]{2})-([0-9]{2})\.dmp$"
11
12 # Give up now if the file isn't a database dump
13 [[ $file =~ $pattern ]] || exit 0
14
15 # Save the year and date from the file name
16 year="${BASH_REMATCH[1]}"
17 date="${year:2:2}${BASH_REMATCH[2]}${BASH_REMATCH[3]}"
18
19 # Check the lock
20 if [ -f /tmp/planetdump.lock ]; then
21     if [ "$(ps -p `cat /tmp/planetdump.lock` | wc -l)" -gt 1 ]; then
22         echo "Error: Another planetdump is running"
23         exit 1
24     else
25         rm /tmp/planetdump.lock
26     fi
27 fi
28
29 # Create lock file
30 echo $$ > /tmp/planetdump.lock
31
32 # Define cleanup function
33 function cleanup {
34     # Remove the lock file
35     rm /tmp/planetdump.lock
36 }
37
38 # Remove lock on exit
39 trap cleanup EXIT
40
41 # Change to working directory
42 cd /store/planetdump
43
44 # Cleanup
45 rm -rf users
46 rm -rf changesets changeset_tags changeset_comments
47 rm -rf nodes node_tags
48 rm -rf ways way_tags way_nodes
49 rm -rf relations relation_tags relation_members
50
51 # Run the dump
52 time nice -n 19 /opt/planet-dump-ng/planet-dump-ng \
53      --max-concurrency=4 \
54      -c "pbzip2 -c" -f "/store/backup/${file}" --dense-nodes=1 \
55      -C "changesets-${date}.osm.bz2" \
56      -D "discussions-${date}.osm.bz2" \
57      -x "planet-${date}.osm.bz2" -X "history-${date}.osm.bz2" \
58      -p "planet-${date}.osm.pbf" -P "history-${date}.osm.pbf"
59
60 # Function to create bittorrent files
61 function mk_torrent {
62   local type="$1"
63   local format="$2"
64   local dir="$3"
65   local s3path="$4"
66   local s_year="$5"
67   local web_dir="${dir}${s_year}"
68   local name="${type}-${date}.osm.${format}"
69   local web_path="${web_dir}/${name}"
70   local s3_web_path="${s3path}/${name}"
71   local rss_web_dir="https://planet.openstreetmap.org/${dir}"
72   local rss_file="${type}-${format}-rss.xml"
73   local torrent_file="${name}.torrent"
74   local torrent_url="${rss_web_dir}${s_year}/${torrent_file}"
75
76   # create .torrent file
77   mktorrent -l 22 "${name}" \
78      -a udp://tracker.opentrackr.org:1337 \
79      -a udp://tracker.datacenterlight.ch:6969/announce,http://tracker.datacenterlight.ch:6969/announce \
80      -a udp://tracker.torrent.eu.org:451 \
81      -a udp://tracker-udp.gbitt.info:80/announce,http://tracker.gbitt.info/announce,https://tracker.gbitt.info/announce \
82      -a http://retracker.local/announce \
83      -w "https://planet.openstreetmap.org/${web_path}" \
84      -w "https://osm-planet-eu-central-1.s3.dualstack.eu-central-1.amazonaws.com/${s3_web_path}" \
85      -w "https://osm-planet-us-west-2.s3.dualstack.us-west-2.amazonaws.com/${s3_web_path}" \
86      -w "https://ftp5.gwdg.de/pub/misc/openstreetmap/planet.openstreetmap.org/${web_path}" \
87      -w "https://ftpmirror.your.org/pub/openstreetmap/${web_path}" \
88      -w "https://mirror.init7.net/openstreetmap/${web_path}" \
89      -w "https://ftp.fau.de/osm-planet/${web_path}" \
90      -w "https://ftp.spline.de/pub/openstreetmap/${web_path}" \
91      -w "https://downloads.opencagedata.com/planet/${name}" \
92      -w "https://planet.osm-hr.org/${web_path}" \
93      -w "https://planet.maps.mail.ru/${web_path}" \
94      -c "OpenStreetMap ${type} data export, licensed under https://opendatacommons.org/licenses/odbl/ by OpenStreetMap contributors" \
95      -o "${torrent_file}" > /dev/null
96
97   # create .xml global RSS headers if missing
98   torrent_time_rfc="$(date -R -r ${torrent_file})"
99   test -f "${rss_file}" || echo "<x/>" | xmlstarlet select --xml-decl --indent \
100         -N "atom=http://www.w3.org/2005/Atom" \
101         -N "dcterms=http://purl.org/dc/terms/" \
102         -N "content=http://purl.org/rss/1.0/modules/content/" \
103         --encode "UTF-8" \
104         --template \
105         --match / \
106         --elem "rss" \
107                 --attr "version" --output "2.0" --break \
108                 --attr "atom:DUMMY" --break \
109         --elem "channel" \
110         --elem "title" --output "OpenStreetMap ${type} ${format} torrent RSS" --break \
111         --elem "link"  --output "${rss_web_dir}" --break \
112         --elem "atom:link" \
113                 --attr "href" --output "${rss_web_dir}/${rss_file}" --break \
114                 --attr "rel" --output "self" --break \
115                 --attr "type" --output "application/rss+xml" --break \
116                 --break \
117         --elem "description" --output "${type}.osm.${format}.torrent RSS feed" --break \
118         --elem "copyright" --output "Source: OpenStreetMap contributors, under ODbL 1.0 licence" --break \
119         --elem "generator" --output "OpenStreetMap xmlstarlet powered shell script v1.0" --break \
120         --elem "language" --output "en" --break \
121         --elem "lastBuildDate" --output "${torrent_time_rfc}" \
122         > "${rss_file}"
123
124   # add newly created .torrent file as new entry to .xml RSS feed, removing excess entries
125   torrent_size="$(stat --format="%s" ${torrent_file})"
126   xmlstarlet edit --inplace \
127         -a "//lastBuildDate" -t elem -n item -v ""  \
128         -s "//item[1]" -t elem -n "title" -v "${torrent_file}" \
129         -s "//item[1]" -t elem -n "guid" -v "${torrent_url}" \
130         -s "//item[1]" -t elem -n "link" -v "${torrent_url}" \
131         -s "//item[1]" -t elem -n "pubDate" -v "${torrent_time_rfc}" \
132         -s "//item[1]" -t elem -n "category" -v "OpenStreetMap data" \
133         -s "//item[1]" -t elem -n "enclosure" \
134                 -s "//item[1]"/enclosure -t attr -n "type" -v "application/x-bittorrent" \
135                 -s "//item[1]"/enclosure -t attr -n "length" -v "${torrent_size}" \
136                 -s "//item[1]"/enclosure -t attr -n "url" -v "${torrent_url}" \
137         -s "//item[1]" -t elem -n "description" -v "OpenStreetMap torrent ${torrent_file}" \
138         -u /rss/channel/lastBuildDate -v "${torrent_time_rfc}" \
139         -d /rss/@atom:DUMMY \
140         -d "//item[position()>5]" \
141         "${rss_file}"
142 }
143
144 function replication_status_wait {
145   local s3_url="$1"
146   for i in {1..3600}; do
147     local replication_status=$(curl -sI --location "${s3_url}" | grep -F 'x-amz-replication-status' | awk '{print $2}' |  tr -d '\r' )
148
149     if [[ "${replication_status}" == "COMPLETED" ]]; then
150       return 0 # success
151     fi
152
153     sleep 1
154   done
155   echo "Timeout waiting for ${s3_url} to complete replication status: ${replication_status}"
156 }
157
158 # Function to install a dump in place
159 function install_dump {
160   local type="$1"
161   local format="$2"
162   local dir="$3"
163   local s3dir="$4"
164   local year="$5"
165   local name="${type}-${date}.osm.${format}"
166   local latest="${type}-latest.osm.${format}"
167   local rss_file="${type}-${format}-rss.xml"
168
169   md5sum "${name}" > "${name}.md5"
170
171   # Upload all files to S3
172   /opt/awscli/v2/current/bin/aws --profile=osm-pds-upload s3 cp --storage-class=INTELLIGENT_TIERING --no-progress "${name}.md5" "s3://osm-planet-eu-central-1/${s3dir}/${name}.md5"
173   /opt/awscli/v2/current/bin/aws --profile=osm-pds-upload s3 cp --storage-class=INTELLIGENT_TIERING --no-progress "${name}.torrent" "s3://osm-planet-eu-central-1/${s3dir}/${name}.torrent"
174   /opt/awscli/v2/current/bin/aws --profile=osm-pds-upload s3 cp --storage-class=INTELLIGENT_TIERING --no-progress "${name}" "s3://osm-planet-eu-central-1/${s3dir}/${name}"
175
176   # Waiting for S3 replication to complete
177   replication_status_wait "https://osm-planet-eu-central-1.s3.dualstack.eu-central-1.amazonaws.com/${s3dir}/${name}.md5"
178   replication_status_wait "https://osm-planet-eu-central-1.s3.dualstack.eu-central-1.amazonaws.com/${s3dir}/${name}.torrent"
179   replication_status_wait "https://osm-planet-eu-central-1.s3.dualstack.eu-central-1.amazonaws.com/${s3dir}/${name}"
180
181   mkdir -p "${dir}/${year}"
182   mv "${name}" "${name}.md5" "${dir}/${year}"
183   ln -sf "${year:-.}/${name}" "${dir}/${latest}"
184   test -f "${name}.torrent" && mv "${name}.torrent" "${dir}/${year}" && ln -sf "${year:-.}/${name}.torrent" "${dir}/${latest}.torrent"
185   test -f "${rss_file}" && xmllint --noout "${rss_file}" && cp -f "${rss_file}" "${dir}"
186   rm -f "${dir}/${latest}.md5"
187   sed -e "s/${name}/${latest}/" "${dir}/${year}/${name}.md5" > "${dir}/${latest}.md5"
188 }
189
190 # Create *.torrent files
191 mk_torrent "changesets" "bz2" "planet" "changesets/osm/${year}" "/${year}"
192 mk_torrent "discussions" "bz2" "planet" "discussions/osm/${year}" "/${year}"
193 mk_torrent "planet" "bz2" "planet" "planet/osm/${year}" "/${year}"
194 mk_torrent "history" "bz2" "planet/full-history" "planet-full-history/osm/${year}" "/${year}"
195 mk_torrent "planet" "pbf" "pbf" "planet/pbf/${year}"
196 mk_torrent "history" "pbf" "pbf/full-history" "planet-full-history/pbf/${year}"
197
198 # Move dumps into place
199 install_dump "changesets" "bz2" "<%= node[:planet][:dump][:xml_directory] %>" "changesets/osm/${year}" "${year}"
200 install_dump "discussions" "bz2" "<%= node[:planet][:dump][:xml_directory] %>" "discussions/osm/${year}" "${year}"
201 install_dump "planet" "bz2" "<%= node[:planet][:dump][:xml_directory] %>" "planet/osm/${year}" "${year}"
202 install_dump "history" "bz2" "<%= node[:planet][:dump][:xml_history_directory] %>" "planet-full-history/osm/${year}" "${year}"
203 install_dump "planet" "pbf" "<%= node[:planet][:dump][:pbf_directory] %>" "planet/pbf/${year}"
204 install_dump "history" "pbf" "<%= node[:planet][:dump][:pbf_history_directory] %>" "planet-full-history/pbf/${year}"