X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/0ad1b28497b49bf8baa5cfa2ea4438c4824617a1..cb70a2fde33e9c2455ea1cfb37ee430b00058b82:/nominatim/tools/refresh.py?ds=sidebyside diff --git a/nominatim/tools/refresh.py b/nominatim/tools/refresh.py index 5fbb07f8..6720465f 100644 --- a/nominatim/tools/refresh.py +++ b/nominatim/tools/refresh.py @@ -2,23 +2,28 @@ Functions for bringing auxiliary data in the database up-to-date. """ import json -import re +import logging +from textwrap import dedent from psycopg2.extras import execute_values -from ..db.utils import execute_file +from nominatim.db.utils import execute_file +from nominatim.db.sql_preprocessor import SQLPreprocessor +from nominatim.version import NOMINATIM_VERSION -def update_postcodes(conn, datadir): +LOG = logging.getLogger() + +def update_postcodes(dsn, sql_dir): """ Recalculate postcode centroids and add, remove and update entries in the location_postcode table. `conn` is an opne connection to the database. """ - execute_file(conn, datadir / 'sql' / 'update-postcodes.sql') + execute_file(dsn, sql_dir / 'update-postcodes.sql') -def recompute_word_counts(conn, datadir): +def recompute_word_counts(dsn, sql_dir): """ Compute the frequency of full-word search terms. """ - execute_file(conn, datadir / 'sql' / 'words_from_search_name.sql') + execute_file(dsn, sql_dir / 'words_from_search_name.sql') def _add_address_level_rows_from_entry(rows, entry): @@ -71,99 +76,122 @@ def load_address_levels_from_file(conn, config_file): with config_file.open('r') as fdesc: load_address_levels(conn, 'address_levels', json.load(fdesc)) -PLPGSQL_BASE_MODULES = ( - 'utils.sql', - 'normalization.sql', - 'ranking.sql', - 'importance.sql', - 'address_lookup.sql', - 'interpolation.sql' -) -PLPGSQL_TABLE_MODULES = ( - ('place', 'place_triggers.sql'), - ('placex', 'placex_triggers.sql'), - ('location_postcode', 'postcode_triggers.sql') -) - -def _get_standard_function_sql(conn, config, sql_dir, enable_diff_updates, enable_debug): - """ Read all applicable SQLs containing PL/pgSQL functions, replace - placefolders and execute them. +def create_functions(conn, config, enable_diff_updates=True, enable_debug=False): + """ (Re)create the PL/pgSQL functions. """ - sql_func_dir = sql_dir / 'functions' - sql = '' + sql = SQLPreprocessor(conn, config) - # Get the basic set of functions that is always imported. - for sql_file in PLPGSQL_BASE_MODULES: - with (sql_func_dir / sql_file).open('r') as fdesc: - sql += fdesc.read() + sql.run_sql_file(conn, 'functions.sql', + disable_diff_updates=not enable_diff_updates, + debug=enable_debug) - # Some files require the presence of a certain table - for table, fname in PLPGSQL_TABLE_MODULES: - if conn.table_exists(table): - with (sql_func_dir / fname).open('r') as fdesc: - sql += fdesc.read() - # Replace placeholders. - sql = sql.replace('{modulepath}', - config.DATABASE_MODULE_PATH or str((config.project_dir / 'module').resolve())) - if enable_diff_updates: - sql = sql.replace('RETURN NEW; -- %DIFFUPDATES%', '--') +WEBSITE_SCRIPTS = ( + 'deletable.php', + 'details.php', + 'lookup.php', + 'polygons.php', + 'reverse.php', + 'search.php', + 'status.php' +) + +# constants needed by PHP scripts: PHP name, config name, type +PHP_CONST_DEFS = ( + ('Database_DSN', 'DATABASE_DSN', str), + ('Default_Language', 'DEFAULT_LANGUAGE', str), + ('Log_DB', 'LOG_DB', bool), + ('Log_File', 'LOG_FILE', str), + ('NoAccessControl', 'CORS_NOACCESSCONTROL', bool), + ('Places_Max_ID_count', 'LOOKUP_MAX_COUNT', int), + ('PolygonOutput_MaximumTypes', 'POLYGON_OUTPUT_MAX_TYPES', int), + ('Search_BatchMode', 'SEARCH_BATCH_MODE', bool), + ('Search_NameOnlySearchFrequencyThreshold', 'SEARCH_NAME_ONLY_THRESHOLD', str), + ('Use_US_Tiger_Data', 'USE_US_TIGER_DATA', bool), + ('MapIcon_URL', 'MAPICON_URL', str), +) - if enable_debug: - sql = sql.replace('--DEBUG:', '') - if config.get_bool('LIMIT_REINDEXING'): - sql = sql.replace('--LIMIT INDEXING:', '') +def import_wikipedia_articles(dsn, data_path, ignore_errors=False): + """ Replaces the wikipedia importance tables with new data. + The import is run in a single transaction so that the new data + is replace seemlessly. - if not config.get_bool('USE_US_TIGER_DATA'): - sql = sql.replace('-- %NOTIGERDATA% ', '') + Returns 0 if all was well and 1 if the importance file could not + be found. Throws an exception if there was an error reading the file. + """ + datafile = data_path / 'wikimedia-importance.sql.gz' - if not config.get_bool('USE_AUX_LOCATION_DATA'): - sql = sql.replace('-- %NOAUXDATA% ', '') + if not datafile.exists(): + return 1 - reverse_only = 'false' if conn.table_exists('search_name') else 'true' + pre_code = """BEGIN; + DROP TABLE IF EXISTS "wikipedia_article"; + DROP TABLE IF EXISTS "wikipedia_redirect" + """ + post_code = "COMMIT" + execute_file(dsn, datafile, ignore_errors=ignore_errors, + pre_code=pre_code, post_code=post_code) - return sql.replace('%REVERSE-ONLY%', reverse_only) + return 0 -def replace_partition_string(sql, partitions): - """ Replace a partition template with the actual partition code. +def recompute_importance(conn): + """ Recompute wikipedia links and importance for all entries in placex. + This is a long-running operations that must not be executed in + parallel with updates. """ - for match in re.findall('^-- start(.*?)^-- end', sql, re.M | re.S): - repl = '' - for part in partitions: - repl += match.replace('-partition-', str(part)) - sql = sql.replace(match, repl) + with conn.cursor() as cur: + cur.execute('ALTER TABLE placex DISABLE TRIGGER ALL') + cur.execute(""" + UPDATE placex SET (wikipedia, importance) = + (SELECT wikipedia, importance + FROM compute_importance(extratags, country_code, osm_type, osm_id)) + """) + cur.execute(""" + UPDATE placex s SET wikipedia = d.wikipedia, importance = d.importance + FROM placex d + WHERE s.place_id = d.linked_place_id and d.wikipedia is not null + and (s.wikipedia is null or s.importance < d.importance); + """) + + cur.execute('ALTER TABLE placex ENABLE TRIGGER ALL') + conn.commit() - return sql -def _get_partition_function_sql(conn, sql_dir): - """ Create functions that work on partition tables. +def setup_website(basedir, config): + """ Create the website script stubs. """ - with conn.cursor() as cur: - cur.execute('SELECT distinct partition FROM country_name') - partitions = set([0]) - for row in cur: - partitions.add(row[0]) + if not basedir.exists(): + LOG.info('Creating website directory.') + basedir.mkdir() - with (sql_dir / 'partition-functions.src.sql').open('r') as fdesc: - sql = fdesc.read() + template = dedent("""\ +