+# SPDX-License-Identifier: GPL-2.0-only
+#
+# This file is part of Nominatim. (https://nominatim.org)
+#
+# Copyright (C) 2022 by the Nominatim developer community.
+# For a full list of authors see the git log.
"""
Functions for importing and managing static country information.
"""
+import json
+from io import StringIO
import psycopg2.extras
from nominatim.db import utils as db_utils
def __init__(self):
self._info = {}
+
def load(self, config):
""" Load the country properties from the configuration files,
if they are not loaded yet.
"""
if not self._info:
self._info = config.load_sub_configuration('country_settings.yaml')
+ # Convert languages into a list for simpler handling.
+ for prop in self._info.values():
+ if 'languages' not in prop:
+ prop['languages'] = []
+ elif not isinstance(prop['languages'], list):
+ prop['languages'] = [x.strip()
+ for x in prop['languages'].split(',')]
+ if 'names' not in prop:
+ prop['names']['name'] = {}
def items(self):
""" Return tuples of (country_code, property dict) as iterable.
_COUNTRY_INFO.load(config)
+def iterate():
+ """ Iterate over country code and properties.
+ """
+ return _COUNTRY_INFO.items()
+
+
def setup_country_tables(dsn, sql_dir, ignore_partitions=False):
""" Create and populate the tables with basic static data that provides
the background for geocoding. Data is assumed to not yet exist.
"""
- db_utils.execute_file(dsn, sql_dir / 'country_name.sql')
db_utils.execute_file(dsn, sql_dir / 'country_osm_grid.sql.gz')
- params = []
+ def prepend_name_to_keys(name):
+ return {'name:'+k: v for k, v in name.items()}
+
+ params, country_names_data = [], ''
for ccode, props in _COUNTRY_INFO.items():
if ccode is not None and props is not None:
if ignore_partitions:
partition = 0
else:
partition = props.get('partition')
- if ',' in (props.get('languages', ',') or ','):
- lang = None
- else:
- lang = props['languages']
+ lang = props['languages'][0] if len(props['languages']) == 1 else None
params.append((ccode, partition, lang))
+ name = prepend_name_to_keys(props.get('names').get('name'))
+ name = json.dumps(name , ensure_ascii=False, separators=(', ', '=>'))
+ country_names_data += ccode + '\t' + name[1:-1] + '\n'
+
with connect(dsn) as conn:
with conn.cursor() as cur:
+ cur.execute(
+ """ CREATE TABLE public.country_name (
+ country_code character varying(2),
+ name public.hstore,
+ derived_name public.hstore,
+ country_default_language_code text,
+ partition integer
+ ); """)
+ data = StringIO(country_names_data)
+ cur.copy_from(data, 'country_name', columns=('country_code', 'name'))
cur.execute_values(
""" UPDATE country_name
SET partition = part, country_default_language_code = lang
languages = languages.split(',')
def _include_key(key):
- return key == 'name' or \
- (key.startswith('name:') and (not languages or key[5:] in languages))
+ return key.startswith('name:') and \
+ key[5:] in languages or key[5:] == 'default'
with conn.cursor() as cur:
psycopg2.extras.register_hstore(cur)