async with QueryPool(dsn, place_threads, autocommit=True) as pool:
with tokenizer.name_analyzer() as analyzer:
- lines = 0
- for row in tar:
+ for lineno, row in enumerate(tar, 1):
try:
address = dict(street=row['street'], postcode=row['postcode'])
args = ('SRID=4326;' + row['geometry'],
%s::INT, %s::TEXT, %s::JSONB, %s::TEXT)""",
args)
- lines += 1
- if lines == 1000:
+ if not lineno % 1000:
print('.', end='', flush=True)
- lines = 0
print('', flush=True)