X-Git-Url: https://git.openstreetmap.org./nominatim.git/blobdiff_plain/921cc233890705d6a916fa415b51fdd88fa8fe11..1a02f182bb6af0a2034fd7e1c1c938fab61e413f:/utils/cron_banip.py diff --git a/utils/cron_banip.py b/utils/cron_banip.py index d716affb..55a7b295 100755 --- a/utils/cron_banip.py +++ b/utils/cron_banip.py @@ -61,8 +61,10 @@ BULKCOOLOFF_PERIOD='15 min' BULKLONG_LIMIT=8000 BULKSHORT_LIMIT=2000 BLOCK_UPPER=19000 -BLOCK_LOADFAC=300 -BULK_LOADFAC=100 +BLOCK_LOWER=4000 +BLOCK_LOADFAC=380 +BULK_LOADFAC=160 +BULK_LOWER=1500 # # END OF DEFAULT SETTINGS @@ -73,11 +75,6 @@ try: except IOError: pass -# determine current load -fd = open("/proc/loadavg") -avgload = int(float(fd.readline().split()[1])) -fd.close() - # read the previous blocklist WHITELIST = set(WHITELIST.split()) if WHITELIST else set() prevblocks = [] @@ -99,25 +96,29 @@ try: except IOError: pass #ignore non-existing file -# current number of bulks -numbulks = len(prevbulks) - -BLOCK_LIMIT = BLOCK_UPPER - BLOCK_LOADFAC * (numbulks - 30) -BULKLONG_LIMIT = BULKLONG_LIMIT - BULK_LOADFAC * (avgload - 16) - +# determine current load +fd = open("/proc/loadavg") +avgload = int(float(fd.readline().split()[2])) +fd.close() +# DB load conn = psycopg2.connect('dbname=nominatim') cur = conn.cursor() +cur.execute("select count(*)/60 from new_query_log where starttime > now() - interval '1min'") +dbload = int(cur.fetchone()[0]) + +BLOCK_LIMIT = max(BLOCK_LOWER, BLOCK_UPPER - BLOCK_LOADFAC * (dbload - 70)) +BULKLONG_LIMIT = max(BULK_LOWER, BULKLONG_LIMIT - BULK_LOADFAC * (avgload - 14)) # get the new block candidates cur.execute(""" SELECT ipaddress, max(count) FROM ((SELECT * FROM - (SELECT ipaddress, sum(CASE WHEN type = 'search' THEN 3 ELSE 1 END) as count FROM new_query_log + (SELECT ipaddress, sum(case when endtime is null then 1 else 1+date_part('epoch',endtime-starttime) end) as count FROM new_query_log WHERE starttime > now() - interval '1 hour' GROUP BY ipaddress) as i WHERE count > %s) UNION (SELECT ipaddress, count * 4 FROM - (SELECT ipaddress, sum(CASE WHEN type = 'search' THEN 2 ELSE 1 END) as count FROM new_query_log + (SELECT ipaddress, sum(case when endtime is null then 1 else 1+date_part('epoch',endtime-starttime) end) as count FROM new_query_log WHERE starttime > now() - interval '10 min' GROUP BY ipaddress) as i WHERE count > %s)) as o GROUP BY ipaddress @@ -182,6 +183,7 @@ if debulkcandidates: cur.execute(""" SELECT DISTINCT ipaddress FROM new_query_log WHERE ipaddress IN ('%s') AND starttime > now() - interval '%s' + AND starttime > date_trunc('day', now()) """ % ("','".join(debulkcandidates), BULKCOOLOFF_PERIOD)) for c in cur: