3 # Search apache logs for high-bandwith users and create a list of suspicious IPs.
4 # There are three states: bulk, block, ban. The first are bulk requesters
5 # that need throtteling, the second bulk requesters that have overdone it
6 # and the last manually banned IPs.
12 from datetime import datetime, timedelta
13 from collections import defaultdict
18 # Copy into settings/ip_blcoks.conf and adapt as required.
20 BASEDIR = os.path.normpath(os.path.join(os.path.realpath(__file__), '../..'))
21 BLOCKEDFILE= BASEDIR + '/settings/ip_blocks.map'
22 LOGFILE= BASEDIR + '/log/restricted_ip.log'
24 # space-separated list of IPs that are never banned
26 # space-separated list of IPs manually blocked
28 # user-agents that should be blocked from bulk mode
29 # (matched with startswith)
32 # time before a automatically blocked IP is allowed back
33 BLOCKCOOLOFF_DELTA=timedelta(hours=1)
34 # quiet time before an IP is released from the bulk pool
35 BULKCOOLOFF_DELTA=timedelta(minutes=15)
47 # END OF DEFAULT SETTINGS
51 with open(BASEDIR + "/settings/ip_blocks.conf") as f:
52 code = compile(f.read(), BASEDIR + "/settings/ip_blocks.conf", 'exec')
57 BLOCK_LIMIT = BLOCK_LOWER
59 time_regex = r'(?P<t_day>\d\d)/(?P<t_month>[A-Za-z]+)/(?P<t_year>\d\d\d\d):(?P<t_hour>\d\d):(?P<t_min>\d\d):(?P<t_sec>\d\d) [+-]\d\d\d\d'
61 format_pat= re.compile(r'(?P<ip>[(\d\.)]+) - - \['+ time_regex + r'] "(?P<query>.*?)" (?P<return>\d+) (?P<bytes>\d+) "(?P<referer>.*?)" "(?P<ua>.*?)"')
62 time_pat= re.compile(r'[(\d\.)]+ - - \[' + time_regex + '\] ')
64 logtime_pat = "%d/%b/%Y:%H:%M:%S %z"
66 MONTHS = { 'Jan' : 1, 'Feb' : 2, 'Mar' : 3, 'Apr' : 4, 'May' : 5, 'Jun' : 6,
67 'Jul' : 7, 'Aug' : 8, 'Sep' : 9, 'Oct' : 10, 'Nov' : 11, 'Dec' : 12 }
70 def __init__(self, logline):
71 e = format_pat.match(logline)
73 raise ValueError("Invalid log line:", logline)
76 self.date = datetime(int(e['t_year']), MONTHS[e['t_month']], int(e['t_day']),
77 int(e['t_hour']), int(e['t_min']), int(e['t_sec']))
78 qp = e['query'].split(' ', 2)
84 if qp[0] == 'OPTIONS':
87 if qp[1].startswith('/search'):
89 elif qp[1].startswith('/reverse'):
91 elif qp[1].startswith('/details'):
95 self.query = e['query']
96 self.retcode = int(e['return'])
97 self.referer = e['referer'] if e['referer'] != '-' else None
98 self.ua = e['ua'] if e['ua'] != '-' else None
100 def get_log_time(logline):
101 e = format_pat.match(logline)
105 #return datetime.strptime(e['time'], logtime_pat).replace(tzinfo=None)
106 return datetime(int(e['t_year']), MONTHS[e['t_month']], int(e['t_day']),
107 int(e['t_hour']), int(e['t_min']), int(e['t_sec']))
111 """ An apache log file, unpacked. """
113 def __init__(self, filename):
114 self.fd = open(filename)
115 self.len = os.path.getsize(filename)
120 def seek_next(self, abstime):
121 self.fd.seek(abstime)
123 l = self.fd.readline()
124 return LogEntry.get_log_time(l) if l is not None else None
126 def seek_to_date(self, target):
127 date1 = self.seek_next(0)
130 curseek = 2*1024*1024
131 curdate = self.seek_next(curseek)
133 raise RuntimeError("Cannot seek to %d" % curseek)
134 while target < curdate or (target - curdate).total_seconds() > 1:
135 bps = curseek / ((curdate - date1).total_seconds())
136 curseek += (target - curdate).total_seconds() * bps
139 elif curseek > self.len:
140 curseek = self.len - bps
141 curdate = self.seek_next(curseek)
143 raise RuntimeError("Cannot see to %d" % curseek)
153 self.whitelist = set(WHITELIST.split()) if WHITELIST else set()
154 self.blacklist = set(BLACKLIST.split()) if BLACKLIST else set()
155 self.prevblocks = set()
156 self.prevbulks = set()
159 fd = open(BLOCKEDFILE)
161 ip, typ = line.strip().split(' ')
162 if ip not in self.blacklist:
164 self.prevblocks.add(ip)
166 self.prevbulks.add(ip)
169 pass #ignore non-existing file
181 def add_long(self, logentry):
183 if logentry.request is not None:
186 if logentry.ua is None:
189 def add_short(self, logentry):
190 self.short_total += 1
191 if logentry.request is not None:
193 self.add_long(logentry)
195 def new_state(self, was_blocked, was_bulked):
197 # deblock only if the IP has been really quiet
198 # (properly catches the ones that simply ignore the HTTP error)
199 return None if self.long_total < 5 else 'block'
200 if self.long_api > BLOCK_UPPER or self.short_api > BLOCK_UPPER / 3:
201 # client totally overdoing it
204 if self.short_total < 5:
205 # client has stopped, debulk
207 if self.long_api > BLOCK_LIMIT or self.short_api > BLOCK_LIMIT / 3:
208 # client is still hammering us, block
212 if self.long_api > BULKLONG_LIMIT or self.short_api > BULKSHORT_LIMIT:
214 return 'uablock' # bad useragent
221 if __name__ == '__main__':
222 if len(sys.argv) < 2:
223 print("Usage: %s logfile startdate" % sys.argv[0])
226 if len(sys.argv) == 2:
227 dt = datetime.now() - BLOCKCOOLOFF_DELTA
229 dt = datetime.strptime(sys.argv[2], "%Y-%m-%d %H:%M:%S")
231 if os.path.getsize(sys.argv[1]) < 2*1030*1024:
232 sys.exit(0) # not enough data
234 lf = LogFile(sys.argv[1])
235 if not lf.seek_to_date(dt):
240 shortstart = dt + BLOCKCOOLOFF_DELTA - BULKCOOLOFF_DELTA
241 notlogged = bl.whitelist | bl.blacklist
243 stats = defaultdict(IPstats)
245 for l in lf.loglines():
246 if l.ip not in notlogged:
247 stats[l.ip].add_long(l)
248 if l.date > shortstart:
252 for l in lf.loglines():
253 if l.ip not in notlogged:
254 stats[l.ip].add_short(l)
255 if l.request is not None and l.retcode == 200:
258 # adapt limits according to CPU and DB load
259 fd = open("/proc/loadavg")
260 cpuload = int(float(fd.readline().split()[2]))
262 dbload = total200 / BULKCOOLOFF_DELTA.total_seconds()
264 numbulks = len(bl.prevbulks)
265 BLOCK_LIMIT = max(BLOCK_LIMIT, BLOCK_UPPER - BLOCK_LOADFAC * (dbload - 75))
266 BULKLONG_LIMIT = max(BULK_LOWER, BULKLONG_LIMIT - BULK_LOADFAC * (cpuload - 14))
267 if numbulks > MAX_BULK_IPS:
268 BLOCK_LIMIT = max(3600, BLOCK_LOWER - (numbulks - MAX_BULK_IPS)*10)
269 # if the bulk pool is still empty, clients will be faster, avoid having
270 # them blocked in this case
272 BLOCK_LIMIT = 2*BLOCK_UPPER
275 # collecting statistics
282 # write out new state file
283 fd = open(BLOCKEDFILE, 'w')
284 for k,v in stats.items():
285 wasblocked = k in bl.prevblocks
286 wasbulked = k in bl.prevbulks
287 state = v.new_state(wasblocked, wasbulked)
288 if state is not None:
289 if state == 'uablock':
292 elif state == 'emblock':
295 elif state == 'block':
298 elif state == 'bulk':
301 fd.write("%s %s\n" % (k, state))
307 for i in bl.blacklist:
308 fd.write("%s ban\n" % k)
311 # TODO write logs (need to collect some statistics)
312 logstr = datetime.now().strftime('%Y-%m-%d %H:%M') + ' %s %s\n'
313 fd = open(LOGFILE, 'a')
315 fd.write(logstr % ('unblocked:', ', '.join(unblocked)))
317 fd.write(logstr % (' debulked:', ', '.join(debulked)))
319 fd.write(logstr % ('new bulks:', ', '.join(bulked)))
321 fd.write(logstr % ('dir.block:', ', '.join(emblocked)))
323 fd.write(logstr % (' ua block:', ', '.join(uablocked)))
325 fd.write(logstr % ('new block:', ', '.join(blocked)))