mirror of https://github.com/fail2ban/fail2ban
code review, fix simplest TZ issue - avoid date adjustment by assuming of last year (date without year in the future) by wrong zone (don't adjust by offset up to +24 hours)
parent
3ca69c8c0a
commit
7e8d98c4ed
|
@ -633,6 +633,9 @@ class Filter(JailThread):
|
||||||
fail = element[3]
|
fail = element[3]
|
||||||
logSys.debug("Processing line with time:%s and ip:%s",
|
logSys.debug("Processing line with time:%s and ip:%s",
|
||||||
unixTime, ip)
|
unixTime, ip)
|
||||||
|
# ensure the time is not in the future, e. g. by some estimated (assumed) time:
|
||||||
|
if self.checkFindTime and unixTime > MyTime.time():
|
||||||
|
unixTime = MyTime.time()
|
||||||
tick = FailTicket(ip, unixTime, data=fail)
|
tick = FailTicket(ip, unixTime, data=fail)
|
||||||
if self._inIgnoreIPList(ip, tick):
|
if self._inIgnoreIPList(ip, tick):
|
||||||
continue
|
continue
|
||||||
|
@ -936,7 +939,7 @@ class FileFilter(Filter):
|
||||||
log.setPos(lastpos)
|
log.setPos(lastpos)
|
||||||
self.__logs[path] = log
|
self.__logs[path] = log
|
||||||
logSys.info("Added logfile: %r (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
|
logSys.info("Added logfile: %r (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
|
||||||
if autoSeek:
|
if autoSeek and not tail:
|
||||||
self.__autoSeek[path] = autoSeek
|
self.__autoSeek[path] = autoSeek
|
||||||
self._addLogPath(path) # backend specific
|
self._addLogPath(path) # backend specific
|
||||||
|
|
||||||
|
@ -1206,7 +1209,7 @@ except ImportError: # pragma: no cover
|
||||||
|
|
||||||
class FileContainer:
|
class FileContainer:
|
||||||
|
|
||||||
def __init__(self, filename, encoding, tail = False):
|
def __init__(self, filename, encoding, tail=False):
|
||||||
self.__filename = filename
|
self.__filename = filename
|
||||||
self.setEncoding(encoding)
|
self.setEncoding(encoding)
|
||||||
self.__tail = tail
|
self.__tail = tail
|
||||||
|
|
|
@ -111,6 +111,8 @@ class FilterPoll(FileFilter):
|
||||||
modlst = []
|
modlst = []
|
||||||
Utils.wait_for(lambda: not self.active or self.getModified(modlst),
|
Utils.wait_for(lambda: not self.active or self.getModified(modlst),
|
||||||
self.sleeptime)
|
self.sleeptime)
|
||||||
|
if not self.active: # pragma: no cover - timing
|
||||||
|
break
|
||||||
for filename in modlst:
|
for filename in modlst:
|
||||||
self.getFailures(filename)
|
self.getFailures(filename)
|
||||||
self.__modified = True
|
self.__modified = True
|
||||||
|
@ -140,7 +142,7 @@ class FilterPoll(FileFilter):
|
||||||
try:
|
try:
|
||||||
logStats = os.stat(filename)
|
logStats = os.stat(filename)
|
||||||
stats = logStats.st_mtime, logStats.st_ino, logStats.st_size
|
stats = logStats.st_mtime, logStats.st_ino, logStats.st_size
|
||||||
pstats = self.__prevStats.get(filename, (0))
|
pstats = self.__prevStats.get(filename, (0,))
|
||||||
if logSys.getEffectiveLevel() <= 4:
|
if logSys.getEffectiveLevel() <= 4:
|
||||||
# we do not want to waste time on strftime etc if not necessary
|
# we do not want to waste time on strftime etc if not necessary
|
||||||
dt = logStats.st_mtime - pstats[0]
|
dt = logStats.st_mtime - pstats[0]
|
||||||
|
|
|
@ -291,9 +291,8 @@ def reGroupDictStrptime(found_dict, msec=False, default_tz=None):
|
||||||
date_result -= datetime.timedelta(days=1)
|
date_result -= datetime.timedelta(days=1)
|
||||||
if assume_year:
|
if assume_year:
|
||||||
if not now: now = MyTime.now()
|
if not now: now = MyTime.now()
|
||||||
if date_result > now:
|
if date_result > now + datetime.timedelta(days=1): # ignore by timezone issues (+24h)
|
||||||
# Could be last year?
|
# assume last year - also reset month and day as it's not yesterday...
|
||||||
# also reset month and day as it's not yesterday...
|
|
||||||
date_result = date_result.replace(
|
date_result = date_result.replace(
|
||||||
year=year-1, month=month, day=day)
|
year=year-1, month=month, day=day)
|
||||||
|
|
||||||
|
|
|
@ -8,9 +8,9 @@ Jul 4 18:39:39 mail courieresmtpd: error,relay=::ffff:1.2.3.4,from=<picaro@astr
|
||||||
Jul 6 03:42:28 whistler courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<admin at memcpy>: 550 User unknown.
|
Jul 6 03:42:28 whistler courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<admin at memcpy>: 550 User unknown.
|
||||||
# failJSON: { "time": "2004-11-21T23:16:17", "match": true , "host": "1.2.3.4" }
|
# failJSON: { "time": "2004-11-21T23:16:17", "match": true , "host": "1.2.3.4" }
|
||||||
Nov 21 23:16:17 server courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<>: 550 User unknown.
|
Nov 21 23:16:17 server courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<>: 550 User unknown.
|
||||||
# failJSON: { "time": "2004-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
# failJSON: { "time": "2005-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||||
Aug 14 12:51:04 HOSTNAME courieresmtpd: error,relay=::ffff:1.2.3.4,from=<firozquarl@aclunc.org>,to=<BOGUSUSER@HOSTEDDOMAIN.org>: 550 User unknown.
|
Aug 14 12:51:04 HOSTNAME courieresmtpd: error,relay=::ffff:1.2.3.4,from=<firozquarl@aclunc.org>,to=<BOGUSUSER@HOSTEDDOMAIN.org>: 550 User unknown.
|
||||||
# failJSON: { "time": "2004-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
# failJSON: { "time": "2005-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||||
Aug 14 12:51:04 mail.server courieresmtpd[26762]: error,relay=::ffff:1.2.3.4,msg="535 Authentication failed.",cmd: AUTH PLAIN AAAAABBBBCCCCWxlZA== admin
|
Aug 14 12:51:04 mail.server courieresmtpd[26762]: error,relay=::ffff:1.2.3.4,msg="535 Authentication failed.",cmd: AUTH PLAIN AAAAABBBBCCCCWxlZA== admin
|
||||||
# failJSON: { "time": "2004-08-14T12:51:05", "match": true , "host": "192.0.2.3" }
|
# failJSON: { "time": "2005-08-14T12:51:05", "match": true , "host": "192.0.2.3" }
|
||||||
Aug 14 12:51:05 mail.server courieresmtpd[425070]: error,relay=::ffff:192.0.2.3,port=43632,msg="535 Authentication failed.",cmd: AUTH LOGIN PlcmSpIp@example.com
|
Aug 14 12:51:05 mail.server courieresmtpd[425070]: error,relay=::ffff:192.0.2.3,port=43632,msg="535 Authentication failed.",cmd: AUTH LOGIN PlcmSpIp@example.com
|
||||||
|
|
|
@ -1606,7 +1606,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
def testGetFailures03(self):
|
def testGetFailures03(self):
|
||||||
output = ('203.162.223.135', 7, 1124013544.0)
|
output = ('203.162.223.135', 9, 1124013600.0)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=0)
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=0)
|
||||||
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
||||||
|
@ -1615,7 +1615,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
|
|
||||||
def testGetFailures03_Seek1(self):
|
def testGetFailures03_Seek1(self):
|
||||||
# same test as above but with seek to 'Aug 14 11:55:04' - so other output ...
|
# same test as above but with seek to 'Aug 14 11:55:04' - so other output ...
|
||||||
output = ('203.162.223.135', 5, 1124013544.0)
|
output = ('203.162.223.135', 3, 1124013600.0)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2] - 4*60)
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2] - 4*60)
|
||||||
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
||||||
|
@ -1624,7 +1624,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
|
|
||||||
def testGetFailures03_Seek2(self):
|
def testGetFailures03_Seek2(self):
|
||||||
# same test as above but with seek to 'Aug 14 11:59:04' - so other output ...
|
# same test as above but with seek to 'Aug 14 11:59:04' - so other output ...
|
||||||
output = ('203.162.223.135', 1, 1124013544.0)
|
output = ('203.162.223.135', 2, 1124013600.0)
|
||||||
self.filter.setMaxRetry(1)
|
self.filter.setMaxRetry(1)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2])
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2])
|
||||||
|
@ -1652,6 +1652,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
def testGetFailuresWrongChar(self):
|
def testGetFailuresWrongChar(self):
|
||||||
|
self.filter.checkFindTime = False
|
||||||
# write wrong utf-8 char:
|
# write wrong utf-8 char:
|
||||||
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='crlf')
|
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='crlf')
|
||||||
fout = fopen(fname, 'wb')
|
fout = fopen(fname, 'wb')
|
||||||
|
@ -1672,6 +1673,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
for enc in (None, 'utf-8', 'ascii'):
|
for enc in (None, 'utf-8', 'ascii'):
|
||||||
if enc is not None:
|
if enc is not None:
|
||||||
self.tearDown();self.setUp();
|
self.tearDown();self.setUp();
|
||||||
|
self.filter.checkFindTime = False;
|
||||||
self.filter.setLogEncoding(enc);
|
self.filter.setLogEncoding(enc);
|
||||||
# speedup search using exact date pattern:
|
# speedup search using exact date pattern:
|
||||||
self.filter.setDatePattern(r'^%ExY-%Exm-%Exd %ExH:%ExM:%ExS')
|
self.filter.setDatePattern(r'^%ExY-%Exm-%Exd %ExH:%ExM:%ExS')
|
||||||
|
|
Loading…
Reference in New Issue