From 35efca594136695f537760e417ab4255a00692c3 Mon Sep 17 00:00:00 2001 From: sebres Date: Wed, 22 Feb 2017 18:39:44 +0100 Subject: [PATCH] Better multi-line handling introduced: single-line parsing with caching of needed failure information to process in further lines. Many times faster and fewer CPU-hungry because of parsing with `maxlines=1`, so without line buffering (scrolling of the buffer-window). Combination of tags `` and `` can be used now to process multi-line logs using single-line expressions: - tag ``: used to identify resp. store failure info for groups of log-lines with the same identifier (e. g. combined failure-info for the same conn-id by `(?:conn-id)`, see sshd.conf for example) - tag ``: used as mark for no-failure (helper to accumulate common failure-info); filter.d/sshd.conf: [sshd], [sshd-ddos], [sshd-aggressive] optimized with pre-filtering using new option `prefregex` and new multi-line handling. --- config/filter.d/sshd.conf | 58 ++++---- fail2ban/server/failregex.py | 8 +- fail2ban/server/filter.py | 222 ++++++++++++++++++------------ fail2ban/server/ticket.py | 9 +- fail2ban/server/utils.py | 6 + fail2ban/tests/filtertestcase.py | 1 + fail2ban/tests/samplestestcase.py | 52 ++++--- 7 files changed, 214 insertions(+), 142 deletions(-) diff --git a/config/filter.d/sshd.conf b/config/filter.d/sshd.conf index 872a73e4..922ea193 100644 --- a/config/filter.d/sshd.conf +++ b/config/filter.d/sshd.conf @@ -24,37 +24,37 @@ __pref = (?:(?:error|fatal): (?:PAM: )?)? __suff = (?: \[preauth\])?\s* __on_port_opt = (?: port \d+)?(?: on \S+(?: port \d+)?)? -# single line prefix: -__prefix_line_sl = %(__prefix_line)s%(__pref)s -# multi line prefixes (for first and second lines): -__prefix_line_ml1 = (?P<__prefix>%(__prefix_line)s)%(__pref)s -__prefix_line_ml2 = %(__suff)s$^(?P=__prefix)%(__pref)s +prefregex = ^%(__prefix_line)s%(__pref)s.+$ mode = %(normal)s -normal = ^%(__prefix_line_sl)s[aA]uthentication (?:failure|error|failed) for .* from ( via \S+)?\s*%(__suff)s$ - ^%(__prefix_line_sl)sUser not known to the underlying authentication module for .* from \s*%(__suff)s$ - ^%(__prefix_line_sl)sFailed \S+ for (?Pinvalid user )?(?P\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+) from %(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$) - ^%(__prefix_line_sl)sROOT LOGIN REFUSED.* FROM \s*%(__suff)s$ - ^%(__prefix_line_sl)s[iI](?:llegal|nvalid) user .*? from %(__on_port_opt)s\s*$ - ^%(__prefix_line_sl)sUser .+ from not allowed because not listed in AllowUsers\s*%(__suff)s$ - ^%(__prefix_line_sl)sUser .+ from not allowed because listed in DenyUsers\s*%(__suff)s$ - ^%(__prefix_line_sl)sUser .+ from not allowed because not in any group\s*%(__suff)s$ - ^%(__prefix_line_sl)srefused connect from \S+ \(\)\s*%(__suff)s$ - ^%(__prefix_line_sl)sReceived disconnect from %(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$ - ^%(__prefix_line_sl)sUser .+ from not allowed because a group is listed in DenyGroups\s*%(__suff)s$ - ^%(__prefix_line_sl)sUser .+ from not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$ - ^%(__prefix_line_sl)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=\s.*%(__suff)s$ - ^%(__prefix_line_sl)s(error: )?maximum authentication attempts exceeded for .* from %(__on_port_opt)s(?: ssh\d*)? \[preauth\]$ - ^%(__prefix_line_ml1)sUser .+ not allowed because account is locked%(__prefix_line_ml2)sReceived disconnect from : 11: .+%(__suff)s$ - ^%(__prefix_line_ml1)sDisconnecting: Too many authentication failures for .+?%(__prefix_line_ml2)sConnection closed by %(__suff)s$ - ^%(__prefix_line_ml1)sConnection from %(__on_port_opt)s%(__prefix_line_ml2)sDisconnecting: Too many authentication failures for .+%(__suff)s$ +normal = ^[aA]uthentication (?:failure|error|failed) for .* from ( via \S+)?\s*%(__suff)s$ + ^User not known to the underlying authentication module for .* from \s*%(__suff)s$ + ^Failed \S+ for (?Pinvalid user )?(?P\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+) from %(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$) + ^ROOT LOGIN REFUSED.* FROM \s*%(__suff)s$ + ^[iI](?:llegal|nvalid) user .*? from %(__on_port_opt)s\s*$ + ^User .+ from not allowed because not listed in AllowUsers\s*%(__suff)s$ + ^User .+ from not allowed because listed in DenyUsers\s*%(__suff)s$ + ^User .+ from not allowed because not in any group\s*%(__suff)s$ + ^refused connect from \S+ \(\)\s*%(__suff)s$ + ^Received disconnect from %(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$ + ^User .+ from not allowed because a group is listed in DenyGroups\s*%(__suff)s$ + ^User .+ from not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$ + ^pam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=\s.*%(__suff)s$ + ^(error: )?maximum authentication attempts exceeded for .* from %(__on_port_opt)s(?: ssh\d*)? \[preauth\]$ + ^User .+ not allowed because account is locked%(__suff)s + ^Disconnecting: Too many authentication failures for .+?%(__suff)s + ^Received disconnect from : 11: + ^Connection closed by %(__suff)s$ -ddos = ^%(__prefix_line_sl)sDid not receive identification string from %(__suff)s$ - ^%(__prefix_line_sl)sReceived disconnect from %(__on_port_opt)s:\s*14: No supported authentication methods available%(__suff)s$ - ^%(__prefix_line_sl)sUnable to negotiate with %(__on_port_opt)s: no matching (?:cipher|key exchange method) found. - ^%(__prefix_line_ml1)sConnection from %(__on_port_opt)s%(__prefix_line_ml2)sUnable to negotiate a (?:cipher|key exchange method)%(__suff)s$ - ^%(__prefix_line_ml1)sSSH: Server;Ltype: (?:Authname|Version|Kex);Remote: -\d+;[A-Z]\w+:.*%(__prefix_line_ml2)sRead from socket failed: Connection reset by peer%(__suff)s$ +ddos = ^Did not receive identification string from %(__suff)s$ + ^Received disconnect from %(__on_port_opt)s:\s*14: No supported authentication methods available%(__suff)s$ + ^Unable to negotiate with %(__on_port_opt)s: no matching (?:cipher|key exchange method) found. + ^Unable to negotiate a (?:cipher|key exchange method)%(__suff)s$ + ^SSH: Server;Ltype: (?:Authname|Version|Kex);Remote: -\d+;[A-Z]\w+: + ^Read from socket failed: Connection reset by peer \[preauth\] + +common = ^Connection from aggressive = %(normal)s %(ddos)s @@ -62,11 +62,11 @@ aggressive = %(normal)s [Definition] failregex = %(mode)s + %(common)s ignoreregex = -# "maxlines" is number of log lines to buffer for multi-line regex searches -maxlines = 10 +maxlines = 1 journalmatch = _SYSTEMD_UNIT=sshd.service + _COMM=sshd diff --git a/fail2ban/server/failregex.py b/fail2ban/server/failregex.py index 19a89303..59f59978 100644 --- a/fail2ban/server/failregex.py +++ b/fail2ban/server/failregex.py @@ -323,6 +323,10 @@ class RegexException(Exception): # FAILURE_ID_GROPS = ("fid", "ip4", "ip6", "dns") +# Additionally allows multi-line failure-id (used for wrapping e. g. conn-id to host) +# +FAILURE_ID_PRESENTS = FAILURE_ID_GROPS + ("mlfid",) + ## # Regular expression class. # @@ -341,9 +345,9 @@ class FailRegex(Regex): # Initializes the parent. Regex.__init__(self, regex, **kwargs) # Check for group "dns", "ip4", "ip6", "fid" - if (not [grp for grp in FAILURE_ID_GROPS if grp in self._regexObj.groupindex] + if (not [grp for grp in FAILURE_ID_PRESENTS if grp in self._regexObj.groupindex] and (prefRegex is None or - not [grp for grp in FAILURE_ID_GROPS if grp in prefRegex._regexObj.groupindex]) + not [grp for grp in FAILURE_ID_PRESENTS if grp in prefRegex._regexObj.groupindex]) ): raise RegexException("No failure-id group in '%s'" % self._regex) diff --git a/fail2ban/server/filter.py b/fail2ban/server/filter.py index 85c07103..6b782fcc 100644 --- a/fail2ban/server/filter.py +++ b/fail2ban/server/filter.py @@ -38,6 +38,7 @@ from .datedetector import DateDetector from .mytime import MyTime from .failregex import FailRegex, Regex, RegexException from .action import CommandAction +from .utils import Utils from ..helpers import getLogger, PREFER_ENC # Gets the instance of the logger. @@ -88,6 +89,8 @@ class Filter(JailThread): self.__ignoreCommand = False ## Default or preferred encoding (to decode bytes from file or journal): self.__encoding = PREFER_ENC + ## Cache temporary holds failures info (used by multi-line for wrapping e. g. conn-id to host): + self.__mlfidCache = None ## Error counter (protected, so can be used in filter implementations) ## if it reached 100 (at once), run-cycle will go idle self._errors = 0 @@ -101,7 +104,7 @@ class Filter(JailThread): self.ticks = 0 self.dateDetector = DateDetector() - logSys.debug("Created %s" % self) + logSys.debug("Created %s", self) def __repr__(self): return "%s(%r)" % (self.__class__.__name__, self.jail) @@ -131,6 +134,13 @@ class Filter(JailThread): self.delLogPath(path) delattr(self, '_reload_logs') + @property + def mlfidCache(self): + if self.__mlfidCache: + return self.__mlfidCache + self.__mlfidCache = Utils.Cache(maxCount=100, maxTime=5*60) + return self.__mlfidCache + @property def prefRegex(self): return self.__prefRegex @@ -170,7 +180,7 @@ class Filter(JailThread): del self.__failRegex[index] except IndexError: logSys.error("Cannot remove regular expression. Index %d is not " - "valid" % index) + "valid", index) ## # Get the regular expression which matches the failure. @@ -208,7 +218,7 @@ class Filter(JailThread): del self.__ignoreRegex[index] except IndexError: logSys.error("Cannot remove regular expression. Index %d is not " - "valid" % index) + "valid", index) ## # Get the regular expression which matches the failure. @@ -231,9 +241,9 @@ class Filter(JailThread): value = value.lower() # must be a string by now if not (value in ('yes', 'warn', 'no', 'raw')): logSys.error("Incorrect value %r specified for usedns. " - "Using safe 'no'" % (value,)) + "Using safe 'no'", value) value = 'no' - logSys.debug("Setting usedns = %s for %s" % (value, self)) + logSys.debug("Setting usedns = %s for %s", value, self) self.__useDns = value ## @@ -346,7 +356,7 @@ class Filter(JailThread): encoding = PREFER_ENC codecs.lookup(encoding) # Raise LookupError if invalid codec self.__encoding = encoding - logSys.info(" encoding: %s" % encoding) + logSys.info(" encoding: %s", encoding) return encoding ## @@ -391,7 +401,7 @@ class Filter(JailThread): if not isinstance(ip, IPAddr): ip = IPAddr(ip) if self.inIgnoreIPList(ip): - logSys.warning('Requested to manually ban an ignored IP %s. User knows best. Proceeding to ban it.' % ip) + logSys.warning('Requested to manually ban an ignored IP %s. User knows best. Proceeding to ban it.', ip) unixTime = MyTime.time() self.failManager.addFailure(FailTicket(ip, unixTime), self.failManager.getMaxRetry()) @@ -435,7 +445,7 @@ class Filter(JailThread): def logIgnoreIp(self, ip, log_ignore, ignore_source="unknown source"): if log_ignore: - logSys.info("[%s] Ignore %s by %s" % (self.jailName, ip, ignore_source)) + logSys.info("[%s] Ignore %s by %s", self.jailName, ip, ignore_source) def getIgnoreIP(self): return self.__ignoreIpList @@ -459,7 +469,7 @@ class Filter(JailThread): if self.__ignoreCommand: command = CommandAction.replaceTag(self.__ignoreCommand, { 'ip': ip } ) - logSys.debug('ignore command: ' + command) + logSys.debug('ignore command: %s', command) ret, ret_ignore = CommandAction.executeCmd(command, success_codes=(0, 1)) ret_ignore = ret and ret_ignore == 0 self.logIgnoreIp(ip, log_ignore and ret_ignore, ignore_source="command") @@ -498,10 +508,7 @@ class Filter(JailThread): for element in self.processLine(line, date): ip = element[1] unixTime = element[2] - lines = element[3] - fail = {} - if len(element) > 4: - fail = element[4] + fail = element[3] logSys.debug("Processing line with time:%s and ip:%s", unixTime, ip) if self.inIgnoreIPList(ip, log_ignore=True): @@ -509,7 +516,7 @@ class Filter(JailThread): logSys.info( "[%s] Found %s - %s", self.jailName, ip, datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S") ) - tick = FailTicket(ip, unixTime, lines, data=fail) + tick = FailTicket(ip, unixTime, data=fail) self.failManager.addFailure(tick) # reset (halve) error counter (successfully processed line): if self._errors: @@ -544,6 +551,29 @@ class Filter(JailThread): return ignoreRegexIndex return None + def _mergeFailure(self, mlfid, fail, failRegex): + mlfidFail = self.mlfidCache.get(mlfid) if self.__mlfidCache else None + if mlfidFail: + mlfidGroups = mlfidFail[1] + # if current line not failure, but previous was failure: + if fail.get('nofail') and not mlfidGroups.get('nofail'): + del fail['nofail'] # remove nofail flag - was already market as failure + self.mlfidCache.unset(mlfid) # remove cache entry + # if current line is failure, but previous was not: + elif not fail.get('nofail') and mlfidGroups.get('nofail'): + del mlfidGroups['nofail'] # remove nofail flag + self.mlfidCache.unset(mlfid) # remove cache entry + fail2 = mlfidGroups.copy() + fail2.update(fail) + fail2["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines() + fail = fail2 + elif fail.get('nofail'): + fail["matches"] = failRegex.getMatchedTupleLines() + mlfidFail = [self.__lastDate, fail] + self.mlfidCache.set(mlfid, mlfidFail) + return fail + + ## # Finds the failure in a line given split into time and log parts. # @@ -618,76 +648,94 @@ class Filter(JailThread): # Iterates over all the regular expressions. for failRegexIndex, failRegex in enumerate(self.__failRegex): failRegex.search(self.__lineBuffer, orgBuffer) - if failRegex.hasMatched(): - # The failregex matched. - logSys.log(7, "Matched %s", failRegex) - # Checks if we must ignore this match. - if self.ignoreLine(failRegex.getMatchedTupleLines()) \ - is not None: - # The ignoreregex matched. Remove ignored match. - self.__lineBuffer = failRegex.getUnmatchedTupleLines() - logSys.log(7, "Matched ignoreregex and was ignored") - if not self.checkAllRegex: - break - else: - continue - if date is None: - logSys.warning( - "Found a match for %r but no valid date/time " - "found for %r. Please try setting a custom " - "date pattern (see man page jail.conf(5)). " - "If format is complex, please " - "file a detailed issue on" - " https://github.com/fail2ban/fail2ban/issues " - "in order to get support for this format." - % ("\n".join(failRegex.getMatchedLines()), timeText)) + if not failRegex.hasMatched(): + continue + # The failregex matched. + logSys.log(7, "Matched %s", failRegex) + # Checks if we must ignore this match. + if self.ignoreLine(failRegex.getMatchedTupleLines()) \ + is not None: + # The ignoreregex matched. Remove ignored match. + self.__lineBuffer = failRegex.getUnmatchedTupleLines() + logSys.log(7, "Matched ignoreregex and was ignored") + if not self.checkAllRegex: + break else: - self.__lineBuffer = failRegex.getUnmatchedTupleLines() - # retrieve failure-id, host, etc from failure match: - raw = returnRawHost - try: - if preGroups: - fail = preGroups.copy() - fail.update(failRegex.getGroups()) - else: - fail = failRegex.getGroups() - # failure-id: - fid = fail.get('fid') - # ip-address or host: - host = fail.get('ip4') - if host is not None: - cidr = IPAddr.FAM_IPv4 - raw = True - else: - host = fail.get('ip6') - if host is not None: - cidr = IPAddr.FAM_IPv6 - raw = True - if host is None: - host = fail.get('dns') - if host is None: + continue + if date is None: + logSys.warning( + "Found a match for %r but no valid date/time " + "found for %r. Please try setting a custom " + "date pattern (see man page jail.conf(5)). " + "If format is complex, please " + "file a detailed issue on" + " https://github.com/fail2ban/fail2ban/issues " + "in order to get support for this format.", + "\n".join(failRegex.getMatchedLines()), timeText) + continue + self.__lineBuffer = failRegex.getUnmatchedTupleLines() + # retrieve failure-id, host, etc from failure match: + try: + raw = returnRawHost + if preGroups: + fail = preGroups.copy() + fail.update(failRegex.getGroups()) + else: + fail = failRegex.getGroups() + # first try to check we have mlfid case (caching of connection id by multi-line): + mlfid = fail.get('mlfid') + if mlfid is not None: + fail = self._mergeFailure(mlfid, fail, failRegex) + else: + # matched lines: + fail["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines() + # failure-id: + fid = fail.get('fid') + # ip-address or host: + host = fail.get('ip4') + if host is not None: + cidr = IPAddr.FAM_IPv4 + raw = True + else: + host = fail.get('ip6') + if host is not None: + cidr = IPAddr.FAM_IPv6 + raw = True + if host is None: + host = fail.get('dns') + if host is None: + # first try to check we have mlfid case (cache connection id): + if fid is None: + if mlfid: + fail = self._mergeFailure(mlfid, fail, failRegex) + else: # if no failure-id also (obscure case, wrong regex), throw error inside getFailID: - if fid is None: - fid = failRegex.getFailID() - host = fid - cidr = IPAddr.CIDR_RAW - # if raw - add single ip or failure-id, - # otherwise expand host to multiple ips using dns (or ignore it if not valid): - if raw: - ip = IPAddr(host, cidr) - # check host equal failure-id, if not - failure with complex id: - if fid is not None and fid != host: - ip = IPAddr(fid, IPAddr.CIDR_RAW) - ips = [ip] - else: - ips = DNSUtils.textToIp(host, self.__useDns) - for ip in ips: - failList.append([failRegexIndex, ip, date, - failRegex.getMatchedLines(), fail]) - if not self.checkAllRegex: - break - except RegexException as e: # pragma: no cover - unsure if reachable - logSys.error(e) + fid = failRegex.getFailID() + host = fid + cidr = IPAddr.CIDR_RAW + # if mlfid case (not failure): + if host is None: + if not self.checkAllRegex: # or fail.get('nofail'): + return failList + ips = [None] + # if raw - add single ip or failure-id, + # otherwise expand host to multiple ips using dns (or ignore it if not valid): + elif raw: + ip = IPAddr(host, cidr) + # check host equal failure-id, if not - failure with complex id: + if fid is not None and fid != host: + ip = IPAddr(fid, IPAddr.CIDR_RAW) + ips = [ip] + # otherwise, try to use dns conversion: + else: + ips = DNSUtils.textToIp(host, self.__useDns) + # append failure with match to the list: + for ip in ips: + failList.append([failRegexIndex, ip, date, fail]) + if not self.checkAllRegex: + break + except RegexException as e: # pragma: no cover - unsure if reachable + logSys.error(e) return failList def status(self, flavor="basic"): @@ -751,7 +799,7 @@ class FileFilter(Filter): db = self.jail.database if db is not None: db.updateLog(self.jail, log) - logSys.info("Removed logfile: %r" % path) + logSys.info("Removed logfile: %r", path) self._delLogPath(path) return @@ -816,7 +864,7 @@ class FileFilter(Filter): def getFailures(self, filename): log = self.getLog(filename) if log is None: - logSys.error("Unable to get failures in " + filename) + logSys.error("Unable to get failures in %s", filename) return False # We should always close log (file), otherwise may be locked (log-rotate, etc.) try: @@ -825,11 +873,11 @@ class FileFilter(Filter): has_content = log.open() # see http://python.org/dev/peps/pep-3151/ except IOError as e: - logSys.error("Unable to open %s" % filename) + logSys.error("Unable to open %s", filename) logSys.exception(e) return False except OSError as e: # pragma: no cover - requires race condition to tigger this - logSys.error("Error opening %s" % filename) + logSys.error("Error opening %s", filename) logSys.exception(e) return False except Exception as e: # pragma: no cover - Requires implemention error in FileContainer to generate @@ -1050,7 +1098,7 @@ class FileContainer: ## sys.stdout.flush() # Compare hash and inode if self.__hash != myHash or self.__ino != stats.st_ino: - logSys.info("Log rotation detected for %s" % self.__filename) + logSys.info("Log rotation detected for %s", self.__filename) self.__hash = myHash self.__ino = stats.st_ino self.__pos = 0 diff --git a/fail2ban/server/ticket.py b/fail2ban/server/ticket.py index a66b64ac..be205303 100644 --- a/fail2ban/server/ticket.py +++ b/fail2ban/server/ticket.py @@ -138,7 +138,8 @@ class Ticket(object): self._data['matches'] = matches or [] def getMatches(self): - return self._data.get('matches', []) + return [(line if isinstance(line, basestring) else "".join(line)) \ + for line in self._data.get('matches', ())] @property def restored(self): @@ -235,7 +236,11 @@ class FailTicket(Ticket): self.__retry += count self._data['failures'] += attempt if matches: - self._data['matches'] += matches + # we should duplicate "matches", because possibly referenced to multiple tickets: + if self._data['matches']: + self._data['matches'] = self._data['matches'] + matches + else: + self._data['matches'] = matches def setLastTime(self, value): if value > self._time: diff --git a/fail2ban/server/utils.py b/fail2ban/server/utils.py index 57da495a..b258ae77 100644 --- a/fail2ban/server/utils.py +++ b/fail2ban/server/utils.py @@ -98,6 +98,12 @@ class Utils(): cache.popitem() cache[k] = (v, t + self.maxTime) + def unset(self, k): + try: + del self._cache[k] + except KeyError: # pragme: no cover + pass + @staticmethod def setFBlockMode(fhandle, value): diff --git a/fail2ban/tests/filtertestcase.py b/fail2ban/tests/filtertestcase.py index 174152b9..8558f8d3 100644 --- a/fail2ban/tests/filtertestcase.py +++ b/fail2ban/tests/filtertestcase.py @@ -1431,6 +1431,7 @@ class GetFailures(LogCaptureTestCase): ('no', output_no), ('warn', output_yes) ): + self.pruneLog("[test-phase useDns=%s]" % useDns) jail = DummyJail() filter_ = FileFilter(jail, useDns=useDns) filter_.active = True diff --git a/fail2ban/tests/samplestestcase.py b/fail2ban/tests/samplestestcase.py index a97e92c3..b59ccd98 100644 --- a/fail2ban/tests/samplestestcase.py +++ b/fail2ban/tests/samplestestcase.py @@ -150,29 +150,34 @@ def testSampleRegexsFactory(name, basedir): else: faildata = {} - ret = self.filter.processLine(line) - if not ret: - # Check line is flagged as none match - self.assertFalse(faildata.get('match', True), - "Line not matched when should have: %s:%i, line:\n%s" % - (logFile.filename(), logFile.filelineno(), line)) - elif ret: - # Check line is flagged to match - self.assertTrue(faildata.get('match', False), - "Line matched when shouldn't have: %s:%i, line:\n%s" % - (logFile.filename(), logFile.filelineno(), line)) - self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" % - (map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno())) + try: + ret = self.filter.processLine(line) + if not ret: + # Check line is flagged as none match + self.assertFalse(faildata.get('match', True), + "Line not matched when should have") + continue - # Verify timestamp and host as expected - failregex, host, fail2banTime, lines, fail = ret[0] - self.assertEqual(host, faildata.get("host", None)) - # Verify other captures: + failregex, fid, fail2banTime, fail = ret[0] + # Bypass no failure helpers-regexp: + if not faildata.get('match', False) and (fid is None or fail.get('nofail')): + regexsUsed.add(failregex) + continue + + # Check line is flagged to match + self.assertTrue(faildata.get('match', False), + "Line matched when shouldn't have") + self.assertEqual(len(ret), 1, + "Multiple regexs matched %r" % (map(lambda x: x[0], ret))) + + # Fallback for backwards compatibility (previously no fid, was host only): + if faildata.get("host", None) is not None and fail.get("host", None) is None: + fail["host"] = fid + # Verify match captures (at least fid/host) and timestamp as expected for k, v in faildata.iteritems(): - if k not in ("time", "match", "host", "desc"): + if k not in ("time", "match", "desc"): fv = fail.get(k, None) - self.assertEqual(fv, v, "Value of %s mismatch %r != %r on: %s:%i, line:\n%s" % ( - k, fv, v, logFile.filename(), logFile.filelineno(), line)) + self.assertEqual(fv, v) t = faildata.get("time", None) try: @@ -185,12 +190,15 @@ def testSampleRegexsFactory(name, basedir): jsonTime += jsonTimeLocal.microsecond / 1000000 self.assertEqual(fail2banTime, jsonTime, - "UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds) on: %s:%i, line:\n%s" % + "UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" % (fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)), jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)), - fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) ) + fail2banTime - jsonTime) ) regexsUsed.add(failregex) + except AssertionError as e: # pragma: no cover + raise AssertionError("%s on: %s:%i, line:\n%s" % ( + e, logFile.filename(), logFile.filelineno(), line)) for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()): self.assertTrue(