Better multi-line handling introduced: single-line parsing with caching of needed failure information to process in further lines.

Many times faster and fewer CPU-hungry because of parsing with `maxlines=1`, so without line buffering (scrolling of the buffer-window).
Combination of tags `<F-MLFID>` and `<F-NOFAIL>` can be used now to process multi-line logs using single-line expressions:
- tag `<F-MLFID>`: used to identify resp. store failure info for groups of log-lines with the same identifier (e. g. combined failure-info for the same conn-id by `<F-MLFID>(?:conn-id)</F-MLFID>`, see sshd.conf for example)
- tag `<F-NOFAIL>`: used as mark for no-failure (helper to accumulate common failure-info);
filter.d/sshd.conf: [sshd], [sshd-ddos], [sshd-aggressive] optimized with pre-filtering using new option `prefregex` and new multi-line handling.
pull/1698/head
sebres 2017-02-22 18:39:44 +01:00
parent 8bcaeb9022
commit 35efca5941
7 changed files with 214 additions and 142 deletions

View File

@ -24,37 +24,37 @@ __pref = (?:(?:error|fatal): (?:PAM: )?)?
__suff = (?: \[preauth\])?\s* __suff = (?: \[preauth\])?\s*
__on_port_opt = (?: port \d+)?(?: on \S+(?: port \d+)?)? __on_port_opt = (?: port \d+)?(?: on \S+(?: port \d+)?)?
# single line prefix: prefregex = ^<F-MLFID>%(__prefix_line)s</F-MLFID>%(__pref)s<F-CONTENT>.+</F-CONTENT>$
__prefix_line_sl = %(__prefix_line)s%(__pref)s
# multi line prefixes (for first and second lines):
__prefix_line_ml1 = (?P<__prefix>%(__prefix_line)s)%(__pref)s
__prefix_line_ml2 = %(__suff)s$<SKIPLINES>^(?P=__prefix)%(__pref)s
mode = %(normal)s mode = %(normal)s
normal = ^%(__prefix_line_sl)s[aA]uthentication (?:failure|error|failed) for <F-USER>.*</F-USER> from <HOST>( via \S+)?\s*%(__suff)s$ normal = ^[aA]uthentication (?:failure|error|failed) for <F-USER>.*</F-USER> from <HOST>( via \S+)?\s*%(__suff)s$
^%(__prefix_line_sl)sUser not known to the underlying authentication module for <F-USER>.*</F-USER> from <HOST>\s*%(__suff)s$ ^User not known to the underlying authentication module for <F-USER>.*</F-USER> from <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)sFailed \S+ for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$) ^Failed \S+ for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)s<F-USER>ROOT</F-USER> LOGIN REFUSED.* FROM <HOST>\s*%(__suff)s$ ^<F-USER>ROOT</F-USER> LOGIN REFUSED.* FROM <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)s[iI](?:llegal|nvalid) user <F-USER>.*?</F-USER> from <HOST>%(__on_port_opt)s\s*$ ^[iI](?:llegal|nvalid) user <F-USER>.*?</F-USER> from <HOST>%(__on_port_opt)s\s*$
^%(__prefix_line_sl)sUser <F-USER>.+</F-USER> from <HOST> not allowed because not listed in AllowUsers\s*%(__suff)s$ ^User <F-USER>.+</F-USER> from <HOST> not allowed because not listed in AllowUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser <F-USER>.+</F-USER> from <HOST> not allowed because listed in DenyUsers\s*%(__suff)s$ ^User <F-USER>.+</F-USER> from <HOST> not allowed because listed in DenyUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser <F-USER>.+</F-USER> from <HOST> not allowed because not in any group\s*%(__suff)s$ ^User <F-USER>.+</F-USER> from <HOST> not allowed because not in any group\s*%(__suff)s$
^%(__prefix_line_sl)srefused connect from \S+ \(<HOST>\)\s*%(__suff)s$ ^refused connect from \S+ \(<HOST>\)\s*%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$ ^Received disconnect from <HOST>%(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$
^%(__prefix_line_sl)sUser <F-USER>.+</F-USER> from <HOST> not allowed because a group is listed in DenyGroups\s*%(__suff)s$ ^User <F-USER>.+</F-USER> from <HOST> not allowed because a group is listed in DenyGroups\s*%(__suff)s$
^%(__prefix_line_sl)sUser <F-USER>.+</F-USER> from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$ ^User <F-USER>.+</F-USER> from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$
^%(__prefix_line_sl)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=<F-USER>\S*</F-USER>\s*rhost=<HOST>\s.*%(__suff)s$ ^pam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=<F-USER>\S*</F-USER>\s*rhost=<HOST>\s.*%(__suff)s$
^%(__prefix_line_sl)s(error: )?maximum authentication attempts exceeded for <F-USER>.*</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)? \[preauth\]$ ^(error: )?maximum authentication attempts exceeded for <F-USER>.*</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)? \[preauth\]$
^%(__prefix_line_ml1)sUser <F-USER>.+</F-USER> not allowed because account is locked%(__prefix_line_ml2)sReceived disconnect from <HOST>: 11: .+%(__suff)s$ ^User <F-USER>.+</F-USER> not allowed because account is locked%(__suff)s
^%(__prefix_line_ml1)sDisconnecting: Too many authentication failures for <F-USER>.+?</F-USER>%(__prefix_line_ml2)sConnection closed by <HOST>%(__suff)s$ ^Disconnecting: Too many authentication failures for <F-USER>.+?</F-USER>%(__suff)s
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sDisconnecting: Too many authentication failures for <F-USER>.+</F-USER>%(__suff)s$ ^<F-NOFAIL>Received disconnect</F-NOFAIL> from <HOST>: 11:
^<F-NOFAIL>Connection closed</F-NOFAIL> by <HOST>%(__suff)s$
ddos = ^%(__prefix_line_sl)sDid not receive identification string from <HOST>%(__suff)s$ ddos = ^Did not receive identification string from <HOST>%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s:\s*14: No supported authentication methods available%(__suff)s$ ^Received disconnect from <HOST>%(__on_port_opt)s:\s*14: No supported authentication methods available%(__suff)s$
^%(__prefix_line_sl)sUnable to negotiate with <HOST>%(__on_port_opt)s: no matching (?:cipher|key exchange method) found. ^Unable to negotiate with <HOST>%(__on_port_opt)s: no matching (?:cipher|key exchange method) found.
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sUnable to negotiate a (?:cipher|key exchange method)%(__suff)s$ ^Unable to negotiate a (?:cipher|key exchange method)%(__suff)s$
^%(__prefix_line_ml1)sSSH: Server;Ltype: (?:Authname|Version|Kex);Remote: <HOST>-\d+;[A-Z]\w+:.*%(__prefix_line_ml2)sRead from socket failed: Connection reset by peer%(__suff)s$ ^<F-NOFAIL>SSH: Server;Ltype:</F-NOFAIL> (?:Authname|Version|Kex);Remote: <HOST>-\d+;[A-Z]\w+:
^Read from socket failed: Connection reset by peer \[preauth\]
common = ^<F-NOFAIL>Connection from</F-NOFAIL> <HOST>
aggressive = %(normal)s aggressive = %(normal)s
%(ddos)s %(ddos)s
@ -62,11 +62,11 @@ aggressive = %(normal)s
[Definition] [Definition]
failregex = %(mode)s failregex = %(mode)s
%(common)s
ignoreregex = ignoreregex =
# "maxlines" is number of log lines to buffer for multi-line regex searches maxlines = 1
maxlines = 10
journalmatch = _SYSTEMD_UNIT=sshd.service + _COMM=sshd journalmatch = _SYSTEMD_UNIT=sshd.service + _COMM=sshd

View File

@ -323,6 +323,10 @@ class RegexException(Exception):
# #
FAILURE_ID_GROPS = ("fid", "ip4", "ip6", "dns") FAILURE_ID_GROPS = ("fid", "ip4", "ip6", "dns")
# Additionally allows multi-line failure-id (used for wrapping e. g. conn-id to host)
#
FAILURE_ID_PRESENTS = FAILURE_ID_GROPS + ("mlfid",)
## ##
# Regular expression class. # Regular expression class.
# #
@ -341,9 +345,9 @@ class FailRegex(Regex):
# Initializes the parent. # Initializes the parent.
Regex.__init__(self, regex, **kwargs) Regex.__init__(self, regex, **kwargs)
# Check for group "dns", "ip4", "ip6", "fid" # Check for group "dns", "ip4", "ip6", "fid"
if (not [grp for grp in FAILURE_ID_GROPS if grp in self._regexObj.groupindex] if (not [grp for grp in FAILURE_ID_PRESENTS if grp in self._regexObj.groupindex]
and (prefRegex is None or and (prefRegex is None or
not [grp for grp in FAILURE_ID_GROPS if grp in prefRegex._regexObj.groupindex]) not [grp for grp in FAILURE_ID_PRESENTS if grp in prefRegex._regexObj.groupindex])
): ):
raise RegexException("No failure-id group in '%s'" % self._regex) raise RegexException("No failure-id group in '%s'" % self._regex)

View File

@ -38,6 +38,7 @@ from .datedetector import DateDetector
from .mytime import MyTime from .mytime import MyTime
from .failregex import FailRegex, Regex, RegexException from .failregex import FailRegex, Regex, RegexException
from .action import CommandAction from .action import CommandAction
from .utils import Utils
from ..helpers import getLogger, PREFER_ENC from ..helpers import getLogger, PREFER_ENC
# Gets the instance of the logger. # Gets the instance of the logger.
@ -88,6 +89,8 @@ class Filter(JailThread):
self.__ignoreCommand = False self.__ignoreCommand = False
## Default or preferred encoding (to decode bytes from file or journal): ## Default or preferred encoding (to decode bytes from file or journal):
self.__encoding = PREFER_ENC self.__encoding = PREFER_ENC
## Cache temporary holds failures info (used by multi-line for wrapping e. g. conn-id to host):
self.__mlfidCache = None
## Error counter (protected, so can be used in filter implementations) ## Error counter (protected, so can be used in filter implementations)
## if it reached 100 (at once), run-cycle will go idle ## if it reached 100 (at once), run-cycle will go idle
self._errors = 0 self._errors = 0
@ -101,7 +104,7 @@ class Filter(JailThread):
self.ticks = 0 self.ticks = 0
self.dateDetector = DateDetector() self.dateDetector = DateDetector()
logSys.debug("Created %s" % self) logSys.debug("Created %s", self)
def __repr__(self): def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.jail) return "%s(%r)" % (self.__class__.__name__, self.jail)
@ -131,6 +134,13 @@ class Filter(JailThread):
self.delLogPath(path) self.delLogPath(path)
delattr(self, '_reload_logs') delattr(self, '_reload_logs')
@property
def mlfidCache(self):
if self.__mlfidCache:
return self.__mlfidCache
self.__mlfidCache = Utils.Cache(maxCount=100, maxTime=5*60)
return self.__mlfidCache
@property @property
def prefRegex(self): def prefRegex(self):
return self.__prefRegex return self.__prefRegex
@ -170,7 +180,7 @@ class Filter(JailThread):
del self.__failRegex[index] del self.__failRegex[index]
except IndexError: except IndexError:
logSys.error("Cannot remove regular expression. Index %d is not " logSys.error("Cannot remove regular expression. Index %d is not "
"valid" % index) "valid", index)
## ##
# Get the regular expression which matches the failure. # Get the regular expression which matches the failure.
@ -208,7 +218,7 @@ class Filter(JailThread):
del self.__ignoreRegex[index] del self.__ignoreRegex[index]
except IndexError: except IndexError:
logSys.error("Cannot remove regular expression. Index %d is not " logSys.error("Cannot remove regular expression. Index %d is not "
"valid" % index) "valid", index)
## ##
# Get the regular expression which matches the failure. # Get the regular expression which matches the failure.
@ -231,9 +241,9 @@ class Filter(JailThread):
value = value.lower() # must be a string by now value = value.lower() # must be a string by now
if not (value in ('yes', 'warn', 'no', 'raw')): if not (value in ('yes', 'warn', 'no', 'raw')):
logSys.error("Incorrect value %r specified for usedns. " logSys.error("Incorrect value %r specified for usedns. "
"Using safe 'no'" % (value,)) "Using safe 'no'", value)
value = 'no' value = 'no'
logSys.debug("Setting usedns = %s for %s" % (value, self)) logSys.debug("Setting usedns = %s for %s", value, self)
self.__useDns = value self.__useDns = value
## ##
@ -346,7 +356,7 @@ class Filter(JailThread):
encoding = PREFER_ENC encoding = PREFER_ENC
codecs.lookup(encoding) # Raise LookupError if invalid codec codecs.lookup(encoding) # Raise LookupError if invalid codec
self.__encoding = encoding self.__encoding = encoding
logSys.info(" encoding: %s" % encoding) logSys.info(" encoding: %s", encoding)
return encoding return encoding
## ##
@ -391,7 +401,7 @@ class Filter(JailThread):
if not isinstance(ip, IPAddr): if not isinstance(ip, IPAddr):
ip = IPAddr(ip) ip = IPAddr(ip)
if self.inIgnoreIPList(ip): if self.inIgnoreIPList(ip):
logSys.warning('Requested to manually ban an ignored IP %s. User knows best. Proceeding to ban it.' % ip) logSys.warning('Requested to manually ban an ignored IP %s. User knows best. Proceeding to ban it.', ip)
unixTime = MyTime.time() unixTime = MyTime.time()
self.failManager.addFailure(FailTicket(ip, unixTime), self.failManager.getMaxRetry()) self.failManager.addFailure(FailTicket(ip, unixTime), self.failManager.getMaxRetry())
@ -435,7 +445,7 @@ class Filter(JailThread):
def logIgnoreIp(self, ip, log_ignore, ignore_source="unknown source"): def logIgnoreIp(self, ip, log_ignore, ignore_source="unknown source"):
if log_ignore: if log_ignore:
logSys.info("[%s] Ignore %s by %s" % (self.jailName, ip, ignore_source)) logSys.info("[%s] Ignore %s by %s", self.jailName, ip, ignore_source)
def getIgnoreIP(self): def getIgnoreIP(self):
return self.__ignoreIpList return self.__ignoreIpList
@ -459,7 +469,7 @@ class Filter(JailThread):
if self.__ignoreCommand: if self.__ignoreCommand:
command = CommandAction.replaceTag(self.__ignoreCommand, { 'ip': ip } ) command = CommandAction.replaceTag(self.__ignoreCommand, { 'ip': ip } )
logSys.debug('ignore command: ' + command) logSys.debug('ignore command: %s', command)
ret, ret_ignore = CommandAction.executeCmd(command, success_codes=(0, 1)) ret, ret_ignore = CommandAction.executeCmd(command, success_codes=(0, 1))
ret_ignore = ret and ret_ignore == 0 ret_ignore = ret and ret_ignore == 0
self.logIgnoreIp(ip, log_ignore and ret_ignore, ignore_source="command") self.logIgnoreIp(ip, log_ignore and ret_ignore, ignore_source="command")
@ -498,10 +508,7 @@ class Filter(JailThread):
for element in self.processLine(line, date): for element in self.processLine(line, date):
ip = element[1] ip = element[1]
unixTime = element[2] unixTime = element[2]
lines = element[3] fail = element[3]
fail = {}
if len(element) > 4:
fail = element[4]
logSys.debug("Processing line with time:%s and ip:%s", logSys.debug("Processing line with time:%s and ip:%s",
unixTime, ip) unixTime, ip)
if self.inIgnoreIPList(ip, log_ignore=True): if self.inIgnoreIPList(ip, log_ignore=True):
@ -509,7 +516,7 @@ class Filter(JailThread):
logSys.info( logSys.info(
"[%s] Found %s - %s", self.jailName, ip, datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S") "[%s] Found %s - %s", self.jailName, ip, datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S")
) )
tick = FailTicket(ip, unixTime, lines, data=fail) tick = FailTicket(ip, unixTime, data=fail)
self.failManager.addFailure(tick) self.failManager.addFailure(tick)
# reset (halve) error counter (successfully processed line): # reset (halve) error counter (successfully processed line):
if self._errors: if self._errors:
@ -544,6 +551,29 @@ class Filter(JailThread):
return ignoreRegexIndex return ignoreRegexIndex
return None return None
def _mergeFailure(self, mlfid, fail, failRegex):
mlfidFail = self.mlfidCache.get(mlfid) if self.__mlfidCache else None
if mlfidFail:
mlfidGroups = mlfidFail[1]
# if current line not failure, but previous was failure:
if fail.get('nofail') and not mlfidGroups.get('nofail'):
del fail['nofail'] # remove nofail flag - was already market as failure
self.mlfidCache.unset(mlfid) # remove cache entry
# if current line is failure, but previous was not:
elif not fail.get('nofail') and mlfidGroups.get('nofail'):
del mlfidGroups['nofail'] # remove nofail flag
self.mlfidCache.unset(mlfid) # remove cache entry
fail2 = mlfidGroups.copy()
fail2.update(fail)
fail2["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines()
fail = fail2
elif fail.get('nofail'):
fail["matches"] = failRegex.getMatchedTupleLines()
mlfidFail = [self.__lastDate, fail]
self.mlfidCache.set(mlfid, mlfidFail)
return fail
## ##
# Finds the failure in a line given split into time and log parts. # Finds the failure in a line given split into time and log parts.
# #
@ -618,76 +648,94 @@ class Filter(JailThread):
# Iterates over all the regular expressions. # Iterates over all the regular expressions.
for failRegexIndex, failRegex in enumerate(self.__failRegex): for failRegexIndex, failRegex in enumerate(self.__failRegex):
failRegex.search(self.__lineBuffer, orgBuffer) failRegex.search(self.__lineBuffer, orgBuffer)
if failRegex.hasMatched(): if not failRegex.hasMatched():
# The failregex matched. continue
logSys.log(7, "Matched %s", failRegex) # The failregex matched.
# Checks if we must ignore this match. logSys.log(7, "Matched %s", failRegex)
if self.ignoreLine(failRegex.getMatchedTupleLines()) \ # Checks if we must ignore this match.
is not None: if self.ignoreLine(failRegex.getMatchedTupleLines()) \
# The ignoreregex matched. Remove ignored match. is not None:
self.__lineBuffer = failRegex.getUnmatchedTupleLines() # The ignoreregex matched. Remove ignored match.
logSys.log(7, "Matched ignoreregex and was ignored") self.__lineBuffer = failRegex.getUnmatchedTupleLines()
if not self.checkAllRegex: logSys.log(7, "Matched ignoreregex and was ignored")
break if not self.checkAllRegex:
else: break
continue
if date is None:
logSys.warning(
"Found a match for %r but no valid date/time "
"found for %r. Please try setting a custom "
"date pattern (see man page jail.conf(5)). "
"If format is complex, please "
"file a detailed issue on"
" https://github.com/fail2ban/fail2ban/issues "
"in order to get support for this format."
% ("\n".join(failRegex.getMatchedLines()), timeText))
else: else:
self.__lineBuffer = failRegex.getUnmatchedTupleLines() continue
# retrieve failure-id, host, etc from failure match: if date is None:
raw = returnRawHost logSys.warning(
try: "Found a match for %r but no valid date/time "
if preGroups: "found for %r. Please try setting a custom "
fail = preGroups.copy() "date pattern (see man page jail.conf(5)). "
fail.update(failRegex.getGroups()) "If format is complex, please "
else: "file a detailed issue on"
fail = failRegex.getGroups() " https://github.com/fail2ban/fail2ban/issues "
# failure-id: "in order to get support for this format.",
fid = fail.get('fid') "\n".join(failRegex.getMatchedLines()), timeText)
# ip-address or host: continue
host = fail.get('ip4') self.__lineBuffer = failRegex.getUnmatchedTupleLines()
if host is not None: # retrieve failure-id, host, etc from failure match:
cidr = IPAddr.FAM_IPv4 try:
raw = True raw = returnRawHost
else: if preGroups:
host = fail.get('ip6') fail = preGroups.copy()
if host is not None: fail.update(failRegex.getGroups())
cidr = IPAddr.FAM_IPv6 else:
raw = True fail = failRegex.getGroups()
if host is None: # first try to check we have mlfid case (caching of connection id by multi-line):
host = fail.get('dns') mlfid = fail.get('mlfid')
if host is None: if mlfid is not None:
fail = self._mergeFailure(mlfid, fail, failRegex)
else:
# matched lines:
fail["matches"] = fail.get("matches", []) + failRegex.getMatchedTupleLines()
# failure-id:
fid = fail.get('fid')
# ip-address or host:
host = fail.get('ip4')
if host is not None:
cidr = IPAddr.FAM_IPv4
raw = True
else:
host = fail.get('ip6')
if host is not None:
cidr = IPAddr.FAM_IPv6
raw = True
if host is None:
host = fail.get('dns')
if host is None:
# first try to check we have mlfid case (cache connection id):
if fid is None:
if mlfid:
fail = self._mergeFailure(mlfid, fail, failRegex)
else:
# if no failure-id also (obscure case, wrong regex), throw error inside getFailID: # if no failure-id also (obscure case, wrong regex), throw error inside getFailID:
if fid is None: fid = failRegex.getFailID()
fid = failRegex.getFailID() host = fid
host = fid cidr = IPAddr.CIDR_RAW
cidr = IPAddr.CIDR_RAW # if mlfid case (not failure):
# if raw - add single ip or failure-id, if host is None:
# otherwise expand host to multiple ips using dns (or ignore it if not valid): if not self.checkAllRegex: # or fail.get('nofail'):
if raw: return failList
ip = IPAddr(host, cidr) ips = [None]
# check host equal failure-id, if not - failure with complex id: # if raw - add single ip or failure-id,
if fid is not None and fid != host: # otherwise expand host to multiple ips using dns (or ignore it if not valid):
ip = IPAddr(fid, IPAddr.CIDR_RAW) elif raw:
ips = [ip] ip = IPAddr(host, cidr)
else: # check host equal failure-id, if not - failure with complex id:
ips = DNSUtils.textToIp(host, self.__useDns) if fid is not None and fid != host:
for ip in ips: ip = IPAddr(fid, IPAddr.CIDR_RAW)
failList.append([failRegexIndex, ip, date, ips = [ip]
failRegex.getMatchedLines(), fail]) # otherwise, try to use dns conversion:
if not self.checkAllRegex: else:
break ips = DNSUtils.textToIp(host, self.__useDns)
except RegexException as e: # pragma: no cover - unsure if reachable # append failure with match to the list:
logSys.error(e) for ip in ips:
failList.append([failRegexIndex, ip, date, fail])
if not self.checkAllRegex:
break
except RegexException as e: # pragma: no cover - unsure if reachable
logSys.error(e)
return failList return failList
def status(self, flavor="basic"): def status(self, flavor="basic"):
@ -751,7 +799,7 @@ class FileFilter(Filter):
db = self.jail.database db = self.jail.database
if db is not None: if db is not None:
db.updateLog(self.jail, log) db.updateLog(self.jail, log)
logSys.info("Removed logfile: %r" % path) logSys.info("Removed logfile: %r", path)
self._delLogPath(path) self._delLogPath(path)
return return
@ -816,7 +864,7 @@ class FileFilter(Filter):
def getFailures(self, filename): def getFailures(self, filename):
log = self.getLog(filename) log = self.getLog(filename)
if log is None: if log is None:
logSys.error("Unable to get failures in " + filename) logSys.error("Unable to get failures in %s", filename)
return False return False
# We should always close log (file), otherwise may be locked (log-rotate, etc.) # We should always close log (file), otherwise may be locked (log-rotate, etc.)
try: try:
@ -825,11 +873,11 @@ class FileFilter(Filter):
has_content = log.open() has_content = log.open()
# see http://python.org/dev/peps/pep-3151/ # see http://python.org/dev/peps/pep-3151/
except IOError as e: except IOError as e:
logSys.error("Unable to open %s" % filename) logSys.error("Unable to open %s", filename)
logSys.exception(e) logSys.exception(e)
return False return False
except OSError as e: # pragma: no cover - requires race condition to tigger this except OSError as e: # pragma: no cover - requires race condition to tigger this
logSys.error("Error opening %s" % filename) logSys.error("Error opening %s", filename)
logSys.exception(e) logSys.exception(e)
return False return False
except Exception as e: # pragma: no cover - Requires implemention error in FileContainer to generate except Exception as e: # pragma: no cover - Requires implemention error in FileContainer to generate
@ -1050,7 +1098,7 @@ class FileContainer:
## sys.stdout.flush() ## sys.stdout.flush()
# Compare hash and inode # Compare hash and inode
if self.__hash != myHash or self.__ino != stats.st_ino: if self.__hash != myHash or self.__ino != stats.st_ino:
logSys.info("Log rotation detected for %s" % self.__filename) logSys.info("Log rotation detected for %s", self.__filename)
self.__hash = myHash self.__hash = myHash
self.__ino = stats.st_ino self.__ino = stats.st_ino
self.__pos = 0 self.__pos = 0

View File

@ -138,7 +138,8 @@ class Ticket(object):
self._data['matches'] = matches or [] self._data['matches'] = matches or []
def getMatches(self): def getMatches(self):
return self._data.get('matches', []) return [(line if isinstance(line, basestring) else "".join(line)) \
for line in self._data.get('matches', ())]
@property @property
def restored(self): def restored(self):
@ -235,7 +236,11 @@ class FailTicket(Ticket):
self.__retry += count self.__retry += count
self._data['failures'] += attempt self._data['failures'] += attempt
if matches: if matches:
self._data['matches'] += matches # we should duplicate "matches", because possibly referenced to multiple tickets:
if self._data['matches']:
self._data['matches'] = self._data['matches'] + matches
else:
self._data['matches'] = matches
def setLastTime(self, value): def setLastTime(self, value):
if value > self._time: if value > self._time:

View File

@ -98,6 +98,12 @@ class Utils():
cache.popitem() cache.popitem()
cache[k] = (v, t + self.maxTime) cache[k] = (v, t + self.maxTime)
def unset(self, k):
try:
del self._cache[k]
except KeyError: # pragme: no cover
pass
@staticmethod @staticmethod
def setFBlockMode(fhandle, value): def setFBlockMode(fhandle, value):

View File

@ -1431,6 +1431,7 @@ class GetFailures(LogCaptureTestCase):
('no', output_no), ('no', output_no),
('warn', output_yes) ('warn', output_yes)
): ):
self.pruneLog("[test-phase useDns=%s]" % useDns)
jail = DummyJail() jail = DummyJail()
filter_ = FileFilter(jail, useDns=useDns) filter_ = FileFilter(jail, useDns=useDns)
filter_.active = True filter_.active = True

View File

@ -150,29 +150,34 @@ def testSampleRegexsFactory(name, basedir):
else: else:
faildata = {} faildata = {}
ret = self.filter.processLine(line) try:
if not ret: ret = self.filter.processLine(line)
# Check line is flagged as none match if not ret:
self.assertFalse(faildata.get('match', True), # Check line is flagged as none match
"Line not matched when should have: %s:%i, line:\n%s" % self.assertFalse(faildata.get('match', True),
(logFile.filename(), logFile.filelineno(), line)) "Line not matched when should have")
elif ret: continue
failregex, fid, fail2banTime, fail = ret[0]
# Bypass no failure helpers-regexp:
if not faildata.get('match', False) and (fid is None or fail.get('nofail')):
regexsUsed.add(failregex)
continue
# Check line is flagged to match # Check line is flagged to match
self.assertTrue(faildata.get('match', False), self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i, line:\n%s" % "Line matched when shouldn't have")
(logFile.filename(), logFile.filelineno(), line)) self.assertEqual(len(ret), 1,
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" % "Multiple regexs matched %r" % (map(lambda x: x[0], ret)))
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
# Verify timestamp and host as expected # Fallback for backwards compatibility (previously no fid, was host only):
failregex, host, fail2banTime, lines, fail = ret[0] if faildata.get("host", None) is not None and fail.get("host", None) is None:
self.assertEqual(host, faildata.get("host", None)) fail["host"] = fid
# Verify other captures: # Verify match captures (at least fid/host) and timestamp as expected
for k, v in faildata.iteritems(): for k, v in faildata.iteritems():
if k not in ("time", "match", "host", "desc"): if k not in ("time", "match", "desc"):
fv = fail.get(k, None) fv = fail.get(k, None)
self.assertEqual(fv, v, "Value of %s mismatch %r != %r on: %s:%i, line:\n%s" % ( self.assertEqual(fv, v)
k, fv, v, logFile.filename(), logFile.filelineno(), line))
t = faildata.get("time", None) t = faildata.get("time", None)
try: try:
@ -185,12 +190,15 @@ def testSampleRegexsFactory(name, basedir):
jsonTime += jsonTimeLocal.microsecond / 1000000 jsonTime += jsonTimeLocal.microsecond / 1000000
self.assertEqual(fail2banTime, jsonTime, self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds) on: %s:%i, line:\n%s" % "UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)), (fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)), jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) ) fail2banTime - jsonTime) )
regexsUsed.add(failregex) regexsUsed.add(failregex)
except AssertionError as e: # pragma: no cover
raise AssertionError("%s on: %s:%i, line:\n%s" % (
e, logFile.filename(), logFile.filelineno(), line))
for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()): for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()):
self.assertTrue( self.assertTrue(