Merge branch '0.11'

pull/2642/head
sebres 2020-02-14 12:14:51 +01:00
commit 35591db3e8
16 changed files with 425 additions and 284 deletions

View File

@ -25,7 +25,7 @@ matrix:
- python: 3.5
- python: 3.6
- python: 3.7
- python: 3.8-dev
- python: 3.8
- python: pypy3.5
before_install:
- echo "running under $TRAVIS_PYTHON_VERSION"

View File

@ -37,7 +37,7 @@ mdre-rbl = ^RCPT from [^[]*\[<HOST>\]%(_port)s: [45]54 [45]\.7\.1 Service unava
mdpr-more = %(mdpr-normal)s
mdre-more = %(mdre-normal)s
mdpr-ddos = lost connection after(?! DATA) [A-Z]+
mdpr-ddos = (?:lost connection after(?! DATA) [A-Z]+|disconnect(?= from \S+(?: \S+=\d+)* auth=0/(?:[1-9]|\d\d+)))
mdre-ddos = ^from [^[]*\[<HOST>\]%(_port)s:?
mdpr-extra = (?:%(mdpr-auth)s|%(mdpr-normal)s)

View File

@ -40,8 +40,8 @@ prefregex = ^<F-MLFID>%(__prefix_line)s</F-MLFID>%(__pref)s<F-CONTENT>.+</F-CONT
cmnfailre = ^[aA]uthentication (?:failure|error|failed) for <F-USER>.*</F-USER> from <HOST>( via \S+)?%(__suff)s$
^User not known to the underlying authentication module for <F-USER>.*</F-USER> from <HOST>%(__suff)s$
^Failed publickey for invalid user <F-USER>(?P<cond_user>\S+)|(?:(?! from ).)*?</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^Failed \b(?!publickey)\S+ for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
<cmnfailre-failed-pub-<publickey>>
^Failed <cmnfailed> for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^<F-USER>ROOT</F-USER> LOGIN REFUSED FROM <HOST>
^[iI](?:llegal|nvalid) user <F-USER>.*?</F-USER> from <HOST>%(__suff)s$
^User <F-USER>.+</F-USER> from <HOST> not allowed because not listed in AllowUsers%(__suff)s$
@ -55,11 +55,17 @@ cmnfailre = ^[aA]uthentication (?:failure|error|failed) for <F-USER>.*</F-USER>
^(error: )?maximum authentication attempts exceeded for <F-USER>.*</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?%(__suff)s$
^User <F-USER>.+</F-USER> not allowed because account is locked%(__suff)s
^<F-MLFFORGET>Disconnecting</F-MLFFORGET>(?: from)?(?: (?:invalid|authenticating)) user <F-USER>\S+</F-USER> <HOST>%(__on_port_opt)s:\s*Change of username or service not allowed:\s*.*\[preauth\]\s*$
^<F-MLFFORGET>Disconnecting</F-MLFFORGET>: Too many authentication failures(?: for <F-USER>.+?</F-USER>)?%(__suff)s$
^Disconnecting: Too many authentication failures(?: for <F-USER>.+?</F-USER>)?%(__suff)s$
^<F-NOFAIL>Received <F-MLFFORGET>disconnect</F-MLFFORGET></F-NOFAIL> from <HOST>%(__on_port_opt)s:\s*11:
<mdre-<mode>-other>
^<F-MLFFORGET><F-MLFGAINED>Accepted \w+</F-MLFGAINED></F-MLFFORGET> for <F-USER>\S+</F-USER> from <HOST>(?:\s|$)
cmnfailed-any = \S+
cmnfailed-ignore = \b(?!publickey)\S+
cmnfailed-invalid = <cmnfailed-ignore>
cmnfailed-nofail = (?:<F-NOFAIL>publickey</F-NOFAIL>|\S+)
cmnfailed = <cmnfailed-<publickey>>
mdre-normal =
# used to differentiate "connection closed" with and without `[preauth]` (fail/nofail cases in ddos mode)
mdre-normal-other = ^<F-NOFAIL><F-MLFFORGET>(Connection closed|Disconnected)</F-MLFFORGET></F-NOFAIL> (?:by|from)%(__authng_user)s <HOST>(?:%(__suff)s|\s*)$
@ -84,6 +90,17 @@ mdre-aggressive = %(mdre-ddos)s
# mdre-extra-other is fully included within mdre-ddos-other:
mdre-aggressive-other = %(mdre-ddos-other)s
# Parameter "publickey": nofail (default), invalid, any, ignore
publickey = nofail
# consider failed publickey for invalid users only:
cmnfailre-failed-pub-invalid = ^Failed publickey for invalid user <F-USER>(?P<cond_user>\S+)|(?:(?! from ).)*?</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
# consider failed publickey for valid users too (don't need RE, see cmnfailed):
cmnfailre-failed-pub-any =
# same as invalid, but consider failed publickey for valid users too, just as no failure (helper to get IP and user-name only, see cmnfailed):
cmnfailre-failed-pub-nofail = <cmnfailre-failed-pub-invalid>
# don't consider failed publickey as failures (don't need RE, see cmnfailed):
cmnfailre-failed-pub-ignore =
cfooterre = ^<F-NOFAIL>Connection from</F-NOFAIL> <HOST>
failregex = %(cmnfailre)s

View File

@ -240,7 +240,7 @@ action_cf_mwl = cloudflare[cfuser="%(cfemail)s", cftoken="%(cfapikey)s"]
# in your `jail.local` globally (section [DEFAULT]) or per specific jail section (resp. in
# corresponding jail.d/my-jail.local file).
#
action_blocklist_de = blocklist_de[email="%(sender)s", service=%(filter)s, apikey="%(blocklist_de_apikey)s", agent="%(fail2ban_agent)s"]
action_blocklist_de = blocklist_de[email="%(sender)s", service="%(__name__)s", apikey="%(blocklist_de_apikey)s", agent="%(fail2ban_agent)s"]
# Report ban via badips.com, and use as blacklist
#

View File

@ -272,6 +272,10 @@ class Fail2banRegex(object):
self._filter.returnRawHost = opts.raw
self._filter.checkFindTime = False
self._filter.checkAllRegex = opts.checkAllRegex and not opts.out
# ignore pending (without ID/IP), added to matches if it hits later (if ID/IP can be retreved)
self._filter.ignorePending = opts.out
# callback to increment ignored RE's by index (during process):
self._filter.onIgnoreRegex = self._onIgnoreRegex
self._backend = 'auto'
def output(self, line):
@ -433,26 +437,20 @@ class Fail2banRegex(object):
'add%sRegex' % regextype.title())(regex.getFailRegex())
return True
def testIgnoreRegex(self, line):
found = False
try:
ret = self._filter.ignoreLine([(line, "", "")])
if ret is not None:
found = True
regex = self._ignoreregex[ret].inc()
except RegexException as e: # pragma: no cover
output( 'ERROR: %s' % e )
return False
return found
def _onIgnoreRegex(self, idx, ignoreRegex):
self._lineIgnored = True
self._ignoreregex[idx].inc()
def testRegex(self, line, date=None):
orgLineBuffer = self._filter._Filter__lineBuffer
# duplicate line buffer (list can be changed inplace during processLine):
if self._filter.getMaxLines() > 1:
orgLineBuffer = orgLineBuffer[:]
fullBuffer = len(orgLineBuffer) >= self._filter.getMaxLines()
is_ignored = False
is_ignored = self._lineIgnored = False
try:
found = self._filter.processLine(line, date)
lines = []
line = self._filter.processedLine()
ret = []
for match in found:
# Append True/False flag depending if line was matched by
@ -468,36 +466,97 @@ class Fail2banRegex(object):
except RegexException as e: # pragma: no cover
output( 'ERROR: %s' % e )
return False
for bufLine in orgLineBuffer[int(fullBuffer):]:
if bufLine not in self._filter._Filter__lineBuffer:
try:
self._line_stats.missed_lines.pop(
self._line_stats.missed_lines.index("".join(bufLine)))
if self._debuggex:
self._line_stats.missed_lines_timeextracted.pop(
self._line_stats.missed_lines_timeextracted.index(
"".join(bufLine[::2])))
except ValueError:
pass
# if buffering - add also another lines from match:
if self._print_all_matched:
if not self._debuggex:
self._line_stats.matched_lines.append("".join(bufLine))
else:
lines.append(bufLine[0] + bufLine[2])
self._line_stats.matched += 1
self._line_stats.missed -= 1
if self._filter.getMaxLines() > 1:
for bufLine in orgLineBuffer[int(fullBuffer):]:
if bufLine not in self._filter._Filter__lineBuffer:
try:
self._line_stats.missed_lines.pop(
self._line_stats.missed_lines.index("".join(bufLine)))
if self._debuggex:
self._line_stats.missed_lines_timeextracted.pop(
self._line_stats.missed_lines_timeextracted.index(
"".join(bufLine[::2])))
except ValueError:
pass
# if buffering - add also another lines from match:
if self._print_all_matched:
if not self._debuggex:
self._line_stats.matched_lines.append("".join(bufLine))
else:
lines.append(bufLine[0] + bufLine[2])
self._line_stats.matched += 1
self._line_stats.missed -= 1
if lines: # pre-lines parsed in multiline mode (buffering)
lines.append(line)
lines.append(self._filter.processedLine())
line = "\n".join(lines)
return line, ret, is_ignored
return line, ret, (is_ignored or self._lineIgnored)
def _prepaireOutput(self):
"""Prepares output- and fetch-function corresponding given '--out' option (format)"""
ofmt = self._opts.out
if ofmt in ('id', 'ip'):
def _out(ret):
for r in ret:
output(r[1])
elif ofmt == 'msg':
def _out(ret):
for r in ret:
for r in r[3].get('matches'):
if not isinstance(r, basestring):
r = ''.join(r for r in r)
output(r)
elif ofmt == 'row':
def _out(ret):
for r in ret:
output('[%r,\t%r,\t%r],' % (r[1],r[2],dict((k,v) for k, v in r[3].iteritems() if k != 'matches')))
elif '<' not in ofmt:
def _out(ret):
for r in ret:
output(r[3].get(ofmt))
else: # extended format with tags substitution:
from ..server.actions import Actions, CommandAction, BanTicket
def _escOut(t, v):
# use safe escape (avoid inject on pseudo tag "\x00msg\x00"):
if t not in ('msg',):
return v.replace('\x00', '\\x00')
return v
def _out(ret):
rows = []
wrap = {'NL':0}
for r in ret:
ticket = BanTicket(r[1], time=r[2], data=r[3])
aInfo = Actions.ActionInfo(ticket)
# if msg tag is used - output if single line (otherwise let it as is to wrap multilines later):
def _get_msg(self):
if not wrap['NL'] and len(r[3].get('matches', [])) <= 1:
return self['matches']
else: # pseudo tag for future replacement:
wrap['NL'] = 1
return "\x00msg\x00"
aInfo['msg'] = _get_msg
# not recursive interpolation (use safe escape):
v = CommandAction.replaceDynamicTags(ofmt, aInfo, escapeVal=_escOut)
if wrap['NL']: # contains multiline tags (msg):
rows.append((r, v))
continue
output(v)
# wrap multiline tag (msg) interpolations to single line:
for r, v in rows:
for r in r[3].get('matches'):
if not isinstance(r, basestring):
r = ''.join(r for r in r)
r = v.replace("\x00msg\x00", r)
output(r)
return _out
def process(self, test_lines):
t0 = time.time()
if self._opts.out: # get out function
out = self._prepaireOutput()
for line in test_lines:
if isinstance(line, tuple):
line_datetimestripped, ret, is_ignored = self.testRegex(
line[0], line[1])
line_datetimestripped, ret, is_ignored = self.testRegex(line[0], line[1])
line = "".join(line[0])
else:
line = line.rstrip('\r\n')
@ -505,8 +564,10 @@ class Fail2banRegex(object):
# skip comment and empty lines
continue
line_datetimestripped, ret, is_ignored = self.testRegex(line)
if not is_ignored:
is_ignored = self.testIgnoreRegex(line_datetimestripped)
if self._opts.out: # (formated) output:
if len(ret) > 0 and not is_ignored: out(ret)
continue
if is_ignored:
self._line_stats.ignored += 1
@ -514,42 +575,25 @@ class Fail2banRegex(object):
self._line_stats.ignored_lines.append(line)
if self._debuggex:
self._line_stats.ignored_lines_timeextracted.append(line_datetimestripped)
if len(ret) > 0:
assert(not is_ignored)
if self._opts.out:
if self._opts.out in ('id', 'ip'):
for ret in ret:
output(ret[1])
elif self._opts.out == 'msg':
for ret in ret:
output('\n'.join(map(lambda v:''.join(v for v in v), ret[3].get('matches'))))
elif self._opts.out == 'row':
for ret in ret:
output('[%r,\t%r,\t%r],' % (ret[1],ret[2],dict((k,v) for k, v in ret[3].iteritems() if k != 'matches')))
else:
for ret in ret:
output(ret[3].get(self._opts.out))
continue
elif len(ret) > 0:
self._line_stats.matched += 1
if self._print_all_matched:
self._line_stats.matched_lines.append(line)
if self._debuggex:
self._line_stats.matched_lines_timeextracted.append(line_datetimestripped)
else:
if not is_ignored:
self._line_stats.missed += 1
if not self._print_no_missed and (self._print_all_missed or self._line_stats.missed <= self._maxlines + 1):
self._line_stats.missed_lines.append(line)
if self._debuggex:
self._line_stats.missed_lines_timeextracted.append(line_datetimestripped)
self._line_stats.missed += 1
if not self._print_no_missed and (self._print_all_missed or self._line_stats.missed <= self._maxlines + 1):
self._line_stats.missed_lines.append(line)
if self._debuggex:
self._line_stats.missed_lines_timeextracted.append(line_datetimestripped)
self._line_stats.tested += 1
self._time_elapsed = time.time() - t0
def printLines(self, ltype):
lstats = self._line_stats
assert(self._line_stats.missed == lstats.tested - (lstats.matched + lstats.ignored))
assert(lstats.missed == lstats.tested - (lstats.matched + lstats.ignored))
lines = lstats[ltype]
l = lstats[ltype + '_lines']
multiline = self._filter.getMaxLines() > 1
@ -686,10 +730,10 @@ class Fail2banRegex(object):
test_lines = journal_lines_gen(flt, myjournal)
else:
# if single line parsing (without buffering)
if self._filter.getMaxLines() <= 1:
if self._filter.getMaxLines() <= 1 and '\n' not in cmd_log:
self.output( "Use single line : %s" % shortstr(cmd_log.replace("\n", r"\n")) )
test_lines = [ cmd_log ]
else: # multi line parsing (with buffering)
else: # multi line parsing (with and without buffering)
test_lines = cmd_log.split("\n")
self.output( "Use multi line : %s line(s)" % len(test_lines) )
for i, l in enumerate(test_lines):

View File

@ -809,7 +809,7 @@ class CommandAction(ActionBase):
ESCAPE_VN_CRE = re.compile(r"\W")
@classmethod
def replaceDynamicTags(cls, realCmd, aInfo):
def replaceDynamicTags(cls, realCmd, aInfo, escapeVal=None):
"""Replaces dynamical tags in `query` with property values.
**Important**
@ -834,16 +834,17 @@ class CommandAction(ActionBase):
# array for escaped vars:
varsDict = dict()
def escapeVal(tag, value):
# if the value should be escaped:
if cls.ESCAPE_CRE.search(value):
# That one needs to be escaped since its content is
# out of our control
tag = 'f2bV_%s' % cls.ESCAPE_VN_CRE.sub('_', tag)
varsDict[tag] = value # add variable
value = '$'+tag # replacement as variable
# replacement for tag:
return value
if not escapeVal:
def escapeVal(tag, value):
# if the value should be escaped:
if cls.ESCAPE_CRE.search(value):
# That one needs to be escaped since its content is
# out of our control
tag = 'f2bV_%s' % cls.ESCAPE_VN_CRE.sub('_', tag)
varsDict[tag] = value # add variable
value = '$'+tag # replacement as variable
# replacement for tag:
return value
# additional replacement as calling map:
ADD_REPL_TAGS_CM = CallingMap(ADD_REPL_TAGS)

View File

@ -138,6 +138,8 @@ class Regex:
except sre_constants.error:
raise RegexException("Unable to compile regular expression '%s'" %
regex)
# set fetch handler depending on presence of alternate tags:
self.getGroups = self._getGroupsWithAlt if self._altValues else self._getGroups
def __str__(self):
return "%s(%r)" % (self.__class__.__name__, self._regex)
@ -277,11 +279,12 @@ class Regex:
# Returns all matched groups.
#
def getGroups(self):
if not self._altValues:
return self._matchCache.groupdict()
# merge alternate values (e. g. 'alt_user_1' -> 'user' or 'alt_host' -> 'host'):
def _getGroups(self):
return self._matchCache.groupdict()
def _getGroupsWithAlt(self):
fail = self._matchCache.groupdict()
# merge alternate values (e. g. 'alt_user_1' -> 'user' or 'alt_host' -> 'host'):
#fail = fail.copy()
for k,n in self._altValues:
v = fail.get(k)
@ -289,6 +292,9 @@ class Regex:
fail[n] = v
return fail
def getGroups(self): # pragma: no cover - abstract function (replaced in __init__)
pass
##
# Returns skipped lines.
#

View File

@ -106,6 +106,10 @@ class Filter(JailThread):
self.returnRawHost = False
## check each regex (used for test purposes):
self.checkAllRegex = False
## avoid finding of pending failures (without ID/IP, used in fail2ban-regex):
self.ignorePending = True
## callback called on ignoreregex match :
self.onIgnoreRegex = None
## if true ignores obsolete failures (failure time < now - findTime):
self.checkFindTime = True
## Ticks counter
@ -169,7 +173,7 @@ class Filter(JailThread):
# @param value the regular expression
def addFailRegex(self, value):
multiLine = self.getMaxLines() > 1
multiLine = self.__lineBufferSize > 1
try:
regex = FailRegex(value, prefRegex=self.__prefRegex, multiline=multiLine,
useDns=self.__useDns)
@ -574,20 +578,33 @@ class Filter(JailThread):
"""
if date:
tupleLine = line
self.__lastTimeText = tupleLine[1]
self.__lastDate = date
else:
l = line.rstrip('\r\n')
logSys.log(7, "Working on line %r", line)
(timeMatch, template) = self.dateDetector.matchTime(l)
if timeMatch:
tupleLine = (
l[:timeMatch.start(1)],
l[timeMatch.start(1):timeMatch.end(1)],
l[timeMatch.end(1):],
(timeMatch, template)
)
# try to parse date:
timeMatch = self.dateDetector.matchTime(line)
m = timeMatch[0]
if m:
s = m.start(1)
e = m.end(1)
m = line[s:e]
tupleLine = (line[:s], m, line[e:])
if m: # found and not empty - retrive date:
date = self.dateDetector.getTime(m, timeMatch)
if date is None:
if m: logSys.error("findFailure failed to parse timeText: %s", m)
date = self.__lastDate
else:
# Lets get the time part
date = date[0]
self.__lastTimeText = m
self.__lastDate = date
else:
tupleLine = (l, "", "", None)
tupleLine = (line, self.__lastTimeText, "")
date = self.__lastDate
# save last line (lazy convert of process line tuple to string on demand):
self.processedLine = lambda: "".join(tupleLine[::2])
@ -632,20 +649,26 @@ class Filter(JailThread):
self._errors //= 2
self.idle = True
##
# Returns true if the line should be ignored.
#
# Uses ignoreregex.
# @param line: the line
# @return: a boolean
def ignoreLine(self, tupleLines):
buf = Regex._tupleLinesBuf(tupleLines)
def _ignoreLine(self, buf, orgBuffer, failRegex=None):
# if multi-line buffer - use matched only, otherwise (single line) - original buf:
if failRegex and self.__lineBufferSize > 1:
orgBuffer = failRegex.getMatchedTupleLines()
buf = Regex._tupleLinesBuf(orgBuffer)
# search ignored:
fnd = None
for ignoreRegexIndex, ignoreRegex in enumerate(self.__ignoreRegex):
ignoreRegex.search(buf, tupleLines)
ignoreRegex.search(buf, orgBuffer)
if ignoreRegex.hasMatched():
return ignoreRegexIndex
return None
fnd = ignoreRegexIndex
logSys.log(7, " Matched ignoreregex %d and was ignored", fnd)
if self.onIgnoreRegex: self.onIgnoreRegex(fnd, ignoreRegex)
# remove ignored match:
if not self.checkAllRegex or self.__lineBufferSize > 1:
# todo: check ignoreRegex.getUnmatchedTupleLines() would be better (fix testGetFailuresMultiLineIgnoreRegex):
if failRegex:
self.__lineBuffer = failRegex.getUnmatchedTupleLines()
if not self.checkAllRegex: break
return fnd
def _updateUsers(self, fail, user=()):
users = fail.get('users')
@ -655,54 +678,31 @@ class Filter(JailThread):
fail['users'] = users = set()
users.add(user)
return users
return None
# # ATM incremental (non-empty only) merge deactivated ...
# @staticmethod
# def _updateFailure(self, mlfidGroups, fail):
# # reset old failure-ids when new types of id available in this failure:
# fids = set()
# for k in ('fid', 'ip4', 'ip6', 'dns'):
# if fail.get(k):
# fids.add(k)
# if fids:
# for k in ('fid', 'ip4', 'ip6', 'dns'):
# if k not in fids:
# try:
# del mlfidGroups[k]
# except:
# pass
# # update not empty values:
# mlfidGroups.update(((k,v) for k,v in fail.iteritems() if v))
return users
def _mergeFailure(self, mlfid, fail, failRegex):
mlfidFail = self.mlfidCache.get(mlfid) if self.__mlfidCache else None
users = None
nfflgs = 0
if fail.get("mlfgained"):
nfflgs |= 9
nfflgs |= (8|1)
if not fail.get('nofail'):
fail['nofail'] = fail["mlfgained"]
elif fail.get('nofail'): nfflgs |= 1
if fail.get('mlfforget'): nfflgs |= 2
if fail.pop('mlfforget', None): nfflgs |= 2
# if multi-line failure id (connection id) known:
if mlfidFail:
mlfidGroups = mlfidFail[1]
# update users set (hold all users of connect):
users = self._updateUsers(mlfidGroups, fail.get('user'))
# be sure we've correct current state ('nofail' and 'mlfgained' only from last failure)
try:
del mlfidGroups['nofail']
del mlfidGroups['mlfgained']
except KeyError:
pass
# # ATM incremental (non-empty only) merge deactivated (for future version only),
# # it can be simulated using alternate value tags, like <F-ALT_VAL>...</F-ALT_VAL>,
# # so previous value 'val' will be overwritten only if 'alt_val' is not empty...
# _updateFailure(mlfidGroups, fail)
#
if mlfidGroups.pop('nofail', None): nfflgs |= 4
if mlfidGroups.pop('mlfgained', None): nfflgs |= 4
# if we had no pending failures then clear the matches (they are already provided):
if (nfflgs & 4) == 0 and not mlfidGroups.get('mlfpending', 0):
mlfidGroups.pop("matches", None)
# overwrite multi-line failure with all values, available in fail:
mlfidGroups.update(fail)
mlfidGroups.update(((k,v) for k,v in fail.iteritems() if v is not None))
# new merged failure data:
fail = mlfidGroups
# if forget (disconnect/reset) - remove cached entry:
@ -713,24 +713,19 @@ class Filter(JailThread):
mlfidFail = [self.__lastDate, fail]
self.mlfidCache.set(mlfid, mlfidFail)
# check users in order to avoid reset failure by multiple logon-attempts:
if users and len(users) > 1:
# we've new user, reset 'nofail' because of multiple users attempts:
try:
del fail['nofail']
nfflgs &= ~1 # reset nofail
except KeyError:
pass
if fail.pop('mlfpending', 0) or users and len(users) > 1:
# we've pending failures or new user, reset 'nofail' because of failures or multiple users attempts:
fail.pop('nofail', None)
fail.pop('mlfgained', None)
nfflgs &= ~(8|1) # reset nofail and gained
# merge matches:
if not (nfflgs & 1): # current nofail state (corresponding users)
try:
m = fail.pop("nofail-matches")
m += fail.get("matches", [])
except KeyError:
m = fail.get("matches", [])
if not (nfflgs & 8): # no gain signaled
if (nfflgs & 1) == 0: # current nofail state (corresponding users)
m = fail.pop("nofail-matches", [])
m += fail.get("matches", [])
if (nfflgs & 8) == 0: # no gain signaled
m += failRegex.getMatchedTupleLines()
fail["matches"] = m
elif not (nfflgs & 2) and (nfflgs & 1): # not mlfforget and nofail:
elif (nfflgs & 3) == 1: # not mlfforget and nofail:
fail["nofail-matches"] = fail.get("nofail-matches", []) + failRegex.getMatchedTupleLines()
# return merged:
return fail
@ -743,7 +738,7 @@ class Filter(JailThread):
# to find the logging time.
# @return a dict with IP and timestamp.
def findFailure(self, tupleLine, date=None):
def findFailure(self, tupleLine, date):
failList = list()
ll = logSys.getEffectiveLevel()
@ -753,62 +748,38 @@ class Filter(JailThread):
returnRawHost = True
cidr = IPAddr.CIDR_RAW
# Checks if we mut ignore this line.
if self.ignoreLine([tupleLine[::2]]) is not None:
# The ignoreregex matched. Return.
if ll <= 7: logSys.log(7, "Matched ignoreregex and was \"%s\" ignored",
"".join(tupleLine[::2]))
return failList
timeText = tupleLine[1]
if date:
self.__lastTimeText = timeText
self.__lastDate = date
elif timeText:
dateTimeMatch = self.dateDetector.getTime(timeText, tupleLine[3])
if dateTimeMatch is None:
logSys.error("findFailure failed to parse timeText: %s", timeText)
date = self.__lastDate
else:
# Lets get the time part
date = dateTimeMatch[0]
self.__lastTimeText = timeText
self.__lastDate = date
else:
timeText = self.__lastTimeText or "".join(tupleLine[::2])
date = self.__lastDate
if self.checkFindTime and date is not None and date < MyTime.time() - self.getFindTime():
if ll <= 5: logSys.log(5, "Ignore line since time %s < %s - %s",
date, MyTime.time(), self.getFindTime())
return failList
if self.__lineBufferSize > 1:
orgBuffer = self.__lineBuffer = (
self.__lineBuffer + [tupleLine[:3]])[-self.__lineBufferSize:]
self.__lineBuffer.append(tupleLine)
orgBuffer = self.__lineBuffer = self.__lineBuffer[-self.__lineBufferSize:]
else:
orgBuffer = self.__lineBuffer = [tupleLine[:3]]
if ll <= 5: logSys.log(5, "Looking for match of %r", self.__lineBuffer)
buf = Regex._tupleLinesBuf(self.__lineBuffer)
orgBuffer = self.__lineBuffer = [tupleLine]
if ll <= 5: logSys.log(5, "Looking for match of %r", orgBuffer)
buf = Regex._tupleLinesBuf(orgBuffer)
# Checks if we must ignore this line (only if fewer ignoreregex than failregex).
if self.__ignoreRegex and len(self.__ignoreRegex) < len(self.__failRegex) - 2:
if self._ignoreLine(buf, orgBuffer) is not None:
# The ignoreregex matched. Return.
return failList
# Pre-filter fail regex (if available):
preGroups = {}
if self.__prefRegex:
if ll <= 5: logSys.log(5, " Looking for prefregex %r", self.__prefRegex.getRegex())
self.__prefRegex.search(buf, self.__lineBuffer)
self.__prefRegex.search(buf, orgBuffer)
if not self.__prefRegex.hasMatched():
if ll <= 5: logSys.log(5, " Prefregex not matched")
return failList
preGroups = self.__prefRegex.getGroups()
if ll <= 7: logSys.log(7, " Pre-filter matched %s", preGroups)
repl = preGroups.get('content')
repl = preGroups.pop('content', None)
# Content replacement:
if repl:
del preGroups['content']
self.__lineBuffer, buf = [('', '', repl)], None
# Iterates over all the regular expressions.
@ -826,15 +797,12 @@ class Filter(JailThread):
# The failregex matched.
if ll <= 7: logSys.log(7, " Matched failregex %d: %s", failRegexIndex, fail)
# Checks if we must ignore this match.
if self.ignoreLine(failRegex.getMatchedTupleLines()) \
is not None:
if self.__ignoreRegex and self._ignoreLine(buf, orgBuffer, failRegex) is not None:
# The ignoreregex matched. Remove ignored match.
self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
if ll <= 7: logSys.log(7, " Matched ignoreregex and was ignored")
buf = None
if not self.checkAllRegex:
break
else:
continue
continue
if date is None:
logSys.warning(
"Found a match for %r but no valid date/time "
@ -844,10 +812,10 @@ class Filter(JailThread):
"file a detailed issue on"
" https://github.com/fail2ban/fail2ban/issues "
"in order to get support for this format.",
"\n".join(failRegex.getMatchedLines()), timeText)
"\n".join(failRegex.getMatchedLines()), tupleLine[1])
continue
# we should check all regex (bypass on multi-line, otherwise too complex):
if not self.checkAllRegex or self.getMaxLines() > 1:
if not self.checkAllRegex or self.__lineBufferSize > 1:
self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
# merge data if multi-line failure:
raw = returnRawHost
@ -892,7 +860,8 @@ class Filter(JailThread):
if host is None:
if ll <= 7: logSys.log(7, "No failure-id by mlfid %r in regex %s: %s",
mlfid, failRegexIndex, fail.get('mlfforget', "waiting for identifier"))
if not self.checkAllRegex: return failList
fail['mlfpending'] = 1; # mark failure is pending
if not self.checkAllRegex and self.ignorePending: return failList
ips = [None]
# if raw - add single ip or failure-id,
# otherwise expand host to multiple ips using dns (or ignore it if not valid):
@ -905,6 +874,9 @@ class Filter(JailThread):
# otherwise, try to use dns conversion:
else:
ips = DNSUtils.textToIp(host, self.__useDns)
# if checkAllRegex we must make a copy (to be sure next RE doesn't change merged/cached failure):
if self.checkAllRegex and mlfid is not None:
fail = fail.copy()
# append failure with match to the list:
for ip in ips:
failList.append([failRegexIndex, ip, date, fail])
@ -1082,7 +1054,7 @@ class FileFilter(Filter):
if not line or not self.active:
# The jail reached the bottom or has been stopped
break
self.processLineAndAdd(line)
self.processLineAndAdd(line.rstrip('\r\n'))
finally:
log.close()
db = self.jail.database

View File

@ -58,6 +58,23 @@ except ImportError: # pragma: no cover
def _thread_name():
return threading.current_thread().__class__.__name__
try:
FileExistsError
except NameError: # pragma: 3.x no cover
FileExistsError = OSError
def _make_file_path(name):
"""Creates path of file (last level only) on demand"""
name = os.path.dirname(name)
# only if it is absolute (e. g. important for socket, so if unix path):
if os.path.isabs(name):
# be sure path exists (create last level of directory on demand):
try:
os.mkdir(name)
except (OSError, FileExistsError) as e:
if e.errno != 17: # pragma: no cover - not EEXIST is not covered
raise
class Server:
@ -97,7 +114,7 @@ class Server:
def start(self, sock, pidfile, force=False, observer=True, conf={}):
# First set the mask to only allow access to owner
os.umask(0077)
os.umask(0o077)
# Second daemonize before logging etc, because it will close all handles:
if self.__daemon: # pragma: no cover
logSys.info("Starting in daemon mode")
@ -142,6 +159,7 @@ class Server:
# Creates a PID file.
try:
logSys.debug("Creating PID file %s", pidfile)
_make_file_path(pidfile)
pidFile = open(pidfile, 'w')
pidFile.write("%s\n" % os.getpid())
pidFile.close()
@ -157,6 +175,7 @@ class Server:
# Start the communication
logSys.debug("Starting communication")
try:
_make_file_path(sock)
self.__asyncServer = AsyncServer(self.__transm)
self.__asyncServer.onstart = conf.get('onstart')
self.__asyncServer.start(sock, force)
@ -781,6 +800,7 @@ class Server:
self.__db = None
else:
if Fail2BanDb is not None:
_make_file_path(filename)
self.__db = Fail2BanDb(filename)
self.__db.delAllJails()
else: # pragma: no cover

View File

@ -37,7 +37,7 @@ __pam_auth = pam_[a-z]+
cmnfailre = ^%(__prefix_line_sl)s[aA]uthentication (?:failure|error|failed) for .* from <HOST>( via \S+)?\s*%(__suff)s$
^%(__prefix_line_sl)sUser not known to the underlying authentication module for .* from <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)sFailed \S+ for invalid user <F-USER>(?P<cond_user>\S+)|(?:(?! from ).)*?</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)sFailed \b(?!publickey)\S+ for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)sFailed (?:<F-NOFAIL>publickey</F-NOFAIL>|\S+) for (?P<cond_inv>invalid user )?<F-USER>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)</F-USER> from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)sROOT LOGIN REFUSED FROM <HOST>
^%(__prefix_line_sl)s[iI](?:llegal|nvalid) user .*? from <HOST>%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because not listed in AllowUsers\s*%(__suff)s$

View File

@ -83,13 +83,29 @@ def _test_exec_command_line(*args):
STR_00 = "Dec 31 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 192.0.2.0"
RE_00 = r"(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>"
RE_00_ID = r"Authentication failure for <F-ID>.*?</F-ID> from <HOST>$"
RE_00_USER = r"Authentication failure for <F-USER>.*?</F-USER> from <HOST>$"
RE_00_ID = r"Authentication failure for <F-ID>.*?</F-ID> from <ADDR>$"
RE_00_USER = r"Authentication failure for <F-USER>.*?</F-USER> from <ADDR>$"
FILENAME_01 = os.path.join(TEST_FILES_DIR, "testcase01.log")
FILENAME_02 = os.path.join(TEST_FILES_DIR, "testcase02.log")
FILENAME_WRONGCHAR = os.path.join(TEST_FILES_DIR, "testcase-wrong-char.log")
# STR_ML_SSHD -- multiline log-excerpt with two sessions:
# 192.0.2.1 (sshd[32307]) makes 2 failed attempts using public keys (without "Disconnecting: Too many authentication"),
# and delayed success on accepted (STR_ML_SSHD_OK) or no success by close on preauth phase (STR_ML_SSHD_FAIL)
# 192.0.2.2 (sshd[32310]) makes 2 failed attempts using public keys (with "Disconnecting: Too many authentication"),
# and closed on preauth phase
STR_ML_SSHD = """Nov 28 09:16:03 srv sshd[32307]: Failed publickey for git from 192.0.2.1 port 57904 ssh2: ECDSA 0e:ff:xx:xx:xx:xx:xx:xx:xx:xx:xx:...
Nov 28 09:16:03 srv sshd[32307]: Failed publickey for git from 192.0.2.1 port 57904 ssh2: RSA 04:bc:xx:xx:xx:xx:xx:xx:xx:xx:xx:...
Nov 28 09:16:03 srv sshd[32307]: Postponed publickey for git from 192.0.2.1 port 57904 ssh2 [preauth]
Nov 28 09:16:05 srv sshd[32310]: Failed publickey for git from 192.0.2.2 port 57910 ssh2: ECDSA 1e:fe:xx:xx:xx:xx:xx:xx:xx:xx:xx:...
Nov 28 09:16:05 srv sshd[32310]: Failed publickey for git from 192.0.2.2 port 57910 ssh2: RSA 14:ba:xx:xx:xx:xx:xx:xx:xx:xx:xx:...
Nov 28 09:16:05 srv sshd[32310]: Disconnecting: Too many authentication failures for git [preauth]
Nov 28 09:16:05 srv sshd[32310]: Connection closed by 192.0.2.2 [preauth]"""
STR_ML_SSHD_OK = "Nov 28 09:16:06 srv sshd[32307]: Accepted publickey for git from 192.0.2.1 port 57904 ssh2: DSA 36:48:xx:xx:xx:xx:xx:xx:xx:xx:xx:..."
STR_ML_SSHD_FAIL = "Nov 28 09:16:06 srv sshd[32307]: Connection closed by 192.0.2.1 [preauth]"
FILENAME_SSHD = os.path.join(TEST_FILES_DIR, "logs", "sshd")
FILTER_SSHD = os.path.join(CONFIG_DIR, 'filter.d', 'sshd.conf')
FILENAME_ZZZ_SSHD = os.path.join(TEST_FILES_DIR, 'zzz-sshd-obsolete-multiline.log')
@ -291,10 +307,10 @@ class Fail2banRegexTest(LogCaptureTestCase):
#
self.assertTrue(_test_exec(
"--usedns", "no", "-d", "^Epoch", "--print-all-matched",
"1490349000 FAIL: failure\nhost: 192.0.2.35",
"-L", "2", "1490349000 FAIL: failure\nhost: 192.0.2.35",
r"^\s*FAIL:\s*.*\nhost:\s+<HOST>$"
))
self.assertLogged('Lines: 1 lines, 0 ignored, 1 matched, 0 missed')
self.assertLogged('Lines: 2 lines, 0 ignored, 2 matched, 0 missed')
def testRegexEpochPatterns(self):
self.assertTrue(_test_exec(
@ -340,6 +356,55 @@ class Fail2banRegexTest(LogCaptureTestCase):
self.assertTrue(_test_exec('-o', 'user', STR_00, RE_00_USER))
self.assertLogged('kevin')
self.pruneLog()
# complex substitution using tags (ip, user, family):
self.assertTrue(_test_exec('-o', '<ip>, <F-USER>, <family>', STR_00, RE_00_USER))
self.assertLogged('192.0.2.0, kevin, inet4')
self.pruneLog()
def testFrmtOutputWrapML(self):
unittest.F2B.SkipIfCfgMissing(stock=True)
# complex substitution using tags and message (ip, user, msg):
self.assertTrue(_test_exec('-o', '<ip>, <F-USER>, <msg>',
'-c', CONFIG_DIR, '--usedns', 'no',
STR_ML_SSHD + "\n" + STR_ML_SSHD_OK, 'sshd[logtype=short, publickey=invalid]'))
# be sure we don't have IP in one line and have it in another:
lines = STR_ML_SSHD.split("\n")
self.assertTrue('192.0.2.2' not in lines[-2] and '192.0.2.2' in lines[-1])
# but both are in output "merged" with IP and user:
self.assertLogged(
'192.0.2.2, git, '+lines[-2],
'192.0.2.2, git, '+lines[-1],
all=True)
# nothing should be found for 192.0.2.1 (mode is not aggressive):
self.assertNotLogged('192.0.2.1, git, ')
# test with publickey (nofail) - would not produce output for 192.0.2.1 because accepted:
self.pruneLog("[test-phase 1] mode=aggressive & publickey=nofail + OK (accepted)")
self.assertTrue(_test_exec('-o', '<ip>, <F-USER>, <msg>',
'-c', CONFIG_DIR, '--usedns', 'no',
STR_ML_SSHD + "\n" + STR_ML_SSHD_OK, 'sshd[logtype=short, mode=aggressive]'))
self.assertLogged(
'192.0.2.2, git, '+lines[-4],
'192.0.2.2, git, '+lines[-3],
'192.0.2.2, git, '+lines[-2],
'192.0.2.2, git, '+lines[-1],
all=True)
# nothing should be found for 192.0.2.1 (access gained so failures ignored):
self.assertNotLogged('192.0.2.1, git, ')
# now same test but "accepted" replaced with "closed" on preauth phase:
self.pruneLog("[test-phase 2] mode=aggressive & publickey=nofail + FAIL (closed on preauth)")
self.assertTrue(_test_exec('-o', '<ip>, <F-USER>, <msg>',
'-c', CONFIG_DIR, '--usedns', 'no',
STR_ML_SSHD + "\n" + STR_ML_SSHD_FAIL, 'sshd[logtype=short, mode=aggressive]'))
# 192.0.2.1 should be found for every failure (2x failed key + 1x closed):
lines = STR_ML_SSHD.split("\n")[0:2] + STR_ML_SSHD_FAIL.split("\n")[-1:]
self.assertLogged(
'192.0.2.1, git, '+lines[-3],
'192.0.2.1, git, '+lines[-2],
'192.0.2.1, git, '+lines[-1],
all=True)
def testWrongFilterFile(self):
# use test log as filter file to cover eror cases...

View File

@ -137,6 +137,11 @@ Jan 14 16:18:16 xxx postfix/smtpd[14933]: warning: host[192.0.2.5]: SASL CRAM-MD
# filterOptions: [{"mode": "ddos"}, {"mode": "aggressive"}]
# failJSON: { "time": "2005-02-10T13:26:34", "match": true , "host": "192.0.2.1" }
Feb 10 13:26:34 srv postfix/smtpd[123]: disconnect from unknown[192.0.2.1] helo=1 auth=0/1 quit=1 commands=2/3
# failJSON: { "time": "2005-02-10T13:26:34", "match": true , "host": "192.0.2.2" }
Feb 10 13:26:34 srv postfix/smtpd[123]: disconnect from unknown[192.0.2.2] ehlo=1 auth=0/1 rset=1 quit=1 commands=3/4
# failJSON: { "time": "2005-02-18T09:45:10", "match": true , "host": "192.0.2.10" }
Feb 18 09:45:10 xxx postfix/smtpd[42]: lost connection after CONNECT from spammer.example.com[192.0.2.10]
# failJSON: { "time": "2005-02-18T09:45:12", "match": true , "host": "192.0.2.42" }

View File

@ -134,7 +134,7 @@ Sep 29 17:15:02 spaceman sshd[12946]: Failed password for user from 127.0.0.1 po
# failJSON: { "time": "2004-09-29T17:15:02", "match": true , "host": "127.0.0.1", "desc": "Injecting while exhausting initially present {0,100} match length limits set for ruser etc" }
Sep 29 17:15:02 spaceman sshd[12946]: Failed password for user from 127.0.0.1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
# failJSON: { "time": "2004-09-29T17:15:03", "match": true , "host": "aaaa:bbbb:cccc:1234::1:1", "desc": "Injecting while exhausting initially present {0,100} match length limits set for ruser etc" }
Sep 29 17:15:03 spaceman sshd[12946]: Failed password for user from aaaa:bbbb:cccc:1234::1:1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
Sep 29 17:15:03 spaceman sshd[12947]: Failed password for user from aaaa:bbbb:cccc:1234::1:1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
# failJSON: { "time": "2004-11-11T08:04:51", "match": true , "host": "127.0.0.1", "desc": "Injecting on username ssh 'from 10.10.1.1'@localhost" }
Nov 11 08:04:51 redbamboo sshd[2737]: Failed password for invalid user from 10.10.1.1 from 127.0.0.1 port 58946 ssh2
@ -166,9 +166,11 @@ Nov 28 09:16:03 srv sshd[32307]: Connection closed by 192.0.2.1
Nov 28 09:16:05 srv sshd[32310]: Failed publickey for git from 192.0.2.111 port 57910 ssh2: ECDSA 1e:fe:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx
# failJSON: { "match": false }
Nov 28 09:16:05 srv sshd[32310]: Failed publickey for git from 192.0.2.111 port 57910 ssh2: RSA 14:ba:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx
# failJSON: { "match": false }
# failJSON: { "constraint": "name == 'sshd'", "time": "2004-11-28T09:16:05", "match": true , "attempts": 3, "desc": "Should catch failure - no success/no accepted public key" }
Nov 28 09:16:05 srv sshd[32310]: Disconnecting: Too many authentication failures for git [preauth]
# failJSON: { "time": "2004-11-28T09:16:05", "match": true , "host": "192.0.2.111", "desc": "Should catch failure - no success/no accepted public key" }
# failJSON: { "constraint": "opts.get('mode') != 'aggressive'", "match": false, "desc": "Nofail in normal mode, failure already produced above" }
Nov 28 09:16:05 srv sshd[32310]: Connection closed by 192.0.2.111 [preauth]
# failJSON: { "constraint": "opts.get('mode') == 'aggressive'", "time": "2004-11-28T09:16:05", "match": true , "host": "192.0.2.111", "attempts":1, "desc": "Matches in aggressive mode only" }
Nov 28 09:16:05 srv sshd[32310]: Connection closed by 192.0.2.111 [preauth]
# failJSON: { "match": false }
@ -215,7 +217,7 @@ Apr 27 13:02:04 host sshd[29116]: Received disconnect from 1.2.3.4: 11: Normal S
# Match sshd auth errors on OpenSUSE systems (gh-1024)
# failJSON: { "match": false, "desc": "No failure until closed or another fail (e. g. F-MLFFORGET by success/accepted password can avoid failure, see gh-2070)" }
2015-04-16T18:02:50.321974+00:00 host sshd[2716]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=192.0.2.112 user=root
# failJSON: { "time": "2015-04-16T20:02:50", "match": true , "host": "192.0.2.112", "desc": "Should catch failure - no success/no accepted password" }
# failJSON: { "constraint": "opts.get('mode') == 'aggressive'", "time": "2015-04-16T20:02:50", "match": true , "host": "192.0.2.112", "desc": "Should catch failure - no success/no accepted password" }
2015-04-16T18:02:50.568798+00:00 host sshd[2716]: Connection closed by 192.0.2.112 [preauth]
# disable this test-cases block for obsolete multi-line filter (zzz-sshd-obsolete...):
@ -238,7 +240,7 @@ Mar 7 18:53:20 bar sshd[1556]: Connection closed by 192.0.2.113
Mar 7 18:53:22 bar sshd[1558]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.114
# failJSON: { "time": "2005-03-07T18:53:23", "match": true , "attempts": 2, "users": ["root", "sudoer"], "host": "192.0.2.114", "desc": "Failure: attempt 2nd user" }
Mar 7 18:53:23 bar sshd[1558]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=sudoer rhost=192.0.2.114
# failJSON: { "time": "2005-03-07T18:53:24", "match": true , "attempts": 2, "users": ["root", "sudoer", "known"], "host": "192.0.2.114", "desc": "Failure: attempt 3rd user" }
# failJSON: { "time": "2005-03-07T18:53:24", "match": true , "attempts": 1, "users": ["root", "sudoer", "known"], "host": "192.0.2.114", "desc": "Failure: attempt 3rd user" }
Mar 7 18:53:24 bar sshd[1558]: Accepted password for known from 192.0.2.114 port 52100 ssh2
# failJSON: { "match": false , "desc": "No failure" }
Mar 7 18:53:24 bar sshd[1558]: pam_unix(sshd:session): session opened for user known by (uid=0)
@ -248,14 +250,14 @@ Mar 7 18:53:24 bar sshd[1558]: pam_unix(sshd:session): session opened for user
Mar 7 18:53:32 bar sshd[1559]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.116
# failJSON: { "match": false , "desc": "Still no failure (second try, same user)" }
Mar 7 18:53:32 bar sshd[1559]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.116
# failJSON: { "time": "2005-03-07T18:53:34", "match": true , "attempts": 2, "users": ["root", "known"], "host": "192.0.2.116", "desc": "Failure: attempt 2nd user" }
# failJSON: { "time": "2005-03-07T18:53:34", "match": true , "attempts": 3, "users": ["root", "known"], "host": "192.0.2.116", "desc": "Failure: attempt 2nd user" }
Mar 7 18:53:34 bar sshd[1559]: Accepted password for known from 192.0.2.116 port 52100 ssh2
# failJSON: { "match": false , "desc": "No failure" }
Mar 7 18:53:38 bar sshd[1559]: Connection closed by 192.0.2.116
# failJSON: { "time": "2005-03-19T16:47:48", "match": true , "attempts": 1, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt invalid user" }
Mar 19 16:47:48 test sshd[5672]: Invalid user admin from 192.0.2.117 port 44004
# failJSON: { "time": "2005-03-19T16:47:49", "match": true , "attempts": 2, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt to change user (disallowed)" }
# failJSON: { "time": "2005-03-19T16:47:49", "match": true , "attempts": 1, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt to change user (disallowed)" }
Mar 19 16:47:49 test sshd[5672]: Disconnecting invalid user admin 192.0.2.117 port 44004: Change of username or service not allowed: (admin,ssh-connection) -> (user,ssh-connection) [preauth]
# failJSON: { "time": "2005-03-19T16:47:50", "match": false, "desc": "Disconnected during preauth phase (no failure in normal mode)" }
Mar 19 16:47:50 srv sshd[5672]: Disconnected from authenticating user admin 192.0.2.6 port 33553 [preauth]
@ -332,7 +334,7 @@ Nov 26 13:03:30 srv sshd[45]: fatal: Unable to negotiate with 192.0.2.2 port 554
Nov 26 15:03:30 host sshd[22440]: Connection from 192.0.2.3 port 39678 on 192.168.1.9 port 22
# failJSON: { "time": "2004-11-26T15:03:31", "match": true , "host": "192.0.2.3", "desc": "Multiline - no matching key exchange method" }
Nov 26 15:03:31 host sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]
# failJSON: { "time": "2004-11-26T15:03:32", "match": true , "host": "192.0.2.3", "filter": "sshd", "desc": "Second attempt within the same connect" }
# failJSON: { "time": "2004-11-26T15:03:32", "match": true , "host": "192.0.2.3", "constraint": "name == 'sshd'", "desc": "Second attempt within the same connect" }
Nov 26 15:03:32 host sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]
# gh-1943 (previous OpenSSH log-format)

View File

@ -135,7 +135,7 @@ srv sshd[12946]: Failed password for user from 127.0.0.1 port 20000 ssh1: ruser
# failJSON: { "match": true , "host": "127.0.0.1", "desc": "Injecting while exhausting initially present {0,100} match length limits set for ruser etc" }
srv sshd[12946]: Failed password for user from 127.0.0.1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
# failJSON: { "match": true , "host": "aaaa:bbbb:cccc:1234::1:1", "desc": "Injecting while exhausting initially present {0,100} match length limits set for ruser etc" }
srv sshd[12946]: Failed password for user from aaaa:bbbb:cccc:1234::1:1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
srv sshd[12947]: Failed password for user from aaaa:bbbb:cccc:1234::1:1 port 20000 ssh1: ruser XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX from 1.2.3.4
# failJSON: { "match": true , "host": "127.0.0.1", "desc": "Injecting on username ssh 'from 10.10.1.1'@localhost" }
srv sshd[2737]: Failed password for invalid user from 10.10.1.1 from 127.0.0.1 port 58946 ssh2
@ -167,9 +167,11 @@ srv sshd[32307]: Connection closed by 192.0.2.1
srv sshd[32310]: Failed publickey for git from 192.0.2.111 port 57910 ssh2: ECDSA 1e:fe:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx
# failJSON: { "match": false }
srv sshd[32310]: Failed publickey for git from 192.0.2.111 port 57910 ssh2: RSA 14:ba:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx:xx
# failJSON: { "match": false }
# failJSON: { "match": true , "attempts": 3, "desc": "Should catch failure - no success/no accepted public key" }
srv sshd[32310]: Disconnecting: Too many authentication failures for git [preauth]
# failJSON: { "match": true , "host": "192.0.2.111", "desc": "Should catch failure - no success/no accepted public key" }
# failJSON: { "constraint": "opts.get('mode') != 'aggressive'", "match": false, "desc": "Nofail in normal mode, failure already produced above" }
srv sshd[32310]: Connection closed by 192.0.2.111 [preauth]
# failJSON: { "constraint": "opts.get('mode') == 'aggressive'", "match": true , "host": "192.0.2.111", "attempts":1, "desc": "Matches in aggressive mode only" }
srv sshd[32310]: Connection closed by 192.0.2.111 [preauth]
# failJSON: { "match": false }
@ -216,7 +218,7 @@ srv sshd[29116]: Received disconnect from 1.2.3.4: 11: Normal Shutdown, Thank yo
# Match sshd auth errors on OpenSUSE systems (gh-1024)
# failJSON: { "match": false, "desc": "No failure until closed or another fail (e. g. F-MLFFORGET by success/accepted password can avoid failure, see gh-2070)" }
srv sshd[2716]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=192.0.2.112 user=root
# failJSON: { "match": true , "host": "192.0.2.112", "desc": "Should catch failure - no success/no accepted password" }
# failJSON: { "constraint": "opts.get('mode') == 'aggressive'", "match": true , "host": "192.0.2.112", "desc": "Should catch failure - no success/no accepted password" }
srv sshd[2716]: Connection closed by 192.0.2.112 [preauth]
# filterOptions: [{}]
@ -238,7 +240,7 @@ srv sshd[1556]: Connection closed by 192.0.2.113
srv sshd[1558]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.114
# failJSON: { "match": true , "attempts": 2, "users": ["root", "sudoer"], "host": "192.0.2.114", "desc": "Failure: attempt 2nd user" }
srv sshd[1558]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=sudoer rhost=192.0.2.114
# failJSON: { "match": true , "attempts": 2, "users": ["root", "sudoer", "known"], "host": "192.0.2.114", "desc": "Failure: attempt 3rd user" }
# failJSON: { "match": true , "attempts": 1, "users": ["root", "sudoer", "known"], "host": "192.0.2.114", "desc": "Failure: attempt 3rd user" }
srv sshd[1558]: Accepted password for known from 192.0.2.114 port 52100 ssh2
# failJSON: { "match": false , "desc": "No failure" }
srv sshd[1558]: pam_unix(sshd:session): session opened for user known by (uid=0)
@ -248,14 +250,14 @@ srv sshd[1558]: pam_unix(sshd:session): session opened for user known by (uid=0)
srv sshd[1559]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.116
# failJSON: { "match": false , "desc": "Still no failure (second try, same user)" }
srv sshd[1559]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser=root rhost=192.0.2.116
# failJSON: { "match": true , "attempts": 2, "users": ["root", "known"], "host": "192.0.2.116", "desc": "Failure: attempt 2nd user" }
# failJSON: { "match": true , "attempts": 3, "users": ["root", "known"], "host": "192.0.2.116", "desc": "Failure: attempt 2nd user" }
srv sshd[1559]: Accepted password for known from 192.0.2.116 port 52100 ssh2
# failJSON: { "match": false , "desc": "No failure" }
srv sshd[1559]: Connection closed by 192.0.2.116
# failJSON: { "match": true , "attempts": 1, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt invalid user" }
srv sshd[5672]: Invalid user admin from 192.0.2.117 port 44004
# failJSON: { "match": true , "attempts": 2, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt to change user (disallowed)" }
# failJSON: { "match": true , "attempts": 1, "user": "admin", "host": "192.0.2.117", "desc": "Failure: attempt to change user (disallowed)" }
srv sshd[5672]: Disconnecting invalid user admin 192.0.2.117 port 44004: Change of username or service not allowed: (admin,ssh-connection) -> (user,ssh-connection) [preauth]
# failJSON: { "match": false, "desc": "Disconnected during preauth phase (no failure in normal mode)" }
srv sshd[5672]: Disconnected from authenticating user admin 192.0.2.6 port 33553 [preauth]
@ -325,7 +327,7 @@ srv sshd[45]: fatal: Unable to negotiate with 192.0.2.2 port 55419: no matching
srv sshd[22440]: Connection from 192.0.2.3 port 39678 on 192.168.1.9 port 22
# failJSON: { "match": true , "host": "192.0.2.3", "desc": "Multiline - no matching key exchange method" }
srv sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]
# failJSON: { "match": true , "host": "192.0.2.3", "filter": "sshd", "desc": "Second attempt within the same connect" }
# failJSON: { "match": true , "host": "192.0.2.3", "constraint": "name == 'sshd'", "desc": "Second attempt within the same connect" }
srv sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]
# gh-1943 (previous OpenSSH log-format)

View File

@ -63,10 +63,7 @@ def open(*args):
if len(args) == 2:
# ~50kB buffer should be sufficient for all tests here.
args = args + (50000,)
if sys.version_info >= (3,):
return fopen(*args, **{'encoding': 'utf-8', 'errors': 'ignore'})
else:
return fopen(*args)
return fopen(*args)
def _killfile(f, name):
@ -200,7 +197,7 @@ def _copy_lines_between_files(in_, fout, n=None, skip=0, mode='a', terminal_line
# polling filter could detect the change
mtimesleep()
if isinstance(in_, str): # pragma: no branch - only used with str in test cases
fin = open(in_, 'r')
fin = open(in_, 'rb')
else:
fin = in_
# Skip
@ -210,7 +207,7 @@ def _copy_lines_between_files(in_, fout, n=None, skip=0, mode='a', terminal_line
i = 0
lines = []
while n is None or i < n:
l = fin.readline()
l = FileContainer.decode_line(in_, 'UTF-8', fin.readline()).rstrip('\r\n')
if terminal_line is not None and l == terminal_line:
break
lines.append(l)
@ -238,7 +235,7 @@ def _copy_lines_to_journal(in_, fields={},n=None, skip=0, terminal_line=""): # p
Returns None
"""
if isinstance(in_, str): # pragma: no branch - only used with str in test cases
fin = open(in_, 'r')
fin = open(in_, 'rb')
else:
fin = in_
# Required for filtering
@ -249,7 +246,7 @@ def _copy_lines_to_journal(in_, fields={},n=None, skip=0, terminal_line=""): # p
# Read/Write
i = 0
while n is None or i < n:
l = fin.readline()
l = FileContainer.decode_line(in_, 'UTF-8', fin.readline()).rstrip('\r\n')
if terminal_line is not None and l == terminal_line:
break
journal.send(MESSAGE=l.strip(), **fields)
@ -1583,9 +1580,9 @@ class GetFailures(LogCaptureTestCase):
# We first adjust logfile/failures to end with CR+LF
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='crlf')
# poor man unix2dos:
fin, fout = open(GetFailures.FILENAME_01), open(fname, 'w')
for l in fin.readlines():
fout.write('%s\r\n' % l.rstrip('\n'))
fin, fout = open(GetFailures.FILENAME_01, 'rb'), open(fname, 'wb')
for l in fin.read().splitlines():
fout.write(l + b'\r\n')
fin.close()
fout.close()

View File

@ -32,7 +32,7 @@ import sys
import time
import unittest
from ..server.failregex import Regex
from ..server.filter import Filter
from ..server.filter import Filter, FileContainer
from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime, TEST_NOW, CONFIG_DIR
@ -157,10 +157,11 @@ def testSampleRegexsFactory(name, basedir):
while i < len(filenames):
filename = filenames[i]; i += 1;
logFile = fileinput.FileInput(os.path.join(TEST_FILES_DIR, "logs",
filename))
filename), mode='rb')
ignoreBlock = False
for line in logFile:
line = FileContainer.decode_line(logFile.filename(), 'UTF-8', line)
jsonREMatch = re.match("^#+ ?(failJSON|(?:file|filter)Options|addFILE):(.+)$", line)
if jsonREMatch:
try:
@ -202,6 +203,7 @@ def testSampleRegexsFactory(name, basedir):
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
line = FileContainer.decode_line(logFile.filename(), 'UTF-8', line)
elif ignoreBlock or line.startswith("#") or not line.strip():
continue
else: # pragma: no cover - normally unreachable
@ -214,11 +216,14 @@ def testSampleRegexsFactory(name, basedir):
flt = self._readFilter(fltName, name, basedir, opts=None)
self._filterTests = [(fltName, flt, {})]
line = line.rstrip('\r\n')
# process line using several filter options (if specified in the test-file):
for fltName, flt, opts in self._filterTests:
# Bypass if constraint (as expression) is not valid:
if faildata.get('constraint') and not eval(faildata['constraint']):
continue
flt, regexsUsedIdx = flt
regexList = flt.getFailRegex()
failregex = -1
try:
fail = {}
@ -228,21 +233,24 @@ def testSampleRegexsFactory(name, basedir):
else: # simulate journal processing, time is known from journal (formatJournalEntry):
if opts.get('test.prefix-line'): # journal backends creates common prefix-line:
line = opts.get('test.prefix-line') + line
ret = flt.processLine(('', TEST_NOW_STR, line.rstrip('\r\n')), TEST_NOW)
if not ret:
# Bypass if filter constraint specified:
if faildata.get('filter') and name != faildata.get('filter'):
continue
# Check line is flagged as none match
self.assertFalse(faildata.get('match', True),
"Line not matched when should have")
continue
ret = flt.processLine(('', TEST_NOW_STR, line), TEST_NOW)
if ret:
# filter matched only (in checkAllRegex mode it could return 'nofail' too):
found = []
for ret in ret:
failregex, fid, fail2banTime, fail = ret
# bypass pending and nofail:
if fid is None or fail.get('nofail'):
regexsUsedIdx.add(failregex)
regexsUsedRe.add(regexList[failregex])
continue
found.append(ret)
ret = found
failregex, fid, fail2banTime, fail = ret[0]
# Bypass no failure helpers-regexp:
if not faildata.get('match', False) and (fid is None or fail.get('nofail')):
regexsUsedIdx.add(failregex)
regexsUsedRe.add(regexList[failregex])
if not ret:
# Check line is flagged as none match
self.assertFalse(faildata.get('match', False),
"Line not matched when should have")
continue
# Check line is flagged to match
@ -251,39 +259,41 @@ def testSampleRegexsFactory(name, basedir):
self.assertEqual(len(ret), 1,
"Multiple regexs matched %r" % (map(lambda x: x[0], ret)))
# Verify match captures (at least fid/host) and timestamp as expected
for k, v in faildata.iteritems():
if k not in ("time", "match", "desc", "filter"):
fv = fail.get(k, None)
if fv is None:
# Fallback for backwards compatibility (previously no fid, was host only):
if k == "host":
fv = fid
# special case for attempts counter:
if k == "attempts":
fv = len(fail.get('matches', {}))
# compare sorted (if set)
if isinstance(fv, (set, list, dict)):
self.assertSortedEqual(fv, v)
continue
self.assertEqual(fv, v)
for ret in ret:
failregex, fid, fail2banTime, fail = ret
# Verify match captures (at least fid/host) and timestamp as expected
for k, v in faildata.iteritems():
if k not in ("time", "match", "desc", "constraint"):
fv = fail.get(k, None)
if fv is None:
# Fallback for backwards compatibility (previously no fid, was host only):
if k == "host":
fv = fid
# special case for attempts counter:
if k == "attempts":
fv = len(fail.get('matches', {}))
# compare sorted (if set)
if isinstance(fv, (set, list, dict)):
self.assertSortedEqual(fv, v)
continue
self.assertEqual(fv, v)
t = faildata.get("time", None)
if t is not None:
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
jsonTime = time.mktime(jsonTimeLocal.timetuple())
jsonTime += jsonTimeLocal.microsecond / 1000000.0
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime) )
t = faildata.get("time", None)
if t is not None:
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
jsonTime = time.mktime(jsonTimeLocal.timetuple())
jsonTime += jsonTimeLocal.microsecond / 1000000.0
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime) )
regexsUsedIdx.add(failregex)
regexsUsedRe.add(regexList[failregex])
regexsUsedIdx.add(failregex)
regexsUsedRe.add(regexList[failregex])
except AssertionError as e: # pragma: no cover
import pprint
raise AssertionError("%s: %s on: %s:%i, line:\n %sregex (%s):\n %s\n"