mirror of https://github.com/fail2ban/fail2ban
Merge branch '0.10' into 0.10-full
commit
3cf068670c
|
@ -29,24 +29,12 @@ import os
|
||||||
from ConfigParser import NoOptionError, NoSectionError
|
from ConfigParser import NoOptionError, NoSectionError
|
||||||
|
|
||||||
from .configparserinc import sys, SafeConfigParserWithIncludes, logLevel
|
from .configparserinc import sys, SafeConfigParserWithIncludes, logLevel
|
||||||
from ..helpers import getLogger, substituteRecursiveTags
|
from ..helpers import getLogger, _merge_dicts, substituteRecursiveTags
|
||||||
|
|
||||||
# Gets the instance of the logger.
|
# Gets the instance of the logger.
|
||||||
logSys = getLogger(__name__)
|
logSys = getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
# if sys.version_info >= (3,5):
|
|
||||||
# def _merge_dicts(x, y):
|
|
||||||
# return {**x, **y}
|
|
||||||
# else:
|
|
||||||
def _merge_dicts(x, y):
|
|
||||||
r = x
|
|
||||||
if y:
|
|
||||||
r = x.copy()
|
|
||||||
r.update(y)
|
|
||||||
return r
|
|
||||||
|
|
||||||
|
|
||||||
class ConfigReader():
|
class ConfigReader():
|
||||||
"""Generic config reader class.
|
"""Generic config reader class.
|
||||||
|
|
||||||
|
|
|
@ -49,14 +49,14 @@ from ..version import version
|
||||||
from .jailreader import JailReader
|
from .jailreader import JailReader
|
||||||
from .filterreader import FilterReader
|
from .filterreader import FilterReader
|
||||||
from ..server.filter import Filter, FileContainer
|
from ..server.filter import Filter, FileContainer
|
||||||
from ..server.failregex import RegexException
|
from ..server.failregex import Regex, RegexException
|
||||||
|
|
||||||
from ..helpers import str2LogLevel, getVerbosityFormat, FormatterWithTraceBack, getLogger, PREFER_ENC
|
from ..helpers import str2LogLevel, getVerbosityFormat, FormatterWithTraceBack, getLogger, PREFER_ENC
|
||||||
# Gets the instance of the logger.
|
# Gets the instance of the logger.
|
||||||
logSys = getLogger("fail2ban")
|
logSys = getLogger("fail2ban")
|
||||||
|
|
||||||
def debuggexURL(sample, regex):
|
def debuggexURL(sample, regex, useDns="yes"):
|
||||||
q = urllib.urlencode({ 're': regex.replace('<HOST>', '(?&.ipv4)'),
|
q = urllib.urlencode({ 're': Regex._resolveHostTag(regex, useDns=useDns),
|
||||||
'str': sample,
|
'str': sample,
|
||||||
'flavor': 'python' })
|
'flavor': 'python' })
|
||||||
return 'https://www.debuggex.com/?' + q
|
return 'https://www.debuggex.com/?' + q
|
||||||
|
@ -198,15 +198,17 @@ class RegexStat(object):
|
||||||
class LineStats(object):
|
class LineStats(object):
|
||||||
"""Just a convenience container for stats
|
"""Just a convenience container for stats
|
||||||
"""
|
"""
|
||||||
def __init__(self):
|
def __init__(self, opts):
|
||||||
self.tested = self.matched = 0
|
self.tested = self.matched = 0
|
||||||
self.matched_lines = []
|
self.matched_lines = []
|
||||||
self.missed = 0
|
self.missed = 0
|
||||||
self.missed_lines = []
|
self.missed_lines = []
|
||||||
self.missed_lines_timeextracted = []
|
|
||||||
self.ignored = 0
|
self.ignored = 0
|
||||||
self.ignored_lines = []
|
self.ignored_lines = []
|
||||||
self.ignored_lines_timeextracted = []
|
if opts.debuggex:
|
||||||
|
self.matched_lines_timeextracted = []
|
||||||
|
self.missed_lines_timeextracted = []
|
||||||
|
self.ignored_lines_timeextracted = []
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
return "%(tested)d lines, %(ignored)d ignored, %(matched)d matched, %(missed)d missed" % self
|
return "%(tested)d lines, %(ignored)d ignored, %(matched)d matched, %(missed)d missed" % self
|
||||||
|
@ -230,7 +232,7 @@ class Fail2banRegex(object):
|
||||||
self._ignoreregex = list()
|
self._ignoreregex = list()
|
||||||
self._failregex = list()
|
self._failregex = list()
|
||||||
self._time_elapsed = None
|
self._time_elapsed = None
|
||||||
self._line_stats = LineStats()
|
self._line_stats = LineStats(opts)
|
||||||
|
|
||||||
if opts.maxlines:
|
if opts.maxlines:
|
||||||
self.setMaxLines(opts.maxlines)
|
self.setMaxLines(opts.maxlines)
|
||||||
|
@ -414,9 +416,10 @@ class Fail2banRegex(object):
|
||||||
try:
|
try:
|
||||||
self._line_stats.missed_lines.pop(
|
self._line_stats.missed_lines.pop(
|
||||||
self._line_stats.missed_lines.index("".join(bufLine)))
|
self._line_stats.missed_lines.index("".join(bufLine)))
|
||||||
self._line_stats.missed_lines_timeextracted.pop(
|
if self._debuggex:
|
||||||
self._line_stats.missed_lines_timeextracted.index(
|
self._line_stats.missed_lines_timeextracted.pop(
|
||||||
"".join(bufLine[::2])))
|
self._line_stats.missed_lines_timeextracted.index(
|
||||||
|
"".join(bufLine[::2])))
|
||||||
except ValueError:
|
except ValueError:
|
||||||
pass
|
pass
|
||||||
else:
|
else:
|
||||||
|
@ -443,19 +446,23 @@ class Fail2banRegex(object):
|
||||||
self._line_stats.ignored += 1
|
self._line_stats.ignored += 1
|
||||||
if not self._print_no_ignored and (self._print_all_ignored or self._line_stats.ignored <= self._maxlines + 1):
|
if not self._print_no_ignored and (self._print_all_ignored or self._line_stats.ignored <= self._maxlines + 1):
|
||||||
self._line_stats.ignored_lines.append(line)
|
self._line_stats.ignored_lines.append(line)
|
||||||
self._line_stats.ignored_lines_timeextracted.append(line_datetimestripped)
|
if self._debuggex:
|
||||||
|
self._line_stats.ignored_lines_timeextracted.append(line_datetimestripped)
|
||||||
|
|
||||||
if len(ret) > 0:
|
if len(ret) > 0:
|
||||||
assert(not is_ignored)
|
assert(not is_ignored)
|
||||||
self._line_stats.matched += 1
|
self._line_stats.matched += 1
|
||||||
if self._print_all_matched:
|
if self._print_all_matched:
|
||||||
self._line_stats.matched_lines.append(line)
|
self._line_stats.matched_lines.append(line)
|
||||||
|
if self._debuggex:
|
||||||
|
self._line_stats.matched_lines_timeextracted.append(line_datetimestripped)
|
||||||
else:
|
else:
|
||||||
if not is_ignored:
|
if not is_ignored:
|
||||||
self._line_stats.missed += 1
|
self._line_stats.missed += 1
|
||||||
if not self._print_no_missed and (self._print_all_missed or self._line_stats.missed <= self._maxlines + 1):
|
if not self._print_no_missed and (self._print_all_missed or self._line_stats.missed <= self._maxlines + 1):
|
||||||
self._line_stats.missed_lines.append(line)
|
self._line_stats.missed_lines.append(line)
|
||||||
self._line_stats.missed_lines_timeextracted.append(line_datetimestripped)
|
if self._debuggex:
|
||||||
|
self._line_stats.missed_lines_timeextracted.append(line_datetimestripped)
|
||||||
self._line_stats.tested += 1
|
self._line_stats.tested += 1
|
||||||
|
|
||||||
self._time_elapsed = time.time() - t0
|
self._time_elapsed = time.time() - t0
|
||||||
|
@ -478,7 +485,7 @@ class Fail2banRegex(object):
|
||||||
for arg in [l, regexlist]:
|
for arg in [l, regexlist]:
|
||||||
ans = [ x + [y] for x in ans for y in arg ]
|
ans = [ x + [y] for x in ans for y in arg ]
|
||||||
b = map(lambda a: a[0] + ' | ' + a[1].getFailRegex() + ' | ' +
|
b = map(lambda a: a[0] + ' | ' + a[1].getFailRegex() + ' | ' +
|
||||||
debuggexURL(self.encode_line(a[0]), a[1].getFailRegex()), ans)
|
debuggexURL(self.encode_line(a[0]), a[1].getFailRegex(), self._opts.usedns), ans)
|
||||||
pprint_list([x.rstrip() for x in b], header)
|
pprint_list([x.rstrip() for x in b], header)
|
||||||
else:
|
else:
|
||||||
output( "%s too many to print. Use --print-all-%s " \
|
output( "%s too many to print. Use --print-all-%s " \
|
||||||
|
|
|
@ -169,6 +169,36 @@ def splitwords(s):
|
||||||
return []
|
return []
|
||||||
return filter(bool, map(str.strip, re.split('[ ,\n]+', s)))
|
return filter(bool, map(str.strip, re.split('[ ,\n]+', s)))
|
||||||
|
|
||||||
|
if sys.version_info >= (3,5):
|
||||||
|
eval(compile(r'''if 1:
|
||||||
|
def _merge_dicts(x, y):
|
||||||
|
"""Helper to merge dicts.
|
||||||
|
"""
|
||||||
|
if y:
|
||||||
|
return {**x, **y}
|
||||||
|
return x
|
||||||
|
|
||||||
|
def _merge_copy_dicts(x, y):
|
||||||
|
"""Helper to merge dicts to guarantee a copy result (r is never x).
|
||||||
|
"""
|
||||||
|
return {**x, **y}
|
||||||
|
''', __file__, 'exec'))
|
||||||
|
else:
|
||||||
|
def _merge_dicts(x, y):
|
||||||
|
"""Helper to merge dicts.
|
||||||
|
"""
|
||||||
|
r = x
|
||||||
|
if y:
|
||||||
|
r = x.copy()
|
||||||
|
r.update(y)
|
||||||
|
return r
|
||||||
|
def _merge_copy_dicts(x, y):
|
||||||
|
"""Helper to merge dicts to guarantee a copy result (r is never x).
|
||||||
|
"""
|
||||||
|
r = x.copy()
|
||||||
|
if y:
|
||||||
|
r.update(y)
|
||||||
|
return r
|
||||||
|
|
||||||
#
|
#
|
||||||
# Following "uni_decode" function unified python independent any to string converting
|
# Following "uni_decode" function unified python independent any to string converting
|
||||||
|
@ -240,6 +270,7 @@ def substituteRecursiveTags(inptags, conditional='',
|
||||||
# init:
|
# init:
|
||||||
ignore = set(ignore)
|
ignore = set(ignore)
|
||||||
done = set()
|
done = set()
|
||||||
|
noRecRepl = hasattr(tags, "getRawItem")
|
||||||
# repeat substitution while embedded-recursive (repFlag is True)
|
# repeat substitution while embedded-recursive (repFlag is True)
|
||||||
while True:
|
while True:
|
||||||
repFlag = False
|
repFlag = False
|
||||||
|
@ -247,6 +278,8 @@ def substituteRecursiveTags(inptags, conditional='',
|
||||||
for tag in tags.iterkeys():
|
for tag in tags.iterkeys():
|
||||||
# ignore escaped or already done (or in ignore list):
|
# ignore escaped or already done (or in ignore list):
|
||||||
if tag in ignore or tag in done: continue
|
if tag in ignore or tag in done: continue
|
||||||
|
# ignore replacing callable items from calling map - should be converted on demand only (by get):
|
||||||
|
if noRecRepl and callable(tags.getRawItem(tag)): continue
|
||||||
value = orgval = str(tags[tag])
|
value = orgval = str(tags[tag])
|
||||||
# search and replace all tags within value, that can be interpolated using other tags:
|
# search and replace all tags within value, that can be interpolated using other tags:
|
||||||
m = tre_search(value)
|
m = tre_search(value)
|
||||||
|
@ -281,6 +314,8 @@ def substituteRecursiveTags(inptags, conditional='',
|
||||||
# constructs like <STDIN>.
|
# constructs like <STDIN>.
|
||||||
m = tre_search(value, m.end())
|
m = tre_search(value, m.end())
|
||||||
continue
|
continue
|
||||||
|
# if calling map - be sure we've string:
|
||||||
|
if noRecRepl: repl = str(repl)
|
||||||
value = value.replace('<%s>' % rtag, repl)
|
value = value.replace('<%s>' % rtag, repl)
|
||||||
#logSys.log(5, 'value now: %s' % value)
|
#logSys.log(5, 'value now: %s' % value)
|
||||||
# increment reference count:
|
# increment reference count:
|
||||||
|
|
|
@ -36,7 +36,7 @@ from .failregex import mapTag2Opt
|
||||||
from .ipdns import asip
|
from .ipdns import asip
|
||||||
from .mytime import MyTime
|
from .mytime import MyTime
|
||||||
from .utils import Utils
|
from .utils import Utils
|
||||||
from ..helpers import getLogger, substituteRecursiveTags, TAG_CRE, MAX_TAG_REPLACE_COUNT
|
from ..helpers import getLogger, _merge_copy_dicts, substituteRecursiveTags, TAG_CRE, MAX_TAG_REPLACE_COUNT
|
||||||
|
|
||||||
# Gets the instance of the logger.
|
# Gets the instance of the logger.
|
||||||
logSys = getLogger(__name__)
|
logSys = getLogger(__name__)
|
||||||
|
@ -98,6 +98,13 @@ class CallingMap(MutableMapping, object):
|
||||||
except:
|
except:
|
||||||
return dict(self.data, **self.storage)
|
return dict(self.data, **self.storage)
|
||||||
|
|
||||||
|
def getRawItem(self, key):
|
||||||
|
try:
|
||||||
|
value = self.storage[key]
|
||||||
|
except KeyError:
|
||||||
|
value = self.data[key]
|
||||||
|
return value
|
||||||
|
|
||||||
def __getitem__(self, key):
|
def __getitem__(self, key):
|
||||||
try:
|
try:
|
||||||
value = self.storage[key]
|
value = self.storage[key]
|
||||||
|
@ -141,7 +148,7 @@ class CallingMap(MutableMapping, object):
|
||||||
return len(self.data)
|
return len(self.data)
|
||||||
|
|
||||||
def copy(self): # pargma: no cover
|
def copy(self): # pargma: no cover
|
||||||
return self.__class__(self.data.copy())
|
return self.__class__(_merge_copy_dicts(self.data, self.storage))
|
||||||
|
|
||||||
|
|
||||||
class ActionBase(object):
|
class ActionBase(object):
|
||||||
|
@ -446,7 +453,7 @@ class CommandAction(ActionBase):
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def replaceTag(cls, query, aInfo, conditional='', cache=None):
|
def replaceTag(cls, query, aInfo, conditional='', cache=None, substRec=True):
|
||||||
"""Replaces tags in `query` with property values.
|
"""Replaces tags in `query` with property values.
|
||||||
|
|
||||||
Parameters
|
Parameters
|
||||||
|
@ -471,23 +478,29 @@ class CommandAction(ActionBase):
|
||||||
except KeyError:
|
except KeyError:
|
||||||
pass
|
pass
|
||||||
|
|
||||||
# first try get cached tags dictionary:
|
# **Important**: don't replace if calling map - contains dynamic values only,
|
||||||
subInfo = csubkey = None
|
# no recursive tags, otherwise may be vulnerable on foreign user-input:
|
||||||
if cache is not None:
|
noRecRepl = isinstance(aInfo, CallingMap)
|
||||||
csubkey = ('subst-tags', id(aInfo), conditional)
|
if noRecRepl:
|
||||||
try:
|
subInfo = aInfo
|
||||||
subInfo = cache[csubkey]
|
else:
|
||||||
except KeyError:
|
# substitute tags recursive (and cache if possible),
|
||||||
pass
|
# first try get cached tags dictionary:
|
||||||
# interpolation of dictionary:
|
subInfo = csubkey = None
|
||||||
if subInfo is None:
|
if cache is not None:
|
||||||
subInfo = substituteRecursiveTags(aInfo, conditional, ignore=cls._escapedTags)
|
csubkey = ('subst-tags', id(aInfo), conditional)
|
||||||
# cache if possible:
|
try:
|
||||||
if csubkey is not None:
|
subInfo = cache[csubkey]
|
||||||
cache[csubkey] = subInfo
|
except KeyError:
|
||||||
|
pass
|
||||||
|
# interpolation of dictionary:
|
||||||
|
if subInfo is None:
|
||||||
|
subInfo = substituteRecursiveTags(aInfo, conditional, ignore=cls._escapedTags)
|
||||||
|
# cache if possible:
|
||||||
|
if csubkey is not None:
|
||||||
|
cache[csubkey] = subInfo
|
||||||
|
|
||||||
# substitution callable, used by interpolation of each tag
|
# substitution callable, used by interpolation of each tag
|
||||||
repeatSubst = {0: 0}
|
|
||||||
def substVal(m):
|
def substVal(m):
|
||||||
tag = m.group(1) # tagname from match
|
tag = m.group(1) # tagname from match
|
||||||
value = None
|
value = None
|
||||||
|
@ -503,18 +516,17 @@ class CommandAction(ActionBase):
|
||||||
# That one needs to be escaped since its content is
|
# That one needs to be escaped since its content is
|
||||||
# out of our control
|
# out of our control
|
||||||
value = cls.escapeTag(value)
|
value = cls.escapeTag(value)
|
||||||
# possible contains tags:
|
# replacement for tag:
|
||||||
if '<' in value:
|
|
||||||
repeatSubst[0] = 1
|
|
||||||
return value
|
return value
|
||||||
|
|
||||||
# interpolation of query:
|
# interpolation of query:
|
||||||
count = MAX_TAG_REPLACE_COUNT + 1
|
count = MAX_TAG_REPLACE_COUNT + 1
|
||||||
while True:
|
while True:
|
||||||
repeatSubst[0] = 0
|
|
||||||
value = TAG_CRE.sub(substVal, query)
|
value = TAG_CRE.sub(substVal, query)
|
||||||
|
# **Important**: no recursive replacement for tags from calling map (properties only):
|
||||||
|
if noRecRepl: break
|
||||||
# possible recursion ?
|
# possible recursion ?
|
||||||
if not repeatSubst or value == query: break
|
if value == query or '<' not in value: break
|
||||||
query = value
|
query = value
|
||||||
count -= 1
|
count -= 1
|
||||||
if count <= 0:
|
if count <= 0:
|
||||||
|
|
|
@ -157,6 +157,37 @@ class CommandActionTest(LogCaptureTestCase):
|
||||||
self.assertEqual(substituteRecursiveTags({'A': 'A <IP<PREF>HOST> B IP<PREF> C', 'PREF': 'V4', 'IPV4HOST': '1.2.3.4'}),
|
self.assertEqual(substituteRecursiveTags({'A': 'A <IP<PREF>HOST> B IP<PREF> C', 'PREF': 'V4', 'IPV4HOST': '1.2.3.4'}),
|
||||||
{'A': 'A 1.2.3.4 B IPV4 C', 'PREF': 'V4', 'IPV4HOST': '1.2.3.4'})
|
{'A': 'A 1.2.3.4 B IPV4 C', 'PREF': 'V4', 'IPV4HOST': '1.2.3.4'})
|
||||||
|
|
||||||
|
def testSubstRec_DontTouchUnusedCallable(self):
|
||||||
|
cm = CallingMap({
|
||||||
|
'A':0,
|
||||||
|
'B':lambda self: '<A><A>',
|
||||||
|
'C':'',
|
||||||
|
'D':''
|
||||||
|
})
|
||||||
|
#
|
||||||
|
# should raise no exceptions:
|
||||||
|
substituteRecursiveTags(cm)
|
||||||
|
# add exception tag:
|
||||||
|
cm['C'] = lambda self,i=0: 5 // int(self['A']) # raise error by access
|
||||||
|
# test direct get of callable (should raise an error):
|
||||||
|
self.assertRaises(ZeroDivisionError, lambda: cm['C'])
|
||||||
|
# should raise no exceptions (tag "C" still unused):
|
||||||
|
substituteRecursiveTags(cm)
|
||||||
|
# add reference to "broken" tag:
|
||||||
|
cm['D'] = 'test=<C>'
|
||||||
|
# should raise an exception (BOOM by replacement of tag "D" recursive):
|
||||||
|
self.assertRaises(ZeroDivisionError, lambda: substituteRecursiveTags(cm))
|
||||||
|
#
|
||||||
|
# should raise no exceptions:
|
||||||
|
self.assertEqual(self.__action.replaceTag('test=<A>', cm), "test=0")
|
||||||
|
# **Important**: recursive replacement of dynamic data from calling map should be prohibited,
|
||||||
|
# otherwise may be vulnerable on foreign user-input:
|
||||||
|
self.assertEqual(self.__action.replaceTag('test=<A>--<B>--<A>', cm), "test=0--<A><A>--0")
|
||||||
|
# should raise an exception (BOOM by replacement of tag "C"):
|
||||||
|
self.assertRaises(ZeroDivisionError, lambda: self.__action.replaceTag('test=<C>', cm))
|
||||||
|
# should raise no exceptions (replaces tag "D" only):
|
||||||
|
self.assertEqual(self.__action.replaceTag('<D>', cm), "test=<C>")
|
||||||
|
|
||||||
def testReplaceTag(self):
|
def testReplaceTag(self):
|
||||||
aInfo = {
|
aInfo = {
|
||||||
'HOST': "192.0.2.0",
|
'HOST': "192.0.2.0",
|
||||||
|
|
|
@ -286,11 +286,12 @@ class Fail2banRegexTest(LogCaptureTestCase):
|
||||||
"-l", "notice", # put down log-level, because of too many debug-messages
|
"-l", "notice", # put down log-level, because of too many debug-messages
|
||||||
"--datepattern", "^(?:%a )?%b %d %H:%M:%S(?:\.%f)?(?: %ExY)?",
|
"--datepattern", "^(?:%a )?%b %d %H:%M:%S(?:\.%f)?(?: %ExY)?",
|
||||||
"--debuggex", "--print-all-matched",
|
"--debuggex", "--print-all-matched",
|
||||||
Fail2banRegexTest.FILENAME_WRONGCHAR, Fail2banRegexTest.FILTER_SSHD
|
Fail2banRegexTest.FILENAME_WRONGCHAR, Fail2banRegexTest.FILTER_SSHD,
|
||||||
|
r"llinco[^\\]"
|
||||||
)
|
)
|
||||||
self.assertTrue(fail2banRegex.start(args))
|
self.assertTrue(fail2banRegex.start(args))
|
||||||
self.assertLogged('Error decoding line')
|
self.assertLogged('Error decoding line')
|
||||||
self.assertLogged('Lines: 4 lines, 0 ignored, 2 matched, 2 missed')
|
self.assertLogged('Lines: 4 lines, 1 ignored, 2 matched, 1 missed')
|
||||||
|
|
||||||
self.assertLogged('https://')
|
self.assertLogged('https://')
|
||||||
|
|
||||||
|
|
|
@ -269,7 +269,9 @@ def initTests(opts):
|
||||||
|
|
||||||
# precache all invalid ip's (TEST-NET-1, ..., TEST-NET-3 according to RFC 5737):
|
# precache all invalid ip's (TEST-NET-1, ..., TEST-NET-3 according to RFC 5737):
|
||||||
c = DNSUtils.CACHE_ipToName
|
c = DNSUtils.CACHE_ipToName
|
||||||
for i in xrange(255):
|
# increase max count and max time (too many entries, long time testing):
|
||||||
|
c.setOptions(maxCount=10000, maxTime=5*60)
|
||||||
|
for i in xrange(256):
|
||||||
c.set('192.0.2.%s' % i, None)
|
c.set('192.0.2.%s' % i, None)
|
||||||
c.set('198.51.100.%s' % i, None)
|
c.set('198.51.100.%s' % i, None)
|
||||||
c.set('203.0.113.%s' % i, None)
|
c.set('203.0.113.%s' % i, None)
|
||||||
|
|
Loading…
Reference in New Issue