mirror of https://github.com/fail2ban/fail2ban
- filtering of test cases extended for method additionally to class (matches method only not whole class for regexp matched method name);
- new options for "fail2ban-testcases" introduced: "-g" or "--no-gamin" and "-m" or "--memory-db", both are true also if "-f" or "--fast" specified, for example: `fail2ban-testcases -ngm` will runs faster (because no network, no gamin, memory database) but will use the same default sleep intervals as in production (in comparison to -nf); - seekToTime rewritten, accuracy increased by seekToTime, extended for all FileFilter (PyInotify, Gamin also), test cases extended etc. - common performance optimized and code reviewed;pull/1346/head
parent
6faffe3201
commit
cf3cf27fa3
|
@ -58,6 +58,12 @@ def get_opt_parser():
|
||||||
Option('-n', "--no-network", action="store_true",
|
Option('-n', "--no-network", action="store_true",
|
||||||
dest="no_network",
|
dest="no_network",
|
||||||
help="Do not run tests that require the network"),
|
help="Do not run tests that require the network"),
|
||||||
|
Option('-g', "--no-gamin", action="store_true",
|
||||||
|
dest="no_gamin",
|
||||||
|
help="Do not run tests that require the gamin"),
|
||||||
|
Option('-m', "--memory-db", action="store_true",
|
||||||
|
dest="memory_db",
|
||||||
|
help="Run database tests using memory instead of file"),
|
||||||
Option('-f', "--fast", action="store_true",
|
Option('-f', "--fast", action="store_true",
|
||||||
dest="fast",
|
dest="fast",
|
||||||
help="Try to increase speed of the tests, decreasing of wait intervals, memory database"),
|
help="Try to increase speed of the tests, decreasing of wait intervals, memory database"),
|
||||||
|
|
|
@ -560,6 +560,7 @@ class FileFilter(Filter):
|
||||||
Filter.__init__(self, jail, **kwargs)
|
Filter.__init__(self, jail, **kwargs)
|
||||||
## The log file path.
|
## The log file path.
|
||||||
self.__logs = dict()
|
self.__logs = dict()
|
||||||
|
self.__autoSeek = dict()
|
||||||
self.setLogEncoding("auto")
|
self.setLogEncoding("auto")
|
||||||
|
|
||||||
##
|
##
|
||||||
|
@ -567,7 +568,7 @@ class FileFilter(Filter):
|
||||||
#
|
#
|
||||||
# @param path log file path
|
# @param path log file path
|
||||||
|
|
||||||
def addLogPath(self, path, tail=False):
|
def addLogPath(self, path, tail=False, autoSeek=True):
|
||||||
if path in self.__logs:
|
if path in self.__logs:
|
||||||
logSys.error(path + " already exists")
|
logSys.error(path + " already exists")
|
||||||
else:
|
else:
|
||||||
|
@ -579,6 +580,11 @@ class FileFilter(Filter):
|
||||||
log.setPos(lastpos)
|
log.setPos(lastpos)
|
||||||
self.__logs[path] = log
|
self.__logs[path] = log
|
||||||
logSys.info("Added logfile = %s (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
|
logSys.info("Added logfile = %s (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
|
||||||
|
if autoSeek:
|
||||||
|
# if default, seek to "current time" - "find time":
|
||||||
|
if isinstance(autoSeek, bool):
|
||||||
|
autoSeek = MyTime.time() - self.getFindTime()
|
||||||
|
self.__autoSeek[path] = autoSeek
|
||||||
self._addLogPath(path) # backend specific
|
self._addLogPath(path) # backend specific
|
||||||
|
|
||||||
def _addLogPath(self, path):
|
def _addLogPath(self, path):
|
||||||
|
@ -673,7 +679,7 @@ class FileFilter(Filter):
|
||||||
# MyTime.time()-self.findTime. When a failure is detected, a FailTicket
|
# MyTime.time()-self.findTime. When a failure is detected, a FailTicket
|
||||||
# is created and is added to the FailManager.
|
# is created and is added to the FailManager.
|
||||||
|
|
||||||
def getFailures(self, filename, startTime=None):
|
def getFailures(self, filename):
|
||||||
log = self.getLog(filename)
|
log = self.getLog(filename)
|
||||||
if log is None:
|
if log is None:
|
||||||
logSys.error("Unable to get failures in " + filename)
|
logSys.error("Unable to get failures in " + filename)
|
||||||
|
@ -695,13 +701,17 @@ class FileFilter(Filter):
|
||||||
logSys.exception(e)
|
logSys.exception(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
# prevent completely read of big files first time (after start of service), initial seek to start time using half-interval search algorithm:
|
# seek to find time for first usage only (prevent performance decline with polling of big files)
|
||||||
if log.getPos() == 0 and startTime is not None:
|
if self.__autoSeek.get(filename):
|
||||||
|
startTime = self.__autoSeek[filename]
|
||||||
|
del self.__autoSeek[filename]
|
||||||
|
# prevent completely read of big files first time (after start of service),
|
||||||
|
# initial seek to start time using half-interval search algorithm:
|
||||||
try:
|
try:
|
||||||
# startTime = MyTime.time() - self.getFindTime()
|
|
||||||
self.seekToTime(log, startTime)
|
self.seekToTime(log, startTime)
|
||||||
except Exception, e: # pragma: no cover
|
except Exception, e: # pragma: no cover
|
||||||
logSys.error("Error during seek to start time in \"%s\"", filename)
|
logSys.error("Error during seek to start time in \"%s\"", filename)
|
||||||
|
raise
|
||||||
logSys.exception(e)
|
logSys.exception(e)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
@ -726,71 +736,88 @@ class FileFilter(Filter):
|
||||||
# Seeks to line with date (search using half-interval search algorithm), to start polling from it
|
# Seeks to line with date (search using half-interval search algorithm), to start polling from it
|
||||||
#
|
#
|
||||||
|
|
||||||
def seekToTime(self, container, date):
|
def seekToTime(self, container, date, accuracy=3):
|
||||||
fs = container.getFileSize()
|
fs = container.getFileSize()
|
||||||
if logSys.getEffectiveLevel() <= logging.DEBUG:
|
if logSys.getEffectiveLevel() <= logging.DEBUG:
|
||||||
logSys.debug("Seek to find time %s (%s), file size %s", date,
|
logSys.debug("Seek to find time %s (%s), file size %s", date,
|
||||||
datetime.datetime.fromtimestamp(date).strftime("%Y-%m-%d %H:%M:%S"), fs)
|
datetime.datetime.fromtimestamp(date).strftime("%Y-%m-%d %H:%M:%S"), fs)
|
||||||
date -= 0.009
|
minp = container.getPos()
|
||||||
minp = 0
|
|
||||||
maxp = fs
|
maxp = fs
|
||||||
lastpos = 0
|
tryPos = minp
|
||||||
lastFew = 0
|
lastPos = -1
|
||||||
lastTime = None
|
foundPos = 0
|
||||||
|
foundTime = None
|
||||||
cntr = 0
|
cntr = 0
|
||||||
unixTime = None
|
unixTime = None
|
||||||
lasti = 0
|
movecntr = accuracy
|
||||||
movecntr = 1
|
|
||||||
while maxp > minp:
|
while maxp > minp:
|
||||||
i = int(minp + (maxp - minp) / 2)
|
if tryPos is None:
|
||||||
pos = container.seek(i)
|
pos = int(minp + (maxp - minp) / 2)
|
||||||
|
else:
|
||||||
|
pos, tryPos = tryPos, None
|
||||||
|
# because container seek will go to start of next line (minus CRLF):
|
||||||
|
pos = max(0, pos-2)
|
||||||
|
seekpos = pos = container.seek(pos)
|
||||||
cntr += 1
|
cntr += 1
|
||||||
# within next 5 lines try to find any legal datetime:
|
# within next 5 lines try to find any legal datetime:
|
||||||
lncntr = 5;
|
lncntr = 5;
|
||||||
dateTimeMatch = None
|
dateTimeMatch = None
|
||||||
llen = 0
|
nextp = None
|
||||||
if lastpos == pos:
|
|
||||||
i = pos
|
|
||||||
while True:
|
while True:
|
||||||
line = container.readline()
|
line = container.readline()
|
||||||
if not line:
|
if not line:
|
||||||
break
|
break
|
||||||
llen += len(line)
|
(timeMatch, template) = self.dateDetector.matchTime(line)
|
||||||
l = line.rstrip('\r\n')
|
|
||||||
(timeMatch, template) = self.dateDetector.matchTime(l)
|
|
||||||
if timeMatch:
|
if timeMatch:
|
||||||
dateTimeMatch = self.dateDetector.getTime2(l[timeMatch.start():timeMatch.end()], (timeMatch, template))
|
dateTimeMatch = self.dateDetector.getTime2(line[timeMatch.start():timeMatch.end()], (timeMatch, template))
|
||||||
|
else:
|
||||||
|
nextp = container.tell()
|
||||||
|
if nextp > maxp:
|
||||||
|
pos = seekpos
|
||||||
|
break
|
||||||
|
pos = nextp
|
||||||
if not dateTimeMatch and lncntr:
|
if not dateTimeMatch and lncntr:
|
||||||
lncntr -= 1
|
lncntr -= 1
|
||||||
continue
|
continue
|
||||||
break
|
break
|
||||||
|
# not found at this step - stop searching
|
||||||
|
if dateTimeMatch:
|
||||||
|
unixTime = dateTimeMatch[0]
|
||||||
|
if unixTime >= date:
|
||||||
|
if foundTime is None or unixTime <= foundTime:
|
||||||
|
foundPos = pos
|
||||||
|
foundTime = unixTime
|
||||||
|
if pos == maxp:
|
||||||
|
pos = seekpos
|
||||||
|
if pos < maxp:
|
||||||
|
maxp = pos
|
||||||
|
else:
|
||||||
|
if foundTime is None or unixTime >= foundTime:
|
||||||
|
foundPos = pos
|
||||||
|
foundTime = unixTime
|
||||||
|
if nextp is None:
|
||||||
|
nextp = container.tell()
|
||||||
|
pos = nextp
|
||||||
|
if pos > minp:
|
||||||
|
minp = pos
|
||||||
# if we can't move (position not changed)
|
# if we can't move (position not changed)
|
||||||
if i + llen == lasti:
|
if pos == lastPos:
|
||||||
movecntr -= 1
|
movecntr -= 1
|
||||||
if movecntr <= 0:
|
if movecntr <= 0:
|
||||||
break
|
break
|
||||||
lasti = i + llen;
|
# we have found large area without any date mached
|
||||||
# not found at this step - stop searching
|
# or end of search - try min position (because can be end of previous line):
|
||||||
if not dateTimeMatch:
|
if minp != lastPos:
|
||||||
|
lastPos = tryPos = minp
|
||||||
|
continue
|
||||||
break
|
break
|
||||||
unixTime = dateTimeMatch[0]
|
lastPos = pos
|
||||||
if unixTime >= date:
|
# always use smallest pos, that could be found:
|
||||||
maxp = i
|
foundPos = container.seek(minp, False)
|
||||||
else:
|
container.setPos(foundPos)
|
||||||
minp = i + llen
|
|
||||||
lastFew = pos;
|
|
||||||
lastTime = unixTime
|
|
||||||
lastpos = pos
|
|
||||||
# if found position have a time greater as given - use smallest time we have found
|
|
||||||
if unixTime is None or unixTime > date:
|
|
||||||
unixTime = lastTime
|
|
||||||
lastpos = container.seek(lastFew, False)
|
|
||||||
else:
|
|
||||||
lastpos = container.seek(lastpos, False)
|
|
||||||
container.setPos(lastpos)
|
|
||||||
if logSys.getEffectiveLevel() <= logging.DEBUG:
|
if logSys.getEffectiveLevel() <= logging.DEBUG:
|
||||||
logSys.debug("Position %s from %s, found time %s (%s) within %s seeks", lastpos, fs, unixTime,
|
logSys.debug("Position %s from %s, found time %s (%s) within %s seeks", lastPos, fs, foundTime,
|
||||||
(datetime.datetime.fromtimestamp(unixTime).strftime("%Y-%m-%d %H:%M:%S") if unixTime is not None else ''), cntr)
|
(datetime.datetime.fromtimestamp(foundTime).strftime("%Y-%m-%d %H:%M:%S") if foundTime is not None else ''), cntr)
|
||||||
|
|
||||||
def status(self, flavor="basic"):
|
def status(self, flavor="basic"):
|
||||||
"""Status of Filter plus files being monitored.
|
"""Status of Filter plus files being monitored.
|
||||||
|
@ -898,11 +925,15 @@ class FileContainer:
|
||||||
# seek to given position
|
# seek to given position
|
||||||
h.seek(offs, 0)
|
h.seek(offs, 0)
|
||||||
# goto end of next line
|
# goto end of next line
|
||||||
if endLine:
|
if offs and endLine:
|
||||||
h.readline()
|
h.readline()
|
||||||
# get current real position
|
# get current real position
|
||||||
return h.tell()
|
return h.tell()
|
||||||
|
|
||||||
|
def tell(self):
|
||||||
|
# get current real position
|
||||||
|
return self.__handler.tell()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def decode_line(filename, enc, line):
|
def decode_line(filename, enc, line):
|
||||||
try:
|
try:
|
||||||
|
|
|
@ -58,7 +58,6 @@ class FilterPoll(FileFilter):
|
||||||
## The time of the last modification of the file.
|
## The time of the last modification of the file.
|
||||||
self.__prevStats = dict()
|
self.__prevStats = dict()
|
||||||
self.__file404Cnt = dict()
|
self.__file404Cnt = dict()
|
||||||
self.__initial = dict()
|
|
||||||
logSys.debug("Created FilterPoll")
|
logSys.debug("Created FilterPoll")
|
||||||
|
|
||||||
##
|
##
|
||||||
|
@ -109,11 +108,7 @@ class FilterPoll(FileFilter):
|
||||||
modlst = []
|
modlst = []
|
||||||
Utils.wait_for(lambda: self.getModified(modlst), self.sleeptime)
|
Utils.wait_for(lambda: self.getModified(modlst), self.sleeptime)
|
||||||
for filename in modlst:
|
for filename in modlst:
|
||||||
# set start time as now - find time for first usage only (prevent performance bug with polling of big files)
|
self.getFailures(filename)
|
||||||
self.getFailures(filename,
|
|
||||||
(MyTime.time() - self.getFindTime()) if not self.__initial.get(filename) else None
|
|
||||||
)
|
|
||||||
self.__initial[filename] = True
|
|
||||||
self.__modified = True
|
self.__modified = True
|
||||||
|
|
||||||
if self.__modified:
|
if self.__modified:
|
||||||
|
|
|
@ -176,6 +176,9 @@ class FailTicket(Ticket):
|
||||||
self.__retry = self._data['failures'];
|
self.__retry = self._data['failures'];
|
||||||
|
|
||||||
def setRetry(self, value):
|
def setRetry(self, value):
|
||||||
|
""" Set artificial retry count, normally equal failures / attempt,
|
||||||
|
used in incremental features (BanTimeIncr) to increase retry count for bad IPs
|
||||||
|
"""
|
||||||
self.__retry = value
|
self.__retry = value
|
||||||
if not self._data['failures']:
|
if not self._data['failures']:
|
||||||
self._data['failures'] = 1
|
self._data['failures'] = 1
|
||||||
|
@ -184,6 +187,9 @@ class FailTicket(Ticket):
|
||||||
self._data['matches'] = []
|
self._data['matches'] = []
|
||||||
|
|
||||||
def getRetry(self):
|
def getRetry(self):
|
||||||
|
""" Returns failures / attempt count or
|
||||||
|
artificial retry count increased for bad IPs
|
||||||
|
"""
|
||||||
return max(self.__retry, self._data['failures'])
|
return max(self.__retry, self._data['failures'])
|
||||||
|
|
||||||
def inc(self, matches=None, attempt=1, count=1):
|
def inc(self, matches=None, attempt=1, count=1):
|
||||||
|
|
|
@ -36,18 +36,20 @@ from ..server.actions import Actions
|
||||||
from .dummyjail import DummyJail
|
from .dummyjail import DummyJail
|
||||||
try:
|
try:
|
||||||
from ..server.database import Fail2BanDb as Fail2BanDb
|
from ..server.database import Fail2BanDb as Fail2BanDb
|
||||||
# because of tests performance use memory instead of file:
|
except ImportError: # pragma: no cover
|
||||||
def TestFail2BanDb(filename):
|
|
||||||
if unittest.F2B.fast:
|
|
||||||
return Fail2BanDb(':memory:')
|
|
||||||
return Fail2BanDb(filename)
|
|
||||||
except ImportError:
|
|
||||||
Fail2BanDb = None
|
Fail2BanDb = None
|
||||||
from .utils import LogCaptureTestCase
|
from .utils import LogCaptureTestCase
|
||||||
|
|
||||||
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
|
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
|
||||||
|
|
||||||
|
|
||||||
|
# because of tests performance use memory instead of file:
|
||||||
|
def getFail2BanDb(filename):
|
||||||
|
if unittest.F2B.memory_db: # pragma: no cover
|
||||||
|
return Fail2BanDb(':memory:')
|
||||||
|
return Fail2BanDb(filename)
|
||||||
|
|
||||||
|
|
||||||
class DatabaseTest(LogCaptureTestCase):
|
class DatabaseTest(LogCaptureTestCase):
|
||||||
|
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
|
@ -59,8 +61,10 @@ class DatabaseTest(LogCaptureTestCase):
|
||||||
"available.")
|
"available.")
|
||||||
elif Fail2BanDb is None:
|
elif Fail2BanDb is None:
|
||||||
return
|
return
|
||||||
|
self.dbFilename = None
|
||||||
|
if not unittest.F2B.memory_db:
|
||||||
_, self.dbFilename = tempfile.mkstemp(".db", "fail2ban_")
|
_, self.dbFilename = tempfile.mkstemp(".db", "fail2ban_")
|
||||||
self.db = TestFail2BanDb(self.dbFilename)
|
self.db = getFail2BanDb(self.dbFilename)
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
"""Call after every test case."""
|
"""Call after every test case."""
|
||||||
|
@ -68,6 +72,7 @@ class DatabaseTest(LogCaptureTestCase):
|
||||||
if Fail2BanDb is None: # pragma: no cover
|
if Fail2BanDb is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
# Cleanup
|
# Cleanup
|
||||||
|
if self.dbFilename is not None:
|
||||||
os.remove(self.dbFilename)
|
os.remove(self.dbFilename)
|
||||||
|
|
||||||
def testGetFilename(self):
|
def testGetFilename(self):
|
||||||
|
@ -106,6 +111,9 @@ class DatabaseTest(LogCaptureTestCase):
|
||||||
def testUpdateDb(self):
|
def testUpdateDb(self):
|
||||||
if Fail2BanDb is None: # pragma: no cover
|
if Fail2BanDb is None: # pragma: no cover
|
||||||
return
|
return
|
||||||
|
self.db = None
|
||||||
|
if self.dbFilename is None: # pragma: no cover
|
||||||
|
_, self.dbFilename = tempfile.mkstemp(".db", "fail2ban_")
|
||||||
shutil.copyfile(
|
shutil.copyfile(
|
||||||
os.path.join(TEST_FILES_DIR, 'database_v1.db'), self.dbFilename)
|
os.path.join(TEST_FILES_DIR, 'database_v1.db'), self.dbFilename)
|
||||||
self.db = Fail2BanDb(self.dbFilename)
|
self.db = Fail2BanDb(self.dbFilename)
|
||||||
|
|
|
@ -81,6 +81,16 @@ def _killfile(f, name):
|
||||||
_killfile(None, name + '.bak')
|
_killfile(None, name + '.bak')
|
||||||
|
|
||||||
|
|
||||||
|
def _maxWaitTime(wtime):
|
||||||
|
if unittest.F2B.fast:
|
||||||
|
wtime /= 10
|
||||||
|
return wtime
|
||||||
|
|
||||||
|
|
||||||
|
def _tm(time):
|
||||||
|
return datetime.datetime.fromtimestamp(time).strftime("%Y-%m-%d %H:%M:%S")
|
||||||
|
|
||||||
|
|
||||||
def _assert_equal_entries(utest, found, output, count=None):
|
def _assert_equal_entries(utest, found, output, count=None):
|
||||||
"""Little helper to unify comparisons with the target entries
|
"""Little helper to unify comparisons with the target entries
|
||||||
|
|
||||||
|
@ -122,9 +132,15 @@ def _assert_correct_last_attempt(utest, filter_, output, count=None):
|
||||||
Test filter to contain target ticket
|
Test filter to contain target ticket
|
||||||
"""
|
"""
|
||||||
if isinstance(filter_, DummyJail):
|
if isinstance(filter_, DummyJail):
|
||||||
|
# get fail ticket from jail
|
||||||
found = _ticket_tuple(filter_.getFailTicket())
|
found = _ticket_tuple(filter_.getFailTicket())
|
||||||
else:
|
else:
|
||||||
# when we are testing without jails
|
# when we are testing without jails
|
||||||
|
# wait for failures (up to max time)
|
||||||
|
Utils.wait_for(
|
||||||
|
lambda: filter_.failManager.getFailTotal() >= (count if count else output[1]),
|
||||||
|
_maxWaitTime(10))
|
||||||
|
# get fail ticket from filter
|
||||||
found = _ticket_tuple(filter_.failManager.toBan())
|
found = _ticket_tuple(filter_.failManager.toBan())
|
||||||
|
|
||||||
_assert_equal_entries(utest, found, output, count)
|
_assert_equal_entries(utest, found, output, count)
|
||||||
|
@ -352,58 +368,131 @@ class LogFileFilterPoll(unittest.TestCase):
|
||||||
self.assertTrue(self.filter.isModified(LogFileFilterPoll.FILENAME))
|
self.assertTrue(self.filter.isModified(LogFileFilterPoll.FILENAME))
|
||||||
self.assertFalse(self.filter.isModified(LogFileFilterPoll.FILENAME))
|
self.assertFalse(self.filter.isModified(LogFileFilterPoll.FILENAME))
|
||||||
|
|
||||||
def testSeekToTime(self):
|
def testSeekToTimeSmallFile(self):
|
||||||
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='.log')
|
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='.log')
|
||||||
tm = lambda time: datetime.datetime.fromtimestamp(time).strftime("%Y-%m-%d %H:%M:%S")
|
|
||||||
time = 1417512352
|
time = 1417512352
|
||||||
f = open(fname, 'w')
|
f = open(fname, 'w')
|
||||||
|
fc = None
|
||||||
|
try:
|
||||||
fc = FileContainer(fname, self.filter.getLogEncoding())
|
fc = FileContainer(fname, self.filter.getLogEncoding())
|
||||||
fc.open()
|
fc.open()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
try:
|
|
||||||
f.flush()
|
f.flush()
|
||||||
# empty :
|
# empty :
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 0)
|
self.assertEqual(fc.getPos(), 0)
|
||||||
# one entry with exact time:
|
# one entry with exact time:
|
||||||
f.write("%s [sshd] error: PAM: failure len 1\n" % tm(time))
|
f.write("%s [sshd] error: PAM: failure len 1\n" % _tm(time))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
# one entry with smaller time:
|
|
||||||
|
# rewrite :
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.write("%s [sshd] error: PAM: failure len 1\n" % tm(time - 10))
|
f.truncate()
|
||||||
|
fc.close()
|
||||||
|
fc = FileContainer(fname, self.filter.getLogEncoding())
|
||||||
|
fc.open()
|
||||||
|
# no time - nothing should be found :
|
||||||
|
for i in xrange(10):
|
||||||
|
f.write("[sshd] error: PAM: failure len 1\n")
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 0)
|
|
||||||
f.write("%s [sshd] error: PAM: failure len 3 2 1\n" % tm(time - 9))
|
# rewrite
|
||||||
|
f.seek(0)
|
||||||
|
f.truncate()
|
||||||
|
fc.close()
|
||||||
|
fc = FileContainer(fname, self.filter.getLogEncoding())
|
||||||
|
fc.open()
|
||||||
|
# one entry with smaller time:
|
||||||
|
f.write("%s [sshd] error: PAM: failure len 2\n" % _tm(time - 10))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 0)
|
self.assertEqual(fc.getPos(), 53)
|
||||||
# add exact time between:
|
# two entries with smaller time:
|
||||||
f.write("%s [sshd] error: PAM: failure\n" % tm(time - 1))
|
f.write("%s [sshd] error: PAM: failure len 3 2 1\n" % _tm(time - 9))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 110)
|
self.assertEqual(fc.getPos(), 110)
|
||||||
|
# check move after end (all of time smaller):
|
||||||
|
f.write("%s [sshd] error: PAM: failure\n" % _tm(time - 1))
|
||||||
|
f.flush()
|
||||||
|
self.assertEqual(fc.getFileSize(), 157)
|
||||||
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 157)
|
||||||
|
|
||||||
# stil one exact line:
|
# stil one exact line:
|
||||||
f.write("%s [sshd] error: PAM: Authentication failure\n" % tm(time))
|
f.write("%s [sshd] error: PAM: Authentication failure\n" % _tm(time))
|
||||||
f.write("%s [sshd] error: PAM: failure len 1\n" % tm(time))
|
f.write("%s [sshd] error: PAM: failure len 1\n" % _tm(time))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 110)
|
self.assertEqual(fc.getPos(), 157)
|
||||||
|
|
||||||
# add something hereafter:
|
# add something hereafter:
|
||||||
f.write("%s [sshd] error: PAM: failure len 3 2 1\n" % tm(time + 2))
|
f.write("%s [sshd] error: PAM: failure len 3 2 1\n" % _tm(time + 2))
|
||||||
f.write("%s [sshd] error: PAM: Authentication failure\n" % tm(time + 3))
|
f.write("%s [sshd] error: PAM: Authentication failure\n" % _tm(time + 3))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 110)
|
self.assertEqual(fc.getPos(), 157)
|
||||||
# add something hereafter:
|
# add something hereafter:
|
||||||
f.write("%s [sshd] error: PAM: failure\n" % tm(time + 9))
|
f.write("%s [sshd] error: PAM: failure\n" % _tm(time + 9))
|
||||||
f.write("%s [sshd] error: PAM: failure len 3 2 1\n" % tm(time + 9))
|
f.write("%s [sshd] error: PAM: failure len 4 3 2\n" % _tm(time + 9))
|
||||||
f.flush()
|
f.flush()
|
||||||
fc.setPos(0); self.filter.seekToTime(fc, time)
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
self.assertEqual(fc.getPos(), 110)
|
self.assertEqual(fc.getPos(), 157)
|
||||||
|
# start search from current pos :
|
||||||
|
fc.setPos(157); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 157)
|
||||||
|
# start search from current pos :
|
||||||
|
fc.setPos(110); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 157)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
|
if fc:
|
||||||
|
fc.close()
|
||||||
|
_killfile(f, fname)
|
||||||
|
|
||||||
|
def testSeekToTimeLargeFile(self):
|
||||||
|
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='.log')
|
||||||
|
time = 1417512352
|
||||||
|
f = open(fname, 'w')
|
||||||
|
fc = None
|
||||||
|
count = 1000 if unittest.F2B.fast else 10000
|
||||||
|
try:
|
||||||
|
fc = FileContainer(fname, self.filter.getLogEncoding())
|
||||||
|
fc.open()
|
||||||
|
f.seek(0)
|
||||||
|
# variable length of file (ca 45K or 450K before and hereafter):
|
||||||
|
# write lines with smaller as search time:
|
||||||
|
t = time - count - 1
|
||||||
|
for i in xrange(count):
|
||||||
|
f.write("%s [sshd] error: PAM: failure\n" % _tm(t))
|
||||||
|
t += 1
|
||||||
|
f.flush()
|
||||||
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 47*count)
|
||||||
|
# write lines with exact search time:
|
||||||
|
for i in xrange(10):
|
||||||
|
f.write("%s [sshd] error: PAM: failure\n" % _tm(time))
|
||||||
|
f.flush()
|
||||||
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 47*count)
|
||||||
|
fc.setPos(4*count); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 47*count)
|
||||||
|
# write lines with greater as search time:
|
||||||
|
t = time+1
|
||||||
|
for i in xrange(count//500):
|
||||||
|
for j in xrange(500):
|
||||||
|
f.write("%s [sshd] error: PAM: failure\n" % _tm(t))
|
||||||
|
t += 1
|
||||||
|
f.flush()
|
||||||
|
fc.setPos(0); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 47*count)
|
||||||
|
fc.setPos(53); self.filter.seekToTime(fc, time)
|
||||||
|
self.assertEqual(fc.getPos(), 47*count)
|
||||||
|
|
||||||
|
finally:
|
||||||
|
if fc:
|
||||||
fc.close()
|
fc.close()
|
||||||
_killfile(f, fname)
|
_killfile(f, fname)
|
||||||
|
|
||||||
|
@ -418,7 +507,7 @@ class LogFileMonitor(LogCaptureTestCase):
|
||||||
_, self.name = tempfile.mkstemp('fail2ban', 'monitorfailures')
|
_, self.name = tempfile.mkstemp('fail2ban', 'monitorfailures')
|
||||||
self.file = open(self.name, 'a')
|
self.file = open(self.name, 'a')
|
||||||
self.filter = FilterPoll(DummyJail())
|
self.filter = FilterPoll(DummyJail())
|
||||||
self.filter.addLogPath(self.name)
|
self.filter.addLogPath(self.name, autoSeek=False)
|
||||||
self.filter.active = True
|
self.filter.active = True
|
||||||
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>")
|
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>")
|
||||||
|
|
||||||
|
@ -431,11 +520,12 @@ class LogFileMonitor(LogCaptureTestCase):
|
||||||
def isModified(self, delay=2.):
|
def isModified(self, delay=2.):
|
||||||
"""Wait up to `delay` sec to assure that it was modified or not
|
"""Wait up to `delay` sec to assure that it was modified or not
|
||||||
"""
|
"""
|
||||||
return Utils.wait_for(lambda: self.filter.isModified(self.name), delay)
|
return Utils.wait_for(lambda: self.filter.isModified(self.name), _maxWaitTime(delay))
|
||||||
|
|
||||||
def notModified(self):
|
def notModified(self, delay=2.):
|
||||||
# shorter wait time for not modified status
|
"""Wait up to `delay` sec as long as it was not modified
|
||||||
return not self.isModified(4*Utils.DEFAULT_SLEEP_TIME)
|
"""
|
||||||
|
return Utils.wait_for(lambda: not self.filter.isModified(self.name), _maxWaitTime(delay))
|
||||||
|
|
||||||
def testUnaccessibleLogFile(self):
|
def testUnaccessibleLogFile(self):
|
||||||
os.chmod(self.name, 0)
|
os.chmod(self.name, 0)
|
||||||
|
@ -478,7 +568,7 @@ class LogFileMonitor(LogCaptureTestCase):
|
||||||
os.rename(self.name, self.name + '.old')
|
os.rename(self.name, self.name + '.old')
|
||||||
# we are not signaling as modified whenever
|
# we are not signaling as modified whenever
|
||||||
# it gets away
|
# it gets away
|
||||||
self.assertTrue(self.notModified())
|
self.assertTrue(self.notModified(1))
|
||||||
f = open(self.name, 'a')
|
f = open(self.name, 'a')
|
||||||
self.assertTrue(self.isModified())
|
self.assertTrue(self.isModified())
|
||||||
self.assertTrue(self.notModified())
|
self.assertTrue(self.notModified())
|
||||||
|
@ -562,7 +652,7 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
self.file = open(self.name, 'a')
|
self.file = open(self.name, 'a')
|
||||||
self.jail = DummyJail()
|
self.jail = DummyJail()
|
||||||
self.filter = Filter_(self.jail)
|
self.filter = Filter_(self.jail)
|
||||||
self.filter.addLogPath(self.name)
|
self.filter.addLogPath(self.name, autoSeek=False)
|
||||||
self.filter.active = True
|
self.filter.active = True
|
||||||
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>")
|
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>")
|
||||||
self.filter.start()
|
self.filter.start()
|
||||||
|
@ -587,21 +677,21 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
def isFilled(self, delay=1.):
|
def isFilled(self, delay=1.):
|
||||||
"""Wait up to `delay` sec to assure that it was modified or not
|
"""Wait up to `delay` sec to assure that it was modified or not
|
||||||
"""
|
"""
|
||||||
return Utils.wait_for(self.jail.isFilled, delay)
|
return Utils.wait_for(self.jail.isFilled, _maxWaitTime(delay))
|
||||||
|
|
||||||
def _sleep_4_poll(self):
|
def _sleep_4_poll(self):
|
||||||
# Since FilterPoll relies on time stamps and some
|
# Since FilterPoll relies on time stamps and some
|
||||||
# actions might be happening too fast in the tests,
|
# actions might be happening too fast in the tests,
|
||||||
# sleep a bit to guarantee reliable time stamps
|
# sleep a bit to guarantee reliable time stamps
|
||||||
if isinstance(self.filter, FilterPoll):
|
if isinstance(self.filter, FilterPoll):
|
||||||
Utils.wait_for(self.filter.isAlive, 4*Utils.DEFAULT_SLEEP_TIME)
|
Utils.wait_for(self.filter.isAlive, _maxWaitTime(5))
|
||||||
|
|
||||||
def isEmpty(self, delay=4*Utils.DEFAULT_SLEEP_TIME):
|
def isEmpty(self, delay=_maxWaitTime(5)):
|
||||||
# shorter wait time for not modified status
|
# shorter wait time for not modified status
|
||||||
return Utils.wait_for(self.jail.isEmpty, delay)
|
return Utils.wait_for(self.jail.isEmpty, _maxWaitTime(delay))
|
||||||
|
|
||||||
def assert_correct_last_attempt(self, failures, count=None):
|
def assert_correct_last_attempt(self, failures, count=None):
|
||||||
self.assertTrue(self.isFilled(20)) # give Filter a chance to react
|
self.assertTrue(self.isFilled(10)) # give Filter a chance to react
|
||||||
_assert_correct_last_attempt(self, self.jail, failures, count=count)
|
_assert_correct_last_attempt(self, self.jail, failures, count=count)
|
||||||
|
|
||||||
def test_grow_file(self):
|
def test_grow_file(self):
|
||||||
|
@ -616,7 +706,7 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
# since it should have not been enough
|
# since it should have not been enough
|
||||||
|
|
||||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=5)
|
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=5)
|
||||||
self.assertTrue(self.isFilled(6))
|
self.assertTrue(self.isFilled(10))
|
||||||
# so we sleep for up to 2 sec for it not to become empty,
|
# so we sleep for up to 2 sec for it not to become empty,
|
||||||
# and meanwhile pass to other thread(s) and filter should
|
# and meanwhile pass to other thread(s) and filter should
|
||||||
# have gathered new failures and passed them into the
|
# have gathered new failures and passed them into the
|
||||||
|
@ -653,11 +743,11 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
self.file = _copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
self.file = _copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
||||||
n=14, mode='w')
|
n=14, mode='w')
|
||||||
# Poll might need more time
|
# Poll might need more time
|
||||||
self.assertTrue(self.isEmpty(min(4, 100 * Utils.DEFAULT_SLEEP_TIME)),
|
self.assertTrue(self.isEmpty(_maxWaitTime(5)),
|
||||||
"Queue must be empty but it is not: %s."
|
"Queue must be empty but it is not: %s."
|
||||||
% (', '.join([str(x) for x in self.jail.queue])))
|
% (', '.join([str(x) for x in self.jail.queue])))
|
||||||
self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
||||||
Utils.wait_for(lambda: self.filter.failManager.getFailTotal() == 2, 50 * Utils.DEFAULT_SLEEP_TIME)
|
Utils.wait_for(lambda: self.filter.failManager.getFailTotal() == 2, _maxWaitTime(10))
|
||||||
self.assertEqual(self.filter.failManager.getFailTotal(), 2)
|
self.assertEqual(self.filter.failManager.getFailTotal(), 2)
|
||||||
|
|
||||||
# move aside, but leaving the handle still open...
|
# move aside, but leaving the handle still open...
|
||||||
|
@ -682,7 +772,7 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
|
|
||||||
if interim_kill:
|
if interim_kill:
|
||||||
_killfile(None, self.name)
|
_killfile(None, self.name)
|
||||||
time.sleep(Utils.DEFAULT_SLEEP_TIME) # let them know
|
time.sleep(Utils.DEFAULT_SLEEP_INTERVAL) # let them know
|
||||||
|
|
||||||
# now create a new one to override old one
|
# now create a new one to override old one
|
||||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name + '.new',
|
_copy_lines_between_files(GetFailures.FILENAME_01, self.name + '.new',
|
||||||
|
@ -729,10 +819,10 @@ def get_monitor_failures_testcase(Filter_):
|
||||||
|
|
||||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=100)
|
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=100)
|
||||||
# so we should get no more failures detected
|
# so we should get no more failures detected
|
||||||
self.assertTrue(self.isEmpty(200 * Utils.DEFAULT_SLEEP_TIME))
|
self.assertTrue(self.isEmpty(_maxWaitTime(10)))
|
||||||
|
|
||||||
# but then if we add it back again
|
# but then if we add it back again (no seek to time in FileFilter's, because in file used the same time)
|
||||||
self.filter.addLogPath(self.name)
|
self.filter.addLogPath(self.name, autoSeek=False)
|
||||||
# Tricky catch here is that it should get them from the
|
# Tricky catch here is that it should get them from the
|
||||||
# tail written before, so let's not copy anything yet
|
# tail written before, so let's not copy anything yet
|
||||||
#_copy_lines_between_files(GetFailures.FILENAME_01, self.name, n=100)
|
#_copy_lines_between_files(GetFailures.FILENAME_01, self.name, n=100)
|
||||||
|
@ -789,14 +879,14 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
|
||||||
def isFilled(self, delay=1.):
|
def isFilled(self, delay=1.):
|
||||||
"""Wait up to `delay` sec to assure that it was modified or not
|
"""Wait up to `delay` sec to assure that it was modified or not
|
||||||
"""
|
"""
|
||||||
return Utils.wait_for(self.jail.isFilled, delay)
|
return Utils.wait_for(self.jail.isFilled, _maxWaitTime(delay))
|
||||||
|
|
||||||
def isEmpty(self, delay=4*Utils.DEFAULT_SLEEP_TIME):
|
def isEmpty(self, delay=_maxWaitTime(5)):
|
||||||
# shorter wait time for not modified status
|
# shorter wait time for not modified status
|
||||||
return Utils.wait_for(self.jail.isEmpty, delay)
|
return Utils.wait_for(self.jail.isEmpty, _maxWaitTime(delay))
|
||||||
|
|
||||||
def assert_correct_ban(self, test_ip, test_attempts):
|
def assert_correct_ban(self, test_ip, test_attempts):
|
||||||
self.assertTrue(self.isFilled(10)) # give Filter a chance to react
|
self.assertTrue(self.isFilled(_maxWaitTime(10))) # give Filter a chance to react
|
||||||
ticket = self.jail.getFailTicket()
|
ticket = self.jail.getFailTicket()
|
||||||
|
|
||||||
attempts = ticket.getAttempt()
|
attempts = ticket.getAttempt()
|
||||||
|
@ -819,7 +909,7 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
|
||||||
|
|
||||||
_copy_lines_to_journal(
|
_copy_lines_to_journal(
|
||||||
self.test_file, self.journal_fields, skip=2, n=3)
|
self.test_file, self.journal_fields, skip=2, n=3)
|
||||||
self.assertTrue(self.isFilled(6))
|
self.assertTrue(self.isFilled(10))
|
||||||
# so we sleep for up to 6 sec for it not to become empty,
|
# so we sleep for up to 6 sec for it not to become empty,
|
||||||
# and meanwhile pass to other thread(s) and filter should
|
# and meanwhile pass to other thread(s) and filter should
|
||||||
# have gathered new failures and passed them into the
|
# have gathered new failures and passed them into the
|
||||||
|
@ -852,7 +942,7 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
|
||||||
_copy_lines_to_journal(
|
_copy_lines_to_journal(
|
||||||
self.test_file, self.journal_fields, n=5, skip=5)
|
self.test_file, self.journal_fields, n=5, skip=5)
|
||||||
# so we should get no more failures detected
|
# so we should get no more failures detected
|
||||||
self.assertTrue(self.isEmpty(200 * Utils.DEFAULT_SLEEP_TIME))
|
self.assertTrue(self.isEmpty(_maxWaitTime(10)))
|
||||||
|
|
||||||
# but then if we add it back again
|
# but then if we add it back again
|
||||||
self.filter.addJournalMatch([
|
self.filter.addJournalMatch([
|
||||||
|
@ -863,7 +953,7 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
|
||||||
_copy_lines_to_journal(
|
_copy_lines_to_journal(
|
||||||
self.test_file, self.journal_fields, n=6, skip=10)
|
self.test_file, self.journal_fields, n=6, skip=10)
|
||||||
# we should detect the failures
|
# we should detect the failures
|
||||||
self.assertTrue(self.isFilled(6))
|
self.assertTrue(self.isFilled(10))
|
||||||
|
|
||||||
return MonitorJournalFailures
|
return MonitorJournalFailures
|
||||||
|
|
||||||
|
@ -931,7 +1021,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
filename = filename or GetFailures.FILENAME_01
|
filename = filename or GetFailures.FILENAME_01
|
||||||
failures = failures or GetFailures.FAILURES_01
|
failures = failures or GetFailures.FAILURES_01
|
||||||
|
|
||||||
self.filter.addLogPath(filename)
|
self.filter.addLogPath(filename, autoSeek=0)
|
||||||
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>$")
|
self.filter.addFailRegex("(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>$")
|
||||||
self.filter.getFailures(filename)
|
self.filter.getFailures(filename)
|
||||||
_assert_correct_last_attempt(self, self.filter, failures)
|
_assert_correct_last_attempt(self, self.filter, failures)
|
||||||
|
@ -955,7 +1045,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
[u'Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2'
|
[u'Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2'
|
||||||
% m for m in 53, 54, 57, 58])
|
% m for m in 53, 54, 57, 58])
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_02)
|
self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=0)
|
||||||
self.filter.addFailRegex("Failed .* from <HOST>")
|
self.filter.addFailRegex("Failed .* from <HOST>")
|
||||||
self.filter.getFailures(GetFailures.FILENAME_02)
|
self.filter.getFailures(GetFailures.FILENAME_02)
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
@ -963,25 +1053,35 @@ class GetFailures(LogCaptureTestCase):
|
||||||
def testGetFailures03(self):
|
def testGetFailures03(self):
|
||||||
output = ('203.162.223.135', 7, 1124013544.0)
|
output = ('203.162.223.135', 7, 1124013544.0)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_03)
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=0)
|
||||||
self.filter.addFailRegex("error,relay=<HOST>,.*550 User unknown")
|
self.filter.addFailRegex("error,relay=<HOST>,.*550 User unknown")
|
||||||
self.filter.getFailures(GetFailures.FILENAME_03)
|
self.filter.getFailures(GetFailures.FILENAME_03)
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
def testGetFailures03_seek(self):
|
def testGetFailures03_Seek1(self):
|
||||||
# same test as above but with seek to 'Aug 14 11:55:04' - so other output ...
|
# same test as above but with seek to 'Aug 14 11:55:04' - so other output ...
|
||||||
output = ('203.162.223.135', 5, 1124013544.0)
|
output = ('203.162.223.135', 5, 1124013544.0)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_03)
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2] - 4*60)
|
||||||
self.filter.addFailRegex("error,relay=<HOST>,.*550 User unknown")
|
self.filter.addFailRegex("error,relay=<HOST>,.*550 User unknown")
|
||||||
self.filter.getFailures(GetFailures.FILENAME_03, output[2] - 4*60 + 1)
|
self.filter.getFailures(GetFailures.FILENAME_03)
|
||||||
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
|
def testGetFailures03_Seek2(self):
|
||||||
|
# same test as above but with seek to 'Aug 14 11:59:04' - so other output ...
|
||||||
|
output = ('203.162.223.135', 1, 1124013544.0)
|
||||||
|
self.filter.setMaxRetry(1)
|
||||||
|
|
||||||
|
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2])
|
||||||
|
self.filter.addFailRegex("error,relay=<HOST>,.*550 User unknown")
|
||||||
|
self.filter.getFailures(GetFailures.FILENAME_03)
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
def testGetFailures04(self):
|
def testGetFailures04(self):
|
||||||
output = [('212.41.96.186', 4, 1124013600.0),
|
output = [('212.41.96.186', 4, 1124013600.0),
|
||||||
('212.41.96.185', 4, 1124017198.0)]
|
('212.41.96.185', 4, 1124017198.0)]
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_04)
|
self.filter.addLogPath(GetFailures.FILENAME_04, autoSeek=0)
|
||||||
self.filter.addFailRegex("Invalid user .* <HOST>")
|
self.filter.addFailRegex("Invalid user .* <HOST>")
|
||||||
self.filter.getFailures(GetFailures.FILENAME_04)
|
self.filter.getFailures(GetFailures.FILENAME_04)
|
||||||
|
|
||||||
|
@ -1048,7 +1148,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
filter_.active = True
|
filter_.active = True
|
||||||
filter_.failManager.setMaxRetry(1) # we might have just few failures
|
filter_.failManager.setMaxRetry(1) # we might have just few failures
|
||||||
|
|
||||||
filter_.addLogPath(GetFailures.FILENAME_USEDNS)
|
filter_.addLogPath(GetFailures.FILENAME_USEDNS, autoSeek=False)
|
||||||
filter_.addFailRegex("Failed .* from <HOST>")
|
filter_.addFailRegex("Failed .* from <HOST>")
|
||||||
filter_.getFailures(GetFailures.FILENAME_USEDNS)
|
filter_.getFailures(GetFailures.FILENAME_USEDNS)
|
||||||
_assert_correct_last_attempt(self, filter_, output)
|
_assert_correct_last_attempt(self, filter_, output)
|
||||||
|
@ -1056,14 +1156,14 @@ class GetFailures(LogCaptureTestCase):
|
||||||
def testGetFailuresMultiRegex(self):
|
def testGetFailuresMultiRegex(self):
|
||||||
output = ('141.3.81.106', 8, 1124013541.0)
|
output = ('141.3.81.106', 8, 1124013541.0)
|
||||||
|
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_02)
|
self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=False)
|
||||||
self.filter.addFailRegex("Failed .* from <HOST>")
|
self.filter.addFailRegex("Failed .* from <HOST>")
|
||||||
self.filter.addFailRegex("Accepted .* from <HOST>")
|
self.filter.addFailRegex("Accepted .* from <HOST>")
|
||||||
self.filter.getFailures(GetFailures.FILENAME_02)
|
self.filter.getFailures(GetFailures.FILENAME_02)
|
||||||
_assert_correct_last_attempt(self, self.filter, output)
|
_assert_correct_last_attempt(self, self.filter, output)
|
||||||
|
|
||||||
def testGetFailuresIgnoreRegex(self):
|
def testGetFailuresIgnoreRegex(self):
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_02)
|
self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=False)
|
||||||
self.filter.addFailRegex("Failed .* from <HOST>")
|
self.filter.addFailRegex("Failed .* from <HOST>")
|
||||||
self.filter.addFailRegex("Accepted .* from <HOST>")
|
self.filter.addFailRegex("Accepted .* from <HOST>")
|
||||||
self.filter.addIgnoreRegex("for roehl")
|
self.filter.addIgnoreRegex("for roehl")
|
||||||
|
@ -1075,7 +1175,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
def testGetFailuresMultiLine(self):
|
def testGetFailuresMultiLine(self):
|
||||||
output = [("192.0.43.10", 2, 1124013599.0),
|
output = [("192.0.43.10", 2, 1124013599.0),
|
||||||
("192.0.43.11", 1, 1124013598.0)]
|
("192.0.43.11", 1, 1124013598.0)]
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE)
|
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE, autoSeek=False)
|
||||||
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
||||||
self.filter.setMaxLines(100)
|
self.filter.setMaxLines(100)
|
||||||
self.filter.setMaxRetry(1)
|
self.filter.setMaxRetry(1)
|
||||||
|
@ -1093,7 +1193,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
|
|
||||||
def testGetFailuresMultiLineIgnoreRegex(self):
|
def testGetFailuresMultiLineIgnoreRegex(self):
|
||||||
output = [("192.0.43.10", 2, 1124013599.0)]
|
output = [("192.0.43.10", 2, 1124013599.0)]
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE)
|
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE, autoSeek=False)
|
||||||
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
||||||
self.filter.addIgnoreRegex("rsync error: Received SIGINT")
|
self.filter.addIgnoreRegex("rsync error: Received SIGINT")
|
||||||
self.filter.setMaxLines(100)
|
self.filter.setMaxLines(100)
|
||||||
|
@ -1109,7 +1209,7 @@ class GetFailures(LogCaptureTestCase):
|
||||||
output = [("192.0.43.10", 2, 1124013599.0),
|
output = [("192.0.43.10", 2, 1124013599.0),
|
||||||
("192.0.43.11", 1, 1124013598.0),
|
("192.0.43.11", 1, 1124013598.0),
|
||||||
("192.0.43.15", 1, 1124013598.0)]
|
("192.0.43.15", 1, 1124013598.0)]
|
||||||
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE)
|
self.filter.addLogPath(GetFailures.FILENAME_MULTILINE, autoSeek=False)
|
||||||
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
self.filter.addFailRegex("^.*rsyncd\[(?P<pid>\d+)\]: connect from .+ \(<HOST>\)$<SKIPLINES>^.+ rsyncd\[(?P=pid)\]: rsync error: .*$")
|
||||||
self.filter.addFailRegex("^.* sendmail\[.*, msgid=<(?P<msgid>[^>]+).*relay=\[<HOST>\].*$<SKIPLINES>^.+ spamd: result: Y \d+ .*,mid=<(?P=msgid)>(,bayes=[.\d]+)?(,autolearn=\S+)?\s*$")
|
self.filter.addFailRegex("^.* sendmail\[.*, msgid=<(?P<msgid>[^>]+).*relay=\[<HOST>\].*$<SKIPLINES>^.+ spamd: result: Y \d+ .*,mid=<(?P=msgid)>(,bayes=[.\d]+)?(,autolearn=\S+)?\s*$")
|
||||||
self.filter.setMaxLines(100)
|
self.filter.setMaxLines(100)
|
||||||
|
|
|
@ -76,12 +76,15 @@ class TransmitterBase(unittest.TestCase):
|
||||||
self.server.quit()
|
self.server.quit()
|
||||||
|
|
||||||
def setGetTest(self, cmd, inValue, outValue=(None,), outCode=0, jail=None, repr_=False):
|
def setGetTest(self, cmd, inValue, outValue=(None,), outCode=0, jail=None, repr_=False):
|
||||||
|
"""Process set/get commands and compare both return values
|
||||||
|
with outValue if it was given otherwise with inValue"""
|
||||||
setCmd = ["set", cmd, inValue]
|
setCmd = ["set", cmd, inValue]
|
||||||
getCmd = ["get", cmd]
|
getCmd = ["get", cmd]
|
||||||
if jail is not None:
|
if jail is not None:
|
||||||
setCmd.insert(1, jail)
|
setCmd.insert(1, jail)
|
||||||
getCmd.insert(1, jail)
|
getCmd.insert(1, jail)
|
||||||
|
|
||||||
|
# if outValue was not given (now None is allowed return/compare value also)
|
||||||
if outValue == (None,):
|
if outValue == (None,):
|
||||||
outValue = inValue
|
outValue = inValue
|
||||||
|
|
||||||
|
@ -173,7 +176,7 @@ class Transmitter(TransmitterBase):
|
||||||
self.assertEqual(self.transm.proceed(["sleep", "0.0001"]), (0, None))
|
self.assertEqual(self.transm.proceed(["sleep", "0.0001"]), (0, None))
|
||||||
|
|
||||||
def testDatabase(self):
|
def testDatabase(self):
|
||||||
if not unittest.F2B.fast:
|
if not unittest.F2B.memory_db:
|
||||||
tmp, tmpFilename = tempfile.mkstemp(".db", "fail2ban_")
|
tmp, tmpFilename = tempfile.mkstemp(".db", "fail2ban_")
|
||||||
else: # pragma: no cover
|
else: # pragma: no cover
|
||||||
tmpFilename = ':memory:'
|
tmpFilename = ':memory:'
|
||||||
|
@ -208,7 +211,7 @@ class Transmitter(TransmitterBase):
|
||||||
self.assertEqual(self.transm.proceed(
|
self.assertEqual(self.transm.proceed(
|
||||||
["set", "dbfile", "None"]),
|
["set", "dbfile", "None"]),
|
||||||
(0, None))
|
(0, None))
|
||||||
if not unittest.F2B.fast:
|
if not unittest.F2B.memory_db:
|
||||||
os.close(tmp)
|
os.close(tmp)
|
||||||
os.unlink(tmpFilename)
|
os.unlink(tmpFilename)
|
||||||
|
|
||||||
|
|
|
@ -23,6 +23,7 @@ __copyright__ = "Copyright (c) 2013 Yaroslav Halchenko"
|
||||||
__license__ = "GPL"
|
__license__ = "GPL"
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
|
import optparse
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -35,6 +36,7 @@ from ..server.filter import DNSUtils
|
||||||
from ..server.mytime import MyTime
|
from ..server.mytime import MyTime
|
||||||
from ..server.utils import Utils
|
from ..server.utils import Utils
|
||||||
|
|
||||||
|
|
||||||
logSys = getLogger(__name__)
|
logSys = getLogger(__name__)
|
||||||
|
|
||||||
CONFIG_DIR = os.environ.get('FAIL2BAN_CONFIG_DIR', None)
|
CONFIG_DIR = os.environ.get('FAIL2BAN_CONFIG_DIR', None)
|
||||||
|
@ -47,10 +49,14 @@ if not CONFIG_DIR:
|
||||||
CONFIG_DIR = '/etc/fail2ban'
|
CONFIG_DIR = '/etc/fail2ban'
|
||||||
|
|
||||||
|
|
||||||
class F2B():
|
class F2B(optparse.Values):
|
||||||
def __init__(self, fast=False, no_network=False):
|
def __init__(self, opts={}):
|
||||||
self.fast=fast
|
self.__dict__ = opts.__dict__ if opts else {
|
||||||
self.no_network=no_network
|
'fast': False, 'memory_db':False, 'no_gamin': False, 'no_network': False,
|
||||||
|
}
|
||||||
|
if self.fast:
|
||||||
|
self.memory_db = True
|
||||||
|
self.no_gamin = True
|
||||||
def SkipIfFast(self):
|
def SkipIfFast(self):
|
||||||
pass
|
pass
|
||||||
def SkipIfNoNetwork(self):
|
def SkipIfNoNetwork(self):
|
||||||
|
@ -58,13 +64,11 @@ class F2B():
|
||||||
|
|
||||||
|
|
||||||
def initTests(opts):
|
def initTests(opts):
|
||||||
if opts: # pragma: no cover
|
unittest.F2B = F2B(opts)
|
||||||
unittest.F2B = F2B(opts.fast, opts.no_network)
|
|
||||||
else:
|
|
||||||
unittest.F2B = F2B()
|
|
||||||
# --fast :
|
# --fast :
|
||||||
if unittest.F2B.fast: # pragma: no cover
|
if unittest.F2B.fast: # pragma: no cover
|
||||||
# prevent long sleeping during test cases...
|
# racy decrease default sleep intervals to test it faster
|
||||||
|
# (prevent long sleeping during test cases ... less time goes to sleep):
|
||||||
Utils.DEFAULT_SLEEP_TIME = 0.0025
|
Utils.DEFAULT_SLEEP_TIME = 0.0025
|
||||||
Utils.DEFAULT_SLEEP_INTERVAL = 0.0005
|
Utils.DEFAULT_SLEEP_INTERVAL = 0.0005
|
||||||
def F2B_SkipIfFast():
|
def F2B_SkipIfFast():
|
||||||
|
@ -74,7 +78,7 @@ def initTests(opts):
|
||||||
# sleep intervals are large - use replacement for sleep to check time to sleep:
|
# sleep intervals are large - use replacement for sleep to check time to sleep:
|
||||||
_org_sleep = time.sleep
|
_org_sleep = time.sleep
|
||||||
def _new_sleep(v):
|
def _new_sleep(v):
|
||||||
if (v > Utils.DEFAULT_SLEEP_TIME):
|
if (v > Utils.DEFAULT_SLEEP_TIME): # pragma: no cover
|
||||||
raise ValueError('[BAD-CODE] To long sleep interval: %s, try to use conditional Utils.wait_for instead' % v)
|
raise ValueError('[BAD-CODE] To long sleep interval: %s, try to use conditional Utils.wait_for instead' % v)
|
||||||
_org_sleep(min(v, Utils.DEFAULT_SLEEP_TIME))
|
_org_sleep(min(v, Utils.DEFAULT_SLEEP_TIME))
|
||||||
time.sleep = _new_sleep
|
time.sleep = _new_sleep
|
||||||
|
@ -103,7 +107,7 @@ def setUpMyTime():
|
||||||
|
|
||||||
def tearDownMyTime():
|
def tearDownMyTime():
|
||||||
os.environ.pop('TZ')
|
os.environ.pop('TZ')
|
||||||
if old_TZ:
|
if old_TZ: # pragma: no cover
|
||||||
os.environ['TZ'] = old_TZ
|
os.environ['TZ'] = old_TZ
|
||||||
time.tzset()
|
time.tzset()
|
||||||
MyTime.myTime = None
|
MyTime.myTime = None
|
||||||
|
@ -135,11 +139,15 @@ def gatherTests(regexps=None, opts=None):
|
||||||
_regexps = [re.compile(r) for r in regexps]
|
_regexps = [re.compile(r) for r in regexps]
|
||||||
|
|
||||||
def addTest(self, suite):
|
def addTest(self, suite):
|
||||||
suite_str = str(suite)
|
matched = []
|
||||||
|
for test in suite:
|
||||||
|
s = str(test)
|
||||||
for r in self._regexps:
|
for r in self._regexps:
|
||||||
if r.search(suite_str):
|
if r.search(s):
|
||||||
super(FilteredTestSuite, self).addTest(suite)
|
matched.append(test)
|
||||||
return
|
break
|
||||||
|
for test in matched:
|
||||||
|
super(FilteredTestSuite, self).addTest(test)
|
||||||
|
|
||||||
tests = FilteredTestSuite()
|
tests = FilteredTestSuite()
|
||||||
|
|
||||||
|
@ -160,7 +168,7 @@ def gatherTests(regexps=None, opts=None):
|
||||||
try:
|
try:
|
||||||
import dns
|
import dns
|
||||||
tests.addTest(unittest.makeSuite(banmanagertestcase.StatusExtendedCymruInfo))
|
tests.addTest(unittest.makeSuite(banmanagertestcase.StatusExtendedCymruInfo))
|
||||||
except ImportError:
|
except ImportError: # pragma: no cover
|
||||||
pass
|
pass
|
||||||
# ClientReaders
|
# ClientReaders
|
||||||
tests.addTest(unittest.makeSuite(clientreadertestcase.ConfigReaderTest))
|
tests.addTest(unittest.makeSuite(clientreadertestcase.ConfigReaderTest))
|
||||||
|
@ -225,7 +233,8 @@ def gatherTests(regexps=None, opts=None):
|
||||||
try:
|
try:
|
||||||
# because gamin can be very slow on some platforms (and can produce many failures
|
# because gamin can be very slow on some platforms (and can produce many failures
|
||||||
# with fast sleep interval) - skip it by fast run:
|
# with fast sleep interval) - skip it by fast run:
|
||||||
unittest.F2B.SkipIfFast()
|
if unittest.F2B.fast or unittest.F2B.no_gamin: # pragma: no cover
|
||||||
|
raise Exception('Skip, fast: %s, no_gamin: %s' % (unittest.F2B.fast, unittest.F2B.no_gamin))
|
||||||
from ..server.filtergamin import FilterGamin
|
from ..server.filtergamin import FilterGamin
|
||||||
filters.append(FilterGamin)
|
filters.append(FilterGamin)
|
||||||
except Exception, e: # pragma: no cover
|
except Exception, e: # pragma: no cover
|
||||||
|
|
Loading…
Reference in New Issue