Merge remote-tracking branch 'upstream/master'

pull/1264/head
Ross Brown 2015-12-17 18:01:17 +00:00
commit 8d12dba245
9 changed files with 70 additions and 58 deletions

View File

@ -45,6 +45,8 @@ ver. 0.9.4 (2015/XX/XXX) - wanna-be-released
murmur/mumble-server with an invalid server password or certificate.
* New jails:
- murmur - bans TCP and UDP from the bad host on the default murmur port.
* sshd filter got new failregex to match "maximum authentication
attempts exceeded" (introduced in openssh 6.8)
- Enhancements:
* Do not rotate empty log files
@ -63,6 +65,9 @@ ver. 0.9.4 (2015/XX/XXX) - wanna-be-released
rest api and web interface (gh-1223)
* Add *_backend options for services to allow distros to set the default
backend per service, set default to systemd for Fedora as appropriate
* Performance improvements while monitoring large number of files (gh-1265).
Use associative array (dict) for monitored log files to speed up lookup
operations. Thanks @kshetragia
ver. 0.9.3 (2015/08/01) - lets-all-stay-friends
----------

View File

@ -33,6 +33,7 @@ failregex = ^%(__prefix_line)s(?:error: PAM: )?[aA]uthentication (?:failure|erro
^(?P<__prefix>%(__prefix_line)s)User .+ not allowed because account is locked<SKIPLINES>(?P=__prefix)(?:error: )?Received disconnect from <HOST>: 11: .+ \[preauth\]$
^(?P<__prefix>%(__prefix_line)s)Disconnecting: Too many authentication failures for .+? \[preauth\]<SKIPLINES>(?P=__prefix)(?:error: )?Connection closed by <HOST> \[preauth\]$
^(?P<__prefix>%(__prefix_line)s)Connection from <HOST> port \d+(?: on \S+ port \d+)?<SKIPLINES>(?P=__prefix)Disconnecting: Too many authentication failures for .+? \[preauth\]$
^%(__prefix_line)s(error: )?maximum authentication attempts exceeded for .* from <HOST>(?: port \d*)?(?: ssh\d*)? \[preauth\]$
^%(__prefix_line)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=<HOST>\s.*$
ignoreregex =

View File

@ -552,7 +552,7 @@ class FileFilter(Filter):
def __init__(self, jail, **kwargs):
Filter.__init__(self, jail, **kwargs)
## The log file path.
self.__logPath = []
self.__logs = dict()
self.setLogEncoding("auto")
##
@ -560,17 +560,17 @@ class FileFilter(Filter):
#
# @param path log file path
def addLogPath(self, path, tail = False):
if self.containsLogPath(path):
def addLogPath(self, path, tail=False):
if path in self.__logs:
logSys.error(path + " already exists")
else:
container = FileContainer(path, self.getLogEncoding(), tail)
log = FileContainer(path, self.getLogEncoding(), tail)
db = self.jail.database
if db is not None:
lastpos = db.addLog(self.jail, container)
lastpos = db.addLog(self.jail, log)
if lastpos and not tail:
container.setPos(lastpos)
self.__logPath.append(container)
log.setPos(lastpos)
self.__logs[path] = log
logSys.info("Added logfile = %s" % path)
self._addLogPath(path) # backend specific
@ -585,15 +585,16 @@ class FileFilter(Filter):
# @param path the log file to delete
def delLogPath(self, path):
for log in self.__logPath:
if log.getFileName() == path:
self.__logPath.remove(log)
db = self.jail.database
if db is not None:
db.updateLog(self.jail, log)
logSys.info("Removed logfile = %s" % path)
self._delLogPath(path)
return
try:
log = self.__logs.pop(path)
except KeyError:
return
db = self.jail.database
if db is not None:
db.updateLog(self.jail, log)
logSys.info("Removed logfile = %s" % path)
self._delLogPath(path)
return
def _delLogPath(self, path): # pragma: no cover - overwritten function
# nothing to do by default
@ -601,12 +602,12 @@ class FileFilter(Filter):
pass
##
# Get the log file path
# Get the log containers
#
# @return log file path
# @return log containers
def getLogPath(self):
return self.__logPath
def getLogs(self):
return self.__logs.values()
##
# Check whether path is already monitored.
@ -615,10 +616,7 @@ class FileFilter(Filter):
# @return True if the path is already monitored else False
def containsLogPath(self, path):
for log in self.__logPath:
if log.getFileName() == path:
return True
return False
return path in self.__logs
##
# Set the log file encoding
@ -629,7 +627,7 @@ class FileFilter(Filter):
if encoding.lower() == "auto":
encoding = locale.getpreferredencoding()
codecs.lookup(encoding) # Raise LookupError if invalid codec
for log in self.getLogPath():
for log in self.__logs.itervalues():
log.setEncoding(encoding)
self.__encoding = encoding
logSys.info("Set jail log file encoding to %s" % encoding)
@ -642,11 +640,8 @@ class FileFilter(Filter):
def getLogEncoding(self):
return self.__encoding
def getFileContainer(self, path):
for log in self.__logPath:
if log.getFileName() == path:
return log
return None
def getLog(self, path):
return self.__logs.get(path, None)
##
# Gets all the failure in the log file.
@ -656,13 +651,13 @@ class FileFilter(Filter):
# is created and is added to the FailManager.
def getFailures(self, filename):
container = self.getFileContainer(filename)
if container is None:
log = self.getLog(filename)
if log is None:
logSys.error("Unable to get failures in " + filename)
return False
# Try to open log file.
try:
has_content = container.open()
has_content = log.open()
# see http://python.org/dev/peps/pep-3151/
except IOError, e:
logSys.error("Unable to open %s" % filename)
@ -683,22 +678,22 @@ class FileFilter(Filter):
# start reading tested to be empty container -- race condition
# might occur leading at least to tests failures.
while has_content:
line = container.readline()
line = log.readline()
if not line or not self.active:
# The jail reached the bottom or has been stopped
break
self.processLineAndAdd(line)
container.close()
log.close()
db = self.jail.database
if db is not None:
db.updateLog(self.jail, container)
db.updateLog(self.jail, log)
return True
def status(self, flavor="basic"):
"""Status of Filter plus files being monitored.
"""
ret = super(FileFilter, self).status(flavor=flavor)
path = [m.getFileName() for m in self.getLogPath()]
path = self.__logs.keys()
ret.append(("File list", path))
return ret

View File

@ -129,6 +129,6 @@ class FilterGamin(FileFilter):
# Desallocates the resources used by Gamin.
def __cleanup(self):
for path in self.getLogPath():
self.monitor.stop_watch(path.getFileName())
for log in self.getLogs():
self.monitor.stop_watch(log.getFileName())
del self.monitor

View File

@ -88,10 +88,10 @@ class FilterPoll(FileFilter):
while self.active:
if logSys.getEffectiveLevel() <= 6:
logSys.log(6, "Woke up idle=%s with %d files monitored",
self.idle, len(self.getLogPath()))
self.idle, len(self.getLogs()))
if not self.idle:
# Get file modification
for container in self.getLogPath():
for container in self.getLogs():
filename = container.getFileName()
if self.isModified(filename):
self.getFailures(filename)

View File

@ -212,7 +212,7 @@ class Server:
filter_ = self.__jails[name].filter
if isinstance(filter_, FileFilter):
return [m.getFileName()
for m in filter_.getLogPath()]
for m in filter_.getLogs()]
else: # pragma: systemd no cover
logSys.info("Jail %s is not a FileFilter instance" % name)
return []

View File

@ -148,6 +148,9 @@ Feb 12 04:09:18 localhost sshd[26713]: Connection from 115.249.163.77 port 51353
# failJSON: { "time": "2005-02-12T04:09:21", "match": true , "host": "115.249.163.77", "desc": "Multiline match with interface address" }
Feb 12 04:09:21 localhost sshd[26713]: Disconnecting: Too many authentication failures for root [preauth]
# failJSON: { "time": "2004-11-23T21:50:37", "match": true , "host": "61.0.0.1", "desc": "New logline format as openssh 6.8 to replace prev multiline version" }
Nov 23 21:50:37 myhost sshd[21810]: error: maximum authentication attempts exceeded for root from 61.0.0.1 port 49940 ssh2 [preauth]
# failJSON: { "match": false }
Apr 27 13:02:04 host sshd[29116]: User root not allowed because account is locked
# failJSON: { "match": false }

View File

@ -852,12 +852,24 @@ class GetFailures(LogCaptureTestCase):
LogCaptureTestCase.tearDown(self)
def testTail(self):
# There must be no containters registered, otherwise [-1] indexing would be wrong
self.assertEqual(self.filter.getLogs(), [])
self.filter.addLogPath(GetFailures.FILENAME_01, tail=True)
self.assertEqual(self.filter.getLogPath()[-1].getPos(), 1653)
self.filter.getLogPath()[-1].close()
self.assertEqual(self.filter.getLogPath()[-1].readline(), "")
self.assertEqual(self.filter.getLogs()[-1].getPos(), 1653)
self.filter.getLogs()[-1].close()
self.assertEqual(self.filter.getLogs()[-1].readline(), "")
self.filter.delLogPath(GetFailures.FILENAME_01)
self.assertEqual(self.filter.getLogPath(),[])
self.assertEqual(self.filter.getLogs(), [])
def testNoLogAdded(self):
self.filter.addLogPath(GetFailures.FILENAME_01, tail=True)
self.assertTrue(self.filter.containsLogPath(GetFailures.FILENAME_01))
self.filter.delLogPath(GetFailures.FILENAME_01)
self.assertFalse(self.filter.containsLogPath(GetFailures.FILENAME_01))
# and unknown (safety and cover)
self.assertFalse(self.filter.containsLogPath('unknown.log'))
self.filter.delLogPath('unknown.log')
def testGetFailures01(self, filename=None, failures=None):
filename = filename or GetFailures.FILENAME_01

View File

@ -113,19 +113,15 @@ class TransmitterBase(unittest.TestCase):
self.assertEqual(
self.transm.proceed(["get", jail, cmd]), (0, []))
for n, value in enumerate(values):
self.assertEqual(
self.transm.proceed(["set", jail, cmdAdd, value]),
(0, values[:n+1]))
self.assertEqual(
self.transm.proceed(["get", jail, cmd]),
(0, values[:n+1]))
ret = self.transm.proceed(["set", jail, cmdAdd, value])
self.assertEqual((ret[0], sorted(ret[1])), (0, sorted(values[:n+1])))
ret = self.transm.proceed(["get", jail, cmd])
self.assertEqual((ret[0], sorted(ret[1])), (0, sorted(values[:n+1])))
for n, value in enumerate(values):
self.assertEqual(
self.transm.proceed(["set", jail, cmdDel, value]),
(0, values[n+1:]))
self.assertEqual(
self.transm.proceed(["get", jail, cmd]),
(0, values[n+1:]))
ret = self.transm.proceed(["set", jail, cmdDel, value])
self.assertEqual((ret[0], sorted(ret[1])), (0, sorted(values[n+1:])))
ret = self.transm.proceed(["get", jail, cmd])
self.assertEqual((ret[0], sorted(ret[1])), (0, sorted(values[n+1:])))
def jailAddDelRegexTest(self, cmd, inValues, outValues, jail):
cmdAdd = "add" + cmd