Merge pull request #1209 from sebres/ssh-filter-new-regexp

sshd-aggressive (new ssh rules added (gh-864) and code review...)
pull/1678/head
Serg G. Brester 2017-01-21 16:29:42 +01:00 committed by GitHub
commit 3ccb026840
12 changed files with 197 additions and 82 deletions

View File

@ -22,6 +22,19 @@ releases.
(0.10th resp. IPv6 relevant only, amend for gh-1479) (0.10th resp. IPv6 relevant only, amend for gh-1479)
* config/pathes-freebsd.conf * config/pathes-freebsd.conf
- Fixed filenames for apache and nginx log files (gh-1667) - Fixed filenames for apache and nginx log files (gh-1667)
* filter.d/sshd.conf
- new aggressive rules (gh-864):
- Connection reset by peer (multi-line rule during authorization process)
- No supported authentication methods available
- single line and multi-line expression optimized, added optional prefixes
and suffix (logged from several ssh versions), according to gh-1206;
- fixed expression received disconnect auth fail (optional space after port
part, gh-1652)
and suffix (logged from several ssh versions), according to gh-1206;
* filter.d/suhosin.conf
- greedy catch-all before `<HOST>` fixed (potential vulnerability)
* Filter tests extended with check of all config-regexp, that contains greedy catch-all
before `<HOST>`, that is hard-anchored at end or precise sub expression after `<HOST>`
### New Features ### New Features
* New Actions: * New Actions:

View File

@ -25,7 +25,7 @@ failregex = ^%(__prefix_line)s\w{14}: ruleset=check_rcpt, arg1=(?P<email><\S+@\S
^%(__prefix_line)sruleset=check_relay, arg1=(?P<dom>\S+), arg2=<HOST>, relay=((?P=dom) )?\[(\d+\.){3}\d+\]( \(may be forged\))?, reject=421 4\.3\.2 (Connection rate limit exceeded\.|Too many open connections\.)$ ^%(__prefix_line)sruleset=check_relay, arg1=(?P<dom>\S+), arg2=<HOST>, relay=((?P=dom) )?\[(\d+\.){3}\d+\]( \(may be forged\))?, reject=421 4\.3\.2 (Connection rate limit exceeded\.|Too many open connections\.)$
^%(__prefix_line)s\w{14}: rejecting commands from (\S* )?\[<HOST>\] due to pre-greeting traffic after \d+ seconds$ ^%(__prefix_line)s\w{14}: rejecting commands from (\S* )?\[<HOST>\] due to pre-greeting traffic after \d+ seconds$
^%(__prefix_line)s\w{14}: (\S+ )?\[<HOST>\]: ((?i)expn|vrfy) \S+ \[rejected\]$ ^%(__prefix_line)s\w{14}: (\S+ )?\[<HOST>\]: ((?i)expn|vrfy) \S+ \[rejected\]$
^(?P<__prefix>%(__prefix_line)s\w+: )<[^@]+@[^>]+>\.\.\. No such user here<SKIPLINES>(?P=__prefix)from=<[^@]+@[^>]+>, size=\d+, class=\d+, nrcpts=\d+, bodytype=\w+, proto=E?SMTP, daemon=MTA, relay=\S+ \[<HOST>\]$ ^(?P<__prefix>%(__prefix_line)s\w+: )<[^@]+@[^>]+>\.\.\. No such user here$<SKIPLINES>^(?P=__prefix)from=<[^@]+@[^>]+>, size=\d+, class=\d+, nrcpts=\d+, bodytype=\w+, proto=E?SMTP, daemon=MTA, relay=\S+ \[<HOST>\]$
ignoreregex = ignoreregex =

View File

@ -0,0 +1,11 @@
# Fail2Ban aggressive ssh filter for at attempted exploit
#
# Includes failregex of both sshd and sshd-ddos filters
#
[INCLUDES]
before = sshd.conf
[Definition]
mode = %(aggressive)s

View File

@ -10,20 +10,8 @@
[INCLUDES] [INCLUDES]
# Read common prefixes. If any customizations available -- read them from before = sshd.conf
# common.local
before = common.conf
[Definition] [Definition]
_daemon = sshd mode = %(ddos)s
failregex = ^%(__prefix_line)sDid not receive identification string from <HOST>\s*$
ignoreregex =
[Init]
journalmatch = _SYSTEMD_UNIT=sshd.service + _COMM=sshd
# Author: Yaroslav Halchenko

View File

@ -14,27 +14,54 @@
# common.local # common.local
before = common.conf before = common.conf
[Definition] [DEFAULT]
_daemon = sshd _daemon = sshd
failregex = ^%(__prefix_line)s(?:error: PAM: )?[aA]uthentication (?:failure|error|failed) for .* from <HOST>( via \S+)?\s*$ # optional prefix (logged from several ssh versions) like "error: ", "error: PAM: " or "fatal: "
^%(__prefix_line)s(?:error: PAM: )?User not known to the underlying authentication module for .* from <HOST>\s*$ __pref = (?:(?:error|fatal): (?:PAM: )?)?
^%(__prefix_line)sFailed \S+ for (?P<cond_inv>invalid user )?(?P<user>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)) from <HOST>(?: port \d+)?(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$) # optional suffix (logged from several ssh versions) like " [preauth]"
^%(__prefix_line)sROOT LOGIN REFUSED.* FROM <HOST>\s*$ __suff = (?: \[preauth\])?\s*
^%(__prefix_line)s[iI](?:llegal|nvalid) user .*? from <HOST>(?: port \d+)?\s*$ __on_port_opt = (?: port \d+)?(?: on \S+(?: port \d+)?)?
^%(__prefix_line)sUser .+ from <HOST> not allowed because not listed in AllowUsers\s*$
^%(__prefix_line)sUser .+ from <HOST> not allowed because listed in DenyUsers\s*$ # single line prefix:
^%(__prefix_line)sUser .+ from <HOST> not allowed because not in any group\s*$ __prefix_line_sl = %(__prefix_line)s%(__pref)s
^%(__prefix_line)srefused connect from \S+ \(<HOST>\)\s*$ # multi line prefixes (for first and second lines):
^%(__prefix_line)s(?:error: )?Received disconnect from <HOST>: 3: .*: Auth fail(?: \[preauth\])?$ __prefix_line_ml1 = (?P<__prefix>%(__prefix_line)s)%(__pref)s
^%(__prefix_line)sUser .+ from <HOST> not allowed because a group is listed in DenyGroups\s*$ __prefix_line_ml2 = %(__suff)s$<SKIPLINES>^(?P=__prefix)%(__pref)s
^%(__prefix_line)sUser .+ from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*$
^(?P<__prefix>%(__prefix_line)s)User .+ not allowed because account is locked<SKIPLINES>(?P=__prefix)(?:error: )?Received disconnect from <HOST>: 11: .+ \[preauth\]$ mode = %(normal)s
^(?P<__prefix>%(__prefix_line)s)Disconnecting: Too many authentication failures for .+? \[preauth\]<SKIPLINES>(?P=__prefix)(?:error: )?Connection closed by <HOST> \[preauth\]$
^(?P<__prefix>%(__prefix_line)s)Connection from <HOST> port \d+(?: on \S+ port \d+)?<SKIPLINES>(?P=__prefix)Disconnecting: Too many authentication failures for .+? \[preauth\]$ normal = ^%(__prefix_line_sl)s[aA]uthentication (?:failure|error|failed) for .* from <HOST>( via \S+)?\s*%(__suff)s$
^%(__prefix_line)s(error: )?maximum authentication attempts exceeded for .* from <HOST>(?: port \d*)?(?: ssh\d*)? \[preauth\]$ ^%(__prefix_line_sl)sUser not known to the underlying authentication module for .* from <HOST>\s*%(__suff)s$
^%(__prefix_line)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=<HOST>\s.*$ ^%(__prefix_line_sl)sFailed \S+ for (?P<cond_inv>invalid user )?(?P<user>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)) from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)sROOT LOGIN REFUSED.* FROM <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)s[iI](?:llegal|nvalid) user .*? from <HOST>%(__on_port_opt)s\s*$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because not listed in AllowUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because listed in DenyUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because not in any group\s*%(__suff)s$
^%(__prefix_line_sl)srefused connect from \S+ \(<HOST>\)\s*%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because a group is listed in DenyGroups\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$
^%(__prefix_line_sl)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=<HOST>\s.*%(__suff)s$
^%(__prefix_line_sl)s(error: )?maximum authentication attempts exceeded for .* from <HOST>%(__on_port_opt)s(?: ssh\d*)? \[preauth\]$
^%(__prefix_line_ml1)sUser .+ not allowed because account is locked%(__prefix_line_ml2)sReceived disconnect from <HOST>: 11: .+%(__suff)s$
^%(__prefix_line_ml1)sDisconnecting: Too many authentication failures for .+?%(__prefix_line_ml2)sConnection closed by <HOST>%(__suff)s$
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sDisconnecting: Too many authentication failures for .+%(__suff)s$
ddos = ^%(__prefix_line_sl)sDid not receive identification string from <HOST>%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s: 14: No supported authentication methods available%(__suff)s$
^%(__prefix_line_sl)sUnable to negotiate with <HOST>%(__on_port_opt)s: no matching (?:cipher|key exchange method) found.
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sUnable to negotiate a (?:cipher|key exchange method)%(__suff)s$
^%(__prefix_line_ml1)sSSH: Server;Ltype: (?:Authname|Version|Kex);Remote: <HOST>-\d+;[A-Z]\w+:.*%(__prefix_line_ml2)sRead from socket failed: Connection reset by peer%(__suff)s$
aggressive = %(normal)s
%(ddos)s
[Definition]
failregex = %(mode)s
ignoreregex = ignoreregex =

View File

@ -17,7 +17,7 @@ _daemon = (?:lighttpd|suhosin)
_lighttpd_prefix = (?:\(mod_fastcgi\.c\.\d+\) FastCGI-stderr:\s) _lighttpd_prefix = (?:\(mod_fastcgi\.c\.\d+\) FastCGI-stderr:\s)
failregex = ^%(__prefix_line)s%(_lighttpd_prefix)s?ALERT - .* \(attacker '<HOST>', file '.*'(?:, line \d+)?\)$ failregex = ^%(__prefix_line)s%(_lighttpd_prefix)s?ALERT - .*? \(attacker '<HOST>', file '[^']*'(?:, line \d+)?\)$
ignoreregex = ignoreregex =

View File

@ -223,6 +223,8 @@ action = %(action_)s
[sshd] [sshd]
# To use more aggressive sshd filter (inclusive sshd-ddos failregex):
#filter = sshd-aggressive
port = ssh port = ssh
logpath = %(sshd_log)s logpath = %(sshd_log)s
backend = %(sshd_backend)s backend = %(sshd_backend)s

View File

@ -597,7 +597,7 @@ class JailsReaderTest(LogCaptureTestCase):
# grab all filter names # grab all filter names
filters = set(os.path.splitext(os.path.split(a)[1])[0] filters = set(os.path.splitext(os.path.split(a)[1])[0]
for a in glob.glob(os.path.join('config', 'filter.d', '*.conf')) for a in glob.glob(os.path.join('config', 'filter.d', '*.conf'))
if not a.endswith('common.conf')) if not (a.endswith('common.conf') or a.endswith('-aggressive.conf')))
# get filters of all jails (filter names without options inside filter[...]) # get filters of all jails (filter names without options inside filter[...])
filters_jail = set( filters_jail = set(
JailReader.extractOptions(jail.options['filter'])[0] for jail in jails.jails JailReader.extractOptions(jail.options['filter'])[0] for jail in jails.jails

View File

@ -139,6 +139,8 @@ Nov 23 21:50:37 sshd[7148]: Connection closed by 61.0.0.1 [preauth]
# failJSON: { "time": "2005-07-13T18:44:28", "match": true , "host": "89.24.13.192", "desc": "from gh-289" } # failJSON: { "time": "2005-07-13T18:44:28", "match": true , "host": "89.24.13.192", "desc": "from gh-289" }
Jul 13 18:44:28 mdop sshd[4931]: Received disconnect from 89.24.13.192: 3: com.jcraft.jsch.JSchException: Auth fail Jul 13 18:44:28 mdop sshd[4931]: Received disconnect from 89.24.13.192: 3: com.jcraft.jsch.JSchException: Auth fail
# failJSON: { "time": "2005-01-02T01:18:41", "match": true , "host": "10.0.0.1", "desc": "space after port is optional (gh-1652)" }
Jan 2 01:18:41 host sshd[11808]: error: Received disconnect from 10.0.0.1 port 7736:3: com.jcraft.jsch.JSchException: Auth fail [preauth]
# failJSON: { "time": "2004-10-01T17:27:44", "match": true , "host": "94.249.236.6", "desc": "newer format per commit 36919d9f" } # failJSON: { "time": "2004-10-01T17:27:44", "match": true , "host": "94.249.236.6", "desc": "newer format per commit 36919d9f" }
Oct 1 17:27:44 localhost sshd[24077]: error: Received disconnect from 94.249.236.6: 3: com.jcraft.jsch.JSchException: Auth fail [preauth] Oct 1 17:27:44 localhost sshd[24077]: error: Received disconnect from 94.249.236.6: 3: com.jcraft.jsch.JSchException: Auth fail [preauth]

View File

@ -0,0 +1,3 @@
# sshd-aggressive includes sshd and sshd-ddos failregex's:
# addFILE: "sshd"
# addFILE: "sshd-ddos"

View File

@ -1,3 +1,39 @@
# http://forums.powervps.com/showthread.php?t=1667 # http://forums.powervps.com/showthread.php?t=1667
# failJSON: { "time": "2005-06-07T01:10:56", "match": true , "host": "69.61.56.114" } # failJSON: { "time": "2005-06-07T01:10:56", "match": true , "host": "69.61.56.114" }
Jun 7 01:10:56 host sshd[5937]: Did not receive identification string from 69.61.56.114 Jun 7 01:10:56 host sshd[5937]: Did not receive identification string from 69.61.56.114
# gh-864(1):
# failJSON: { "match": false }
Nov 24 23:46:39 host sshd[32686]: SSH: Server;Ltype: Version;Remote: 127.0.0.1-1780;Protocol: 2.0;Client: libssh2_1.4.3
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (1)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# gh-864(2):
# failJSON: { "match": false }
Nov 24 23:46:40 host sshd[32686]: SSH: Server;Ltype: Kex;Remote: 127.0.0.1-1780;Enc: aes128-ctr;MAC: hmac-sha1;Comp: none [preauth]
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (2)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# gh-864(3):
# failJSON: { "match": false }
Nov 24 23:46:41 host sshd[32686]: SSH: Server;Ltype: Authname;Remote: 127.0.0.1-1780;Name: root [preauth]
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (3)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# several other cases from gh-864:
# failJSON: { "time": "2004-11-25T01:34:12", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
Nov 25 01:34:12 srv sshd[123]: Received disconnect from 127.0.0.1: 14: No supported authentication methods available [preauth]
# failJSON: { "time": "2004-11-25T01:35:13", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
Nov 25 01:35:13 srv sshd[123]: error: Received disconnect from 127.0.0.1: 14: No supported authentication methods available [preauth]
# gh-1545:
# failJSON: { "time": "2004-11-26T13:03:29", "match": true , "host": "192.0.2.1", "desc": "No matching cipher" }
Nov 26 13:03:29 srv sshd[45]: Unable to negotiate with 192.0.2.1 port 55419: no matching cipher found. Their offer: aes256-cbc,rijndael-cbc@lysator.liu.se,aes192-cbc,aes128-cbc,arcfour128,arcfour,3des-cbc,none [preauth]
# gh-1117:
# failJSON: { "time": "2004-11-26T13:03:30", "match": true , "host": "192.0.2.2", "desc": "No matching key exchange method" }
Nov 26 13:03:30 srv sshd[45]: fatal: Unable to negotiate with 192.0.2.2 port 55419: no matching key exchange method found. Their offer: diffie-hellman-group1-sha1
# failJSON: { "match": false }
Nov 26 15:03:30 host sshd[22440]: Connection from 192.0.2.3 port 39678 on 192.168.1.9 port 22
# failJSON: { "time": "2004-11-26T15:03:31", "match": true , "host": "192.0.2.3", "desc": "Multiline - no matching key exchange method" }
Nov 26 15:03:31 host sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]

View File

@ -31,6 +31,7 @@ import re
import sys import sys
import time import time
import unittest import unittest
from ..server.failregex import Regex
from ..server.filter import Filter from ..server.filter import Filter
from ..client.filterreader import FilterReader from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR
@ -38,6 +39,10 @@ from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR
TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config") TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config")
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files") TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
# regexp to test greedy catch-all should be not-greedy:
RE_HOST = Regex('<HOST>').getRegex()
RE_WRONG_GREED = re.compile(r'\.[+\*](?!\?).*' + re.escape(RE_HOST) + r'.*(?:\.[+\*].*|[^\$])$')
class FilterSamplesRegex(unittest.TestCase): class FilterSamplesRegex(unittest.TestCase):
@ -60,6 +65,19 @@ class FilterSamplesRegex(unittest.TestCase):
>= 10, >= 10,
"Expected more FilterSampleRegexs tests") "Expected more FilterSampleRegexs tests")
def testReWrongGreedyCatchAll(self):
"""Tests regexp RE_WRONG_GREED is intact (positive/negative)"""
self.assertTrue(
RE_WRONG_GREED.search('greedy .* test' + RE_HOST + ' test not hard-anchored'))
self.assertTrue(
RE_WRONG_GREED.search('greedy .+ test' + RE_HOST + ' test vary .* anchored$'))
self.assertFalse(
RE_WRONG_GREED.search('greedy .* test' + RE_HOST + ' test no catch-all, hard-anchored$'))
self.assertFalse(
RE_WRONG_GREED.search('non-greedy .*? test' + RE_HOST + ' test not hard-anchored'))
self.assertFalse(
RE_WRONG_GREED.search('non-greedy .+? test' + RE_HOST + ' test vary catch-all .* anchored$'))
def testSampleRegexsFactory(name, basedir): def testSampleRegexsFactory(name, basedir):
def testFilter(self): def testFilter(self):
@ -85,60 +103,75 @@ def testSampleRegexsFactory(name, basedir):
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)), os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
"No sample log file available for '%s' filter" % name) "No sample log file available for '%s' filter" % name)
logFile = fileinput.FileInput(
os.path.join(TEST_FILES_DIR, "logs", name))
regexsUsed = set() regexsUsed = set()
for line in logFile: filenames = [name]
jsonREMatch = re.match("^# ?failJSON:(.+)$", line) i = 0
if jsonREMatch: while i < len(filenames):
try: filename = filenames[i]; i += 1;
faildata = json.loads(jsonREMatch.group(1)) logFile = fileinput.FileInput(os.path.join(TEST_FILES_DIR, "logs",
except ValueError as e: filename))
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
ret = self.filter.processLine( # test regexp contains greedy catch-all before <HOST>, that is
line, returnRawHost=True, checkAllRegex=True)[1] # not hard-anchored at end or has not precise sub expression after <HOST>:
if not ret: for fr in self.filter.getFailRegex():
# Check line is flagged as none match if RE_WRONG_GREED.search(fr): #pragma: no cover
self.assertFalse(faildata.get('match', True), raise AssertionError("Following regexp of \"%s\" contains greedy catch-all before <HOST>, "
"Line not matched when should have: %s:%i %r" % "that is not hard-anchored at end or has not precise sub expression after <HOST>:\n%s" %
(logFile.filename(), logFile.filelineno(), line)) (name, str(fr).replace(RE_HOST, '<HOST>')))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
# Verify timestamp and host as expected for line in logFile:
failregex, host, fail2banTime, lines = ret[0] jsonREMatch = re.match("^# ?(failJSON|addFILE):(.+)$", line)
self.assertEqual(host, faildata.get("host", None)) if jsonREMatch:
try:
faildata = json.loads(jsonREMatch.group(2))
if jsonREMatch.group(1) == 'addFILE':
filenames.append(faildata)
continue
except ValueError as e:
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
t = faildata.get("time", None) ret = self.filter.processLine(
try: line, returnRawHost=True, checkAllRegex=True)[1]
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S") if not ret:
except ValueError: # Check line is flagged as none match
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f") self.assertFalse(faildata.get('match', True),
"Line not matched when should have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
jsonTime = time.mktime(jsonTimeLocal.timetuple()) # Verify timestamp and host as expected
failregex, host, fail2banTime, lines = ret[0]
self.assertEqual(host, faildata.get("host", None))
jsonTime += jsonTimeLocal.microsecond / 1000000 t = faildata.get("time", None)
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
self.assertEqual(fail2banTime, jsonTime, jsonTime = time.mktime(jsonTimeLocal.timetuple())
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
regexsUsed.add(failregex) jsonTime += jsonTimeLocal.microsecond / 1000000
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
regexsUsed.add(failregex)
for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()): for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()):
self.assertTrue( self.assertTrue(