mirror of https://github.com/fail2ban/fail2ban
Merge pull request #1825 from sebres/_0.10/postfix-filter-opti
0.10 - postfix filter optimizationspull/1764/merge
commit
d334a36a60
|
@ -26,8 +26,14 @@ TODO: implementing of options resp. other tasks from PR #1346
|
||||||
- rewritten using `prefregex` and used MLFID-related multi-line parsing;
|
- rewritten using `prefregex` and used MLFID-related multi-line parsing;
|
||||||
- optional parameter `mode` introduced: normal (default), extra or aggressive
|
- optional parameter `mode` introduced: normal (default), extra or aggressive
|
||||||
* `filter.d/haproxy-http-auth`: do not mistake client port for part of an IPv6 address (gh-1745)
|
* `filter.d/haproxy-http-auth`: do not mistake client port for part of an IPv6 address (gh-1745)
|
||||||
* `filter.d/postfix-sasl.conf`
|
* `filter.d/postfix.conf`:
|
||||||
- updated to latest postfix formats
|
- updated to latest postfix formats
|
||||||
|
- joined several postfix filter together (normalized and optimized version, gh-1825)
|
||||||
|
- introduced new parameter `mode` (see gh-1825): more (default, combines normal and rbl), auth, normal,
|
||||||
|
rbl, ddos, extra or aggressive (combines all)
|
||||||
|
- postfix postscreen (resp. other RBL's compatibility fix, gh-1764, gh-1825)
|
||||||
|
* `filter.d/postfix-rbl.conf`: removed (replaced with `postfix[mode=rbl]`)
|
||||||
|
* `filter.d/postfix-sasl.conf`: removed (replaced with `postfix[mode=auth]`)
|
||||||
* `filter.d/roundcube-auth.conf`:
|
* `filter.d/roundcube-auth.conf`:
|
||||||
- fixed regex when logging authentication errors to journal instead to a local file (gh-1159);
|
- fixed regex when logging authentication errors to journal instead to a local file (gh-1159);
|
||||||
- additionally fixed more complex injections on username (e. g. using dot after fake host).
|
- additionally fixed more complex injections on username (e. g. using dot after fake host).
|
||||||
|
|
|
@ -1,19 +0,0 @@
|
||||||
# Fail2Ban filter for Postfix's RBL based Blocked hosts
|
|
||||||
#
|
|
||||||
#
|
|
||||||
|
|
||||||
[INCLUDES]
|
|
||||||
|
|
||||||
# Read common prefixes. If any customizations available -- read them from
|
|
||||||
# common.local
|
|
||||||
before = common.conf
|
|
||||||
|
|
||||||
[Definition]
|
|
||||||
|
|
||||||
_daemon = postfix(-\w+)?/smtpd
|
|
||||||
|
|
||||||
failregex = ^%(__prefix_line)sNOQUEUE: reject: RCPT from \S+\[<HOST>\]: [45]54 [45]\.7\.1 Service unavailable; Client host \[\S+\] blocked\b
|
|
||||||
|
|
||||||
ignoreregex =
|
|
||||||
|
|
||||||
# Author: Lee Clemens
|
|
|
@ -1,21 +0,0 @@
|
||||||
# Fail2Ban filter for postfix authentication failures
|
|
||||||
#
|
|
||||||
|
|
||||||
[INCLUDES]
|
|
||||||
|
|
||||||
before = common.conf
|
|
||||||
|
|
||||||
[Definition]
|
|
||||||
|
|
||||||
_daemon = postfix(-\w+)?/(?:submission/|smtps/)?smtp[ds]
|
|
||||||
|
|
||||||
failregex = ^%(__prefix_line)swarning: [-._\w]+\[<HOST>\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed(:[ A-Za-z0-9+/:]*={0,2})?\s*$
|
|
||||||
|
|
||||||
ignoreregex = authentication failed: Connection lost to authentication server$
|
|
||||||
|
|
||||||
[Init]
|
|
||||||
|
|
||||||
journalmatch = _SYSTEMD_UNIT=postfix.service
|
|
||||||
|
|
||||||
|
|
||||||
# Author: Yaroslav Halchenko
|
|
|
@ -10,17 +10,57 @@ before = common.conf
|
||||||
|
|
||||||
[Definition]
|
[Definition]
|
||||||
|
|
||||||
_daemon = postfix(-\w+)?/(?:submission/|smtps/)?smtp[ds]
|
_daemon = postfix(-\w+)?/\w+(?:/smtp[ds])?
|
||||||
|
_port = (?::\d+)?
|
||||||
|
|
||||||
prefregex = ^%(__prefix_line)s(?:NOQUEUE: reject:|improper command pipelining) <F-CONTENT>.+</F-CONTENT>$
|
prefregex = ^%(__prefix_line)s<mdpr-<mode>> <F-CONTENT>.+</F-CONTENT>$
|
||||||
|
|
||||||
failregex = ^RCPT from \S+\[<HOST>\]: 554 5\.7\.1
|
mdpr-normal = (?:NOQUEUE: reject:|improper command pipelining after \S+)
|
||||||
^RCPT from \S+\[<HOST>\]: 450 4\.7\.1 Client host rejected: cannot find your (reverse )?hostname\b
|
mdre-normal=^RCPT from [^[]*\[<HOST>\]<_port>: 55[04] 5\.7\.1\s
|
||||||
^RCPT from \S+\[<HOST>\]: 450 4\.7\.1 (<[^>]*>)?: Helo command rejected: Host not found\b
|
^RCPT from [^[]*\[<HOST>\]<_port>: 45[04] 4\.7\.1 (?:Service unavailable\b|Client host rejected: cannot find your (reverse )?hostname\b)
|
||||||
^EHLO from \S+\[<HOST>\]: 504 5\.5\.2 (<[^>]*>)?: Helo command rejected: need fully-qualified hostname\b
|
^RCPT from [^[]*\[<HOST>\]<_port>: 450 4\.7\.1 (<[^>]*>)?: Helo command rejected: Host not found\b
|
||||||
^VRFY from \S+\[<HOST>\]: 550 5\.1\.1
|
^EHLO from [^[]*\[<HOST>\]<_port>: 504 5\.5\.2 (<[^>]*>)?: Helo command rejected: need fully-qualified hostname\b
|
||||||
^RCPT from \S+\[<HOST>\]: 450 4\.1\.8 (<[^>]*>)?: Sender address rejected: Domain not found\b
|
^VRFY from [^[]*\[<HOST>\]<_port>: 550 5\.1\.1\s
|
||||||
^after \S+ from [^[]*\[<HOST>\]:?
|
^RCPT from [^[]*\[<HOST>\]<_port>: 450 4\.1\.8 (<[^>]*>)?: Sender address rejected: Domain not found\b
|
||||||
|
^from [^[]*\[<HOST>\]:?
|
||||||
|
|
||||||
|
mdpr-auth = warning:
|
||||||
|
mdre-auth = ^[^[]*\[<HOST>\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed:(?! Connection lost to authentication server| Invalid authentication mechanism)
|
||||||
|
mdre-auth2= ^[^[]*\[<HOST>\]: SASL ((?i)LOGIN|PLAIN|(?:CRAM|DIGEST)-MD5) authentication failed:(?! Connection lost to authentication server)
|
||||||
|
# todo: check/remove "Invalid authentication mechanism" from ignore list, if gh-1243 will get finished (see gh-1297).
|
||||||
|
|
||||||
|
# Mode "rbl" currently included in mode "normal", but if needed for jail "postfix-rbl" only:
|
||||||
|
mdpr-rbl = %(mdpr-normal)s
|
||||||
|
mdre-rbl = ^RCPT from [^[]*\[<HOST>\]: [45]54 [45]\.7\.1 Service unavailable; Client host \[\S+\] blocked\b
|
||||||
|
|
||||||
|
# Mode "rbl" currently included in mode "normal" (within 1st rule)
|
||||||
|
mdpr-more = %(mdpr-normal)s
|
||||||
|
mdre-more = %(mdre-normal)s
|
||||||
|
|
||||||
|
mdpr-ddos = lost connection after(?! DATA) [A-Z]+
|
||||||
|
mdre-ddos = ^from [^[]*\[<HOST>\]:?
|
||||||
|
|
||||||
|
mdpr-extra = (?:%(mdpr-auth)s|%(mdpr-normal)s)
|
||||||
|
mdre-extra = %(mdre-auth)s
|
||||||
|
%(mdre-normal)s
|
||||||
|
|
||||||
|
mdpr-aggressive = (?:%(mdpr-auth)s|%(mdpr-normal)s|%(mdpr-ddos)s)
|
||||||
|
mdre-aggressive = %(mdre-auth2)s
|
||||||
|
%(mdre-normal)s
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
failregex = <mdre-<mode>>
|
||||||
|
|
||||||
|
# Parameter "mode": more (default combines normal and rbl), auth, normal, rbl, ddos, extra or aggressive (combines all)
|
||||||
|
# Usage example (for jail.local):
|
||||||
|
# [postfix]
|
||||||
|
# mode = aggressive
|
||||||
|
# # or another jail (rewrite filter parameters of jail):
|
||||||
|
# [postfix-rbl]
|
||||||
|
# filter = postfix[mode=rbl]
|
||||||
|
#
|
||||||
|
mode = more
|
||||||
|
|
||||||
ignoreregex =
|
ignoreregex =
|
||||||
|
|
||||||
|
|
|
@ -533,14 +533,17 @@ backend = %(syslog_backend)s
|
||||||
|
|
||||||
|
|
||||||
[postfix]
|
[postfix]
|
||||||
|
# To use another modes set filter parameter "mode" in jail.local:
|
||||||
port = smtp,465,submission
|
mode = more
|
||||||
logpath = %(postfix_log)s
|
filter = postfix[mode=%(mode)s]
|
||||||
backend = %(postfix_backend)s
|
port = smtp,465,submission
|
||||||
|
logpath = %(postfix_log)s
|
||||||
|
backend = %(postfix_backend)s
|
||||||
|
|
||||||
|
|
||||||
[postfix-rbl]
|
[postfix-rbl]
|
||||||
|
|
||||||
|
filter = postfix[mode=rbl]
|
||||||
port = smtp,465,submission
|
port = smtp,465,submission
|
||||||
logpath = %(postfix_log)s
|
logpath = %(postfix_log)s
|
||||||
backend = %(postfix_backend)s
|
backend = %(postfix_backend)s
|
||||||
|
@ -626,6 +629,7 @@ backend = %(syslog_backend)s
|
||||||
|
|
||||||
[postfix-sasl]
|
[postfix-sasl]
|
||||||
|
|
||||||
|
filter = postfix[mode=auth]
|
||||||
port = smtp,465,submission,imap3,imaps,pop3,pop3s
|
port = smtp,465,submission,imap3,imaps,pop3,pop3s
|
||||||
# You might consider monitoring /var/log/mail.warn instead if you are
|
# You might consider monitoring /var/log/mail.warn instead if you are
|
||||||
# running postfix since it would provide the same log lines at the
|
# running postfix since it would provide the same log lines at the
|
||||||
|
|
|
@ -668,16 +668,19 @@ class Filter(JailThread):
|
||||||
self.__lineBuffer + [tupleLine[:3]])[-self.__lineBufferSize:]
|
self.__lineBuffer + [tupleLine[:3]])[-self.__lineBufferSize:]
|
||||||
else:
|
else:
|
||||||
orgBuffer = self.__lineBuffer = [tupleLine[:3]]
|
orgBuffer = self.__lineBuffer = [tupleLine[:3]]
|
||||||
logSys.log(5, "Looking for failregex match of %r", self.__lineBuffer)
|
logSys.log(5, "Looking for match of %r", self.__lineBuffer)
|
||||||
|
|
||||||
# Pre-filter fail regex (if available):
|
# Pre-filter fail regex (if available):
|
||||||
preGroups = {}
|
preGroups = {}
|
||||||
if self.__prefRegex:
|
if self.__prefRegex:
|
||||||
|
if logSys.getEffectiveLevel() <= logging.HEAVYDEBUG: # pragma: no cover
|
||||||
|
logSys.log(5, " Looking for prefregex %r", self.__prefRegex.getRegex())
|
||||||
self.__prefRegex.search(self.__lineBuffer)
|
self.__prefRegex.search(self.__lineBuffer)
|
||||||
if not self.__prefRegex.hasMatched():
|
if not self.__prefRegex.hasMatched():
|
||||||
|
logSys.log(5, " Prefregex not matched")
|
||||||
return failList
|
return failList
|
||||||
preGroups = self.__prefRegex.getGroups()
|
preGroups = self.__prefRegex.getGroups()
|
||||||
logSys.log(7, "Pre-filter matched %s", preGroups)
|
logSys.log(7, " Pre-filter matched %s", preGroups)
|
||||||
repl = preGroups.get('content')
|
repl = preGroups.get('content')
|
||||||
# Content replacement:
|
# Content replacement:
|
||||||
if repl:
|
if repl:
|
||||||
|
@ -686,17 +689,19 @@ class Filter(JailThread):
|
||||||
|
|
||||||
# Iterates over all the regular expressions.
|
# Iterates over all the regular expressions.
|
||||||
for failRegexIndex, failRegex in enumerate(self.__failRegex):
|
for failRegexIndex, failRegex in enumerate(self.__failRegex):
|
||||||
|
if logSys.getEffectiveLevel() <= logging.HEAVYDEBUG: # pragma: no cover
|
||||||
|
logSys.log(5, " Looking for failregex %r", failRegex.getRegex())
|
||||||
failRegex.search(self.__lineBuffer, orgBuffer)
|
failRegex.search(self.__lineBuffer, orgBuffer)
|
||||||
if not failRegex.hasMatched():
|
if not failRegex.hasMatched():
|
||||||
continue
|
continue
|
||||||
# The failregex matched.
|
# The failregex matched.
|
||||||
logSys.log(7, "Matched %s", failRegex)
|
logSys.log(7, " Matched %s", failRegex)
|
||||||
# Checks if we must ignore this match.
|
# Checks if we must ignore this match.
|
||||||
if self.ignoreLine(failRegex.getMatchedTupleLines()) \
|
if self.ignoreLine(failRegex.getMatchedTupleLines()) \
|
||||||
is not None:
|
is not None:
|
||||||
# The ignoreregex matched. Remove ignored match.
|
# The ignoreregex matched. Remove ignored match.
|
||||||
self.__lineBuffer = failRegex.getUnmatchedTupleLines()
|
self.__lineBuffer = failRegex.getUnmatchedTupleLines()
|
||||||
logSys.log(7, "Matched ignoreregex and was ignored")
|
logSys.log(7, " Matched ignoreregex and was ignored")
|
||||||
if not self.checkAllRegex:
|
if not self.checkAllRegex:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# filterOptions: [{}, {"mode": "normal"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
# per https://github.com/fail2ban/fail2ban/issues/125
|
# per https://github.com/fail2ban/fail2ban/issues/125
|
||||||
# and https://github.com/fail2ban/fail2ban/issues/126
|
# and https://github.com/fail2ban/fail2ban/issues/126
|
||||||
# failJSON: { "time": "2005-02-21T09:21:54", "match": true , "host": "192.0.43.10" }
|
# failJSON: { "time": "2005-02-21T09:21:54", "match": true , "host": "192.0.43.10" }
|
||||||
|
@ -44,3 +46,93 @@ Jun 12 08:58:35 xxx postfix/smtpd[2931]: NOQUEUE: reject: RCPT from unknown[1.2.
|
||||||
|
|
||||||
# failJSON: { "time": "2005-06-12T08:58:35", "match": true , "host": "1.2.3.4" }
|
# failJSON: { "time": "2005-06-12T08:58:35", "match": true , "host": "1.2.3.4" }
|
||||||
Jun 12 08:58:35 xxx postfix/smtpd[13533]: improper command pipelining after AUTH from unknown[1.2.3.4]: QUIT
|
Jun 12 08:58:35 xxx postfix/smtpd[13533]: improper command pipelining after AUTH from unknown[1.2.3.4]: QUIT
|
||||||
|
|
||||||
|
# ---------------------------------------
|
||||||
|
# Test-cases of postfix-postscreen:
|
||||||
|
# ---------------------------------------
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-05-05T15:51:11", "match": true , "host": "216.245.194.173", "desc": "postfix postscreen / gh-1764" }
|
||||||
|
May 5 15:51:11 xxx postfix/postscreen[1148]: NOQUEUE: reject: RCPT from [216.245.194.173]:60591: 550 5.7.1 Service unavailable; client [216.245.194.173] blocked using rbl.example.com; from=<spammer@example.com>, to=<goodguy@example.com>, proto=ESMTP, helo=<badguy.example.com>
|
||||||
|
|
||||||
|
# ---------------------------------------
|
||||||
|
# Test-cases of postfix-rbl:
|
||||||
|
# ---------------------------------------
|
||||||
|
# filterOptions: [{}, {"mode": "rbl"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
|
# failJSON: { "time": "2004-12-30T18:19:15", "match": true , "host": "93.184.216.34" }
|
||||||
|
Dec 30 18:19:15 xxx postfix/smtpd[1574]: NOQUEUE: reject: RCPT from badguy.example.com[93.184.216.34]: 454 4.7.1 Service unavailable; Client host [93.184.216.34] blocked using rbl.example.com; http://www.example.com/query?ip=93.184.216.34; from=<spammer@example.com> to=<goodguy@example.com> proto=ESMTP helo=<badguy.example.com>
|
||||||
|
|
||||||
|
# failJSON: { "time": "2004-12-30T18:19:15", "match": true , "host": "93.184.216.34" }
|
||||||
|
Dec 30 18:19:15 xxx postfix-incoming/smtpd[1574]: NOQUEUE: reject: RCPT from badguy.example.com[93.184.216.34]: 454 4.7.1 Service unavailable; Client host [93.184.216.34] blocked using rbl.example.com; http://www.example.com/query?ip=93.184.216.34; from=<spammer@example.com> to=<goodguy@example.com> proto=ESMTP helo=<badguy.example.com>
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-02-07T12:25:45", "match": true , "host": "87.236.233.182" }
|
||||||
|
Feb 7 12:25:45 xxx12345 postfix/smtpd[13275]: NOQUEUE: reject: RCPT from unknown[87.236.233.182]: 554 5.7.1 Service unavailable; Client host [87.236.233.182] blocked using rbl.example.com; https://www.example.com/query/ip/87.236.233.182; from=<spammer@example.com> to=<goodguy@example.com> proto=SMTP helo=<WIN-5N8GBBS0R5I>
|
||||||
|
|
||||||
|
# ---------------------------------------
|
||||||
|
# Test-cases of postfix-sasl:
|
||||||
|
# ---------------------------------------
|
||||||
|
# filterOptions: [{"mode": "auth"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
|
#1 Example from postfix from dbts #507990
|
||||||
|
# failJSON: { "time": "2004-12-02T22:24:22", "match": true , "host": "114.44.142.233" }
|
||||||
|
Dec 2 22:24:22 hel postfix/smtpd[7676]: warning: 114-44-142-233.dynamic.hinet.net[114.44.142.233]: SASL CRAM-MD5 authentication failed: PDc3OTEwNTkyNTEyMzA2NDIuMTIyODI1MzA2MUBoZWw+
|
||||||
|
#2 Example from postfix from dbts #573314
|
||||||
|
# failJSON: { "time": "2005-03-10T13:33:30", "match": true , "host": "1.1.1.1" }
|
||||||
|
Mar 10 13:33:30 gandalf postfix/smtpd[3937]: warning: HOSTNAME[1.1.1.1]: SASL LOGIN authentication failed: authentication failure
|
||||||
|
|
||||||
|
#3 Example from postfix post-debian changes to rename to add "submission" to syslog name
|
||||||
|
# failJSON: { "time": "2004-09-06T00:44:56", "match": true , "host": "82.221.106.233" }
|
||||||
|
Sep 6 00:44:56 trianon postfix/submission/smtpd[11538]: warning: unknown[82.221.106.233]: SASL LOGIN authentication failed: UGFzc3dvcmQ6
|
||||||
|
|
||||||
|
#4 Example from postfix post-debian changes to rename to add "submission" to syslog name + downcase
|
||||||
|
# failJSON: { "time": "2004-09-06T00:44:57", "match": true , "host": "82.221.106.233" }
|
||||||
|
Sep 6 00:44:57 trianon postfix/submission/smtpd[11538]: warning: unknown[82.221.106.233]: SASL login authentication failed: UGFzc3dvcmQ6
|
||||||
|
|
||||||
|
#5 Example to add :
|
||||||
|
# failJSON: { "time": "2005-01-29T08:11:45", "match": true , "host": "1.1.1.1" }
|
||||||
|
Jan 29 08:11:45 mail postfix/smtpd[10752]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Password:
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-01-29T08:11:45", "match": true , "host": "1.1.1.1" }
|
||||||
|
Jan 29 08:11:45 mail postfix-incoming/smtpd[10752]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Password:
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-04-12T02:24:11", "match": true , "host": "62.138.2.143" }
|
||||||
|
Apr 12 02:24:11 xxx postfix/smtps/smtpd[42]: warning: astra4139.startdedicated.de[62.138.2.143]: SASL LOGIN authentication failed: UGFzc3dvcmQ6
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-08-03T15:30:49", "match": true , "host": "98.191.84.74" }
|
||||||
|
Aug 3 15:30:49 ksusha postfix/smtpd[17041]: warning: mail.foldsandwalker.com[98.191.84.74]: SASL Plain authentication failed:
|
||||||
|
|
||||||
|
# failJSON: { "time": "2004-11-04T09:11:01", "match": true , "host": "192.0.2.150", "desc": "without reason for fail, see gh-1245" }
|
||||||
|
Nov 4 09:11:01 mail postfix/submission/smtpd[27133]: warning: unknown[192.0.2.150]: SASL PLAIN authentication failed:
|
||||||
|
|
||||||
|
#6 Example to ignore because due to a failed attempt to connect to authentication service - no malicious activities whatsoever
|
||||||
|
# failJSON: { "match": false }
|
||||||
|
Feb 3 08:29:28 mail postfix/smtpd[21022]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Connection lost to authentication server
|
||||||
|
|
||||||
|
# filterOptions: [{"mode": "auth"}]
|
||||||
|
|
||||||
|
# failJSON: { "match": false, "desc": "not aggressive" }
|
||||||
|
Jan 14 16:18:16 xxx postfix/smtpd[14933]: warning: host[192.0.2.5]: SASL CRAM-MD5 authentication failed: Invalid authentication mechanism
|
||||||
|
|
||||||
|
# filterOptions: [{"mode": "aggressive"}]
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-01-14T16:18:16", "match": true , "host": "192.0.2.5", "desc": "aggressive only" }
|
||||||
|
Jan 14 16:18:16 xxx postfix/smtpd[14933]: warning: host[192.0.2.5]: SASL CRAM-MD5 authentication failed: Invalid authentication mechanism
|
||||||
|
|
||||||
|
# ---------------------------------------
|
||||||
|
# Test-cases of postfix DDOS mode:
|
||||||
|
# ---------------------------------------
|
||||||
|
|
||||||
|
# filterOptions: [{"mode": "ddos"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
|
# failJSON: { "time": "2005-02-18T09:45:10", "match": true , "host": "192.0.2.10" }
|
||||||
|
Feb 18 09:45:10 xxx postfix/smtpd[42]: lost connection after CONNECT from spammer.example.com[192.0.2.10]
|
||||||
|
# failJSON: { "time": "2005-02-18T09:45:12", "match": true , "host": "192.0.2.42" }
|
||||||
|
Feb 18 09:45:12 xxx postfix/smtpd[42]: lost connection after STARTTLS from spammer.example.com[192.0.2.42]
|
||||||
|
# failJSON: { "time": "2005-02-18T09:48:04", "match": true , "host": "192.0.2.23" }
|
||||||
|
Feb 18 09:48:04 xxx postfix/smtpd[23]: lost connection after AUTH from unknown[192.0.2.23]
|
||||||
|
# failJSON: { "time": "2005-02-18T09:48:04", "match": true , "host": "192.0.2.23" }
|
||||||
|
Feb 18 09:48:04 xxx postfix/smtpd[23]: lost connection after AUTH from unknown[192.0.2.23]
|
||||||
|
|
||||||
|
# filterOptions: [{}, {"mode": "ddos"}, {"mode": "aggressive"}]
|
||||||
|
# failJSON: { "match": false, "desc": "don't affect lawful data (sporadical connection aborts within DATA-phase, see gh-1813 for discussion)" }
|
||||||
|
Feb 18 09:50:05 xxx postfix/smtpd[42]: lost connection after DATA from good-host.example.com[192.0.2.10]
|
||||||
|
|
|
@ -1,8 +0,0 @@
|
||||||
# failJSON: { "time": "2004-12-30T18:19:15", "match": true , "host": "93.184.216.34" }
|
|
||||||
Dec 30 18:19:15 xxx postfix/smtpd[1574]: NOQUEUE: reject: RCPT from badguy.example.com[93.184.216.34]: 454 4.7.1 Service unavailable; Client host [93.184.216.34] blocked using rbl.example.com; http://www.example.com/query?ip=93.184.216.34; from=<spammer@example.com> to=<goodguy@example.com> proto=ESMTP helo=<badguy.example.com>
|
|
||||||
|
|
||||||
# failJSON: { "time": "2004-12-30T18:19:15", "match": true , "host": "93.184.216.34" }
|
|
||||||
Dec 30 18:19:15 xxx postfix-incoming/smtpd[1574]: NOQUEUE: reject: RCPT from badguy.example.com[93.184.216.34]: 454 4.7.1 Service unavailable; Client host [93.184.216.34] blocked using rbl.example.com; http://www.example.com/query?ip=93.184.216.34; from=<spammer@example.com> to=<goodguy@example.com> proto=ESMTP helo=<badguy.example.com>
|
|
||||||
|
|
||||||
# failJSON: { "time": "2005-02-07T12:25:45", "match": true , "host": "87.236.233.182" }
|
|
||||||
Feb 7 12:25:45 xxx12345 postfix/smtpd[13275]: NOQUEUE: reject: RCPT from unknown[87.236.233.182]: 554 5.7.1 Service unavailable; Client host [87.236.233.182] blocked using rbl.example.com; https://www.example.com/query/ip/87.236.233.182; from=<spammer@example.com> to=<goodguy@example.com> proto=SMTP helo=<WIN-5N8GBBS0R5I>
|
|
|
@ -1,32 +0,0 @@
|
||||||
#1 Example from postfix from dbts #507990
|
|
||||||
# failJSON: { "time": "2004-12-02T22:24:22", "match": true , "host": "114.44.142.233" }
|
|
||||||
Dec 2 22:24:22 hel postfix/smtpd[7676]: warning: 114-44-142-233.dynamic.hinet.net[114.44.142.233]: SASL CRAM-MD5 authentication failed: PDc3OTEwNTkyNTEyMzA2NDIuMTIyODI1MzA2MUBoZWw+
|
|
||||||
#2 Example from postfix from dbts #573314
|
|
||||||
# failJSON: { "time": "2005-03-10T13:33:30", "match": true , "host": "1.1.1.1" }
|
|
||||||
Mar 10 13:33:30 gandalf postfix/smtpd[3937]: warning: HOSTNAME[1.1.1.1]: SASL LOGIN authentication failed: authentication failure
|
|
||||||
|
|
||||||
#3 Example from postfix post-debian changes to rename to add "submission" to syslog name
|
|
||||||
# failJSON: { "time": "2004-09-06T00:44:56", "match": true , "host": "82.221.106.233" }
|
|
||||||
Sep 6 00:44:56 trianon postfix/submission/smtpd[11538]: warning: unknown[82.221.106.233]: SASL LOGIN authentication failed: UGFzc3dvcmQ6
|
|
||||||
|
|
||||||
#4 Example from postfix post-debian changes to rename to add "submission" to syslog name + downcase
|
|
||||||
# failJSON: { "time": "2004-09-06T00:44:57", "match": true , "host": "82.221.106.233" }
|
|
||||||
Sep 6 00:44:57 trianon postfix/submission/smtpd[11538]: warning: unknown[82.221.106.233]: SASL login authentication failed: UGFzc3dvcmQ6
|
|
||||||
|
|
||||||
#5 Example to add :
|
|
||||||
# failJSON: { "time": "2005-01-29T08:11:45", "match": true , "host": "1.1.1.1" }
|
|
||||||
Jan 29 08:11:45 mail postfix/smtpd[10752]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Password:
|
|
||||||
|
|
||||||
#6 Example to ignore because due to a failed attempt to connect to authentication service - no malicious activities whatsoever
|
|
||||||
# failJSON: { "time": "2005-02-03T08:29:28", "match": false , "host": "1.1.1.1" }
|
|
||||||
Feb 3 08:29:28 mail postfix/smtpd[21022]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Connection lost to authentication server
|
|
||||||
|
|
||||||
# failJSON: { "time": "2005-01-29T08:11:45", "match": true , "host": "1.1.1.1" }
|
|
||||||
Jan 29 08:11:45 mail postfix-incoming/smtpd[10752]: warning: unknown[1.1.1.1]: SASL LOGIN authentication failed: Password:
|
|
||||||
|
|
||||||
# failJSON: { "time": "2005-04-12T02:24:11", "match": true , "host": "62.138.2.143" }
|
|
||||||
Apr 12 02:24:11 xxx postfix/smtps/smtpd[42]: warning: astra4139.startdedicated.de[62.138.2.143]: SASL LOGIN authentication failed: UGFzc3dvcmQ6
|
|
||||||
|
|
||||||
# failJSON: { "time": "2005-08-03T15:30:49", "match": true , "host": "98.191.84.74" }
|
|
||||||
Aug 3 15:30:49 ksusha postfix/smtpd[17041]: warning: mail.foldsandwalker.com[98.191.84.74]: SASL Plain authentication failed:
|
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
# filterOptions: [{}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
#1
|
#1
|
||||||
# failJSON: { "time": "2005-06-21T16:47:48", "match": true , "host": "192.030.0.6" }
|
# failJSON: { "time": "2005-06-21T16:47:48", "match": true , "host": "192.030.0.6" }
|
||||||
Jun 21 16:47:48 digital-mlhhyiqscv sshd[13709]: error: PAM: Authentication failure for myhlj1374 from 192.030.0.6
|
Jun 21 16:47:48 digital-mlhhyiqscv sshd[13709]: error: PAM: Authentication failure for myhlj1374 from 192.030.0.6
|
||||||
|
@ -189,7 +191,7 @@ Apr 27 13:02:04 host sshd[29116]: Received disconnect from 1.2.3.4: 11: Normal S
|
||||||
# failJSON: { "time": "2015-04-16T20:02:50", "match": true , "host": "222.186.21.217", "desc": "Authentication for user failed" }
|
# failJSON: { "time": "2015-04-16T20:02:50", "match": true , "host": "222.186.21.217", "desc": "Authentication for user failed" }
|
||||||
2015-04-16T18:02:50.321974+00:00 host sshd[2716]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=222.186.21.217 user=root
|
2015-04-16T18:02:50.321974+00:00 host sshd[2716]: pam_unix(sshd:auth): authentication failure; logname= uid=0 euid=0 tty=ssh ruser= rhost=222.186.21.217 user=root
|
||||||
|
|
||||||
# filterOptions: {"mode": "ddos"}
|
# filterOptions: [{"mode": "ddos"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
# http://forums.powervps.com/showthread.php?t=1667
|
# http://forums.powervps.com/showthread.php?t=1667
|
||||||
# failJSON: { "time": "2005-06-07T01:10:56", "match": true , "host": "69.61.56.114" }
|
# failJSON: { "time": "2005-06-07T01:10:56", "match": true , "host": "69.61.56.114" }
|
||||||
|
@ -218,7 +220,7 @@ Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection res
|
||||||
Mar 15 09:20:57 host sshd[28972]: Connection reset by 192.0.2.39 port 14282 [preauth]
|
Mar 15 09:20:57 host sshd[28972]: Connection reset by 192.0.2.39 port 14282 [preauth]
|
||||||
|
|
||||||
|
|
||||||
# filterOptions: {"mode": "extra"}
|
# filterOptions: [{"mode": "extra"}, {"mode": "aggressive"}]
|
||||||
|
|
||||||
# several other cases from gh-864:
|
# several other cases from gh-864:
|
||||||
# failJSON: { "time": "2004-11-25T01:34:12", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
|
# failJSON: { "time": "2004-11-25T01:34:12", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
|
||||||
|
|
|
@ -49,7 +49,8 @@ class FilterSamplesRegex(unittest.TestCase):
|
||||||
def setUp(self):
|
def setUp(self):
|
||||||
"""Call before every test case."""
|
"""Call before every test case."""
|
||||||
super(FilterSamplesRegex, self).setUp()
|
super(FilterSamplesRegex, self).setUp()
|
||||||
self.filter = None
|
self._filters = dict()
|
||||||
|
self._filterTests = None
|
||||||
setUpMyTime()
|
setUpMyTime()
|
||||||
|
|
||||||
def tearDown(self):
|
def tearDown(self):
|
||||||
|
@ -79,14 +80,20 @@ class FilterSamplesRegex(unittest.TestCase):
|
||||||
RE_WRONG_GREED.search('non-greedy .+? test' + RE_HOST + ' test vary catch-all .* anchored$'))
|
RE_WRONG_GREED.search('non-greedy .+? test' + RE_HOST + ' test vary catch-all .* anchored$'))
|
||||||
|
|
||||||
|
|
||||||
def _readFilter(self, name, basedir, opts=None):
|
def _readFilter(self, fltName, name, basedir, opts=None):
|
||||||
self.filter = Filter(None)
|
# Check filter with this option combination was already used:
|
||||||
self.filter.returnRawHost = True
|
flt = self._filters.get(fltName)
|
||||||
self.filter.checkAllRegex = True
|
if flt:
|
||||||
self.filter.checkFindTime = False
|
return flt
|
||||||
self.filter.active = True
|
# First time:
|
||||||
|
flt = Filter(None)
|
||||||
|
flt.returnRawHost = True
|
||||||
|
flt.checkAllRegex = True
|
||||||
|
flt.checkFindTime = False
|
||||||
|
flt.active = True
|
||||||
|
# Read filter:
|
||||||
if opts is None: opts = dict()
|
if opts is None: opts = dict()
|
||||||
# Check filter exists
|
opts = opts.copy()
|
||||||
filterConf = FilterReader(name, "jail", opts,
|
filterConf = FilterReader(name, "jail", opts,
|
||||||
basedir=basedir, share_config=unittest.F2B.share_config)
|
basedir=basedir, share_config=unittest.F2B.share_config)
|
||||||
self.assertEqual(filterConf.getFile(), name)
|
self.assertEqual(filterConf.getFile(), name)
|
||||||
|
@ -103,25 +110,28 @@ class FilterSamplesRegex(unittest.TestCase):
|
||||||
self.fail('Unexpected config-token %r in stream' % (opt,))
|
self.fail('Unexpected config-token %r in stream' % (opt,))
|
||||||
for optval in optval:
|
for optval in optval:
|
||||||
if opt[2] == "prefregex":
|
if opt[2] == "prefregex":
|
||||||
self.filter.prefRegex = optval
|
flt.prefRegex = optval
|
||||||
elif opt[2] == "addfailregex":
|
elif opt[2] == "addfailregex":
|
||||||
self.filter.addFailRegex(optval)
|
flt.addFailRegex(optval)
|
||||||
elif opt[2] == "addignoreregex":
|
elif opt[2] == "addignoreregex":
|
||||||
self.filter.addIgnoreRegex(optval)
|
flt.addIgnoreRegex(optval)
|
||||||
elif opt[2] == "maxlines":
|
elif opt[2] == "maxlines":
|
||||||
self.filter.setMaxLines(optval)
|
flt.setMaxLines(optval)
|
||||||
elif opt[2] == "datepattern":
|
elif opt[2] == "datepattern":
|
||||||
self.filter.setDatePattern(optval)
|
flt.setDatePattern(optval)
|
||||||
|
|
||||||
# test regexp contains greedy catch-all before <HOST>, that is
|
# test regexp contains greedy catch-all before <HOST>, that is
|
||||||
# not hard-anchored at end or has not precise sub expression after <HOST>:
|
# not hard-anchored at end or has not precise sub expression after <HOST>:
|
||||||
regexList = self.filter.getFailRegex()
|
regexList = flt.getFailRegex()
|
||||||
for fr in regexList:
|
for fr in regexList:
|
||||||
if RE_WRONG_GREED.search(fr): # pragma: no cover
|
if RE_WRONG_GREED.search(fr): # pragma: no cover
|
||||||
raise AssertionError("Following regexp of \"%s\" contains greedy catch-all before <HOST>, "
|
raise AssertionError("Following regexp of \"%s\" contains greedy catch-all before <HOST>, "
|
||||||
"that is not hard-anchored at end or has not precise sub expression after <HOST>:\n%s" %
|
"that is not hard-anchored at end or has not precise sub expression after <HOST>:\n%s" %
|
||||||
(name, str(fr).replace(RE_HOST, '<HOST>')))
|
(fltName, str(fr).replace(RE_HOST, '<HOST>')))
|
||||||
return regexList
|
# Cache within used filter combinations and return:
|
||||||
|
flt = [flt, set()]
|
||||||
|
self._filters[fltName] = flt
|
||||||
|
return flt
|
||||||
|
|
||||||
def testSampleRegexsFactory(name, basedir):
|
def testSampleRegexsFactory(name, basedir):
|
||||||
def testFilter(self):
|
def testFilter(self):
|
||||||
|
@ -129,18 +139,11 @@ def testSampleRegexsFactory(name, basedir):
|
||||||
self.assertTrue(
|
self.assertTrue(
|
||||||
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
|
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
|
||||||
"No sample log file available for '%s' filter" % name)
|
"No sample log file available for '%s' filter" % name)
|
||||||
|
|
||||||
regexList = None
|
|
||||||
regexsUsedIdx = set()
|
|
||||||
regexsUsedRe = set()
|
|
||||||
filenames = [name]
|
filenames = [name]
|
||||||
|
regexsUsedRe = set()
|
||||||
|
|
||||||
def _testMissingSamples():
|
# process each test-file (note: array filenames can grow during processing):
|
||||||
for failRegexIndex, failRegex in enumerate(regexList):
|
|
||||||
self.assertTrue(
|
|
||||||
failRegexIndex in regexsUsedIdx or failRegex in regexsUsedRe,
|
|
||||||
"Regex for filter '%s' has no samples: %i: %r" %
|
|
||||||
(name, failRegexIndex, failRegex))
|
|
||||||
i = 0
|
i = 0
|
||||||
while i < len(filenames):
|
while i < len(filenames):
|
||||||
filename = filenames[i]; i += 1;
|
filename = filenames[i]; i += 1;
|
||||||
|
@ -154,13 +157,17 @@ def testSampleRegexsFactory(name, basedir):
|
||||||
faildata = json.loads(jsonREMatch.group(2))
|
faildata = json.loads(jsonREMatch.group(2))
|
||||||
# filterOptions - dict in JSON to control filter options (e. g. mode, etc.):
|
# filterOptions - dict in JSON to control filter options (e. g. mode, etc.):
|
||||||
if jsonREMatch.group(1) == 'filterOptions':
|
if jsonREMatch.group(1) == 'filterOptions':
|
||||||
# another filter mode - we should check previous also:
|
# following lines with another filter options:
|
||||||
if self.filter is not None:
|
self._filterTests = []
|
||||||
_testMissingSamples()
|
for opts in (faildata if isinstance(faildata, list) else [faildata]):
|
||||||
regexsUsedIdx = set() # clear used indices (possible overlapping by mode change)
|
# unique filter name (using options combination):
|
||||||
# read filter with another setting:
|
self.assertTrue(isinstance(opts, dict))
|
||||||
self.filter = None
|
fltName = opts.get('filterName')
|
||||||
regexList = self._readFilter(name, basedir, opts=faildata)
|
if not fltName: fltName = str(opts) if opts else ''
|
||||||
|
fltName = name + fltName
|
||||||
|
# read it:
|
||||||
|
flt = self._readFilter(fltName, name, basedir, opts=opts)
|
||||||
|
self._filterTests.append((fltName, flt))
|
||||||
continue
|
continue
|
||||||
# addFILE - filename to "include" test-files should be additionally parsed:
|
# addFILE - filename to "include" test-files should be additionally parsed:
|
||||||
if jsonREMatch.group(1) == 'addFILE':
|
if jsonREMatch.group(1) == 'addFILE':
|
||||||
|
@ -176,65 +183,81 @@ def testSampleRegexsFactory(name, basedir):
|
||||||
else: # pragma: no cover - normally unreachable
|
else: # pragma: no cover - normally unreachable
|
||||||
faildata = {}
|
faildata = {}
|
||||||
|
|
||||||
if self.filter is None:
|
# if filter options was not yet specified:
|
||||||
regexList = self._readFilter(name, basedir, opts=None)
|
if not self._filterTests:
|
||||||
|
fltName = name
|
||||||
|
flt = self._readFilter(fltName, name, basedir, opts=None)
|
||||||
|
self._filterTests = [(fltName, flt)]
|
||||||
|
|
||||||
try:
|
# process line using several filter options (if specified in the test-file):
|
||||||
ret = self.filter.processLine(line)
|
for fltName, flt in self._filterTests:
|
||||||
if not ret:
|
flt, regexsUsedIdx = flt
|
||||||
# Bypass if filter constraint specified:
|
regexList = flt.getFailRegex()
|
||||||
if faildata.get('filter') and name != faildata.get('filter'):
|
|
||||||
|
try:
|
||||||
|
ret = flt.processLine(line)
|
||||||
|
if not ret:
|
||||||
|
# Bypass if filter constraint specified:
|
||||||
|
if faildata.get('filter') and name != faildata.get('filter'):
|
||||||
|
continue
|
||||||
|
# Check line is flagged as none match
|
||||||
|
self.assertFalse(faildata.get('match', True),
|
||||||
|
"Line not matched when should have")
|
||||||
continue
|
continue
|
||||||
# Check line is flagged as none match
|
|
||||||
self.assertFalse(faildata.get('match', True),
|
|
||||||
"Line not matched when should have")
|
|
||||||
continue
|
|
||||||
|
|
||||||
failregex, fid, fail2banTime, fail = ret[0]
|
failregex, fid, fail2banTime, fail = ret[0]
|
||||||
# Bypass no failure helpers-regexp:
|
# Bypass no failure helpers-regexp:
|
||||||
if not faildata.get('match', False) and (fid is None or fail.get('nofail')):
|
if not faildata.get('match', False) and (fid is None or fail.get('nofail')):
|
||||||
|
regexsUsedIdx.add(failregex)
|
||||||
|
regexsUsedRe.add(regexList[failregex])
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Check line is flagged to match
|
||||||
|
self.assertTrue(faildata.get('match', False),
|
||||||
|
"Line matched when shouldn't have")
|
||||||
|
self.assertEqual(len(ret), 1,
|
||||||
|
"Multiple regexs matched %r" % (map(lambda x: x[0], ret)))
|
||||||
|
|
||||||
|
# Verify match captures (at least fid/host) and timestamp as expected
|
||||||
|
for k, v in faildata.iteritems():
|
||||||
|
if k not in ("time", "match", "desc", "filter"):
|
||||||
|
fv = fail.get(k, None)
|
||||||
|
# Fallback for backwards compatibility (previously no fid, was host only):
|
||||||
|
if k == "host" and fv is None:
|
||||||
|
fv = fid
|
||||||
|
self.assertEqual(fv, v)
|
||||||
|
|
||||||
|
t = faildata.get("time", None)
|
||||||
|
try:
|
||||||
|
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
|
||||||
|
except ValueError:
|
||||||
|
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
|
||||||
|
|
||||||
|
jsonTime = time.mktime(jsonTimeLocal.timetuple())
|
||||||
|
|
||||||
|
jsonTime += jsonTimeLocal.microsecond / 1000000
|
||||||
|
|
||||||
|
self.assertEqual(fail2banTime, jsonTime,
|
||||||
|
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
|
||||||
|
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
|
||||||
|
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
|
||||||
|
fail2banTime - jsonTime) )
|
||||||
|
|
||||||
regexsUsedIdx.add(failregex)
|
regexsUsedIdx.add(failregex)
|
||||||
regexsUsedRe.add(regexList[failregex])
|
regexsUsedRe.add(regexList[failregex])
|
||||||
continue
|
except AssertionError as e: # pragma: no cover
|
||||||
|
raise AssertionError("%s: %s on: %s:%i, line:\n%s" % (
|
||||||
|
fltName, e, logFile.filename(), logFile.filelineno(), line))
|
||||||
|
|
||||||
# Check line is flagged to match
|
# check missing samples for regex using each filter-options combination:
|
||||||
self.assertTrue(faildata.get('match', False),
|
for fltName, flt in self._filters.iteritems():
|
||||||
"Line matched when shouldn't have")
|
flt, regexsUsedIdx = flt
|
||||||
self.assertEqual(len(ret), 1,
|
regexList = flt.getFailRegex()
|
||||||
"Multiple regexs matched %r" % (map(lambda x: x[0], ret)))
|
for failRegexIndex, failRegex in enumerate(regexList):
|
||||||
|
self.assertTrue(
|
||||||
# Verify match captures (at least fid/host) and timestamp as expected
|
failRegexIndex in regexsUsedIdx or failRegex in regexsUsedRe,
|
||||||
for k, v in faildata.iteritems():
|
"%s: Regex has no samples: %i: %r" %
|
||||||
if k not in ("time", "match", "desc", "filter"):
|
(fltName, failRegexIndex, failRegex))
|
||||||
fv = fail.get(k, None)
|
|
||||||
# Fallback for backwards compatibility (previously no fid, was host only):
|
|
||||||
if k == "host" and fv is None:
|
|
||||||
fv = fid
|
|
||||||
self.assertEqual(fv, v)
|
|
||||||
|
|
||||||
t = faildata.get("time", None)
|
|
||||||
try:
|
|
||||||
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
|
|
||||||
except ValueError:
|
|
||||||
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
|
|
||||||
|
|
||||||
jsonTime = time.mktime(jsonTimeLocal.timetuple())
|
|
||||||
|
|
||||||
jsonTime += jsonTimeLocal.microsecond / 1000000
|
|
||||||
|
|
||||||
self.assertEqual(fail2banTime, jsonTime,
|
|
||||||
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
|
|
||||||
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
|
|
||||||
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
|
|
||||||
fail2banTime - jsonTime) )
|
|
||||||
|
|
||||||
regexsUsedIdx.add(failregex)
|
|
||||||
regexsUsedRe.add(regexList[failregex])
|
|
||||||
except AssertionError as e: # pragma: no cover
|
|
||||||
raise AssertionError("%s on: %s:%i, line:\n%s" % (
|
|
||||||
e, logFile.filename(), logFile.filelineno(), line))
|
|
||||||
|
|
||||||
_testMissingSamples()
|
|
||||||
|
|
||||||
return testFilter
|
return testFilter
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue