Merge branch 'master' into master

pull/1690/head
Serg G. Brester 2017-02-17 09:06:09 +01:00 committed by GitHub
commit 2fa18a74c4
17 changed files with 295 additions and 85 deletions

View File

@ -25,12 +25,26 @@ releases.
* action.d/bsd-ipfw.conf
- Make the rule number, the action starts looking for a free slot to insert
the new rule, configurable (gh-1689)
* filter.d/sshd.conf
- new aggressive rules (gh-864):
- Connection reset by peer (multi-line rule during authorization process)
- No supported authentication methods available
- single line and multi-line expression optimized, added optional prefixes
and suffix (logged from several ssh versions), according to gh-1206;
- fixed expression received disconnect auth fail (optional space after port
part, gh-1652)
and suffix (logged from several ssh versions), according to gh-1206;
* filter.d/suhosin.conf
- greedy catch-all before `<HOST>` fixed (potential vulnerability)
* Filter tests extended with check of all config-regexp, that contains greedy catch-all
before `<HOST>`, that is hard-anchored at end or precise sub expression after `<HOST>`
### New Features
* New Actions:
- action.d/netscaler: Block IPs on a Citrix Netscaler ADC (gh-1663)
* New Filters:
- filter.d/domino-smtp: IBM Domino SMTP task (gh-1603)
### Enhancements

View File

@ -0,0 +1,33 @@
# Fail2ban Citrix Netscaler Action
# by Juliano Jeziorny
# juliano@jeziorny.eu
#
# The script will add offender IPs to a dataset on netscaler, the dataset can then be used to block the IPs at a cs/vserver or global level
# This dataset is then used to block IPs using responder policies on the netscaler.
#
# The script assumes using HTTPS with unsecure certificate to access the netscaler,
# if you have a valid certificate installed remove the -k from the curl lines, or if you want http change it accordingly (and remove the -k)
#
# This action depends on curl
#
# You need to populate the 3 options inside Init
#
# ns_host: IP or hostname of netslcaer appliance
# ns_auth: username:password, suggest base64 encoded for a little added security (echo -n "username:password" | base64)
# ns_dataset: Name of the netscaler dataset holding the IPs to be blocked.
#
# For further details on how to use it please check http://blog.ckzone.eu/2017/01/fail2ban-action-for-citrix-netscaler.html
[Init]
ns_host =
ns_auth =
ns_dataset =
[Definition]
actionstart = curl -kH 'Authorization: Basic <ns_auth>' https://<ns_host>/nitro/v1/config
actioncheck =
actionban = curl -k -H 'Authorization: Basic <ns_auth>' -X PUT -d '{"policydataset_value_binding":{"name":"<ns_dataset>","value":"<ip>"}}' https://<ns_host>/nitro/v1/config/
actionunban = curl -H 'Authorization: Basic <ns_auth>' -X DELETE -k "https://<ns_host>/nitro/v1/config/policydataset_value_binding/<ns_dataset>?args=value:<ip>"

View File

@ -0,0 +1,47 @@
# Fail2Ban configuration file for IBM Domino SMTP Server TASK to detect failed login attempts
#
# Author: Christian Brandlehner
#
# $Revision: 003 $
#
# Configuration:
# Set the following Domino Server parameters in notes.ini:
# console_log_enabled=1
# log_sessions=2
# You also have to use a date and time format supported by fail2ban. Recommended notes.ini configuration is:
# DateOrder=DMY
# DateSeparator=-
# ClockType=24_Hour
# TimeSeparator=:
#
# Depending on your locale you might have to tweak the date and time format so fail2ban can read the log
#[INCLUDES]
# Read common prefixes. If any customizations available -- read them from
# common.local
#before = common.conf
[Definition]
# Option: failregex
# Notes.: regex to match the password failure messages in the logfile. The
# host must be matched by a group named "host". The tag "<HOST>" can
# be used for standard IP/hostname matching and is only an alias for
# (?:::f{4,6}:)?(?P<host>\S+)
# Values: TEXT
#
# Sample log entries (used different time formats and an extra sample with process info in front of date)
# 01-23-2009 19:54:51 SMTP Server: Authentication failed for user postmaster ; connecting host 1.2.3.4
# [28325:00010-3735542592] 22-06-2014 09:56:12 smtp: postmaster [1.2.3.4] authentication failure using internet password
# 08-09-2014 06:14:27 smtp: postmaster [1.2.3.4] authentication failure using internet password
# 08-09-2014 06:14:27 SMTP Server: Authentication failed for user postmaster ; connecting host 1.2.3.4
__prefix = (?:\[[^\]]+\])?\s+
failregex = ^%(__prefix)sSMTP Server: Authentication failed for user .*? \; connecting host <HOST>$
^%(__prefix)ssmtp: (?:[^\[]+ )*\[<HOST>\] authentication failure using internet password\s*$
# Option: ignoreregex
# Notes.: regex to ignore. If this regex matches, the line is ignored.
# Values: TEXT
#
ignoreregex =

View File

@ -25,7 +25,7 @@ failregex = ^%(__prefix_line)s\w{14}: ruleset=check_rcpt, arg1=(?P<email><\S+@\S
^%(__prefix_line)sruleset=check_relay, arg1=(?P<dom>\S+), arg2=<HOST>, relay=((?P=dom) )?\[(\d+\.){3}\d+\]( \(may be forged\))?, reject=421 4\.3\.2 (Connection rate limit exceeded\.|Too many open connections\.)$
^%(__prefix_line)s\w{14}: rejecting commands from (\S* )?\[<HOST>\] due to pre-greeting traffic after \d+ seconds$
^%(__prefix_line)s\w{14}: (\S+ )?\[<HOST>\]: ((?i)expn|vrfy) \S+ \[rejected\]$
^(?P<__prefix>%(__prefix_line)s\w+: )<[^@]+@[^>]+>\.\.\. No such user here<SKIPLINES>(?P=__prefix)from=<[^@]+@[^>]+>, size=\d+, class=\d+, nrcpts=\d+, bodytype=\w+, proto=E?SMTP, daemon=MTA, relay=\S+ \[<HOST>\]$
^(?P<__prefix>%(__prefix_line)s\w+: )<[^@]+@[^>]+>\.\.\. No such user here$<SKIPLINES>^(?P=__prefix)from=<[^@]+@[^>]+>, size=\d+, class=\d+, nrcpts=\d+, bodytype=\w+, proto=E?SMTP, daemon=MTA, relay=\S+ \[<HOST>\]$
ignoreregex =

View File

@ -0,0 +1,11 @@
# Fail2Ban aggressive ssh filter for at attempted exploit
#
# Includes failregex of both sshd and sshd-ddos filters
#
[INCLUDES]
before = sshd.conf
[Definition]
mode = %(aggressive)s

View File

@ -10,20 +10,8 @@
[INCLUDES]
# Read common prefixes. If any customizations available -- read them from
# common.local
before = common.conf
before = sshd.conf
[Definition]
_daemon = sshd
failregex = ^%(__prefix_line)sDid not receive identification string from <HOST>\s*$
ignoreregex =
[Init]
journalmatch = _SYSTEMD_UNIT=sshd.service + _COMM=sshd
# Author: Yaroslav Halchenko
mode = %(ddos)s

View File

@ -14,27 +14,54 @@
# common.local
before = common.conf
[Definition]
[DEFAULT]
_daemon = sshd
failregex = ^%(__prefix_line)s(?:error: PAM: )?[aA]uthentication (?:failure|error|failed) for .* from <HOST>( via \S+)?\s*$
^%(__prefix_line)s(?:error: PAM: )?User not known to the underlying authentication module for .* from <HOST>\s*$
^%(__prefix_line)sFailed \S+ for (?P<cond_inv>invalid user )?(?P<user>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)) from <HOST>(?: port \d+)?(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line)sROOT LOGIN REFUSED.* FROM <HOST>\s*$
^%(__prefix_line)s[iI](?:llegal|nvalid) user .*? from <HOST>(?: port \d+)?\s*$
^%(__prefix_line)sUser .+ from <HOST> not allowed because not listed in AllowUsers\s*$
^%(__prefix_line)sUser .+ from <HOST> not allowed because listed in DenyUsers\s*$
^%(__prefix_line)sUser .+ from <HOST> not allowed because not in any group\s*$
^%(__prefix_line)srefused connect from \S+ \(<HOST>\)\s*$
^%(__prefix_line)s(?:error: )?Received disconnect from <HOST>: 3: .*: Auth fail(?: \[preauth\])?$
^%(__prefix_line)sUser .+ from <HOST> not allowed because a group is listed in DenyGroups\s*$
^%(__prefix_line)sUser .+ from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*$
^(?P<__prefix>%(__prefix_line)s)User .+ not allowed because account is locked<SKIPLINES>(?P=__prefix)(?:error: )?Received disconnect from <HOST>: 11: .+ \[preauth\]$
^(?P<__prefix>%(__prefix_line)s)Disconnecting: Too many authentication failures for .+? \[preauth\]<SKIPLINES>(?P=__prefix)(?:error: )?Connection closed by <HOST> \[preauth\]$
^(?P<__prefix>%(__prefix_line)s)Connection from <HOST> port \d+(?: on \S+ port \d+)?<SKIPLINES>(?P=__prefix)Disconnecting: Too many authentication failures for .+? \[preauth\]$
^%(__prefix_line)s(error: )?maximum authentication attempts exceeded for .* from <HOST>(?: port \d*)?(?: ssh\d*)? \[preauth\]$
^%(__prefix_line)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=<HOST>\s.*$
# optional prefix (logged from several ssh versions) like "error: ", "error: PAM: " or "fatal: "
__pref = (?:(?:error|fatal): (?:PAM: )?)?
# optional suffix (logged from several ssh versions) like " [preauth]"
__suff = (?: \[preauth\])?\s*
__on_port_opt = (?: port \d+)?(?: on \S+(?: port \d+)?)?
# single line prefix:
__prefix_line_sl = %(__prefix_line)s%(__pref)s
# multi line prefixes (for first and second lines):
__prefix_line_ml1 = (?P<__prefix>%(__prefix_line)s)%(__pref)s
__prefix_line_ml2 = %(__suff)s$<SKIPLINES>^(?P=__prefix)%(__pref)s
mode = %(normal)s
normal = ^%(__prefix_line_sl)s[aA]uthentication (?:failure|error|failed) for .* from <HOST>( via \S+)?\s*%(__suff)s$
^%(__prefix_line_sl)sUser not known to the underlying authentication module for .* from <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)sFailed \S+ for (?P<cond_inv>invalid user )?(?P<user>(?P<cond_user>\S+)|(?(cond_inv)(?:(?! from ).)*?|[^:]+)) from <HOST>%(__on_port_opt)s(?: ssh\d*)?(?(cond_user): |(?:(?:(?! from ).)*)$)
^%(__prefix_line_sl)sROOT LOGIN REFUSED.* FROM <HOST>\s*%(__suff)s$
^%(__prefix_line_sl)s[iI](?:llegal|nvalid) user .*? from <HOST>%(__on_port_opt)s\s*$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because not listed in AllowUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because listed in DenyUsers\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because not in any group\s*%(__suff)s$
^%(__prefix_line_sl)srefused connect from \S+ \(<HOST>\)\s*%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s:\s*3: .*: Auth fail%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because a group is listed in DenyGroups\s*%(__suff)s$
^%(__prefix_line_sl)sUser .+ from <HOST> not allowed because none of user's groups are listed in AllowGroups\s*%(__suff)s$
^%(__prefix_line_sl)spam_unix\(sshd:auth\):\s+authentication failure;\s*logname=\S*\s*uid=\d*\s*euid=\d*\s*tty=\S*\s*ruser=\S*\s*rhost=<HOST>\s.*%(__suff)s$
^%(__prefix_line_sl)s(error: )?maximum authentication attempts exceeded for .* from <HOST>%(__on_port_opt)s(?: ssh\d*)? \[preauth\]$
^%(__prefix_line_ml1)sUser .+ not allowed because account is locked%(__prefix_line_ml2)sReceived disconnect from <HOST>: 11: .+%(__suff)s$
^%(__prefix_line_ml1)sDisconnecting: Too many authentication failures for .+?%(__prefix_line_ml2)sConnection closed by <HOST>%(__suff)s$
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sDisconnecting: Too many authentication failures for .+%(__suff)s$
ddos = ^%(__prefix_line_sl)sDid not receive identification string from <HOST>%(__suff)s$
^%(__prefix_line_sl)sReceived disconnect from <HOST>%(__on_port_opt)s:\s*14: No supported authentication methods available%(__suff)s$
^%(__prefix_line_sl)sUnable to negotiate with <HOST>%(__on_port_opt)s: no matching (?:cipher|key exchange method) found.
^%(__prefix_line_ml1)sConnection from <HOST>%(__on_port_opt)s%(__prefix_line_ml2)sUnable to negotiate a (?:cipher|key exchange method)%(__suff)s$
^%(__prefix_line_ml1)sSSH: Server;Ltype: (?:Authname|Version|Kex);Remote: <HOST>-\d+;[A-Z]\w+:.*%(__prefix_line_ml2)sRead from socket failed: Connection reset by peer%(__suff)s$
aggressive = %(normal)s
%(ddos)s
[Definition]
failregex = %(mode)s
ignoreregex =

View File

@ -17,7 +17,7 @@ _daemon = (?:lighttpd|suhosin)
_lighttpd_prefix = (?:\(mod_fastcgi\.c\.\d+\) FastCGI-stderr:\s)
failregex = ^%(__prefix_line)s%(_lighttpd_prefix)s?ALERT - .* \(attacker '<HOST>', file '.*'(?:, line \d+)?\)$
failregex = ^%(__prefix_line)s%(_lighttpd_prefix)s?ALERT - .*? \(attacker '<HOST>', file '[^']*'(?:, line \d+)?\)$
ignoreregex =

View File

@ -223,6 +223,8 @@ action = %(action_)s
[sshd]
# To use more aggressive sshd filter (inclusive sshd-ddos failregex):
#filter = sshd-aggressive
port = ssh
logpath = %(sshd_log)s
backend = %(sshd_backend)s
@ -853,3 +855,8 @@ logpath = /var/log/haproxy.log
port = ldap,ldaps
filter = slapd
logpath = /var/log/slapd.log
[domino-smtp]
port = smtp,ssmtp
filter = domino-smtp
logpath = /home/domino01/data/IBM_TECHNICAL_SUPPORT/console.log

View File

@ -597,7 +597,7 @@ class JailsReaderTest(LogCaptureTestCase):
# grab all filter names
filters = set(os.path.splitext(os.path.split(a)[1])[0]
for a in glob.glob(os.path.join('config', 'filter.d', '*.conf'))
if not a.endswith('common.conf'))
if not (a.endswith('common.conf') or a.endswith('-aggressive.conf')))
# get filters of all jails (filter names without options inside filter[...])
filters_jail = set(
JailReader.extractOptions(jail.options['filter'])[0] for jail in jails.jails

View File

@ -1 +0,0 @@
../../../../config/filter.d/common.conf

View File

@ -8,7 +8,7 @@
# Read common prefixes. If any customizations available -- read them from
# common.local. common.conf is a symlink to the original common.conf and
# should be copied (dereferenced) during installation
before = common.conf
before = ../../../../config/filter.d/common.conf
[Definition]

View File

@ -0,0 +1,8 @@
# failJSON: { "time": "2005-07-03T23:07:20", "match": true , "host": "1.2.3.4" }
03-07-2005 23:07:20 SMTP Server: Authentication failed for user postmaster ; connecting host 1.2.3.4
# failJSON: { "time": "2014-06-22T09:56:12", "match": true , "host": "1.2.3.4" }
[28325:00010-3735542592] 22-06-2014 09:56:12 smtp: postmaster [1.2.3.4] authentication failure using internet password
# failJSON: { "time": "2014-09-08T06:14:27", "match": true , "host": "1.2.3.4" }
08-09-2014 06:14:27 smtp: postmaster [1.2.3.4] authentication failure using internet password
# failJSON: { "time": "2016-11-07T22:21:20", "match": true , "host": "1.2.3.4" }
2016-11-07 22:21:20 smtp: postmaster [1.2.3.4] authentication failure using internet password

View File

@ -139,6 +139,8 @@ Nov 23 21:50:37 sshd[7148]: Connection closed by 61.0.0.1 [preauth]
# failJSON: { "time": "2005-07-13T18:44:28", "match": true , "host": "89.24.13.192", "desc": "from gh-289" }
Jul 13 18:44:28 mdop sshd[4931]: Received disconnect from 89.24.13.192: 3: com.jcraft.jsch.JSchException: Auth fail
# failJSON: { "time": "2005-01-02T01:18:41", "match": true , "host": "10.0.0.1", "desc": "space after port is optional (gh-1652)" }
Jan 2 01:18:41 host sshd[11808]: error: Received disconnect from 10.0.0.1 port 7736:3: com.jcraft.jsch.JSchException: Auth fail [preauth]
# failJSON: { "time": "2004-10-01T17:27:44", "match": true , "host": "94.249.236.6", "desc": "newer format per commit 36919d9f" }
Oct 1 17:27:44 localhost sshd[24077]: error: Received disconnect from 94.249.236.6: 3: com.jcraft.jsch.JSchException: Auth fail [preauth]

View File

@ -0,0 +1,3 @@
# sshd-aggressive includes sshd and sshd-ddos failregex's:
# addFILE: "sshd"
# addFILE: "sshd-ddos"

View File

@ -1,3 +1,41 @@
# http://forums.powervps.com/showthread.php?t=1667
# failJSON: { "time": "2005-06-07T01:10:56", "match": true , "host": "69.61.56.114" }
Jun 7 01:10:56 host sshd[5937]: Did not receive identification string from 69.61.56.114
# gh-864(1):
# failJSON: { "match": false }
Nov 24 23:46:39 host sshd[32686]: SSH: Server;Ltype: Version;Remote: 127.0.0.1-1780;Protocol: 2.0;Client: libssh2_1.4.3
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (1)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# gh-864(2):
# failJSON: { "match": false }
Nov 24 23:46:40 host sshd[32686]: SSH: Server;Ltype: Kex;Remote: 127.0.0.1-1780;Enc: aes128-ctr;MAC: hmac-sha1;Comp: none [preauth]
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (2)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# gh-864(3):
# failJSON: { "match": false }
Nov 24 23:46:41 host sshd[32686]: SSH: Server;Ltype: Authname;Remote: 127.0.0.1-1780;Name: root [preauth]
# failJSON: { "time": "2004-11-24T23:46:43", "match": true , "host": "127.0.0.1", "desc": "Multiline for connection reset by peer (3)" }
Nov 24 23:46:43 host sshd[32686]: fatal: Read from socket failed: Connection reset by peer [preauth]
# several other cases from gh-864:
# failJSON: { "time": "2004-11-25T01:34:12", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
Nov 25 01:34:12 srv sshd[123]: Received disconnect from 127.0.0.1: 14: No supported authentication methods available [preauth]
# failJSON: { "time": "2004-11-25T01:35:13", "match": true , "host": "127.0.0.1", "desc": "No supported authentication methods" }
Nov 25 01:35:13 srv sshd[123]: error: Received disconnect from 127.0.0.1: 14: No supported authentication methods available [preauth]
# failJSON: { "time": "2004-11-25T01:35:14", "match": true , "host": "192.168.2.92", "desc": "Optional space after port" }
Nov 25 01:35:14 srv sshd[3625]: error: Received disconnect from 192.168.2.92 port 1684:14: No supported authentication methods available [preauth]
# gh-1545:
# failJSON: { "time": "2004-11-26T13:03:29", "match": true , "host": "192.0.2.1", "desc": "No matching cipher" }
Nov 26 13:03:29 srv sshd[45]: Unable to negotiate with 192.0.2.1 port 55419: no matching cipher found. Their offer: aes256-cbc,rijndael-cbc@lysator.liu.se,aes192-cbc,aes128-cbc,arcfour128,arcfour,3des-cbc,none [preauth]
# gh-1117:
# failJSON: { "time": "2004-11-26T13:03:30", "match": true , "host": "192.0.2.2", "desc": "No matching key exchange method" }
Nov 26 13:03:30 srv sshd[45]: fatal: Unable to negotiate with 192.0.2.2 port 55419: no matching key exchange method found. Their offer: diffie-hellman-group1-sha1
# failJSON: { "match": false }
Nov 26 15:03:30 host sshd[22440]: Connection from 192.0.2.3 port 39678 on 192.168.1.9 port 22
# failJSON: { "time": "2004-11-26T15:03:31", "match": true , "host": "192.0.2.3", "desc": "Multiline - no matching key exchange method" }
Nov 26 15:03:31 host sshd[22440]: fatal: Unable to negotiate a key exchange method [preauth]

View File

@ -31,6 +31,7 @@ import re
import sys
import time
import unittest
from ..server.failregex import Regex
from ..server.filter import Filter
from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR
@ -38,6 +39,10 @@ from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR
TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config")
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
# regexp to test greedy catch-all should be not-greedy:
RE_HOST = Regex('<HOST>').getRegex()
RE_WRONG_GREED = re.compile(r'\.[+\*](?!\?).*' + re.escape(RE_HOST) + r'.*(?:\.[+\*].*|[^\$])$')
class FilterSamplesRegex(unittest.TestCase):
@ -60,6 +65,19 @@ class FilterSamplesRegex(unittest.TestCase):
>= 10,
"Expected more FilterSampleRegexs tests")
def testReWrongGreedyCatchAll(self):
"""Tests regexp RE_WRONG_GREED is intact (positive/negative)"""
self.assertTrue(
RE_WRONG_GREED.search('greedy .* test' + RE_HOST + ' test not hard-anchored'))
self.assertTrue(
RE_WRONG_GREED.search('greedy .+ test' + RE_HOST + ' test vary .* anchored$'))
self.assertFalse(
RE_WRONG_GREED.search('greedy .* test' + RE_HOST + ' test no catch-all, hard-anchored$'))
self.assertFalse(
RE_WRONG_GREED.search('non-greedy .*? test' + RE_HOST + ' test not hard-anchored'))
self.assertFalse(
RE_WRONG_GREED.search('non-greedy .+? test' + RE_HOST + ' test vary catch-all .* anchored$'))
def testSampleRegexsFactory(name, basedir):
def testFilter(self):
@ -85,60 +103,75 @@ def testSampleRegexsFactory(name, basedir):
os.path.isfile(os.path.join(TEST_FILES_DIR, "logs", name)),
"No sample log file available for '%s' filter" % name)
logFile = fileinput.FileInput(
os.path.join(TEST_FILES_DIR, "logs", name))
regexsUsed = set()
for line in logFile:
jsonREMatch = re.match("^# ?failJSON:(.+)$", line)
if jsonREMatch:
try:
faildata = json.loads(jsonREMatch.group(1))
except ValueError as e:
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
filenames = [name]
i = 0
while i < len(filenames):
filename = filenames[i]; i += 1;
logFile = fileinput.FileInput(os.path.join(TEST_FILES_DIR, "logs",
filename))
ret = self.filter.processLine(
line, returnRawHost=True, checkAllRegex=True)[1]
if not ret:
# Check line is flagged as none match
self.assertFalse(faildata.get('match', True),
"Line not matched when should have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
# test regexp contains greedy catch-all before <HOST>, that is
# not hard-anchored at end or has not precise sub expression after <HOST>:
for fr in self.filter.getFailRegex():
if RE_WRONG_GREED.search(fr): #pragma: no cover
raise AssertionError("Following regexp of \"%s\" contains greedy catch-all before <HOST>, "
"that is not hard-anchored at end or has not precise sub expression after <HOST>:\n%s" %
(name, str(fr).replace(RE_HOST, '<HOST>')))
# Verify timestamp and host as expected
failregex, host, fail2banTime, lines = ret[0]
self.assertEqual(host, faildata.get("host", None))
for line in logFile:
jsonREMatch = re.match("^# ?(failJSON|addFILE):(.+)$", line)
if jsonREMatch:
try:
faildata = json.loads(jsonREMatch.group(2))
if jsonREMatch.group(1) == 'addFILE':
filenames.append(faildata)
continue
except ValueError as e:
raise ValueError("%s: %s:%i" %
(e, logFile.filename(), logFile.filelineno()))
line = next(logFile)
elif line.startswith("#") or not line.strip():
continue
else:
faildata = {}
t = faildata.get("time", None)
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
ret = self.filter.processLine(
line, returnRawHost=True, checkAllRegex=True)[1]
if not ret:
# Check line is flagged as none match
self.assertFalse(faildata.get('match', True),
"Line not matched when should have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
elif ret:
# Check line is flagged to match
self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have: %s:%i %r" %
(logFile.filename(), logFile.filelineno(), line))
self.assertEqual(len(ret), 1, "Multiple regexs matched %r - %s:%i" %
(map(lambda x: x[0], ret),logFile.filename(), logFile.filelineno()))
jsonTime = time.mktime(jsonTimeLocal.timetuple())
# Verify timestamp and host as expected
failregex, host, fail2banTime, lines = ret[0]
self.assertEqual(host, faildata.get("host", None))
jsonTime += jsonTimeLocal.microsecond / 1000000
t = faildata.get("time", None)
try:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
jsonTime = time.mktime(jsonTimeLocal.timetuple())
regexsUsed.add(failregex)
jsonTime += jsonTimeLocal.microsecond / 1000000
self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch fail2ban %s (%s) != failJson %s (%s) (diff %.3f seconds) on: %s:%i %r:" %
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime, logFile.filename(), logFile.filelineno(), line ) )
regexsUsed.add(failregex)
for failRegexIndex, failRegex in enumerate(self.filter.getFailRegex()):
self.assertTrue(