mirror of https://github.com/fail2ban/fail2ban
Merge branch '0.11'
commit
7327fee2c8
14
ChangeLog
14
ChangeLog
|
@ -43,13 +43,21 @@ ver. 1.0.1-dev-1 (20??/??/??) - development nightly edition
|
|||
- `aggressive`: matches 401 and any variant (with and without username)
|
||||
* `filter.d/sshd.conf`: normalizing of user pattern in all RE's, allowing empty user (gh-2749)
|
||||
|
||||
### New Features
|
||||
### New Features and Enhancements
|
||||
* new filter and jail for GitLab recognizing failed application logins (gh-2689)
|
||||
|
||||
### Enhancements
|
||||
* introduced new prefix `{UNB}` for `datepattern` to disable word boundaries in regex;
|
||||
* datetemplate: improved anchor detection for capturing groups `(^...)`;
|
||||
* datepattern: improved handling with wrong recognized timestamps (timezones, no datepattern, etc)
|
||||
as well as some warnings signaling user about invalid pattern or zone (gh-2814):
|
||||
- filter gets mode in-operation, which gets activated if filter starts processing of new messages;
|
||||
in this mode a timestamp read from log-line that appeared recently (not an old line), deviating too much
|
||||
from now (up too 24h), will be considered as now (assuming a timezone issue), so could avoid unexpected
|
||||
bypass of failure (previously exceeding `findtime`);
|
||||
- better interaction with non-matching optional datepattern or invalid timestamps;
|
||||
- implements special datepattern `{NONE}` - allow to find failures totally without date-time in log messages,
|
||||
whereas filter will use now as timestamp (gh-2802)
|
||||
* performance optimization of `datepattern` (better search algorithm in datedetector, especially for single template);
|
||||
* fail2ban-client: extended to unban IP range(s) by subnet (CIDR/mask) or hostname (DNS), gh-2791;
|
||||
* extended capturing of alternate tags in filter, allowing combine of multiple groups to single tuple token with new tag
|
||||
prefix `<F-TUPLE_`, that would combine value of `<F-V>` with all value of `<F-TUPLE_V?_n?>` tags (gh-2755)
|
||||
* `actioncheck` behavior is changed now (gh-488), so invariant check as well as restore or repair
|
||||
|
|
|
@ -168,19 +168,6 @@ class Fail2banClient(Fail2banCmdLine, Thread):
|
|||
if not ret:
|
||||
return None
|
||||
|
||||
# verify that directory for the socket file exists
|
||||
socket_dir = os.path.dirname(self._conf["socket"])
|
||||
if not os.path.exists(socket_dir):
|
||||
logSys.error(
|
||||
"There is no directory %s to contain the socket file %s."
|
||||
% (socket_dir, self._conf["socket"]))
|
||||
return None
|
||||
if not os.access(socket_dir, os.W_OK | os.X_OK): # pragma: no cover
|
||||
logSys.error(
|
||||
"Directory %s exists but not accessible for writing"
|
||||
% (socket_dir,))
|
||||
return None
|
||||
|
||||
# Check already running
|
||||
if not self._conf["force"] and os.path.exists(self._conf["socket"]):
|
||||
logSys.error("Fail2ban seems to be in unexpected state (not running but the socket exists)")
|
||||
|
|
|
@ -308,6 +308,10 @@ class Fail2banCmdLine():
|
|||
# since method is also exposed in API via globally bound variable
|
||||
@staticmethod
|
||||
def _exit(code=0):
|
||||
# implicit flush without to produce broken pipe error (32):
|
||||
sys.stderr.close()
|
||||
sys.stdout.close()
|
||||
# exit:
|
||||
if hasattr(os, '_exit') and os._exit:
|
||||
os._exit(code)
|
||||
else:
|
||||
|
@ -318,8 +322,6 @@ class Fail2banCmdLine():
|
|||
logSys.debug("Exit with code %s", code)
|
||||
# because of possible buffered output in python, we should flush it before exit:
|
||||
logging.shutdown()
|
||||
sys.stdout.flush()
|
||||
sys.stderr.flush()
|
||||
# exit
|
||||
Fail2banCmdLine._exit(code)
|
||||
|
||||
|
|
|
@ -262,7 +262,7 @@ class Actions(JailThread, Mapping):
|
|||
if ip is None:
|
||||
return self.__flushBan(db)
|
||||
# Multiple IPs:
|
||||
if isinstance(ip, list):
|
||||
if isinstance(ip, (list, tuple)):
|
||||
missed = []
|
||||
cnt = 0
|
||||
for i in ip:
|
||||
|
@ -284,6 +284,14 @@ class Actions(JailThread, Mapping):
|
|||
# Unban the IP.
|
||||
self.__unBan(ticket)
|
||||
else:
|
||||
# Multiple IPs by subnet or dns:
|
||||
if not isinstance(ip, IPAddr):
|
||||
ipa = IPAddr(ip)
|
||||
if not ipa.isSingle: # subnet (mask/cidr) or raw (may be dns/hostname):
|
||||
ips = filter(ipa.contains, self.__banManager.getBanList())
|
||||
if ips:
|
||||
return self.removeBannedIP(ips, db, ifexists)
|
||||
# not found:
|
||||
msg = "%s is not banned" % ip
|
||||
logSys.log(logging.MSG, msg)
|
||||
if ifexists:
|
||||
|
|
|
@ -282,6 +282,8 @@ class DateDetector(object):
|
|||
elif "{DATE}" in key:
|
||||
self.addDefaultTemplate(preMatch=pattern, allDefaults=False)
|
||||
return
|
||||
elif key == "{NONE}":
|
||||
template = _getPatternTemplate('{UNB}^', key)
|
||||
else:
|
||||
template = _getPatternTemplate(pattern, key)
|
||||
|
||||
|
|
|
@ -113,6 +113,8 @@ class Filter(JailThread):
|
|||
self.onIgnoreRegex = None
|
||||
## if true ignores obsolete failures (failure time < now - findTime):
|
||||
self.checkFindTime = True
|
||||
## shows that filter is in operation mode (processing new messages):
|
||||
self.inOperation = True
|
||||
## if true prevents against retarded banning in case of RC by too many failures (disabled only for test purposes):
|
||||
self.banASAP = True
|
||||
## Ticks counter
|
||||
|
@ -587,16 +589,26 @@ class Filter(JailThread):
|
|||
if self.__ignoreCache: c.set(key, False)
|
||||
return False
|
||||
|
||||
def _logWarnOnce(self, nextLTM, *args):
|
||||
"""Log some issue as warning once per day, otherwise level 7"""
|
||||
if MyTime.time() < getattr(self, nextLTM, 0):
|
||||
if logSys.getEffectiveLevel() <= 7: logSys.log(7, *(args[0]))
|
||||
else:
|
||||
setattr(self, nextLTM, MyTime.time() + 24*60*60)
|
||||
for args in args:
|
||||
logSys.warning('[%s] ' + args[0], self.jailName, *args[1:])
|
||||
|
||||
def processLine(self, line, date=None):
|
||||
"""Split the time portion from log msg and return findFailures on them
|
||||
"""
|
||||
logSys.log(7, "Working on line %r", line)
|
||||
|
||||
noDate = False
|
||||
if date:
|
||||
tupleLine = line
|
||||
self.__lastTimeText = tupleLine[1]
|
||||
self.__lastDate = date
|
||||
else:
|
||||
logSys.log(7, "Working on line %r", line)
|
||||
|
||||
# try to parse date:
|
||||
timeMatch = self.dateDetector.matchTime(line)
|
||||
m = timeMatch[0]
|
||||
|
@ -607,22 +619,59 @@ class Filter(JailThread):
|
|||
tupleLine = (line[:s], m, line[e:])
|
||||
if m: # found and not empty - retrive date:
|
||||
date = self.dateDetector.getTime(m, timeMatch)
|
||||
|
||||
if date is None:
|
||||
if m: logSys.error("findFailure failed to parse timeText: %s", m)
|
||||
if date is not None:
|
||||
# Lets get the time part
|
||||
date = date[0]
|
||||
self.__lastTimeText = m
|
||||
self.__lastDate = date
|
||||
else:
|
||||
logSys.error("findFailure failed to parse timeText: %s", m)
|
||||
# matched empty value - date is optional or not available - set it to last known or now:
|
||||
elif self.__lastDate and self.__lastDate > MyTime.time() - 60:
|
||||
# set it to last known:
|
||||
tupleLine = ("", self.__lastTimeText, line)
|
||||
date = self.__lastDate
|
||||
else:
|
||||
# Lets get the time part
|
||||
date = date[0]
|
||||
self.__lastTimeText = m
|
||||
# set it to now:
|
||||
date = MyTime.time()
|
||||
else:
|
||||
tupleLine = ("", "", line)
|
||||
# still no date - try to use last known:
|
||||
if date is None:
|
||||
noDate = True
|
||||
if self.__lastDate and self.__lastDate > MyTime.time() - 60:
|
||||
tupleLine = ("", self.__lastTimeText, line)
|
||||
date = self.__lastDate
|
||||
|
||||
if self.checkFindTime:
|
||||
# if in operation (modifications have been really found):
|
||||
if self.inOperation:
|
||||
# if weird date - we'd simulate now for timeing issue (too large deviation from now):
|
||||
if (date is None or date < MyTime.time() - 60 or date > MyTime.time() + 60):
|
||||
# log time zone issue as warning once per day:
|
||||
self._logWarnOnce("_next_simByTimeWarn",
|
||||
("Simulate NOW in operation since found time has too large deviation %s ~ %s +/- %s",
|
||||
date, MyTime.time(), 60),
|
||||
("Please check jail has possibly a timezone issue. Line with odd timestamp: %s",
|
||||
line))
|
||||
# simulate now as date:
|
||||
date = MyTime.time()
|
||||
self.__lastDate = date
|
||||
else:
|
||||
tupleLine = (line, self.__lastTimeText, "")
|
||||
date = self.__lastDate
|
||||
# in initialization (restore) phase, if too old - ignore:
|
||||
if date is not None and date < MyTime.time() - self.getFindTime():
|
||||
# log time zone issue as warning once per day:
|
||||
self._logWarnOnce("_next_ignByTimeWarn",
|
||||
("Ignore line since time %s < %s - %s",
|
||||
date, MyTime.time(), self.getFindTime()),
|
||||
("Please check jail has possibly a timezone issue. Line with odd timestamp: %s",
|
||||
line))
|
||||
# ignore - too old (obsolete) entry:
|
||||
return []
|
||||
|
||||
# save last line (lazy convert of process line tuple to string on demand):
|
||||
self.processedLine = lambda: "".join(tupleLine[::2])
|
||||
return self.findFailure(tupleLine, date)
|
||||
return self.findFailure(tupleLine, date, noDate=noDate)
|
||||
|
||||
def processLineAndAdd(self, line, date=None):
|
||||
"""Processes the line for failures and populates failManager
|
||||
|
@ -634,6 +683,9 @@ class Filter(JailThread):
|
|||
fail = element[3]
|
||||
logSys.debug("Processing line with time:%s and ip:%s",
|
||||
unixTime, ip)
|
||||
# ensure the time is not in the future, e. g. by some estimated (assumed) time:
|
||||
if self.checkFindTime and unixTime > MyTime.time():
|
||||
unixTime = MyTime.time()
|
||||
tick = FailTicket(ip, unixTime, data=fail)
|
||||
if self._inIgnoreIPList(ip, tick):
|
||||
continue
|
||||
|
@ -756,7 +808,7 @@ class Filter(JailThread):
|
|||
# to find the logging time.
|
||||
# @return a dict with IP and timestamp.
|
||||
|
||||
def findFailure(self, tupleLine, date):
|
||||
def findFailure(self, tupleLine, date, noDate=False):
|
||||
failList = list()
|
||||
|
||||
ll = logSys.getEffectiveLevel()
|
||||
|
@ -766,11 +818,6 @@ class Filter(JailThread):
|
|||
returnRawHost = True
|
||||
cidr = IPAddr.CIDR_RAW
|
||||
|
||||
if self.checkFindTime and date is not None and date < MyTime.time() - self.getFindTime():
|
||||
if ll <= 5: logSys.log(5, "Ignore line since time %s < %s - %s",
|
||||
date, MyTime.time(), self.getFindTime())
|
||||
return failList
|
||||
|
||||
if self.__lineBufferSize > 1:
|
||||
self.__lineBuffer.append(tupleLine)
|
||||
orgBuffer = self.__lineBuffer = self.__lineBuffer[-self.__lineBufferSize:]
|
||||
|
@ -821,17 +868,13 @@ class Filter(JailThread):
|
|||
if not self.checkAllRegex:
|
||||
break
|
||||
continue
|
||||
if date is None:
|
||||
logSys.warning(
|
||||
"Found a match for %r but no valid date/time "
|
||||
"found for %r. Please try setting a custom "
|
||||
"date pattern (see man page jail.conf(5)). "
|
||||
"If format is complex, please "
|
||||
"file a detailed issue on"
|
||||
" https://github.com/fail2ban/fail2ban/issues "
|
||||
"in order to get support for this format.",
|
||||
"\n".join(failRegex.getMatchedLines()), tupleLine[1])
|
||||
continue
|
||||
if noDate:
|
||||
self._logWarnOnce("_next_noTimeWarn",
|
||||
("Found a match but no valid date/time found for %r.", tupleLine[1]),
|
||||
("Match without a timestamp: %s", "\n".join(failRegex.getMatchedLines())),
|
||||
("Please try setting a custom date pattern (see man page jail.conf(5)).",)
|
||||
)
|
||||
if date is None and self.checkFindTime: continue
|
||||
# we should check all regex (bypass on multi-line, otherwise too complex):
|
||||
if not self.checkAllRegex or self.__lineBufferSize > 1:
|
||||
self.__lineBuffer, buf = failRegex.getUnmatchedTupleLines(), None
|
||||
|
@ -940,7 +983,7 @@ class FileFilter(Filter):
|
|||
log.setPos(lastpos)
|
||||
self.__logs[path] = log
|
||||
logSys.info("Added logfile: %r (pos = %s, hash = %s)" , path, log.getPos(), log.getHash())
|
||||
if autoSeek:
|
||||
if autoSeek and not tail:
|
||||
self.__autoSeek[path] = autoSeek
|
||||
self._addLogPath(path) # backend specific
|
||||
|
||||
|
@ -1024,7 +1067,7 @@ class FileFilter(Filter):
|
|||
# MyTime.time()-self.findTime. When a failure is detected, a FailTicket
|
||||
# is created and is added to the FailManager.
|
||||
|
||||
def getFailures(self, filename):
|
||||
def getFailures(self, filename, inOperation=None):
|
||||
log = self.getLog(filename)
|
||||
if log is None:
|
||||
logSys.error("Unable to get failures in %s", filename)
|
||||
|
@ -1069,9 +1112,14 @@ class FileFilter(Filter):
|
|||
if has_content:
|
||||
while not self.idle:
|
||||
line = log.readline()
|
||||
if not line or not self.active:
|
||||
# The jail reached the bottom or has been stopped
|
||||
if not self.active: break; # jail has been stopped
|
||||
if not line:
|
||||
# The jail reached the bottom, simply set in operation for this log
|
||||
# (since we are first time at end of file, growing is only possible after modifications):
|
||||
log.inOperation = True
|
||||
break
|
||||
# acquire in operation from log and process:
|
||||
self.inOperation = inOperation if inOperation is not None else log.inOperation
|
||||
self.processLineAndAdd(line.rstrip('\r\n'))
|
||||
finally:
|
||||
log.close()
|
||||
|
@ -1210,7 +1258,7 @@ except ImportError: # pragma: no cover
|
|||
|
||||
class FileContainer:
|
||||
|
||||
def __init__(self, filename, encoding, tail = False):
|
||||
def __init__(self, filename, encoding, tail=False):
|
||||
self.__filename = filename
|
||||
self.setEncoding(encoding)
|
||||
self.__tail = tail
|
||||
|
@ -1231,6 +1279,8 @@ class FileContainer:
|
|||
self.__pos = 0
|
||||
finally:
|
||||
handler.close()
|
||||
## shows that log is in operation mode (expecting new messages only from here):
|
||||
self.inOperation = tail
|
||||
|
||||
def getFileName(self):
|
||||
return self.__filename
|
||||
|
@ -1304,16 +1354,17 @@ class FileContainer:
|
|||
return line.decode(enc, 'strict')
|
||||
except (UnicodeDecodeError, UnicodeEncodeError) as e:
|
||||
global _decode_line_warn
|
||||
lev = logging.DEBUG
|
||||
if _decode_line_warn.get(filename, 0) <= MyTime.time():
|
||||
lev = 7
|
||||
if not _decode_line_warn.get(filename, 0):
|
||||
lev = logging.WARNING
|
||||
_decode_line_warn[filename] = MyTime.time() + 24*60*60
|
||||
_decode_line_warn.set(filename, 1)
|
||||
logSys.log(lev,
|
||||
"Error decoding line from '%s' with '%s'."
|
||||
" Consider setting logencoding=utf-8 (or another appropriate"
|
||||
" encoding) for this jail. Continuing"
|
||||
" to process line ignoring invalid characters: %r",
|
||||
filename, enc, line)
|
||||
"Error decoding line from '%s' with '%s'.", filename, enc)
|
||||
if logSys.getEffectiveLevel() <= lev:
|
||||
logSys.log(lev, "Consider setting logencoding=utf-8 (or another appropriate"
|
||||
" encoding) for this jail. Continuing"
|
||||
" to process line ignoring invalid characters: %r",
|
||||
line)
|
||||
# decode with replacing error chars:
|
||||
line = line.decode(enc, 'replace')
|
||||
return line
|
||||
|
@ -1334,7 +1385,7 @@ class FileContainer:
|
|||
## print "D: Closed %s with pos %d" % (handler, self.__pos)
|
||||
## sys.stdout.flush()
|
||||
|
||||
_decode_line_warn = {}
|
||||
_decode_line_warn = Utils.Cache(maxCount=1000, maxTime=24*60*60);
|
||||
|
||||
|
||||
##
|
||||
|
|
|
@ -111,6 +111,8 @@ class FilterPoll(FileFilter):
|
|||
modlst = []
|
||||
Utils.wait_for(lambda: not self.active or self.getModified(modlst),
|
||||
self.sleeptime)
|
||||
if not self.active: # pragma: no cover - timing
|
||||
break
|
||||
for filename in modlst:
|
||||
self.getFailures(filename)
|
||||
self.__modified = True
|
||||
|
@ -140,7 +142,7 @@ class FilterPoll(FileFilter):
|
|||
try:
|
||||
logStats = os.stat(filename)
|
||||
stats = logStats.st_mtime, logStats.st_ino, logStats.st_size
|
||||
pstats = self.__prevStats.get(filename, (0))
|
||||
pstats = self.__prevStats.get(filename, (0,))
|
||||
if logSys.getEffectiveLevel() <= 4:
|
||||
# we do not want to waste time on strftime etc if not necessary
|
||||
dt = logStats.st_mtime - pstats[0]
|
||||
|
|
|
@ -517,6 +517,11 @@ class IPAddr(object):
|
|||
|
||||
return (self.addr & mask) == net.addr
|
||||
|
||||
def contains(self, ip):
|
||||
"""Return whether the object (as network) contains given IP
|
||||
"""
|
||||
return isinstance(ip, IPAddr) and (ip == self or ip.isInNet(self))
|
||||
|
||||
# Pre-calculated map: addr to maskplen
|
||||
def __getMaskMap():
|
||||
m6 = (1 << 128)-1
|
||||
|
|
|
@ -291,9 +291,8 @@ def reGroupDictStrptime(found_dict, msec=False, default_tz=None):
|
|||
date_result -= datetime.timedelta(days=1)
|
||||
if assume_year:
|
||||
if not now: now = MyTime.now()
|
||||
if date_result > now:
|
||||
# Could be last year?
|
||||
# also reset month and day as it's not yesterday...
|
||||
if date_result > now + datetime.timedelta(days=1): # ignore by timezone issues (+24h)
|
||||
# assume last year - also reset month and day as it's not yesterday...
|
||||
date_result = date_result.replace(
|
||||
year=year-1, month=month, day=day)
|
||||
|
||||
|
|
|
@ -125,6 +125,10 @@ class Utils():
|
|||
with self.__lock:
|
||||
self._cache.pop(k, None)
|
||||
|
||||
def clear(self):
|
||||
with self.__lock:
|
||||
self._cache.clear()
|
||||
|
||||
|
||||
@staticmethod
|
||||
def setFBlockMode(fhandle, value):
|
||||
|
|
|
@ -655,12 +655,6 @@ class Fail2banClientTest(Fail2banClientServerBase):
|
|||
self.assertLogged("Base configuration directory " + pjoin(tmp, "miss") + " does not exist")
|
||||
self.pruneLog()
|
||||
|
||||
## wrong socket
|
||||
self.execCmd(FAILED, (),
|
||||
"--async", "-c", pjoin(tmp, "config"), "-s", pjoin(tmp, "miss/f2b.sock"), "start")
|
||||
self.assertLogged("There is no directory " + pjoin(tmp, "miss") + " to contain the socket file")
|
||||
self.pruneLog()
|
||||
|
||||
## not running
|
||||
self.execCmd(FAILED, (),
|
||||
"-c", pjoin(tmp, "config"), "-s", pjoin(tmp, "f2b.sock"), "reload")
|
||||
|
@ -756,12 +750,6 @@ class Fail2banServerTest(Fail2banClientServerBase):
|
|||
self.assertLogged("Base configuration directory " + pjoin(tmp, "miss") + " does not exist")
|
||||
self.pruneLog()
|
||||
|
||||
## wrong socket
|
||||
self.execCmd(FAILED, (),
|
||||
"-c", pjoin(tmp, "config"), "-x", "-s", pjoin(tmp, "miss/f2b.sock"))
|
||||
self.assertLogged("There is no directory " + pjoin(tmp, "miss") + " to contain the socket file")
|
||||
self.pruneLog()
|
||||
|
||||
## already exists:
|
||||
open(pjoin(tmp, "f2b.sock"), 'a').close()
|
||||
self.execCmd(FAILED, (),
|
||||
|
@ -1215,13 +1203,41 @@ class Fail2banServerTest(Fail2banClientServerBase):
|
|||
self.assertNotLogged("[test-jail1] Found 192.0.2.5")
|
||||
|
||||
# unban single ips:
|
||||
self.pruneLog("[test-phase 6]")
|
||||
self.pruneLog("[test-phase 6a]")
|
||||
self.execCmd(SUCCESS, startparams,
|
||||
"--async", "unban", "192.0.2.5", "192.0.2.6")
|
||||
self.assertLogged(
|
||||
"192.0.2.5 is not banned",
|
||||
"[test-jail1] Unban 192.0.2.6", all=True, wait=MID_WAITTIME
|
||||
)
|
||||
# unban ips by subnet (cidr/mask):
|
||||
self.pruneLog("[test-phase 6b]")
|
||||
self.execCmd(SUCCESS, startparams,
|
||||
"--async", "unban", "192.0.2.2/31")
|
||||
self.assertLogged(
|
||||
"[test-jail1] Unban 192.0.2.2",
|
||||
"[test-jail1] Unban 192.0.2.3", all=True, wait=MID_WAITTIME
|
||||
)
|
||||
self.execCmd(SUCCESS, startparams,
|
||||
"--async", "unban", "192.0.2.8/31", "192.0.2.100/31")
|
||||
self.assertLogged(
|
||||
"[test-jail1] Unban 192.0.2.8",
|
||||
"192.0.2.100/31 is not banned", all=True, wait=MID_WAITTIME)
|
||||
|
||||
# ban/unban subnet(s):
|
||||
self.pruneLog("[test-phase 6c]")
|
||||
self.execCmd(SUCCESS, startparams,
|
||||
"--async", "set", "test-jail1", "banip", "192.0.2.96/28", "192.0.2.112/28")
|
||||
self.assertLogged(
|
||||
"[test-jail1] Ban 192.0.2.96/28",
|
||||
"[test-jail1] Ban 192.0.2.112/28", all=True, wait=MID_WAITTIME
|
||||
)
|
||||
self.execCmd(SUCCESS, startparams,
|
||||
"--async", "set", "test-jail1", "unbanip", "192.0.2.64/26"); # contains both subnets .96/28 and .112/28
|
||||
self.assertLogged(
|
||||
"[test-jail1] Unban 192.0.2.96/28",
|
||||
"[test-jail1] Unban 192.0.2.112/28", all=True, wait=MID_WAITTIME
|
||||
)
|
||||
|
||||
# reload all (one jail) with unban all:
|
||||
self.pruneLog("[test-phase 7]")
|
||||
|
@ -1232,8 +1248,6 @@ class Fail2banServerTest(Fail2banClientServerBase):
|
|||
self.assertLogged(
|
||||
"Jail 'test-jail1' reloaded",
|
||||
"[test-jail1] Unban 192.0.2.1",
|
||||
"[test-jail1] Unban 192.0.2.2",
|
||||
"[test-jail1] Unban 192.0.2.3",
|
||||
"[test-jail1] Unban 192.0.2.4", all=True
|
||||
)
|
||||
# no restart occurred, no more ban (unbanned all using option "--unban"):
|
||||
|
@ -1241,8 +1255,6 @@ class Fail2banServerTest(Fail2banClientServerBase):
|
|||
"Jail 'test-jail1' stopped",
|
||||
"Jail 'test-jail1' started",
|
||||
"[test-jail1] Ban 192.0.2.1",
|
||||
"[test-jail1] Ban 192.0.2.2",
|
||||
"[test-jail1] Ban 192.0.2.3",
|
||||
"[test-jail1] Ban 192.0.2.4", all=True
|
||||
)
|
||||
|
||||
|
|
|
@ -81,6 +81,7 @@ def _test_exec_command_line(*args):
|
|||
return _exit_code
|
||||
|
||||
STR_00 = "Dec 31 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 192.0.2.0"
|
||||
STR_00_NODT = "[sshd] error: PAM: Authentication failure for kevin from 192.0.2.0"
|
||||
|
||||
RE_00 = r"(?:(?:Authentication failure|Failed [-/\w+]+) for(?: [iI](?:llegal|nvalid) user)?|[Ii](?:llegal|nvalid) user|ROOT LOGIN REFUSED) .*(?: from|FROM) <HOST>"
|
||||
RE_00_ID = r"Authentication failure for <F-ID>.*?</F-ID> from <ADDR>$"
|
||||
|
@ -172,7 +173,7 @@ class Fail2banRegexTest(LogCaptureTestCase):
|
|||
"--print-all-matched",
|
||||
FILENAME_01, RE_00
|
||||
))
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 13 matched, 6 missed')
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 16 matched, 3 missed')
|
||||
|
||||
self.assertLogged('Error decoding line');
|
||||
self.assertLogged('Continuing to process line ignoring invalid characters')
|
||||
|
@ -186,7 +187,7 @@ class Fail2banRegexTest(LogCaptureTestCase):
|
|||
"--print-all-matched", "--raw",
|
||||
FILENAME_01, RE_00
|
||||
))
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 16 matched, 3 missed')
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 19 matched, 0 missed')
|
||||
|
||||
def testDirectRE_1raw_noDns(self):
|
||||
self.assertTrue(_test_exec(
|
||||
|
@ -194,7 +195,7 @@ class Fail2banRegexTest(LogCaptureTestCase):
|
|||
"--print-all-matched", "--raw", "--usedns=no",
|
||||
FILENAME_01, RE_00
|
||||
))
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 13 matched, 6 missed')
|
||||
self.assertLogged('Lines: 19 lines, 0 ignored, 16 matched, 3 missed')
|
||||
# usage of <F-ID>\S+</F-ID> causes raw handling automatically:
|
||||
self.pruneLog()
|
||||
self.assertTrue(_test_exec(
|
||||
|
@ -378,6 +379,24 @@ class Fail2banRegexTest(LogCaptureTestCase):
|
|||
self.assertLogged('192.0.2.0, kevin, inet4')
|
||||
self.pruneLog()
|
||||
|
||||
def testNoDateTime(self):
|
||||
# datepattern doesn't match:
|
||||
self.assertTrue(_test_exec('-d', '{^LN-BEG}EPOCH', '-o', 'Found-ID:<F-ID>', STR_00_NODT, RE_00_ID))
|
||||
self.assertLogged(
|
||||
"Found a match but no valid date/time found",
|
||||
"Match without a timestamp:",
|
||||
"Found-ID:kevin", all=True)
|
||||
self.pruneLog()
|
||||
# explicitly no datepattern:
|
||||
self.assertTrue(_test_exec('-d', '{NONE}', '-o', 'Found-ID:<F-ID>', STR_00_NODT, RE_00_ID))
|
||||
self.assertLogged(
|
||||
"Found-ID:kevin", all=True)
|
||||
self.assertNotLogged(
|
||||
"Found a match but no valid date/time found",
|
||||
"Match without a timestamp:", all=True)
|
||||
|
||||
self.pruneLog()
|
||||
|
||||
def testFrmtOutputWrapML(self):
|
||||
unittest.F2B.SkipIfCfgMissing(stock=True)
|
||||
# complex substitution using tags and message (ip, user, msg):
|
||||
|
|
|
@ -8,9 +8,9 @@ Jul 4 18:39:39 mail courieresmtpd: error,relay=::ffff:1.2.3.4,from=<picaro@astr
|
|||
Jul 6 03:42:28 whistler courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<admin at memcpy>: 550 User unknown.
|
||||
# failJSON: { "time": "2004-11-21T23:16:17", "match": true , "host": "1.2.3.4" }
|
||||
Nov 21 23:16:17 server courieresmtpd: error,relay=::ffff:1.2.3.4,from=<>,to=<>: 550 User unknown.
|
||||
# failJSON: { "time": "2004-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||
# failJSON: { "time": "2005-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||
Aug 14 12:51:04 HOSTNAME courieresmtpd: error,relay=::ffff:1.2.3.4,from=<firozquarl@aclunc.org>,to=<BOGUSUSER@HOSTEDDOMAIN.org>: 550 User unknown.
|
||||
# failJSON: { "time": "2004-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||
# failJSON: { "time": "2005-08-14T12:51:04", "match": true , "host": "1.2.3.4" }
|
||||
Aug 14 12:51:04 mail.server courieresmtpd[26762]: error,relay=::ffff:1.2.3.4,msg="535 Authentication failed.",cmd: AUTH PLAIN AAAAABBBBCCCCWxlZA== admin
|
||||
# failJSON: { "time": "2004-08-14T12:51:05", "match": true , "host": "192.0.2.3" }
|
||||
# failJSON: { "time": "2005-08-14T12:51:05", "match": true , "host": "192.0.2.3" }
|
||||
Aug 14 12:51:05 mail.server courieresmtpd[425070]: error,relay=::ffff:192.0.2.3,port=43632,msg="535 Authentication failed.",cmd: AUTH LOGIN PlcmSpIp@example.com
|
||||
|
|
|
@ -30,8 +30,8 @@ Jun 21 16:55:02 <auth.info> machine kernel: [ 970.699396] @vserver_demo test-
|
|||
# failJSON: { "time": "2005-06-21T16:55:03", "match": true , "host": "192.0.2.3" }
|
||||
[Jun 21 16:55:03] <auth.info> machine kernel: [ 970.699396] @vserver_demo test-demo(pam_unix)[13709] [ID 255 test] F2B: failure from 192.0.2.3
|
||||
|
||||
# -- wrong time direct in journal-line (used last known date):
|
||||
# failJSON: { "time": "2005-06-21T16:55:03", "match": true , "host": "192.0.2.1" }
|
||||
# -- wrong time direct in journal-line (used last known date or now, but null because no checkFindTime in samples test factory):
|
||||
# failJSON: { "time": null, "match": true , "host": "192.0.2.1" }
|
||||
0000-12-30 00:00:00 server test-demo[47831]: F2B: failure from 192.0.2.1
|
||||
# -- wrong time after newline in message (plist without escaped newlines):
|
||||
# failJSON: { "match": false }
|
||||
|
@ -42,8 +42,8 @@ Jun 22 20:37:04 server test-demo[402]: writeToStorage plist={
|
|||
applicationDate = "0000-12-30 00:00:00 +0000";
|
||||
# failJSON: { "match": false }
|
||||
}
|
||||
# -- wrong time direct in journal-line (used last known date):
|
||||
# failJSON: { "time": "2005-06-22T20:37:04", "match": true , "host": "192.0.2.2" }
|
||||
# -- wrong time direct in journal-line (used last known date, but null because no checkFindTime in samples test factory):
|
||||
# failJSON: { "time": null, "match": true , "host": "192.0.2.2" }
|
||||
0000-12-30 00:00:00 server test-demo[47831]: F2B: failure from 192.0.2.2
|
||||
|
||||
# -- test no zone and UTC/GMT named zone "2005-06-21T14:55:10 UTC" == "2005-06-21T16:55:10 CEST" (diff +2h in CEST):
|
||||
|
|
|
@ -215,7 +215,7 @@ def _copy_lines_between_files(in_, fout, n=None, skip=0, mode='a', terminal_line
|
|||
# Write: all at once and flush
|
||||
if isinstance(fout, str):
|
||||
fout = open(fout, mode)
|
||||
fout.write('\n'.join(lines))
|
||||
fout.write('\n'.join(lines)+'\n')
|
||||
fout.flush()
|
||||
if isinstance(in_, str): # pragma: no branch - only used with str in test cases
|
||||
# Opened earlier, therefore must close it
|
||||
|
@ -394,12 +394,13 @@ class IgnoreIP(LogCaptureTestCase):
|
|||
finally:
|
||||
tearDownMyTime()
|
||||
|
||||
def testTimeJump(self):
|
||||
def _testTimeJump(self, inOperation=False):
|
||||
try:
|
||||
self.filter.addFailRegex('^<HOST>')
|
||||
self.filter.setDatePattern(r'{^LN-BEG}%Y-%m-%d %H:%M:%S(?:\s*%Z)?\s')
|
||||
self.filter.setFindTime(10); # max 10 seconds back
|
||||
self.filter.setMaxRetry(5); # don't ban here
|
||||
self.filter.inOperation = inOperation
|
||||
#
|
||||
self.pruneLog('[phase 1] DST time jump')
|
||||
# check local time jump (DST hole):
|
||||
|
@ -430,6 +431,47 @@ class IgnoreIP(LogCaptureTestCase):
|
|||
self.assertNotLogged('Ignore line')
|
||||
finally:
|
||||
tearDownMyTime()
|
||||
def testTimeJump(self):
|
||||
self._testTimeJump(inOperation=False)
|
||||
def testTimeJump_InOperation(self):
|
||||
self._testTimeJump(inOperation=True)
|
||||
|
||||
def testWrongTimeZone(self):
|
||||
try:
|
||||
self.filter.addFailRegex('fail from <ADDR>$')
|
||||
self.filter.setDatePattern(r'{^LN-BEG}%Y-%m-%d %H:%M:%S(?:\s*%Z)?\s')
|
||||
self.filter.setMaxRetry(5); # don't ban here
|
||||
self.filter.inOperation = True; # real processing (all messages are new)
|
||||
# current time is 1h later than log-entries:
|
||||
MyTime.setTime(1572138000+3600)
|
||||
#
|
||||
self.pruneLog("[phase 1] simulate wrong TZ")
|
||||
for i in (1,2,3):
|
||||
self.filter.processLineAndAdd('2019-10-27 02:00:00 fail from 192.0.2.15'); # +3 = 3
|
||||
self.assertLogged(
|
||||
"Simulate NOW in operation since found time has too large deviation",
|
||||
"Please check jail has possibly a timezone issue.",
|
||||
"192.0.2.15:1", "192.0.2.15:2", "192.0.2.15:3",
|
||||
"Total # of detected failures: 3.", wait=True)
|
||||
#
|
||||
self.pruneLog("[phase 2] wrong TZ given in log")
|
||||
for i in (1,2,3):
|
||||
self.filter.processLineAndAdd('2019-10-27 04:00:00 GMT fail from 192.0.2.16'); # +3 = 6
|
||||
self.assertLogged(
|
||||
"192.0.2.16:1", "192.0.2.16:2", "192.0.2.16:3",
|
||||
"Total # of detected failures: 6.", all=True, wait=True)
|
||||
self.assertNotLogged("Found a match but no valid date/time found")
|
||||
#
|
||||
self.pruneLog("[phase 3] other timestamp (don't match datepattern), regex matches")
|
||||
for i in range(3):
|
||||
self.filter.processLineAndAdd('27.10.2019 04:00:00 fail from 192.0.2.17'); # +3 = 9
|
||||
self.assertLogged(
|
||||
"Found a match but no valid date/time found",
|
||||
"Match without a timestamp:",
|
||||
"192.0.2.17:1", "192.0.2.17:2", "192.0.2.17:3",
|
||||
"Total # of detected failures: 9.", all=True, wait=True)
|
||||
finally:
|
||||
tearDownMyTime()
|
||||
|
||||
def testAddAttempt(self):
|
||||
self.filter.setMaxRetry(3)
|
||||
|
@ -878,7 +920,7 @@ class LogFileMonitor(LogCaptureTestCase):
|
|||
self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
||||
# and it should have not been enough
|
||||
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=5)
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=12, n=3)
|
||||
self.filter.getFailures(self.name)
|
||||
_assert_correct_last_attempt(self, self.filter, GetFailures.FAILURES_01)
|
||||
|
||||
|
@ -897,7 +939,7 @@ class LogFileMonitor(LogCaptureTestCase):
|
|||
# filter "marked" as the known beginning, otherwise it
|
||||
# would not detect "rotation"
|
||||
self.file = _copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
||||
skip=3, mode='w')
|
||||
skip=12, n=3, mode='w')
|
||||
self.filter.getFailures(self.name)
|
||||
#self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
||||
_assert_correct_last_attempt(self, self.filter, GetFailures.FAILURES_01)
|
||||
|
@ -916,7 +958,7 @@ class LogFileMonitor(LogCaptureTestCase):
|
|||
|
||||
# move aside, but leaving the handle still open...
|
||||
os.rename(self.name, self.name + '.bak')
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=14).close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=14, n=1).close()
|
||||
self.filter.getFailures(self.name)
|
||||
_assert_correct_last_attempt(self, self.filter, GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 3)
|
||||
|
@ -1027,13 +1069,13 @@ def get_monitor_failures_testcase(Filter_):
|
|||
self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
||||
|
||||
# Now let's feed it with entries from the file
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=5)
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=12)
|
||||
self.assertRaises(FailManagerEmpty, self.filter.failManager.toBan)
|
||||
# and our dummy jail is empty as well
|
||||
self.assertFalse(len(self.jail))
|
||||
# since it should have not been enough
|
||||
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=5)
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=12, n=3)
|
||||
if idle:
|
||||
self.waitForTicks(1)
|
||||
self.assertTrue(self.isEmpty(1))
|
||||
|
@ -1052,7 +1094,7 @@ def get_monitor_failures_testcase(Filter_):
|
|||
#return
|
||||
# just for fun let's copy all of them again and see if that results
|
||||
# in a new ban
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=100)
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=12, n=3)
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
|
||||
def test_rewrite_file(self):
|
||||
|
@ -1066,7 +1108,7 @@ def get_monitor_failures_testcase(Filter_):
|
|||
# filter "marked" as the known beginning, otherwise it
|
||||
# would not detect "rotation"
|
||||
self.file = _copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
||||
skip=3, mode='w')
|
||||
skip=12, n=3, mode='w')
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
|
||||
def _wait4failures(self, count=2):
|
||||
|
@ -1087,13 +1129,13 @@ def get_monitor_failures_testcase(Filter_):
|
|||
|
||||
# move aside, but leaving the handle still open...
|
||||
os.rename(self.name, self.name + '.bak')
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=14).close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=14, n=1).close()
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 3)
|
||||
|
||||
# now remove the moved file
|
||||
_killfile(None, self.name + '.bak')
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, n=100).close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=12, n=3).close()
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 6)
|
||||
|
||||
|
@ -1169,8 +1211,7 @@ def get_monitor_failures_testcase(Filter_):
|
|||
|
||||
def _test_move_into_file(self, interim_kill=False):
|
||||
# if we move a new file into the location of an old (monitored) file
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
||||
n=100).close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name).close()
|
||||
# make sure that it is monitored first
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 3)
|
||||
|
@ -1181,14 +1222,14 @@ def get_monitor_failures_testcase(Filter_):
|
|||
|
||||
# now create a new one to override old one
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name + '.new',
|
||||
n=100).close()
|
||||
skip=12, n=3).close()
|
||||
os.rename(self.name + '.new', self.name)
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 6)
|
||||
|
||||
# and to make sure that it now monitored for changes
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name,
|
||||
n=100).close()
|
||||
skip=12, n=3).close()
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 9)
|
||||
|
||||
|
@ -1207,7 +1248,7 @@ def get_monitor_failures_testcase(Filter_):
|
|||
|
||||
# create a bogus file in the same directory and see if that doesn't affect
|
||||
open(self.name + '.bak2', 'w').close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, n=100).close()
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.name, skip=12, n=3).close()
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 6)
|
||||
_killfile(None, self.name + '.bak2')
|
||||
|
@ -1239,8 +1280,8 @@ def get_monitor_failures_testcase(Filter_):
|
|||
self.assert_correct_last_attempt(GetFailures.FAILURES_01, count=6) # was needed if we write twice above
|
||||
|
||||
# now copy and get even more
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, n=100)
|
||||
# check for 3 failures (not 9), because 6 already get above...
|
||||
_copy_lines_between_files(GetFailures.FILENAME_01, self.file, skip=12, n=3)
|
||||
# check for 3 failures (not 9), because 6 already get above...
|
||||
self.assert_correct_last_attempt(GetFailures.FAILURES_01)
|
||||
# total count in this test:
|
||||
self.assertEqual(self.filter.failManager.getFailTotal(), 12)
|
||||
|
@ -1606,16 +1647,24 @@ class GetFailures(LogCaptureTestCase):
|
|||
_assert_correct_last_attempt(self, self.filter, output)
|
||||
|
||||
def testGetFailures03(self):
|
||||
output = ('203.162.223.135', 7, 1124013544.0)
|
||||
output = ('203.162.223.135', 6, 1124013600.0)
|
||||
|
||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=0)
|
||||
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
||||
self.filter.getFailures(GetFailures.FILENAME_03)
|
||||
_assert_correct_last_attempt(self, self.filter, output)
|
||||
|
||||
def testGetFailures03_InOperation(self):
|
||||
output = ('203.162.223.135', 9, 1124013600.0)
|
||||
|
||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=0)
|
||||
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
||||
self.filter.getFailures(GetFailures.FILENAME_03, inOperation=True)
|
||||
_assert_correct_last_attempt(self, self.filter, output)
|
||||
|
||||
def testGetFailures03_Seek1(self):
|
||||
# same test as above but with seek to 'Aug 14 11:55:04' - so other output ...
|
||||
output = ('203.162.223.135', 5, 1124013544.0)
|
||||
output = ('203.162.223.135', 3, 1124013600.0)
|
||||
|
||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2] - 4*60)
|
||||
self.filter.addFailRegex(r"error,relay=<HOST>,.*550 User unknown")
|
||||
|
@ -1624,7 +1673,7 @@ class GetFailures(LogCaptureTestCase):
|
|||
|
||||
def testGetFailures03_Seek2(self):
|
||||
# same test as above but with seek to 'Aug 14 11:59:04' - so other output ...
|
||||
output = ('203.162.223.135', 1, 1124013544.0)
|
||||
output = ('203.162.223.135', 2, 1124013600.0)
|
||||
self.filter.setMaxRetry(1)
|
||||
|
||||
self.filter.addLogPath(GetFailures.FILENAME_03, autoSeek=output[2])
|
||||
|
@ -1652,6 +1701,7 @@ class GetFailures(LogCaptureTestCase):
|
|||
_assert_correct_last_attempt(self, self.filter, output)
|
||||
|
||||
def testGetFailuresWrongChar(self):
|
||||
self.filter.checkFindTime = False
|
||||
# write wrong utf-8 char:
|
||||
fname = tempfile.mktemp(prefix='tmp_fail2ban', suffix='crlf')
|
||||
fout = fopen(fname, 'wb')
|
||||
|
@ -1672,6 +1722,8 @@ class GetFailures(LogCaptureTestCase):
|
|||
for enc in (None, 'utf-8', 'ascii'):
|
||||
if enc is not None:
|
||||
self.tearDown();self.setUp();
|
||||
if DefLogSys.getEffectiveLevel() > 7: DefLogSys.setLevel(7); # ensure decode_line logs always
|
||||
self.filter.checkFindTime = False;
|
||||
self.filter.setLogEncoding(enc);
|
||||
# speedup search using exact date pattern:
|
||||
self.filter.setDatePattern(r'^%ExY-%Exm-%Exd %ExH:%ExM:%ExS')
|
||||
|
|
|
@ -460,11 +460,27 @@ Similar to actions, filters have an [Init] section which can be overridden in \f
|
|||
specifies the maximum number of lines to buffer to match multi-line regexs. For some log formats this will not required to be changed. Other logs may require to increase this value if a particular log file is frequently written to.
|
||||
.TP
|
||||
\fBdatepattern\fR
|
||||
specifies a custom date pattern/regex as an alternative to the default date detectors e.g. %Y-%m-%d %H:%M(?::%S)?. For a list of valid format directives, see Python library documentation for strptime behaviour.
|
||||
.br
|
||||
Also, special values of \fIEpoch\fR (UNIX Timestamp), \fITAI64N\fR and \fIISO8601\fR can be used.
|
||||
specifies a custom date pattern/regex as an alternative to the default date detectors e.g. %%Y-%%m-%%d %%H:%%M(?::%%S)?.
|
||||
For a list of valid format directives, see Python library documentation for strptime behaviour.
|
||||
.br
|
||||
\fBNOTE:\fR due to config file string substitution, that %'s must be escaped by an % in config files.
|
||||
.br
|
||||
Also, special values of \fIEpoch\fR (UNIX Timestamp), \fITAI64N\fR and \fIISO8601\fR can be used as datepattern.
|
||||
.br
|
||||
Normally the regexp generated for datepattern additionally gets word-start and word-end boundaries to avoid accidental match inside of some word in a message.
|
||||
There are several prefixes and words with special meaning that could be specified with custom datepattern to control resulting regex:
|
||||
.RS
|
||||
.IP
|
||||
\fI{DEFAULT}\fR - can be used to add default date patterns of fail2ban.
|
||||
.IP
|
||||
\fI{DATE}\fR - can be used as part of regex that will be replaced with default date patterns.
|
||||
.IP
|
||||
\fI{^LN-BEG}\fR - prefix (similar to \fI^\fR) changing word-start boundary to line-start boundary (ignoring up to 2 characters). If used as value (not as a prefix), it will also set all default date patterns (similar to \fI{DEFAULT}\fR), but anchored at begin of message line.
|
||||
.IP
|
||||
\fI{UNB}\fR - prefix to disable automatic word boundaries in regex.
|
||||
.IP
|
||||
\fI{NONE}\fR - value would allow to find failures totally without date-time in log message. Filter will use now as a timestamp (or last known timestamp from previous line with timestamp).
|
||||
.RE
|
||||
.TP
|
||||
\fBjournalmatch\fR
|
||||
specifies the systemd journal match used to filter the journal entries. See \fBjournalctl(1)\fR and \fBsystemd.journal-fields(7)\fR for matches syntax and more details on special journal fields. This option is only valid for the \fIsystemd\fR backend.
|
||||
|
|
Loading…
Reference in New Issue