Merge branch '0.11'

pull/2656/head
sebres 2020-03-05 14:32:42 +01:00
commit 8b43d54878
13 changed files with 154 additions and 86 deletions

View File

@ -17,10 +17,16 @@ ver. 1.0.1-dev-1 (20??/??/??) - development nightly edition
### Fixes
* restoring a large number (500+ depending on files ulimit) of current bans when using PyPy fixed
* manual ban is written to database, so can be restored by restart (gh-2647)
* `filter.d/common.conf`: avoid substitute of default values in related `lt_*` section, `__prefix_line`
should be interpolated in definition section (inside the filter-config, gh-2650)
### New Features
### Enhancements
* introduced new prefix `{UNB}` for `datepattern` to disable word boundaries in regex;
* datetemplate: improved anchor detection for capturing groups `(^...)`;
* performance optimization of `datepattern` (better search algorithm in datedetector, especially for single template);
* `actioncheck` behavior is changed now (gh-488), so invariant check as well as restore or repair
of sane environment (in case of recognized unsane state) would only occur on action errors (e. g.
if ban or unban operations are exiting with other code as 0)

View File

@ -25,7 +25,7 @@ __pid_re = (?:\[\d+\])
# Daemon name (with optional source_file:line or whatever)
# EXAMPLES: pam_rhosts_auth, [sshd], pop(pam_unix)
__daemon_re = [\[\(]?%(_daemon)s(?:\(\S+\))?[\]\)]?:?
__daemon_re = [\[\(]?<_daemon>(?:\(\S+\))?[\]\)]?:?
# extra daemon info
# EXAMPLE: [ID 800047 auth.info]
@ -33,7 +33,7 @@ __daemon_extra_re = \[ID \d+ \S+\]
# Combinations of daemon name and PID
# EXAMPLES: sshd[31607], pop(pam_unix)[4920]
__daemon_combs_re = (?:%(__pid_re)s?:\s+%(__daemon_re)s|%(__daemon_re)s%(__pid_re)s?:?)
__daemon_combs_re = (?:<__pid_re>?:\s+<__daemon_re>|<__daemon_re><__pid_re>?:?)
# Some messages have a kernel prefix with a timestamp
# EXAMPLES: kernel: [769570.846956]
@ -69,12 +69,12 @@ datepattern = <lt_<logtype>/datepattern>
[lt_file]
# Common line prefixes for logtype "file":
__prefix_line = %(__date_ambit)s?\s*(?:%(__bsd_syslog_verbose)s\s+)?(?:%(__hostname)s\s+)?(?:%(__kernel_prefix)s\s+)?(?:%(__vserver)s\s+)?(?:%(__daemon_combs_re)s\s+)?(?:%(__daemon_extra_re)s\s+)?
__prefix_line = <__date_ambit>?\s*(?:<__bsd_syslog_verbose>\s+)?(?:<__hostname>\s+)?(?:<__kernel_prefix>\s+)?(?:<__vserver>\s+)?(?:<__daemon_combs_re>\s+)?(?:<__daemon_extra_re>\s+)?
datepattern = {^LN-BEG}
[lt_short]
# Common (short) line prefix for logtype "journal" (corresponds output of formatJournalEntry):
__prefix_line = \s*(?:%(__hostname)s\s+)?(?:%(_daemon)s%(__pid_re)s?:?\s+)?(?:%(__kernel_prefix)s\s+)?
__prefix_line = \s*(?:<__hostname>\s+)?(?:<_daemon><__pid_re>?:?\s+)?(?:<__kernel_prefix>\s+)?
datepattern = %(lt_file/datepattern)s
[lt_journal]
__prefix_line = %(lt_short/__prefix_line)s

View File

@ -8,13 +8,17 @@
# common.local
before = common.conf
# [DEFAULT]
# logtype = short
[Definition]
_daemon = monit
_prefix = Warning|HttpRequest
# Regexp for previous (accessing monit httpd) and new (access denied) versions
failregex = ^\[\s*\]\s*error\s*:\s*Warning:\s+Client '<HOST>' supplied (?:unknown user '[^']+'|wrong password for user '[^']*') accessing monit httpd$
^%(__prefix_line)s\w+: access denied -- client <HOST>: (?:unknown user '[^']+'|wrong password for user '[^']*'|empty password)$
failregex = ^%(__prefix_line)s(?:error\s*:\s+)?(?:%(_prefix)s):\s+(?:access denied\s+--\s+)?[Cc]lient '?<HOST>'?(?:\s+supplied|\s*:)\s+(?:unknown user '<F-ALT_USER>[^']+</F-ALT_USER>'|wrong password for user '<F-USER>[^']*</F-USER>'|empty password)
# Ignore login with empty user (first connect, no user specified)
# ignoreregex = %(__prefix_line)s\w+: access denied -- client <HOST>: (?:unknown user '')

View File

@ -476,7 +476,7 @@ class Actions(JailThread, Mapping):
# do actions :
for name, action in self._actions.iteritems():
try:
if ticket.restored and getattr(action, 'norestored', False):
if bTicket.restored and getattr(action, 'norestored', False):
continue
if not aInfo.immutable: aInfo.reset()
action.ban(aInfo)
@ -522,6 +522,8 @@ class Actions(JailThread, Mapping):
cnt += self.__reBan(bTicket, actions=rebanacts)
else: # pragma: no cover - unexpected: ticket is not banned for some reasons - reban using all actions:
cnt += self.__reBan(bTicket)
# add ban to database moved to observer (should previously check not already banned
# and increase ticket time if "bantime.increment" set)
if cnt:
logSys.debug("Banned %s / %s, %s ticket(s) in %r", cnt,
self.__banManager.getBanTotal(), self.__banManager.size(), self._jail.name)

View File

@ -337,65 +337,76 @@ class DateDetector(object):
# if no templates specified - default templates should be used:
if not len(self.__templates):
self.addDefaultTemplate()
logSys.log(logLevel-1, "try to match time for line: %.120s", line)
match = None
log = logSys.log if logSys.getEffectiveLevel() <= logLevel else lambda *args: None
log(logLevel-1, "try to match time for line: %.120s", line)
# first try to use last template with same start/end position:
match = None
found = None, 0x7fffffff, 0x7fffffff, -1
ignoreBySearch = 0x7fffffff
i = self.__lastTemplIdx
if i < len(self.__templates):
ddtempl = self.__templates[i]
template = ddtempl.template
if template.flags & (DateTemplate.LINE_BEGIN|DateTemplate.LINE_END):
if logSys.getEffectiveLevel() <= logLevel-1: # pragma: no cover - very-heavy debug
logSys.log(logLevel-1, " try to match last anchored template #%02i ...", i)
log(logLevel-1, " try to match last anchored template #%02i ...", i)
match = template.matchDate(line)
ignoreBySearch = i
else:
distance, endpos = self.__lastPos[0], self.__lastEndPos[0]
if logSys.getEffectiveLevel() <= logLevel-1:
logSys.log(logLevel-1, " try to match last template #%02i (from %r to %r): ...%r==%r %s %r==%r...",
i, distance, endpos,
line[distance-1:distance], self.__lastPos[1],
line[distance:endpos],
line[endpos:endpos+1], self.__lastEndPos[1])
# check same boundaries left/right, otherwise possible collision/pattern switch:
if (line[distance-1:distance] == self.__lastPos[1] and
line[endpos:endpos+1] == self.__lastEndPos[1]
):
log(logLevel-1, " try to match last template #%02i (from %r to %r): ...%r==%r %s %r==%r...",
i, distance, endpos,
line[distance-1:distance], self.__lastPos[1],
line[distance:endpos],
line[endpos:endpos+1], self.__lastEndPos[2])
# check same boundaries left/right, outside fully equal, inside only if not alnum (e. g. bound RE
# with space or some special char), otherwise possible collision/pattern switch:
if ((
line[distance-1:distance] == self.__lastPos[1] or
(line[distance] == self.__lastPos[2] and not self.__lastPos[2].isalnum())
) and (
line[endpos:endpos+1] == self.__lastEndPos[2] or
(line[endpos-1] == self.__lastEndPos[1] and not self.__lastEndPos[1].isalnum())
)):
# search in line part only:
log(logLevel-1, " boundaries are correct, search in part %r", line[distance:endpos])
match = template.matchDate(line, distance, endpos)
else:
log(logLevel-1, " boundaries show conflict, try whole search")
match = template.matchDate(line)
ignoreBySearch = i
if match:
distance = match.start()
endpos = match.end()
# if different position, possible collision/pattern switch:
if (
len(self.__templates) == 1 or # single template:
template.flags & (DateTemplate.LINE_BEGIN|DateTemplate.LINE_END) or
(distance == self.__lastPos[0] and endpos == self.__lastEndPos[0])
):
logSys.log(logLevel, " matched last time template #%02i", i)
log(logLevel, " matched last time template #%02i", i)
else:
logSys.log(logLevel, " ** last pattern collision - pattern change, search ...")
log(logLevel, " ** last pattern collision - pattern change, reserve & search ...")
found = match, distance, endpos, i; # save current best alternative
match = None
else:
logSys.log(logLevel, " ** last pattern not found - pattern change, search ...")
log(logLevel, " ** last pattern not found - pattern change, search ...")
# search template and better match:
if not match:
logSys.log(logLevel, " search template (%i) ...", len(self.__templates))
found = None, 0x7fffffff, 0x7fffffff, -1
log(logLevel, " search template (%i) ...", len(self.__templates))
i = 0
for ddtempl in self.__templates:
if logSys.getEffectiveLevel() <= logLevel-1:
logSys.log(logLevel-1, " try template #%02i: %s", i, ddtempl.name)
if i == ignoreBySearch:
i += 1
continue
log(logLevel-1, " try template #%02i: %s", i, ddtempl.name)
template = ddtempl.template
match = template.matchDate(line)
if match:
distance = match.start()
endpos = match.end()
if logSys.getEffectiveLevel() <= logLevel:
logSys.log(logLevel, " matched time template #%02i (at %r <= %r, %r) %s",
i, distance, ddtempl.distance, self.__lastPos[0], template.name)
log(logLevel, " matched time template #%02i (at %r <= %r, %r) %s",
i, distance, ddtempl.distance, self.__lastPos[0], template.name)
## last (or single) template - fast stop:
if i+1 >= len(self.__templates):
break
@ -408,7 +419,7 @@ class DateDetector(object):
## [grave] if distance changed, possible date-match was found somewhere
## in body of message, so save this template, and search further:
if distance > ddtempl.distance or distance > self.__lastPos[0]:
logSys.log(logLevel, " ** distance collision - pattern change, reserve")
log(logLevel, " ** distance collision - pattern change, reserve")
## shortest of both:
if distance < found[1]:
found = match, distance, endpos, i
@ -422,7 +433,7 @@ class DateDetector(object):
# check other template was found (use this one with shortest distance):
if not match and found[0]:
match, distance, endpos, i = found
logSys.log(logLevel, " use best time template #%02i", i)
log(logLevel, " use best time template #%02i", i)
ddtempl = self.__templates[i]
template = ddtempl.template
# we've winner, incr hits, set distance, usage, reorder, etc:
@ -432,8 +443,8 @@ class DateDetector(object):
ddtempl.distance = distance
if self.__firstUnused == i:
self.__firstUnused += 1
self.__lastPos = distance, line[distance-1:distance]
self.__lastEndPos = endpos, line[endpos:endpos+1]
self.__lastPos = distance, line[distance-1:distance], line[distance]
self.__lastEndPos = endpos, line[endpos-1], line[endpos:endpos+1]
# if not first - try to reorder current template (bubble up), they will be not sorted anymore:
if i and i != self.__lastTemplIdx:
i = self._reorderTemplate(i)
@ -442,7 +453,7 @@ class DateDetector(object):
return (match, template)
# not found:
logSys.log(logLevel, " no template.")
log(logLevel, " no template.")
return (None, None)
@property

View File

@ -36,15 +36,16 @@ logSys = getLogger(__name__)
RE_GROUPED = re.compile(r'(?<!(?:\(\?))(?<!\\)\((?!\?)')
RE_GROUP = ( re.compile(r'^((?:\(\?\w+\))?\^?(?:\(\?\w+\))?)(.*?)(\$?)$'), r"\1(\2)\3" )
RE_EXLINE_NO_BOUNDS = re.compile(r'^\{UNB\}')
RE_EXLINE_BOUND_BEG = re.compile(r'^\{\^LN-BEG\}')
RE_EXSANC_BOUND_BEG = re.compile(r'^\(\?:\^\|\\b\|\\W\)')
RE_EXSANC_BOUND_BEG = re.compile(r'^\((?:\?:)?\^\|\\b\|\\W\)')
RE_EXEANC_BOUND_BEG = re.compile(r'\(\?=\\b\|\\W\|\$\)$')
RE_NO_WRD_BOUND_BEG = re.compile(r'^\(*(?:\(\?\w+\))?(?:\^|\(*\*\*|\(\?:\^)')
RE_NO_WRD_BOUND_BEG = re.compile(r'^\(*(?:\(\?\w+\))?(?:\^|\(*\*\*|\((?:\?:)?\^)')
RE_NO_WRD_BOUND_END = re.compile(r'(?<!\\)(?:\$\)?|\\b|\\s|\*\*\)*)$')
RE_DEL_WRD_BOUNDS = ( re.compile(r'^\(*(?:\(\?\w+\))?\(*\*\*|(?<!\\)\*\*\)*$'),
lambda m: m.group().replace('**', '') )
RE_LINE_BOUND_BEG = re.compile(r'^(?:\(\?\w+\))?(?:\^|\(\?:\^(?!\|))')
RE_LINE_BOUND_BEG = re.compile(r'^(?:\(\?\w+\))?(?:\^|\((?:\?:)?\^(?!\|))')
RE_LINE_BOUND_END = re.compile(r'(?<![\\\|])(?:\$\)?)$')
RE_ALPHA_PATTERN = re.compile(r'(?<!\%)\%[aAbBpc]')
@ -119,7 +120,7 @@ class DateTemplate(object):
if boundBegin:
self.flags |= DateTemplate.WORD_BEGIN if wordBegin != 'start' else DateTemplate.LINE_BEGIN
if wordBegin != 'start':
regex = r'(?:^|\b|\W)' + regex
regex = r'(?=^|\b|\W)' + regex
else:
regex = r"^(?:\W{0,2})?" + regex
if not self.name.startswith('{^LN-BEG}'):
@ -128,8 +129,10 @@ class DateTemplate(object):
if boundEnd:
self.flags |= DateTemplate.WORD_END
regex += r'(?=\b|\W|$)'
if RE_LINE_BOUND_BEG.search(regex): self.flags |= DateTemplate.LINE_BEGIN
if RE_LINE_BOUND_END.search(regex): self.flags |= DateTemplate.LINE_END
if not (self.flags & DateTemplate.LINE_BEGIN) and RE_LINE_BOUND_BEG.search(regex):
self.flags |= DateTemplate.LINE_BEGIN
if not (self.flags & DateTemplate.LINE_END) and RE_LINE_BOUND_END.search(regex):
self.flags |= DateTemplate.LINE_END
# remove possible special pattern "**" in front and end of regex:
regex = RE_DEL_WRD_BOUNDS[0].sub(RE_DEL_WRD_BOUNDS[1], regex)
self._regex = regex
@ -188,7 +191,7 @@ class DateTemplate(object):
def unboundPattern(pattern):
return RE_EXEANC_BOUND_BEG.sub('',
RE_EXSANC_BOUND_BEG.sub('',
RE_EXLINE_BOUND_BEG.sub('', pattern)
RE_EXLINE_BOUND_BEG.sub('', RE_EXLINE_NO_BOUNDS.sub('', pattern))
)
)
@ -297,6 +300,10 @@ class DatePatternRegex(DateTemplate):
def setRegex(self, pattern, wordBegin=True, wordEnd=True):
# original pattern:
self._pattern = pattern
# if unbound signalled - reset boundaries left and right:
if RE_EXLINE_NO_BOUNDS.search(pattern):
pattern = RE_EXLINE_NO_BOUNDS.sub('', pattern)
wordBegin = wordEnd = False
# if explicit given {^LN-BEG} - remove it from pattern and set 'start' in wordBegin:
if wordBegin and RE_EXLINE_BOUND_BEG.search(pattern):
pattern = RE_EXLINE_BOUND_BEG.sub('', pattern)

View File

@ -92,10 +92,7 @@ class FailManager:
if attempt <= 0:
attempt += 1
unixTime = ticket.getTime()
fData.setLastTime(unixTime)
if fData.getLastReset() < unixTime - self.__maxTime:
fData.setLastReset(unixTime)
fData.setRetry(0)
fData.adjustTime(unixTime, self.__maxTime)
fData.inc(matches, attempt, count)
# truncate to maxMatches:
if self.maxMatches:
@ -139,7 +136,7 @@ class FailManager:
def cleanup(self, time):
with self.__lock:
todelete = [fid for fid,item in self.__failList.iteritems() \
if item.getLastTime() + self.__maxTime <= time]
if item.getTime() + self.__maxTime <= time]
if len(todelete) == len(self.__failList):
# remove all:
self.__failList = dict()
@ -153,7 +150,7 @@ class FailManager:
else:
# create new dictionary without items to be deleted:
self.__failList = dict((fid,item) for fid,item in self.__failList.iteritems() \
if item.getLastTime() + self.__maxTime > time)
if item.getTime() + self.__maxTime > time)
self.__bgSvc.service()
def delFailure(self, fid):

View File

@ -227,15 +227,14 @@ class FailTicket(Ticket):
def __init__(self, ip=None, time=None, matches=None, data={}, ticket=None):
# this class variables:
self._retry = 0
self._lastReset = None
self._firstTime = None
self._retry = 1
# create/copy using default ticket constructor:
Ticket.__init__(self, ip, time, matches, data, ticket)
# init:
if ticket is None:
self._lastReset = time if time is not None else self.getTime()
if not self._retry:
self._retry = self._data['failures'];
if not isinstance(ticket, FailTicket):
self._firstTime = time if time is not None else self.getTime()
self._retry = self._data.get('failures', 1)
def setRetry(self, value):
""" Set artificial retry count, normally equal failures / attempt,
@ -252,7 +251,20 @@ class FailTicket(Ticket):
""" Returns failures / attempt count or
artificial retry count increased for bad IPs
"""
return max(self._retry, self._data['failures'])
return self._retry
def adjustTime(self, time, maxTime):
""" Adjust time of ticket and current attempts count considering given maxTime
as estimation from rate by previous known interval (if it exceeds the findTime)
"""
if time > self._time:
# expand current interval and attemps count (considering maxTime):
if self._firstTime < time - maxTime:
# adjust retry calculated as estimation from rate by previous known interval:
self._retry = int(round(self._retry / float(time - self._firstTime) * maxTime))
self._firstTime = time - maxTime
# last time of failure:
self._time = time
def inc(self, matches=None, attempt=1, count=1):
self._retry += count
@ -264,19 +276,6 @@ class FailTicket(Ticket):
else:
self._data['matches'] = matches
def setLastTime(self, value):
if value > self._time:
self._time = value
def getLastTime(self):
return self._time
def getLastReset(self):
return self._lastReset
def setLastReset(self, value):
self._lastReset = value
@staticmethod
def wrap(o):
o.__class__ = FailTicket

View File

@ -543,6 +543,7 @@ class DatabaseTest(LogCaptureTestCase):
# test action together with database functionality
self.testAddJail() # Jail required
self.jail.database = self.db
self.db.addJail(self.jail)
actions = Actions(self.jail)
actions.add(
"action_checkainfo",

View File

@ -330,6 +330,27 @@ class DateDetectorTest(LogCaptureTestCase):
dt = '2005 Jun 03'; self.assertEqual(t.matchDate(dt).group(1), dt)
dt = '2005 JUN 03'; self.assertEqual(t.matchDate(dt).group(1), dt)
def testNotAnchoredCollision(self):
# try for patterns with and without word boundaries:
for dp in (r'%H:%M:%S', r'{UNB}%H:%M:%S'):
dd = DateDetector()
dd.appendTemplate(dp)
# boundary of timestamp changes right and left (and time is left and right in line):
for fmt in ('%s test', '%8s test', 'test %s', 'test %8s'):
for dt in (
'00:01:02',
'00:01:2',
'00:1:2',
'0:1:2',
'00:1:2',
'00:01:2',
'00:01:02',
'0:1:2',
'00:01:02',
):
t = dd.getTime(fmt % dt)
self.assertEqual((t[0], t[1].group()), (1123970462.0, dt))
def testAmbiguousInOrderedTemplates(self):
dd = self.datedetector
for (debit, line, cnt) in (

View File

@ -1005,8 +1005,8 @@ class Fail2banServerTest(Fail2banClientServerBase):
# leave action2 just to test restored interpolation:
_write_jail_cfg(actions=[2,3])
# write new failures:
self.pruneLog("[test-phase 2b]")
# write new failures:
_write_file(test2log, "w+", *(
(str(int(MyTime.time())) + " error 403 from 192.0.2.2: test 2",) * 3 +
(str(int(MyTime.time())) + " error 403 from 192.0.2.3: test 2",) * 3 +
@ -1019,13 +1019,19 @@ class Fail2banServerTest(Fail2banClientServerBase):
self.assertLogged(
"2 ticket(s) in 'test-jail2",
"5 ticket(s) in 'test-jail1", all=True, wait=MID_WAITTIME)
# ban manually to cover restore in restart (phase 2c):
self.execCmd(SUCCESS, startparams,
"set", "test-jail2", "banip", "192.0.2.9")
self.assertLogged(
"3 ticket(s) in 'test-jail2", wait=MID_WAITTIME)
self.assertLogged(
"[test-jail1] Ban 192.0.2.2",
"[test-jail1] Ban 192.0.2.3",
"[test-jail1] Ban 192.0.2.4",
"[test-jail1] Ban 192.0.2.8",
"[test-jail2] Ban 192.0.2.4",
"[test-jail2] Ban 192.0.2.8", all=True)
"[test-jail2] Ban 192.0.2.8",
"[test-jail2] Ban 192.0.2.9", all=True)
# test ips at all not visible for jail2:
self.assertNotLogged(
"[test-jail2] Found 192.0.2.2",
@ -1047,15 +1053,17 @@ class Fail2banServerTest(Fail2banClientServerBase):
self.assertLogged(
"Reload finished.",
"Restore Ban",
"2 ticket(s) in 'test-jail2", all=True, wait=MID_WAITTIME)
"3 ticket(s) in 'test-jail2", all=True, wait=MID_WAITTIME)
# stop/start and unban/restore ban:
self.assertLogged(
"Jail 'test-jail2' stopped",
"Jail 'test-jail2' started",
"[test-jail2] Unban 192.0.2.4",
"[test-jail2] Unban 192.0.2.8",
"[test-jail2] Unban 192.0.2.9",
"Jail 'test-jail2' stopped",
"Jail 'test-jail2' started",
"[test-jail2] Restore Ban 192.0.2.4",
"[test-jail2] Restore Ban 192.0.2.8", all=True
"[test-jail2] Restore Ban 192.0.2.8",
"[test-jail2] Restore Ban 192.0.2.9", all=True
)
# test restored is 1 (only test-action2):
self.assertLogged(
@ -1100,7 +1108,8 @@ class Fail2banServerTest(Fail2banClientServerBase):
"Jail 'test-jail2' stopped",
"Jail 'test-jail2' started",
"[test-jail2] Unban 192.0.2.4",
"[test-jail2] Unban 192.0.2.8", all=True
"[test-jail2] Unban 192.0.2.8",
"[test-jail2] Unban 192.0.2.9", all=True
)
# test unban (action2):
self.assertLogged(

View File

@ -60,3 +60,6 @@ Jun 22 20:37:04 server test-demo[402]: writeToStorage plist={
[Jun 21 16:56:03] machine test-demo(pam_unix)[13709] F2B: error from 192.0.2.251
# failJSON: { "match": false, "desc": "test 2nd ignoreregex" }
[Jun 21 16:56:04] machine test-demo(pam_unix)[13709] F2B: error from 192.0.2.252
# failJSON: { "match": false, "desc": "ignore other daemon" }
[Jun 21 16:56:04] machine captain-nemo(pam_unix)[55555] F2B: error from 192.0.2.2

View File

@ -69,10 +69,10 @@ class TicketTests(unittest.TestCase):
self.assertEqual(ft.getTime(), tm)
self.assertEqual(ft.getMatches(), matches2)
ft.setAttempt(2)
self.assertEqual(ft.getAttempt(), 2)
# retry is max of set retry and failures:
self.assertEqual(ft.getRetry(), 2)
ft.setRetry(1)
self.assertEqual(ft.getAttempt(), 2)
self.assertEqual(ft.getRetry(), 1)
ft.setRetry(2)
self.assertEqual(ft.getRetry(), 2)
ft.setRetry(3)
self.assertEqual(ft.getRetry(), 3)
@ -86,13 +86,21 @@ class TicketTests(unittest.TestCase):
self.assertEqual(ft.getRetry(), 14)
self.assertEqual(ft.getMatches(), matches3)
# last time (ignore if smaller as time):
self.assertEqual(ft.getLastTime(), tm)
ft.setLastTime(tm-60)
self.assertEqual(ft.getTime(), tm)
self.assertEqual(ft.getLastTime(), tm)
ft.setLastTime(tm+60)
ft.adjustTime(tm-60, 3600)
self.assertEqual(ft.getTime(), tm)
self.assertEqual(ft.getRetry(), 14)
ft.adjustTime(tm+60, 3600)
self.assertEqual(ft.getTime(), tm+60)
self.assertEqual(ft.getLastTime(), tm+60)
self.assertEqual(ft.getRetry(), 14)
ft.adjustTime(tm+3600, 3600)
self.assertEqual(ft.getTime(), tm+3600)
self.assertEqual(ft.getRetry(), 14)
# adjust time so interval is larger than find time (3600), so reset retry count:
ft.adjustTime(tm+7200, 3600)
self.assertEqual(ft.getTime(), tm+7200)
self.assertEqual(ft.getRetry(), 7); # estimated attempts count
self.assertEqual(ft.getAttempt(), 4); # real known failure count
ft.setData('country', 'DE')
self.assertEqual(ft.getData(),
{'matches': ['first', 'second', 'third'], 'failures': 4, 'country': 'DE'})
@ -102,10 +110,10 @@ class TicketTests(unittest.TestCase):
self.assertEqual(ft, ft2)
self.assertEqual(ft.getData(), ft2.getData())
self.assertEqual(ft2.getAttempt(), 4)
self.assertEqual(ft2.getRetry(), 14)
self.assertEqual(ft2.getRetry(), 7)
self.assertEqual(ft2.getMatches(), matches3)
self.assertEqual(ft2.getTime(), ft.getTime())
self.assertEqual(ft2.getLastTime(), ft.getLastTime())
self.assertEqual(ft2.getTime(), ft.getTime())
self.assertEqual(ft2.getBanTime(), ft.getBanTime())
def testTicketFlags(self):