merge branch gh-2690--mig2to3 : fail2ban is python 3.x based now;

closes gh-2690
pull/3539/head
sebres 2023-06-16 17:01:12 +02:00
commit 226a59445a
67 changed files with 421 additions and 763 deletions

View File

@ -22,7 +22,7 @@ jobs:
runs-on: ubuntu-20.04 runs-on: ubuntu-20.04
strategy: strategy:
matrix: matrix:
python-version: [2.7, 3.6, 3.7, 3.8, 3.9, '3.10', '3.11', pypy2, pypy3] python-version: [3.6, 3.7, 3.8, 3.9, '3.10', '3.11', pypy3]
fail-fast: false fail-fast: false
# Steps represent a sequence of tasks that will be executed as part of the job # Steps represent a sequence of tasks that will be executed as part of the job
steps: steps:
@ -51,11 +51,7 @@ jobs:
- name: Install dependencies - name: Install dependencies
run: | run: |
if [[ "$F2B_PY" = 3 ]]; then python -m pip install --upgrade pip || echo "can't upgrade pip"; fi #if [[ "$F2B_PY" = 3 ]]; then python -m pip install --upgrade pip || echo "can't upgrade pip"; fi
if [[ "$F2B_PY" = 3 ]] && ! command -v 2to3x -v 2to3 > /dev/null; then
#pip install 2to3
sudo apt-get -y install 2to3
fi
#sudo apt-get -y install python${F2B_PY/2/}-pyinotify || echo 'inotify not available' #sudo apt-get -y install python${F2B_PY/2/}-pyinotify || echo 'inotify not available'
python -m pip install pyinotify || echo 'inotify not available' python -m pip install pyinotify || echo 'inotify not available'
#sudo apt-get -y install python${F2B_PY/2/}-systemd || echo 'systemd not available' #sudo apt-get -y install python${F2B_PY/2/}-systemd || echo 'systemd not available'
@ -67,8 +63,6 @@ jobs:
- name: Before scripts - name: Before scripts
run: | run: |
cd "$GITHUB_WORKSPACE" cd "$GITHUB_WORKSPACE"
# Manually execute 2to3 for now
if [[ "$F2B_PY" = 3 ]]; then echo "2to3 ..." && ./fail2ban-2to3; fi
_debug() { echo -n "$1 "; err=$("${@:2}" 2>&1) && echo 'OK' || echo -e "FAIL\n$err"; } _debug() { echo -n "$1 "; err=$("${@:2}" 2>&1) && echo 'OK' || echo -e "FAIL\n$err"; }
# (debug) output current preferred encoding: # (debug) output current preferred encoding:
_debug 'Encodings:' python -c 'import locale, sys; from fail2ban.helpers import PREFER_ENC; print(PREFER_ENC, locale.getpreferredencoding(), (sys.stdout and sys.stdout.encoding))' _debug 'Encodings:' python -c 'import locale, sys; from fail2ban.helpers import PREFER_ENC; print(PREFER_ENC, locale.getpreferredencoding(), (sys.stdout and sys.stdout.encoding))'
@ -80,9 +74,8 @@ jobs:
- name: Test suite - name: Test suite
run: | run: |
if [[ "$F2B_PY" = 2 ]]; then #python setup.py test
python setup.py test if dpkg --compare-versions "$F2B_PYV" lt 3.10; then
elif dpkg --compare-versions "$F2B_PYV" lt 3.10; then
python bin/fail2ban-testcases --verbosity=2 python bin/fail2ban-testcases --verbosity=2
else else
echo "Skip systemd backend since systemd-python module must be fixed for python >= v.3.10 in GHA ..." echo "Skip systemd backend since systemd-python module must be fixed for python >= v.3.10 in GHA ..."

View File

@ -10,10 +10,6 @@ dist: xenial
matrix: matrix:
fast_finish: true fast_finish: true
include: include:
- python: 2.7
#- python: pypy
- python: 3.4
- python: 3.5
- python: 3.6 - python: 3.6
- python: 3.7 - python: 3.7
- python: 3.8 - python: 3.8
@ -39,20 +35,14 @@ install:
# codecov: # codecov:
- travis_retry pip install codecov - travis_retry pip install codecov
# dnspython or dnspython3 # dnspython or dnspython3
- if [[ "$F2B_PY" = 2 ]]; then travis_retry pip install dnspython || echo 'not installed'; fi
- if [[ "$F2B_PY" = 3 ]]; then travis_retry pip install dnspython3 || echo 'not installed'; fi - if [[ "$F2B_PY" = 3 ]]; then travis_retry pip install dnspython3 || echo 'not installed'; fi
# python systemd bindings: # python systemd bindings:
- if [[ "$F2B_PY" = 2 ]]; then travis_retry sudo apt-get install -qq python-systemd || echo 'not installed'; fi
- if [[ "$F2B_PY" = 3 ]]; then travis_retry sudo apt-get install -qq python3-systemd || echo 'not installed'; fi - if [[ "$F2B_PY" = 3 ]]; then travis_retry sudo apt-get install -qq python3-systemd || echo 'not installed'; fi
# gamin - install manually (not in PyPI) - travis-ci system Python is 2.7
- if [[ $TRAVIS_PYTHON_VERSION == 2.7 ]]; then (travis_retry sudo apt-get install -qq python-gamin && cp /usr/share/pyshared/gamin.py /usr/lib/pyshared/python2.7/_gamin.so $VIRTUAL_ENV/lib/python2.7/site-packages/) || echo 'not installed'; fi
# pyinotify # pyinotify
- travis_retry pip install pyinotify || echo 'not installed' - travis_retry pip install pyinotify || echo 'not installed'
# Install helper tools # Install helper tools
- sudo apt-get install shellcheck - sudo apt-get install shellcheck
before_script: before_script:
# Manually execute 2to3 for now
- if [[ "$F2B_PY" = 3 ]]; then ./fail2ban-2to3; fi
# (debug) output current preferred encoding: # (debug) output current preferred encoding:
- python -c 'import locale, sys; from fail2ban.helpers import PREFER_ENC; print(PREFER_ENC, locale.getpreferredencoding(), (sys.stdout and sys.stdout.encoding))' - python -c 'import locale, sys; from fail2ban.helpers import PREFER_ENC; print(PREFER_ENC, locale.getpreferredencoding(), (sys.stdout and sys.stdout.encoding))'
script: script:

View File

@ -40,6 +40,7 @@ config/action.d/mail.conf
config/action.d/mail-whois-common.conf config/action.d/mail-whois-common.conf
config/action.d/mail-whois.conf config/action.d/mail-whois.conf
config/action.d/mail-whois-lines.conf config/action.d/mail-whois-lines.conf
config/action.d/mikrotik.conf
config/action.d/mynetwatchman.conf config/action.d/mynetwatchman.conf
config/action.d/netscaler.conf config/action.d/netscaler.conf
config/action.d/nftables-allports.conf config/action.d/nftables-allports.conf
@ -90,6 +91,7 @@ config/filter.d/counter-strike.conf
config/filter.d/courier-auth.conf config/filter.d/courier-auth.conf
config/filter.d/courier-smtp.conf config/filter.d/courier-smtp.conf
config/filter.d/cyrus-imap.conf config/filter.d/cyrus-imap.conf
config/filter.d/dante.conf
config/filter.d/directadmin.conf config/filter.d/directadmin.conf
config/filter.d/domino-smtp.conf config/filter.d/domino-smtp.conf
config/filter.d/dovecot.conf config/filter.d/dovecot.conf
@ -121,6 +123,7 @@ config/filter.d/nagios.conf
config/filter.d/named-refused.conf config/filter.d/named-refused.conf
config/filter.d/nginx-bad-request.conf config/filter.d/nginx-bad-request.conf
config/filter.d/nginx-botsearch.conf config/filter.d/nginx-botsearch.conf
config/filter.d/nginx-forbidden.conf
config/filter.d/nginx-http-auth.conf config/filter.d/nginx-http-auth.conf
config/filter.d/nginx-limit-req.conf config/filter.d/nginx-limit-req.conf
config/filter.d/nsd.conf config/filter.d/nsd.conf
@ -175,7 +178,6 @@ CONTRIBUTING.md
COPYING COPYING
.coveragerc .coveragerc
DEVELOP DEVELOP
fail2ban-2to3
fail2ban/client/actionreader.py fail2ban/client/actionreader.py
fail2ban/client/beautifier.py fail2ban/client/beautifier.py
fail2ban/client/configparserinc.py fail2ban/client/configparserinc.py
@ -204,7 +206,6 @@ fail2ban/server/datedetector.py
fail2ban/server/datetemplate.py fail2ban/server/datetemplate.py
fail2ban/server/failmanager.py fail2ban/server/failmanager.py
fail2ban/server/failregex.py fail2ban/server/failregex.py
fail2ban/server/filtergamin.py
fail2ban/server/filterpoll.py fail2ban/server/filterpoll.py
fail2ban/server/filter.py fail2ban/server/filter.py
fail2ban/server/filterpyinotify.py fail2ban/server/filterpyinotify.py
@ -300,6 +301,7 @@ fail2ban/tests/files/logs/counter-strike
fail2ban/tests/files/logs/courier-auth fail2ban/tests/files/logs/courier-auth
fail2ban/tests/files/logs/courier-smtp fail2ban/tests/files/logs/courier-smtp
fail2ban/tests/files/logs/cyrus-imap fail2ban/tests/files/logs/cyrus-imap
fail2ban/tests/files/logs/dante
fail2ban/tests/files/logs/directadmin fail2ban/tests/files/logs/directadmin
fail2ban/tests/files/logs/domino-smtp fail2ban/tests/files/logs/domino-smtp
fail2ban/tests/files/logs/dovecot fail2ban/tests/files/logs/dovecot
@ -329,6 +331,7 @@ fail2ban/tests/files/logs/nagios
fail2ban/tests/files/logs/named-refused fail2ban/tests/files/logs/named-refused
fail2ban/tests/files/logs/nginx-bad-request fail2ban/tests/files/logs/nginx-bad-request
fail2ban/tests/files/logs/nginx-botsearch fail2ban/tests/files/logs/nginx-botsearch
fail2ban/tests/files/logs/nginx-forbidden
fail2ban/tests/files/logs/nginx-http-auth fail2ban/tests/files/logs/nginx-http-auth
fail2ban/tests/files/logs/nginx-limit-req fail2ban/tests/files/logs/nginx-limit-req
fail2ban/tests/files/logs/nsd fail2ban/tests/files/logs/nsd

View File

@ -2,7 +2,7 @@
/ _|__ _(_) |_ ) |__ __ _ _ _ / _|__ _(_) |_ ) |__ __ _ _ _
| _/ _` | | |/ /| '_ \/ _` | ' \ | _/ _` | | |/ /| '_ \/ _` | ' \
|_| \__,_|_|_/___|_.__/\__,_|_||_| |_| \__,_|_|_/___|_.__/\__,_|_||_|
v1.0.3.dev1 20??/??/?? v1.1.0.dev1 20??/??/??
## Fail2Ban: ban hosts that cause multiple authentication errors ## Fail2Ban: ban hosts that cause multiple authentication errors
@ -34,13 +34,12 @@ Fail2Ban is likely already packaged for your Linux distribution and [can install
If your distribution is not listed, you can install from GitHub: If your distribution is not listed, you can install from GitHub:
Required: Required:
- [Python2 >= 2.7 or Python >= 3.2](https://www.python.org) or [PyPy](https://pypy.org) - [Python >= 3.5](https://www.python.org) or [PyPy3](https://pypy.org)
- python-setuptools, python-distutils or python3-setuptools for installation from source - python-setuptools, python-distutils (or python3-setuptools) for installation from source
Optional: Optional:
- [pyinotify >= 0.8.3](https://github.com/seb-m/pyinotify), may require: - [pyinotify >= 0.8.3](https://github.com/seb-m/pyinotify), may require:
* Linux >= 2.6.13 * Linux >= 2.6.13
- [gamin >= 0.0.21](http://www.gnome.org/~veillard/gamin)
- [systemd >= 204](http://www.freedesktop.org/wiki/Software/systemd) and python bindings: - [systemd >= 204](http://www.freedesktop.org/wiki/Software/systemd) and python bindings:
* [python-systemd package](https://www.freedesktop.org/software/systemd/python-systemd/index.html) * [python-systemd package](https://www.freedesktop.org/software/systemd/python-systemd/index.html)
- [dnspython](http://www.dnspython.org/) - [dnspython](http://www.dnspython.org/)

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet : # vi: set ft=python sts=4 ts=4 sw=4 noet :

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet : # vi: set ft=python sts=4 ts=4 sw=4 noet :
# #

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet : # vi: set ft=python sts=4 ts=4 sw=4 noet :

View File

@ -1,4 +1,4 @@
#!/usr/bin/env python #!/usr/bin/env python3
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*- # emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet : # vi: set ft=python sts=4 ts=4 sw=4 noet :
"""Script to run Fail2Ban tests battery """Script to run Fail2Ban tests battery

View File

@ -113,19 +113,17 @@ maxretry = 5
maxmatches = %(maxretry)s maxmatches = %(maxretry)s
# "backend" specifies the backend used to get files modification. # "backend" specifies the backend used to get files modification.
# Available options are "pyinotify", "gamin", "polling", "systemd" and "auto". # Available options are "pyinotify", "polling", "systemd" and "auto".
# This option can be overridden in each jail as well. # This option can be overridden in each jail as well.
# #
# pyinotify: requires pyinotify (a file alteration monitor) to be installed. # pyinotify: requires pyinotify (a file alteration monitor) to be installed.
# If pyinotify is not installed, Fail2ban will use auto. # If pyinotify is not installed, Fail2ban will use auto.
# gamin: requires Gamin (a file alteration monitor) to be installed.
# If Gamin is not installed, Fail2ban will use auto.
# polling: uses a polling algorithm which does not require external libraries. # polling: uses a polling algorithm which does not require external libraries.
# systemd: uses systemd python library to access the systemd journal. # systemd: uses systemd python library to access the systemd journal.
# Specifying "logpath" is not valid for this backend. # Specifying "logpath" is not valid for this backend.
# See "journalmatch" in the jails associated filter config # See "journalmatch" in the jails associated filter config
# auto: will try to use the following backends, in order: # auto: will try to use the following backends, in order:
# pyinotify, gamin, polling. # pyinotify, polling.
# #
# Note: if systemd backend is chosen as the default but you enable a jail # Note: if systemd backend is chosen as the default but you enable a jail
# for which logs are present only in its own log files, specify some other # for which logs are present only in its own log files, specify some other

View File

@ -1,7 +0,0 @@
fail2ban.server.filtergamin module
==================================
.. automodule:: fail2ban.server.filtergamin
:members:
:undoc-members:
:show-inheritance:

View File

@ -13,7 +13,6 @@ fail2ban.server package
fail2ban.server.failmanager fail2ban.server.failmanager
fail2ban.server.failregex fail2ban.server.failregex
fail2ban.server.filter fail2ban.server.filter
fail2ban.server.filtergamin
fail2ban.server.filterpoll fail2ban.server.filterpoll
fail2ban.server.filterpyinotify fail2ban.server.filterpyinotify
fail2ban.server.filtersystemd fail2ban.server.filtersystemd

View File

@ -1,14 +0,0 @@
#!/bin/bash
# This script carries out conversion of fail2ban to python3
# A backup of any converted files are created with ".bak"
# extension
set -eu
if 2to3 -w --no-diffs bin/* fail2ban;then
echo "Success!" >&2
exit 0
else
echo "Fail!" >&2
exit 1
fi

View File

@ -89,11 +89,11 @@ class ActionReader(DefinitionInitConfigReader):
stream = list() stream = list()
stream.append(head + ["addaction", self._name]) stream.append(head + ["addaction", self._name])
multi = [] multi = []
for opt, optval in opts.iteritems(): for opt, optval in opts.items():
if opt in self._configOpts and not opt.startswith('known/'): if opt in self._configOpts and not opt.startswith('known/'):
multi.append([opt, optval]) multi.append([opt, optval])
if self._initOpts: if self._initOpts:
for opt, optval in self._initOpts.iteritems(): for opt, optval in self._initOpts.items():
if opt not in self._configOpts and not opt.startswith('known/'): if opt not in self._configOpts and not opt.startswith('known/'):
multi.append([opt, optval]) multi.append([opt, optval])
if len(multi) > 1: if len(multi) > 1:

View File

@ -29,49 +29,36 @@ import re
import sys import sys
from ..helpers import getLogger from ..helpers import getLogger
if sys.version_info >= (3,): # pragma: 2.x no cover # SafeConfigParser deprecated from Python 3.2 (renamed to ConfigParser)
from configparser import ConfigParser as SafeConfigParser, BasicInterpolation, \
InterpolationMissingOptionError, NoOptionError, NoSectionError
# SafeConfigParser deprecated from Python 3.2 (renamed to ConfigParser) # And interpolation of __name__ was simply removed, thus we need to
from configparser import ConfigParser as SafeConfigParser, BasicInterpolation, \ # decorate default interpolator to handle it
InterpolationMissingOptionError, NoOptionError, NoSectionError class BasicInterpolationWithName(BasicInterpolation):
"""Decorator to bring __name__ interpolation back.
# And interpolation of __name__ was simply removed, thus we need to Original handling of __name__ was removed because of
# decorate default interpolator to handle it functional deficiencies: http://bugs.python.org/issue10489
class BasicInterpolationWithName(BasicInterpolation):
"""Decorator to bring __name__ interpolation back.
Original handling of __name__ was removed because of commit v3.2a4-105-g61f2761
functional deficiencies: http://bugs.python.org/issue10489 Author: Lukasz Langa <lukasz@langa.pl>
Date: Sun Nov 21 13:41:35 2010 +0000
commit v3.2a4-105-g61f2761 Issue #10489: removed broken `__name__` support from configparser
Author: Lukasz Langa <lukasz@langa.pl>
Date: Sun Nov 21 13:41:35 2010 +0000
Issue #10489: removed broken `__name__` support from configparser But should be fine to reincarnate for our use case
"""
def _interpolate_some(self, parser, option, accum, rest, section, map,
*args, **kwargs):
if section and not (__name__ in map):
map = map.copy() # just to be safe
map['__name__'] = section
# try to wrap section options like %(section/option)s:
parser._map_section_options(section, option, rest, map)
return super(BasicInterpolationWithName, self)._interpolate_some(
parser, option, accum, rest, section, map, *args, **kwargs)
But should be fine to reincarnate for our use case
"""
def _interpolate_some(self, parser, option, accum, rest, section, map,
*args, **kwargs):
if section and not (__name__ in map):
map = map.copy() # just to be safe
map['__name__'] = section
# try to wrap section options like %(section/option)s:
parser._map_section_options(section, option, rest, map)
return super(BasicInterpolationWithName, self)._interpolate_some(
parser, option, accum, rest, section, map, *args, **kwargs)
else: # pragma: 3.x no cover
from ConfigParser import SafeConfigParser, \
InterpolationMissingOptionError, NoOptionError, NoSectionError
# Interpolate missing known/option as option from default section
SafeConfigParser._cp_interpolate_some = SafeConfigParser._interpolate_some
def _interpolate_some(self, option, accum, rest, section, map, *args, **kwargs):
# try to wrap section options like %(section/option)s:
self._map_section_options(section, option, rest, map)
return self._cp_interpolate_some(option, accum, rest, section, map, *args, **kwargs)
SafeConfigParser._interpolate_some = _interpolate_some
def _expandConfFilesWithLocal(filenames): def _expandConfFilesWithLocal(filenames):
"""Expands config files with local extension. """Expands config files with local extension.
@ -327,7 +314,7 @@ after = 1.conf
# mix it with defaults: # mix it with defaults:
return set(opts.keys()) | set(self._defaults) return set(opts.keys()) | set(self._defaults)
# only own option names: # only own option names:
return opts.keys() return list(opts.keys())
def read(self, filenames, get_includes=True): def read(self, filenames, get_includes=True):
if not isinstance(filenames, list): if not isinstance(filenames, list):
@ -356,7 +343,7 @@ after = 1.conf
ret += i ret += i
# merge defaults and all sections to self: # merge defaults and all sections to self:
alld.update(cfg.get_defaults()) alld.update(cfg.get_defaults())
for n, s in cfg.get_sections().iteritems(): for n, s in cfg.get_sections().items():
# conditional sections # conditional sections
cond = SafeConfigParserWithIncludes.CONDITIONAL_RE.match(n) cond = SafeConfigParserWithIncludes.CONDITIONAL_RE.match(n)
if cond: if cond:
@ -366,14 +353,14 @@ after = 1.conf
del(s['__name__']) del(s['__name__'])
except KeyError: except KeyError:
pass pass
for k in s.keys(): for k in list(s.keys()):
v = s.pop(k) v = s.pop(k)
s[k + cond] = v s[k + cond] = v
s2 = alls.get(n) s2 = alls.get(n)
if isinstance(s2, dict): if isinstance(s2, dict):
# save previous known values, for possible using in local interpolations later: # save previous known values, for possible using in local interpolations later:
self.merge_section('KNOWN/'+n, self.merge_section('KNOWN/'+n,
dict(filter(lambda i: i[0] in s, s2.iteritems())), '') dict([i for i in iter(s2.items()) if i[0] in s]), '')
# merge section # merge section
s2.update(s) s2.update(s)
else: else:
@ -400,7 +387,7 @@ after = 1.conf
sec.update(options) sec.update(options)
return return
sk = {} sk = {}
for k, v in options.iteritems(): for k, v in options.items():
if not k.startswith(pref) and k != '__name__': if not k.startswith(pref) and k != '__name__':
sk[pref+k] = v sk[pref+k] = v
sec.update(sk) sec.update(sk)

View File

@ -26,7 +26,7 @@ __license__ = "GPL"
import glob import glob
import os import os
from ConfigParser import NoOptionError, NoSectionError from configparser import NoOptionError, NoSectionError
from .configparserinc import sys, SafeConfigParserWithIncludes, logLevel from .configparserinc import sys, SafeConfigParserWithIncludes, logLevel
from ..helpers import getLogger, _as_bool, _merge_dicts, substituteRecursiveTags from ..helpers import getLogger, _as_bool, _merge_dicts, substituteRecursiveTags
@ -221,7 +221,7 @@ class ConfigReaderUnshared(SafeConfigParserWithIncludes):
config_files += sorted(glob.glob('%s/*.local' % config_dir)) config_files += sorted(glob.glob('%s/*.local' % config_dir))
# choose only existing ones # choose only existing ones
config_files = filter(os.path.exists, config_files) config_files = list(filter(os.path.exists, config_files))
if len(config_files): if len(config_files):
# at least one config exists and accessible # at least one config exists and accessible

View File

@ -47,7 +47,7 @@ class CSocket:
def send(self, msg, nonblocking=False, timeout=None): def send(self, msg, nonblocking=False, timeout=None):
# Convert every list member to string # Convert every list member to string
obj = dumps(map(CSocket.convert, msg), HIGHEST_PROTOCOL) obj = dumps(list(map(CSocket.convert, msg)), HIGHEST_PROTOCOL)
self.__csock.send(obj) self.__csock.send(obj)
self.__csock.send(CSPROTO.END) self.__csock.send(CSPROTO.END)
return self.receive(self.__csock, nonblocking, timeout) return self.receive(self.__csock, nonblocking, timeout)
@ -72,7 +72,7 @@ class CSocket:
@staticmethod @staticmethod
def convert(m): def convert(m):
"""Convert every "unexpected" member of message to string""" """Convert every "unexpected" member of message to string"""
if isinstance(m, (basestring, bool, int, float, list, dict, set)): if isinstance(m, (str, bool, int, float, list, dict, set)):
return m return m
else: # pragma: no cover else: # pragma: no cover
return str(m) return str(m)

View File

@ -45,7 +45,7 @@ def _thread_name():
return threading.current_thread().__class__.__name__ return threading.current_thread().__class__.__name__
def input_command(): # pragma: no cover def input_command(): # pragma: no cover
return raw_input(PROMPT) return input(PROMPT)
## ##
# #
@ -456,7 +456,7 @@ class Fail2banClient(Fail2banCmdLine, Thread):
return False return False
finally: finally:
self._alive = False self._alive = False
for s, sh in _prev_signals.iteritems(): for s, sh in _prev_signals.items():
signal.signal(s, sh) signal.signal(s, sh)

View File

@ -40,10 +40,10 @@ import os
import shlex import shlex
import sys import sys
import time import time
import urllib import urllib.request, urllib.parse, urllib.error
from optparse import OptionParser, Option from optparse import OptionParser, Option
from ConfigParser import NoOptionError, NoSectionError, MissingSectionHeaderError from configparser import NoOptionError, NoSectionError, MissingSectionHeaderError
try: # pragma: no cover try: # pragma: no cover
from ..server.filtersystemd import FilterSystemd from ..server.filtersystemd import FilterSystemd
@ -67,7 +67,7 @@ def debuggexURL(sample, regex, multiline=False, useDns="yes"):
'flavor': 'python' 'flavor': 'python'
} }
if multiline: args['flags'] = 'm' if multiline: args['flags'] = 'm'
return 'https://www.debuggex.com/?' + urllib.urlencode(args) return 'https://www.debuggex.com/?' + urllib.parse.urlencode(args)
def output(args): # pragma: no cover (overriden in test-cases) def output(args): # pragma: no cover (overriden in test-cases)
print(args) print(args)
@ -246,7 +246,7 @@ class Fail2banRegex(object):
def __init__(self, opts): def __init__(self, opts):
# set local protected members from given options: # set local protected members from given options:
self.__dict__.update(dict(('_'+o,v) for o,v in opts.__dict__.iteritems())) self.__dict__.update(dict(('_'+o,v) for o,v in opts.__dict__.items()))
self._opts = opts self._opts = opts
self._maxlines_set = False # so we allow to override maxlines in cmdline self._maxlines_set = False # so we allow to override maxlines in cmdline
self._datepattern_set = False self._datepattern_set = False
@ -313,7 +313,7 @@ class Fail2banRegex(object):
realopts = {} realopts = {}
combopts = reader.getCombined() combopts = reader.getCombined()
# output all options that are specified in filter-argument as well as some special (mostly interested): # output all options that are specified in filter-argument as well as some special (mostly interested):
for k in ['logtype', 'datepattern'] + fltOpt.keys(): for k in ['logtype', 'datepattern'] + list(fltOpt.keys()):
# combined options win, but they contain only a sub-set in filter expected keys, # combined options win, but they contain only a sub-set in filter expected keys,
# so get the rest from definition section: # so get the rest from definition section:
try: try:
@ -440,7 +440,7 @@ class Fail2banRegex(object):
self.output( "Use %11s line : %s" % (regex, shortstr(value)) ) self.output( "Use %11s line : %s" % (regex, shortstr(value)) )
regex_values = {regextype: [RegexStat(value)]} regex_values = {regextype: [RegexStat(value)]}
for regextype, regex_values in regex_values.iteritems(): for regextype, regex_values in regex_values.items():
regex = regextype + 'regex' regex = regextype + 'regex'
setattr(self, "_" + regex, regex_values) setattr(self, "_" + regex, regex_values)
for regex in regex_values: for regex in regex_values:
@ -532,13 +532,13 @@ class Fail2banRegex(object):
def _out(ret): def _out(ret):
for r in ret: for r in ret:
for r in r[3].get('matches'): for r in r[3].get('matches'):
if not isinstance(r, basestring): if not isinstance(r, str):
r = ''.join(r for r in r) r = ''.join(r for r in r)
output(r) output(r)
elif ofmt == 'row': elif ofmt == 'row':
def _out(ret): def _out(ret):
for r in ret: for r in ret:
output('[%r,\t%r,\t%r],' % (r[1],r[2],dict((k,v) for k, v in r[3].iteritems() if k != 'matches'))) output('[%r,\t%r,\t%r],' % (r[1],r[2],dict((k,v) for k, v in r[3].items() if k != 'matches')))
elif '<' not in ofmt: elif '<' not in ofmt:
def _out(ret): def _out(ret):
for r in ret: for r in ret:
@ -573,7 +573,7 @@ class Fail2banRegex(object):
# wrap multiline tag (msg) interpolations to single line: # wrap multiline tag (msg) interpolations to single line:
for r, v in rows: for r, v in rows:
for r in r[3].get('matches'): for r in r[3].get('matches'):
if not isinstance(r, basestring): if not isinstance(r, str):
r = ''.join(r for r in r) r = ''.join(r for r in r)
r = v.replace("\x00msg\x00", r) r = v.replace("\x00msg\x00", r)
output(r) output(r)
@ -639,9 +639,9 @@ class Fail2banRegex(object):
ans = [[]] ans = [[]]
for arg in [l, regexlist]: for arg in [l, regexlist]:
ans = [ x + [y] for x in ans for y in arg ] ans = [ x + [y] for x in ans for y in arg ]
b = map(lambda a: a[0] + ' | ' + a[1].getFailRegex() + ' | ' + b = [a[0] + ' | ' + a[1].getFailRegex() + ' | ' +
debuggexURL(self.encode_line(a[0]), a[1].getFailRegex(), debuggexURL(self.encode_line(a[0]), a[1].getFailRegex(),
multiline, self._opts.usedns), ans) multiline, self._opts.usedns) for a in ans]
pprint_list([x.rstrip() for x in b], header) pprint_list([x.rstrip() for x in b], header)
else: else:
output( "%s too many to print. Use --print-all-%s " \ output( "%s too many to print. Use --print-all-%s " \

View File

@ -71,7 +71,7 @@ class FilterReader(DefinitionInitConfigReader):
@staticmethod @staticmethod
def _fillStream(stream, opts, jailName): def _fillStream(stream, opts, jailName):
prio0idx = 0 prio0idx = 0
for opt, value in opts.iteritems(): for opt, value in opts.items():
# Do not send a command if the value is not set (empty). # Do not send a command if the value is not set (empty).
if value is None: continue if value is None: continue
if opt in ("failregex", "ignoreregex"): if opt in ("failregex", "ignoreregex"):

View File

@ -117,7 +117,7 @@ class JailReader(ConfigReader):
} }
_configOpts.update(FilterReader._configOpts) _configOpts.update(FilterReader._configOpts)
_ignoreOpts = set(['action', 'filter', 'enabled'] + FilterReader._configOpts.keys()) _ignoreOpts = set(['action', 'filter', 'enabled'] + list(FilterReader._configOpts.keys()))
def getOptions(self): def getOptions(self):
@ -240,7 +240,7 @@ class JailReader(ConfigReader):
stream.extend(self.__filter.convert()) stream.extend(self.__filter.convert())
# and using options from jail: # and using options from jail:
FilterReader._fillStream(stream, self.__opts, self.__name) FilterReader._fillStream(stream, self.__opts, self.__name)
for opt, value in self.__opts.iteritems(): for opt, value in self.__opts.items():
if opt == "logpath": if opt == "logpath":
if self.__opts.get('backend', '').startswith("systemd"): continue if self.__opts.get('backend', '').startswith("systemd"): continue
found_files = 0 found_files = 0

View File

@ -31,6 +31,7 @@ import traceback
from threading import Lock from threading import Lock
from .server.mytime import MyTime from .server.mytime import MyTime
import importlib
try: try:
import ctypes import ctypes
@ -47,30 +48,6 @@ if PREFER_ENC.startswith('ANSI_'): # pragma: no cover
elif all((os.getenv(v) in (None, "") for v in ('LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG'))): elif all((os.getenv(v) in (None, "") for v in ('LANGUAGE', 'LC_ALL', 'LC_CTYPE', 'LANG'))):
PREFER_ENC = 'UTF-8'; PREFER_ENC = 'UTF-8';
# py-2.x: try to minimize influence of sporadic conversion errors on python 2.x,
# caused by implicit converting of string/unicode (e. g. `str(u"\uFFFD")` produces an error
# if default encoding is 'ascii');
if sys.version_info < (3,): # pragma: 3.x no cover
# correct default (global system) encoding (mostly UTF-8):
def __resetDefaultEncoding(encoding):
global PREFER_ENC
ode = sys.getdefaultencoding().upper()
if ode == 'ASCII' and ode != PREFER_ENC.upper():
# setdefaultencoding is normally deleted after site initialized, so hack-in using load of sys-module:
_sys = sys
if not hasattr(_sys, "setdefaultencoding"):
try:
from imp import load_dynamic as __ldm
_sys = __ldm('_sys', 'sys')
except ImportError: # pragma: no cover - only if load_dynamic fails
reload(sys)
_sys = sys
if hasattr(_sys, "setdefaultencoding"):
_sys.setdefaultencoding(encoding)
# override to PREFER_ENC:
__resetDefaultEncoding(PREFER_ENC)
del __resetDefaultEncoding
# todo: rewrite explicit (and implicit) str-conversions via encode/decode with IO-encoding (sys.stdout.encoding), # todo: rewrite explicit (and implicit) str-conversions via encode/decode with IO-encoding (sys.stdout.encoding),
# e. g. inside tags-replacement by command-actions, etc. # e. g. inside tags-replacement by command-actions, etc.
@ -84,44 +61,24 @@ if sys.version_info < (3,): # pragma: 3.x no cover
# [True, True, False]; # -- python2 # [True, True, False]; # -- python2
# [True, False, True]; # -- python3 # [True, False, True]; # -- python3
# #
if sys.version_info >= (3,): # pragma: 2.x no cover def uni_decode(x, enc=PREFER_ENC, errors='strict'):
def uni_decode(x, enc=PREFER_ENC, errors='strict'): try:
try: if isinstance(x, bytes):
if isinstance(x, bytes): return x.decode(enc, errors)
return x.decode(enc, errors) return x
return x except (UnicodeDecodeError, UnicodeEncodeError): # pragma: no cover - unsure if reachable
except (UnicodeDecodeError, UnicodeEncodeError): # pragma: no cover - unsure if reachable if errors != 'strict':
if errors != 'strict': raise
raise return x.decode(enc, 'replace')
return x.decode(enc, 'replace') def uni_string(x):
def uni_string(x): if not isinstance(x, bytes):
if not isinstance(x, bytes): return str(x)
return str(x) return x.decode(PREFER_ENC, 'replace')
return x.decode(PREFER_ENC, 'replace') def uni_bytes(x):
def uni_bytes(x): return bytes(x, 'UTF-8')
return bytes(x, 'UTF-8')
else: # pragma: 3.x no cover
def uni_decode(x, enc=PREFER_ENC, errors='strict'):
try:
if isinstance(x, unicode):
return x.encode(enc, errors)
return x
except (UnicodeDecodeError, UnicodeEncodeError): # pragma: no cover - unsure if reachable
if errors != 'strict':
raise
return x.encode(enc, 'replace')
if sys.getdefaultencoding().upper() != 'UTF-8': # pragma: no cover - utf-8 is default encoding now
def uni_string(x):
if not isinstance(x, unicode):
return str(x)
return x.encode(PREFER_ENC, 'replace')
else:
uni_string = str
uni_bytes = bytes
def _as_bool(val): def _as_bool(val):
return bool(val) if not isinstance(val, basestring) \ return bool(val) if not isinstance(val, str) \
else val.lower() in ('1', 'on', 'true', 'yes') else val.lower() in ('1', 'on', 'true', 'yes')
@ -226,10 +183,7 @@ def __stopOnIOError(logSys=None, logHndlr=None): # pragma: no cover
pass pass
sys.exit(0) sys.exit(0)
try: BrokenPipeError = BrokenPipeError
BrokenPipeError = BrokenPipeError
except NameError: # pragma: 3.x no cover
BrokenPipeError = IOError
__origLog = logging.Logger._log __origLog = logging.Logger._log
def __safeLog(self, level, msg, args, **kwargs): def __safeLog(self, level, msg, args, **kwargs):
@ -330,38 +284,19 @@ def splitwords(s):
""" """
if not s: if not s:
return [] return []
return filter(bool, map(lambda v: v.strip(), re.split('[ ,\n]+', s))) return list(filter(bool, [v.strip() for v in re.split('[ ,\n]+', s)]))
if sys.version_info >= (3,5): def _merge_dicts(x, y):
eval(compile(r'''if 1: """Helper to merge dicts.
def _merge_dicts(x, y): """
"""Helper to merge dicts. if y:
"""
if y:
return {**x, **y}
return x
def _merge_copy_dicts(x, y):
"""Helper to merge dicts to guarantee a copy result (r is never x).
"""
return {**x, **y} return {**x, **y}
''', __file__, 'exec')) return x
else:
def _merge_dicts(x, y): def _merge_copy_dicts(x, y):
"""Helper to merge dicts. """Helper to merge dicts to guarantee a copy result (r is never x).
""" """
r = x return {**x, **y}
if y:
r = x.copy()
r.update(y)
return r
def _merge_copy_dicts(x, y):
"""Helper to merge dicts to guarantee a copy result (r is never x).
"""
r = x.copy()
if y:
r.update(y)
return r
# #
# Following function used for parse options from parameter (e.g. `name[p1=0, p2="..."][p3='...']`). # Following function used for parse options from parameter (e.g. `name[p1=0, p2="..."][p3='...']`).
@ -447,7 +382,7 @@ def substituteRecursiveTags(inptags, conditional='',
while True: while True:
repFlag = False repFlag = False
# substitute each value: # substitute each value:
for tag in tags.iterkeys(): for tag in tags.keys():
# ignore escaped or already done (or in ignore list): # ignore escaped or already done (or in ignore list):
if tag in ignore or tag in done: continue if tag in ignore or tag in done: continue
# ignore replacing callable items from calling map - should be converted on demand only (by get): # ignore replacing callable items from calling map - should be converted on demand only (by get):
@ -487,7 +422,7 @@ def substituteRecursiveTags(inptags, conditional='',
m = tre_search(value, m.end()) m = tre_search(value, m.end())
continue continue
# if calling map - be sure we've string: # if calling map - be sure we've string:
if not isinstance(repl, basestring): repl = uni_string(repl) if not isinstance(repl, str): repl = uni_string(repl)
value = value.replace('<%s>' % rtag, repl) value = value.replace('<%s>' % rtag, repl)
#logSys.log(5, 'value now: %s' % value) #logSys.log(5, 'value now: %s' % value)
# increment reference count: # increment reference count:
@ -520,10 +455,7 @@ if _libcap:
Side effect: name can be silently truncated to 15 bytes (16 bytes with NTS zero) Side effect: name can be silently truncated to 15 bytes (16 bytes with NTS zero)
""" """
try: try:
if sys.version_info >= (3,): # pragma: 2.x no cover name = name.encode()
name = name.encode()
else: # pragma: 3.x no cover
name = bytes(name)
_libcap.prctl(15, name) # PR_SET_NAME = 15 _libcap.prctl(15, name) # PR_SET_NAME = 15
except: # pragma: no cover except: # pragma: no cover
pass pass

View File

@ -114,9 +114,9 @@ class CallingMap(MutableMapping, object):
def _asdict(self, calculated=False, checker=None): def _asdict(self, calculated=False, checker=None):
d = dict(self.data, **self.storage) d = dict(self.data, **self.storage)
if not calculated: if not calculated:
return dict((n,v) for n,v in d.iteritems() \ return dict((n,v) for n,v in d.items() \
if not callable(v) or n in self.CM_REPR_ITEMS) if not callable(v) or n in self.CM_REPR_ITEMS)
for n,v in d.items(): for n,v in list(d.items()):
if callable(v): if callable(v):
try: try:
# calculate: # calculate:
@ -182,7 +182,7 @@ class CallingMap(MutableMapping, object):
return self.__class__(_merge_copy_dicts(self.data, self.storage)) return self.__class__(_merge_copy_dicts(self.data, self.storage))
class ActionBase(object): class ActionBase(object, metaclass=ABCMeta):
"""An abstract base class for actions in Fail2Ban. """An abstract base class for actions in Fail2Ban.
Action Base is a base definition of what methods need to be in Action Base is a base definition of what methods need to be in
@ -212,7 +212,6 @@ class ActionBase(object):
Any additional arguments specified in `jail.conf` or passed Any additional arguments specified in `jail.conf` or passed
via `fail2ban-client` will be passed as keyword arguments. via `fail2ban-client` will be passed as keyword arguments.
""" """
__metaclass__ = ABCMeta
@classmethod @classmethod
def __subclasshook__(cls, C): def __subclasshook__(cls, C):
@ -423,7 +422,7 @@ class CommandAction(ActionBase):
if not callable(family): # pragma: no cover if not callable(family): # pragma: no cover
return self.__substCache.get(key, {}).get(family) return self.__substCache.get(key, {}).get(family)
# family as expression - use it to filter values: # family as expression - use it to filter values:
return [v for f, v in self.__substCache.get(key, {}).iteritems() if family(f)] return [v for f, v in self.__substCache.get(key, {}).items() if family(f)]
cmd = args[0] cmd = args[0]
if cmd: # set: if cmd: # set:
try: try:
@ -435,7 +434,7 @@ class CommandAction(ActionBase):
try: try:
famd = self.__substCache[key] famd = self.__substCache[key]
cmd = famd.pop(family) cmd = famd.pop(family)
for family, v in famd.items(): for family, v in list(famd.items()):
if v == cmd: if v == cmd:
del famd[family] del famd[family]
except KeyError: # pragma: no cover except KeyError: # pragma: no cover
@ -451,7 +450,7 @@ class CommandAction(ActionBase):
res = True res = True
err = 'Script error' err = 'Script error'
if not family: # all started: if not family: # all started:
family = [famoper for (famoper,v) in self.__started.iteritems() if v] family = [famoper for (famoper,v) in self.__started.items() if v]
for famoper in family: for famoper in family:
try: try:
cmd = self._getOperation(tag, famoper) cmd = self._getOperation(tag, famoper)
@ -631,7 +630,7 @@ class CommandAction(ActionBase):
and executes the resulting command. and executes the resulting command.
""" """
# collect started families, may be started on demand (conditional): # collect started families, may be started on demand (conditional):
family = [f for (f,v) in self.__started.iteritems() if v & 3 == 3]; # started and contains items family = [f for (f,v) in self.__started.items() if v & 3 == 3]; # started and contains items
# if nothing contains items: # if nothing contains items:
if not family: return True if not family: return True
# flush: # flush:
@ -656,7 +655,7 @@ class CommandAction(ActionBase):
""" """
# collect started families, if started on demand (conditional): # collect started families, if started on demand (conditional):
if family is None: if family is None:
family = [f for (f,v) in self.__started.iteritems() if v] family = [f for (f,v) in self.__started.items() if v]
# if no started (on demand) actions: # if no started (on demand) actions:
if not family: return True if not family: return True
self.__started = {} self.__started = {}
@ -690,7 +689,7 @@ class CommandAction(ActionBase):
ret = True ret = True
# for each started family: # for each started family:
if self.actioncheck: if self.actioncheck:
for (family, started) in self.__started.items(): for (family, started) in list(self.__started.items()):
if started and not self._invariantCheck(family, beforeRepair): if started and not self._invariantCheck(family, beforeRepair):
# reset started flag and command of executed operation: # reset started flag and command of executed operation:
self.__started[family] = 0 self.__started[family] = 0

View File

@ -156,11 +156,11 @@ class Actions(JailThread, Mapping):
else: else:
if hasattr(self, '_reload_actions'): if hasattr(self, '_reload_actions'):
# reload actions after all parameters set via stream: # reload actions after all parameters set via stream:
for name, initOpts in self._reload_actions.iteritems(): for name, initOpts in self._reload_actions.items():
if name in self._actions: if name in self._actions:
self._actions[name].reload(**(initOpts if initOpts else {})) self._actions[name].reload(**(initOpts if initOpts else {}))
# remove obsolete actions (untouched by reload process): # remove obsolete actions (untouched by reload process):
delacts = OrderedDict((name, action) for name, action in self._actions.iteritems() delacts = OrderedDict((name, action) for name, action in self._actions.items()
if name not in self._reload_actions) if name not in self._reload_actions)
if len(delacts): if len(delacts):
# unban all tickets using removed actions only: # unban all tickets using removed actions only:
@ -217,7 +217,7 @@ class Actions(JailThread, Mapping):
return lst return lst
if len(ids) == 1: if len(ids) == 1:
return 1 if ids[0] in lst else 0 return 1 if ids[0] in lst else 0
return map(lambda ip: 1 if ip in lst else 0, ids) return [1 if ip in lst else 0 for ip in ids]
def getBanList(self, withTime=False): def getBanList(self, withTime=False):
"""Returns the list of banned IP addresses. """Returns the list of banned IP addresses.
@ -288,7 +288,7 @@ class Actions(JailThread, Mapping):
if not isinstance(ip, IPAddr): if not isinstance(ip, IPAddr):
ipa = IPAddr(ip) ipa = IPAddr(ip)
if not ipa.isSingle: # subnet (mask/cidr) or raw (may be dns/hostname): if not ipa.isSingle: # subnet (mask/cidr) or raw (may be dns/hostname):
ips = filter(ipa.contains, self.banManager.getBanList()) ips = list(filter(ipa.contains, self.banManager.getBanList()))
if ips: if ips:
return self.removeBannedIP(ips, db, ifexists) return self.removeBannedIP(ips, db, ifexists)
# not found: # not found:
@ -305,7 +305,7 @@ class Actions(JailThread, Mapping):
""" """
if actions is None: if actions is None:
actions = self._actions actions = self._actions
for name, action in reversed(actions.items()): for name, action in reversed(list(actions.items())):
try: try:
action.stop() action.stop()
except Exception as e: except Exception as e:
@ -328,7 +328,7 @@ class Actions(JailThread, Mapping):
True when the thread exits nicely. True when the thread exits nicely.
""" """
cnt = 0 cnt = 0
for name, action in self._actions.iteritems(): for name, action in self._actions.items():
try: try:
action.start() action.start()
except Exception as e: except Exception as e:
@ -505,7 +505,7 @@ class Actions(JailThread, Mapping):
Observers.Main.add('banFound', bTicket, self._jail, btime) Observers.Main.add('banFound', bTicket, self._jail, btime)
logSys.notice("[%s] %sBan %s", self._jail.name, ('' if not bTicket.restored else 'Restore '), ip) logSys.notice("[%s] %sBan %s", self._jail.name, ('' if not bTicket.restored else 'Restore '), ip)
# do actions : # do actions :
for name, action in self._actions.iteritems(): for name, action in self._actions.items():
try: try:
if bTicket.restored and getattr(action, 'norestored', False): if bTicket.restored and getattr(action, 'norestored', False):
continue continue
@ -543,13 +543,13 @@ class Actions(JailThread, Mapping):
# avoid too often checks: # avoid too often checks:
if not rebanacts and MyTime.time() > self.__lastConsistencyCheckTM + 3: if not rebanacts and MyTime.time() > self.__lastConsistencyCheckTM + 3:
self.__lastConsistencyCheckTM = MyTime.time() self.__lastConsistencyCheckTM = MyTime.time()
for action in self._actions.itervalues(): for action in self._actions.values():
if hasattr(action, 'consistencyCheck'): if hasattr(action, 'consistencyCheck'):
action.consistencyCheck() action.consistencyCheck()
# check epoch in order to reban it: # check epoch in order to reban it:
if bTicket.banEpoch < self.banEpoch: if bTicket.banEpoch < self.banEpoch:
if not rebanacts: rebanacts = dict( if not rebanacts: rebanacts = dict(
(name, action) for name, action in self._actions.iteritems() (name, action) for name, action in self._actions.items()
if action.banEpoch > bTicket.banEpoch) if action.banEpoch > bTicket.banEpoch)
cnt += self.__reBan(bTicket, actions=rebanacts) cnt += self.__reBan(bTicket, actions=rebanacts)
else: # pragma: no cover - unexpected: ticket is not banned for some reasons - reban using all actions: else: # pragma: no cover - unexpected: ticket is not banned for some reasons - reban using all actions:
@ -576,8 +576,8 @@ class Actions(JailThread, Mapping):
ip = ticket.getID() ip = ticket.getID()
aInfo = self._getActionInfo(ticket) aInfo = self._getActionInfo(ticket)
if log: if log:
logSys.notice("[%s] Reban %s%s", self._jail.name, ip, (', action %r' % actions.keys()[0] if len(actions) == 1 else '')) logSys.notice("[%s] Reban %s%s", self._jail.name, ip, (', action %r' % list(actions.keys())[0] if len(actions) == 1 else ''))
for name, action in actions.iteritems(): for name, action in actions.items():
try: try:
logSys.debug("[%s] action %r: reban %s", self._jail.name, name, ip) logSys.debug("[%s] action %r: reban %s", self._jail.name, name, ip)
if not aInfo.immutable: aInfo.reset() if not aInfo.immutable: aInfo.reset()
@ -601,7 +601,7 @@ class Actions(JailThread, Mapping):
if not self.banManager._inBanList(ticket): return if not self.banManager._inBanList(ticket): return
# do actions : # do actions :
aInfo = None aInfo = None
for name, action in self._actions.iteritems(): for name, action in self._actions.items():
try: try:
if ticket.restored and getattr(action, 'norestored', False): if ticket.restored and getattr(action, 'norestored', False):
continue continue
@ -650,7 +650,7 @@ class Actions(JailThread, Mapping):
cnt = 0 cnt = 0
# first we'll execute flush for actions supporting this operation: # first we'll execute flush for actions supporting this operation:
unbactions = {} unbactions = {}
for name, action in (actions if actions is not None else self._actions).iteritems(): for name, action in (actions if actions is not None else self._actions).items():
try: try:
if hasattr(action, 'flush') and (not isinstance(action, CommandAction) or action.actionflush): if hasattr(action, 'flush') and (not isinstance(action, CommandAction) or action.actionflush):
logSys.notice("[%s] Flush ticket(s) with %s", self._jail.name, name) logSys.notice("[%s] Flush ticket(s) with %s", self._jail.name, name)
@ -705,7 +705,7 @@ class Actions(JailThread, Mapping):
aInfo = self._getActionInfo(ticket) aInfo = self._getActionInfo(ticket)
if log: if log:
logSys.notice("[%s] Unban %s", self._jail.name, ip) logSys.notice("[%s] Unban %s", self._jail.name, ip)
for name, action in unbactions.iteritems(): for name, action in unbactions.items():
try: try:
logSys.debug("[%s] action %r: unban %s", self._jail.name, name, ip) logSys.debug("[%s] action %r: unban %s", self._jail.name, name, ip)
if not aInfo.immutable: aInfo.reset() if not aInfo.immutable: aInfo.reset()

View File

@ -178,7 +178,7 @@ def loop(active, timeout=None, use_poll=False, err_count=None):
elif err_count['listen'] > 100: # pragma: no cover - normally unreachable elif err_count['listen'] > 100: # pragma: no cover - normally unreachable
if ( if (
e.args[0] == errno.EMFILE # [Errno 24] Too many open files e.args[0] == errno.EMFILE # [Errno 24] Too many open files
or sum(err_count.itervalues()) > 1000 or sum(err_count.values()) > 1000
): ):
logSys.critical("Too many errors - critical count reached %r", err_count) logSys.critical("Too many errors - critical count reached %r", err_count)
break break
@ -220,7 +220,7 @@ class AsyncServer(asyncore.dispatcher):
elif self.__errCount['accept'] > 100: elif self.__errCount['accept'] > 100:
if ( if (
(isinstance(e, socket.error) and e.args[0] == errno.EMFILE) # [Errno 24] Too many open files (isinstance(e, socket.error) and e.args[0] == errno.EMFILE) # [Errno 24] Too many open files
or sum(self.__errCount.itervalues()) > 1000 or sum(self.__errCount.values()) > 1000
): ):
logSys.critical("Too many errors - critical count reached %r", self.__errCount) logSys.critical("Too many errors - critical count reached %r", self.__errCount)
self.stop() self.stop()

View File

@ -103,7 +103,7 @@ class BanManager:
return list(self.__banList.keys()) return list(self.__banList.keys())
with self.__lock: with self.__lock:
lst = [] lst = []
for ticket in self.__banList.itervalues(): for ticket in self.__banList.values():
eob = ticket.getEndOfBanTime(self.__banTime) eob = ticket.getEndOfBanTime(self.__banTime)
lst.append((ticket,eob)) lst.append((ticket,eob))
lst.sort(key=lambda t: t[1]) lst.sort(key=lambda t: t[1])
@ -161,7 +161,7 @@ class BanManager:
return return_dict return return_dict
# get ips in lock: # get ips in lock:
with self.__lock: with self.__lock:
banIPs = [banData.getIP() for banData in self.__banList.values()] banIPs = [banData.getIP() for banData in list(self.__banList.values())]
# get cymru info: # get cymru info:
try: try:
for ip in banIPs: for ip in banIPs:
@ -333,7 +333,7 @@ class BanManager:
# Gets the list of ticket to remove (thereby correct next unban time). # Gets the list of ticket to remove (thereby correct next unban time).
unBanList = {} unBanList = {}
nextUnbanTime = BanTicket.MAX_TIME nextUnbanTime = BanTicket.MAX_TIME
for fid,ticket in self.__banList.iteritems(): for fid,ticket in self.__banList.items():
# current time greater as end of ban - timed out: # current time greater as end of ban - timed out:
eob = ticket.getEndOfBanTime(self.__banTime) eob = ticket.getEndOfBanTime(self.__banTime)
if time > eob: if time > eob:
@ -349,15 +349,15 @@ class BanManager:
if len(unBanList): if len(unBanList):
if len(unBanList) / 2.0 <= len(self.__banList) / 3.0: if len(unBanList) / 2.0 <= len(self.__banList) / 3.0:
# few as 2/3 should be removed - remove particular items: # few as 2/3 should be removed - remove particular items:
for fid in unBanList.iterkeys(): for fid in unBanList.keys():
del self.__banList[fid] del self.__banList[fid]
else: else:
# create new dictionary without items to be deleted: # create new dictionary without items to be deleted:
self.__banList = dict((fid,ticket) for fid,ticket in self.__banList.iteritems() \ self.__banList = dict((fid,ticket) for fid,ticket in self.__banList.items() \
if fid not in unBanList) if fid not in unBanList)
# return list of tickets: # return list of tickets:
return unBanList.values() return list(unBanList.values())
## ##
# Flush the ban list. # Flush the ban list.
@ -367,7 +367,7 @@ class BanManager:
def flushBanList(self): def flushBanList(self):
with self.__lock: with self.__lock:
uBList = self.__banList.values() uBList = list(self.__banList.values())
self.__banList = dict() self.__banList = dict()
return uBList return uBList

View File

@ -45,55 +45,24 @@ def _json_default(x):
x = list(x) x = list(x)
return uni_string(x) return uni_string(x)
if sys.version_info >= (3,): # pragma: 2.x no cover def _json_dumps_safe(x):
def _json_dumps_safe(x): try:
try: x = json.dumps(x, ensure_ascii=False, default=_json_default).encode(
x = json.dumps(x, ensure_ascii=False, default=_json_default).encode( PREFER_ENC, 'replace')
PREFER_ENC, 'replace') except Exception as e:
except Exception as e: # adapter handler should be exception-safe
# adapter handler should be exception-safe logSys.error('json dumps failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4)
logSys.error('json dumps failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4) x = '{}'
x = '{}' return x
return x
def _json_loads_safe(x): def _json_loads_safe(x):
try: try:
x = json.loads(x.decode(PREFER_ENC, 'replace')) x = json.loads(x.decode(PREFER_ENC, 'replace'))
except Exception as e: except Exception as e:
# converter handler should be exception-safe # converter handler should be exception-safe
logSys.error('json loads failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4) logSys.error('json loads failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4)
x = {} x = {}
return x return x
else: # pragma: 3.x no cover
def _normalize(x):
if isinstance(x, dict):
return dict((_normalize(k), _normalize(v)) for k, v in x.iteritems())
elif isinstance(x, (list, set)):
return [_normalize(element) for element in x]
elif isinstance(x, unicode):
# in 2.x default text_factory is unicode - so return proper unicode here:
return x.encode(PREFER_ENC, 'replace').decode(PREFER_ENC)
elif isinstance(x, basestring):
return x.decode(PREFER_ENC, 'replace')
return x
def _json_dumps_safe(x):
try:
x = json.dumps(_normalize(x), ensure_ascii=False, default=_json_default)
except Exception as e:
# adapter handler should be exception-safe
logSys.error('json dumps failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4)
x = '{}'
return x
def _json_loads_safe(x):
try:
x = json.loads(x.decode(PREFER_ENC, 'replace'))
except Exception as e:
# converter handler should be exception-safe
logSys.error('json loads failed: %r', e, exc_info=logSys.getEffectiveLevel() <= 4)
x = {}
return x
sqlite3.register_adapter(dict, _json_dumps_safe) sqlite3.register_adapter(dict, _json_dumps_safe)
sqlite3.register_converter("JSON", _json_loads_safe) sqlite3.register_converter("JSON", _json_loads_safe)

View File

@ -55,7 +55,7 @@ class FailManager:
def getFailCount(self): def getFailCount(self):
# may be slow on large list of failures, should be used for test purposes only... # may be slow on large list of failures, should be used for test purposes only...
with self.__lock: with self.__lock:
return len(self.__failList), sum([f.getRetry() for f in self.__failList.values()]) return len(self.__failList), sum([f.getRetry() for f in list(self.__failList.values())])
def setMaxRetry(self, value): def setMaxRetry(self, value):
self.__maxRetry = value self.__maxRetry = value
@ -116,7 +116,7 @@ class FailManager:
# in case of having many active failures, it should be ran only # in case of having many active failures, it should be ran only
# if debug level is "low" enough # if debug level is "low" enough
failures_summary = ', '.join(['%s:%d' % (k, v.getRetry()) failures_summary = ', '.join(['%s:%d' % (k, v.getRetry())
for k,v in self.__failList.iteritems()]) for k,v in self.__failList.items()])
logSys.log(logLevel, "Total # of detected failures: %d. Current failures from %d IPs (IP:count): %s" logSys.log(logLevel, "Total # of detected failures: %d. Current failures from %d IPs (IP:count): %s"
% (self.__failTotal, len(self.__failList), failures_summary)) % (self.__failTotal, len(self.__failList), failures_summary))
@ -129,7 +129,7 @@ class FailManager:
def cleanup(self, time): def cleanup(self, time):
time -= self.__maxTime time -= self.__maxTime
with self.__lock: with self.__lock:
todelete = [fid for fid,item in self.__failList.iteritems() \ todelete = [fid for fid,item in self.__failList.items() \
if item.getTime() <= time] if item.getTime() <= time]
if len(todelete) == len(self.__failList): if len(todelete) == len(self.__failList):
# remove all: # remove all:
@ -143,7 +143,7 @@ class FailManager:
del self.__failList[fid] del self.__failList[fid]
else: else:
# create new dictionary without items to be deleted: # create new dictionary without items to be deleted:
self.__failList = dict((fid,item) for fid,item in self.__failList.iteritems() \ self.__failList = dict((fid,item) for fid,item in self.__failList.items() \
if item.getTime() > time) if item.getTime() > time)
self.__bgSvc.service() self.__bgSvc.service()

View File

@ -142,9 +142,7 @@ class Regex:
self._regex = regex self._regex = regex
self._altValues = [] self._altValues = []
self._tupleValues = [] self._tupleValues = []
for k in filter( for k in [k for k in self._regexObj.groupindex if len(k) > len(COMPLNAME_PRE[0])]:
lambda k: len(k) > len(COMPLNAME_PRE[0]), self._regexObj.groupindex
):
n = COMPLNAME_CRE.match(k) n = COMPLNAME_CRE.match(k)
if n: if n:
g, n = n.group(1), mapTag2Opt(n.group(2)) g, n = n.group(1), mapTag2Opt(n.group(2))
@ -234,7 +232,7 @@ class Regex:
# #
@staticmethod @staticmethod
def _tupleLinesBuf(tupleLines): def _tupleLinesBuf(tupleLines):
return "\n".join(map(lambda v: "".join(v[::2]), tupleLines)) + "\n" return "\n".join(["".join(v[::2]) for v in tupleLines]) + "\n"
## ##
# Searches the regular expression. # Searches the regular expression.
@ -246,7 +244,7 @@ class Regex:
def search(self, tupleLines, orgLines=None): def search(self, tupleLines, orgLines=None):
buf = tupleLines buf = tupleLines
if not isinstance(tupleLines, basestring): if not isinstance(tupleLines, str):
buf = Regex._tupleLinesBuf(tupleLines) buf = Regex._tupleLinesBuf(tupleLines)
self._matchCache = self._regexObj.search(buf) self._matchCache = self._regexObj.search(buf)
if self._matchCache: if self._matchCache:

View File

@ -307,7 +307,7 @@ class Filter(JailThread):
dd = DateDetector() dd = DateDetector()
dd.default_tz = self.__logtimezone dd.default_tz = self.__logtimezone
if not isinstance(pattern, (list, tuple)): if not isinstance(pattern, (list, tuple)):
pattern = filter(bool, map(str.strip, re.split('\n+', pattern))) pattern = list(filter(bool, list(map(str.strip, re.split('\n+', pattern)))))
for pattern in pattern: for pattern in pattern:
dd.appendTemplate(pattern) dd.appendTemplate(pattern)
self.dateDetector = dd self.dateDetector = dd
@ -800,7 +800,7 @@ class Filter(JailThread):
if (nfflgs & 4) == 0 and not mlfidGroups.get('mlfpending', 0): if (nfflgs & 4) == 0 and not mlfidGroups.get('mlfpending', 0):
mlfidGroups.pop("matches", None) mlfidGroups.pop("matches", None)
# overwrite multi-line failure with all values, available in fail: # overwrite multi-line failure with all values, available in fail:
mlfidGroups.update(((k,v) for k,v in fail.iteritems() if v is not None)) mlfidGroups.update(((k,v) for k,v in fail.items() if v is not None))
# new merged failure data: # new merged failure data:
fail = mlfidGroups fail = mlfidGroups
# if forget (disconnect/reset) - remove cached entry: # if forget (disconnect/reset) - remove cached entry:
@ -1045,7 +1045,7 @@ class FileFilter(Filter):
# @return log paths # @return log paths
def getLogPaths(self): def getLogPaths(self):
return self.__logs.keys() return list(self.__logs.keys())
## ##
# Get the log containers # Get the log containers
@ -1053,7 +1053,7 @@ class FileFilter(Filter):
# @return log containers # @return log containers
def getLogs(self): def getLogs(self):
return self.__logs.values() return list(self.__logs.values())
## ##
# Get the count of log containers # Get the count of log containers
@ -1079,7 +1079,7 @@ class FileFilter(Filter):
def setLogEncoding(self, encoding): def setLogEncoding(self, encoding):
encoding = super(FileFilter, self).setLogEncoding(encoding) encoding = super(FileFilter, self).setLogEncoding(encoding)
for log in self.__logs.itervalues(): for log in self.__logs.values():
log.setEncoding(encoding) log.setEncoding(encoding)
def getLog(self, path): def getLog(self, path):
@ -1255,7 +1255,7 @@ class FileFilter(Filter):
"""Status of Filter plus files being monitored. """Status of Filter plus files being monitored.
""" """
ret = super(FileFilter, self).status(flavor=flavor) ret = super(FileFilter, self).status(flavor=flavor)
path = self.__logs.keys() path = list(self.__logs.keys())
ret.append(("File list", path)) ret.append(("File list", path))
return ret return ret
@ -1277,7 +1277,7 @@ class FileFilter(Filter):
if self._pendDBUpdates and self.jail.database: if self._pendDBUpdates and self.jail.database:
self._updateDBPending() self._updateDBPending()
# stop files monitoring: # stop files monitoring:
for path in self.__logs.keys(): for path in list(self.__logs.keys()):
self.delLogPath(path) self.delLogPath(path)
def stop(self): def stop(self):
@ -1530,7 +1530,7 @@ class FileContainer:
def __iter__(self): def __iter__(self):
return self return self
def next(self): def __next__(self):
line = self.readline() line = self.readline()
if line is None: if line is None:
self.close() self.close()

View File

@ -1,136 +0,0 @@
# emacs: -*- mode: python; py-indent-offset: 4; indent-tabs-mode: t -*-
# vi: set ft=python sts=4 ts=4 sw=4 noet :
# This file is part of Fail2Ban.
#
# Fail2Ban is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# Fail2Ban is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Fail2Ban; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
# Author: Cyril Jaquier, Yaroslav Halchenko
__author__ = "Cyril Jaquier, Yaroslav Halchenko"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2012 Yaroslav Halchenko"
__license__ = "GPL"
import fcntl
import time
import gamin
from .failmanager import FailManagerEmpty
from .filter import FileFilter
from .mytime import MyTime
from .utils import Utils
from ..helpers import getLogger
# Gets the instance of the logger.
logSys = getLogger(__name__)
##
# Log reader class.
#
# This class reads a log file and detects login failures or anything else
# that matches a given regular expression. This class is instanciated by
# a Jail object.
class FilterGamin(FileFilter):
##
# Constructor.
#
# Initialize the filter object with default values.
# @param jail the jail object
def __init__(self, jail):
FileFilter.__init__(self, jail)
# Gamin monitor
self.monitor = gamin.WatchMonitor()
fd = self.monitor.get_fd()
flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, flags|fcntl.FD_CLOEXEC)
logSys.debug("Created FilterGamin")
def callback(self, path, event):
logSys.log(4, "Got event: " + repr(event) + " for " + path)
if event in (gamin.GAMCreated, gamin.GAMChanged, gamin.GAMExists):
logSys.debug("File changed: " + path)
self.ticks += 1
self.getFailures(path)
##
# Add a log file path
#
# @param path log file path
def _addLogPath(self, path):
self.monitor.watch_file(path, self.callback)
##
# Delete a log path
#
# @param path the log file to delete
def _delLogPath(self, path):
self.monitor.stop_watch(path)
def _handleEvents(self):
ret = False
mon = self.monitor
while mon and mon.event_pending() > 0:
mon.handle_events()
mon = self.monitor
ret = True
return ret
##
# Main loop.
#
# This function is the main loop of the thread. It checks if the
# file has been modified and looks for failures.
# @return True when the thread exits nicely
def run(self):
# Gamin needs a loop to collect and dispatch events
while self.active:
if self.idle:
# wait a little bit here for not idle, to prevent hi-load:
if not Utils.wait_for(lambda: not self.active or not self.idle,
self.sleeptime * 10, self.sleeptime
):
self.ticks += 1
continue
Utils.wait_for(lambda: not self.active or self._handleEvents(),
self.sleeptime)
self.ticks += 1
if self.ticks % 10 == 0:
self.performSvc()
logSys.debug("[%s] filter terminated", self.jailName)
return True
def stop(self):
super(FilterGamin, self).stop()
self.__cleanup()
##
# Desallocates the resources used by Gamin.
def __cleanup(self):
if not self.monitor:
return
for filename in self.getLogPaths():
self.monitor.stop_watch(filename)
self.monitor = None

View File

@ -173,4 +173,4 @@ class FilterPoll(FileFilter):
return False return False
def getPendingPaths(self): def getPendingPaths(self):
return self.__file404Cnt.keys() return list(self.__file404Cnt.keys())

View File

@ -155,7 +155,7 @@ class FilterPyinotify(FileFilter):
except KeyError: pass except KeyError: pass
def getPendingPaths(self): def getPendingPaths(self):
return self.__pending.keys() return list(self.__pending.keys())
def _checkPending(self): def _checkPending(self):
if not self.__pending: if not self.__pending:
@ -181,7 +181,7 @@ class FilterPyinotify(FileFilter):
self.__pendingChkTime = time.time() self.__pendingChkTime = time.time()
self.__pendingMinTime = minTime self.__pendingMinTime = minTime
# process now because we've missed it in monitoring: # process now because we've missed it in monitoring:
for path, isDir in found.iteritems(): for path, isDir in found.items():
self._delPending(path) self._delPending(path)
# refresh monitoring of this: # refresh monitoring of this:
if isDir is not None: if isDir is not None:

View File

@ -253,7 +253,7 @@ class FilterSystemd(JournalFilter): # pragma: systemd no cover
return ((logline[:0], date[0] + ' ', logline.replace('\n', '\\n')), date[1]) return ((logline[:0], date[0] + ' ', logline.replace('\n', '\\n')), date[1])
def seekToTime(self, date): def seekToTime(self, date):
if isinstance(date, (int, long)): if isinstance(date, int):
date = float(date) date = float(date)
self.__journal.seek_realtime(date) self.__journal.seek_realtime(date)

View File

@ -370,7 +370,7 @@ class IPAddr(object):
s[1] = IPAddr.masktoplen(s[2]) s[1] = IPAddr.masktoplen(s[2])
del s[2] del s[2]
try: try:
s[1] = long(s[1]) s[1] = int(s[1])
except ValueError: except ValueError:
return ipstr, IPAddr.CIDR_UNSPEC return ipstr, IPAddr.CIDR_UNSPEC
return s return s
@ -406,7 +406,7 @@ class IPAddr(object):
# mask out host portion if prefix length is supplied # mask out host portion if prefix length is supplied
if cidr is not None and cidr >= 0: if cidr is not None and cidr >= 0:
mask = ~(0xFFFFFFFFL >> cidr) mask = ~(0xFFFFFFFF >> cidr)
self._addr &= mask self._addr &= mask
self._plen = cidr self._plen = cidr
@ -418,13 +418,13 @@ class IPAddr(object):
# mask out host portion if prefix length is supplied # mask out host portion if prefix length is supplied
if cidr is not None and cidr >= 0: if cidr is not None and cidr >= 0:
mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFL >> cidr) mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF >> cidr)
self._addr &= mask self._addr &= mask
self._plen = cidr self._plen = cidr
# if IPv6 address is a IPv4-compatible, make instance a IPv4 # if IPv6 address is a IPv4-compatible, make instance a IPv4
elif self.isInNet(IPAddr.IP6_4COMPAT): elif self.isInNet(IPAddr.IP6_4COMPAT):
self._addr = lo & 0xFFFFFFFFL self._addr = lo & 0xFFFFFFFF
self._family = socket.AF_INET self._family = socket.AF_INET
self._plen = 32 self._plen = 32
else: else:
@ -434,7 +434,7 @@ class IPAddr(object):
return repr(self.ntoa) return repr(self.ntoa)
def __str__(self): def __str__(self):
return self.ntoa if isinstance(self.ntoa, basestring) else str(self.ntoa) return self.ntoa if isinstance(self.ntoa, str) else str(self.ntoa)
def __reduce__(self): def __reduce__(self):
"""IPAddr pickle-handler, that simply wraps IPAddr to the str """IPAddr pickle-handler, that simply wraps IPAddr to the str
@ -548,7 +548,7 @@ class IPAddr(object):
elif self.isIPv6: elif self.isIPv6:
# convert network to host byte order # convert network to host byte order
hi = self._addr >> 64 hi = self._addr >> 64
lo = self._addr & 0xFFFFFFFFFFFFFFFFL lo = self._addr & 0xFFFFFFFFFFFFFFFF
binary = struct.pack("!QQ", hi, lo) binary = struct.pack("!QQ", hi, lo)
if self._plen and self._plen < 128: if self._plen and self._plen < 128:
add = "/%d" % self._plen add = "/%d" % self._plen
@ -606,9 +606,9 @@ class IPAddr(object):
if self.family != net.family: if self.family != net.family:
return False return False
if self.isIPv4: if self.isIPv4:
mask = ~(0xFFFFFFFFL >> net.plen) mask = ~(0xFFFFFFFF >> net.plen)
elif self.isIPv6: elif self.isIPv6:
mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFL >> net.plen) mask = ~(0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF >> net.plen)
else: else:
return False return False
@ -628,7 +628,7 @@ class IPAddr(object):
m4 = (1 << 32)-1 m4 = (1 << 32)-1
mmap = {m6: 128, m4: 32, 0: 0} mmap = {m6: 128, m4: 32, 0: 0}
m = 0 m = 0
for i in xrange(0, 128): for i in range(0, 128):
m |= 1 << i m |= 1 << i
if i < 32: if i < 32:
mmap[m ^ m4] = 32-1-i mmap[m ^ m4] = 32-1-i

View File

@ -26,7 +26,7 @@ __license__ = "GPL"
import logging import logging
import math import math
import random import random
import Queue import queue
from .actions import Actions from .actions import Actions
from ..helpers import getLogger, _as_bool, extractOptions, MyTime from ..helpers import getLogger, _as_bool, extractOptions, MyTime
@ -66,7 +66,7 @@ class Jail(object):
#Known backends. Each backend should have corresponding __initBackend method #Known backends. Each backend should have corresponding __initBackend method
# yoh: stored in a list instead of a tuple since only # yoh: stored in a list instead of a tuple since only
# list had .index until 2.6 # list had .index until 2.6
_BACKENDS = ['pyinotify', 'gamin', 'polling', 'systemd'] _BACKENDS = ['pyinotify', 'polling', 'systemd']
def __init__(self, name, backend = "auto", db=None): def __init__(self, name, backend = "auto", db=None):
self.__db = db self.__db = db
@ -76,7 +76,7 @@ class Jail(object):
"might not function correctly. Please shorten" "might not function correctly. Please shorten"
% name) % name)
self.__name = name self.__name = name
self.__queue = Queue.Queue() self.__queue = queue.Queue()
self.__filter = None self.__filter = None
# Extra parameters for increase ban time # Extra parameters for increase ban time
self._banExtra = {}; self._banExtra = {};
@ -127,25 +127,19 @@ class Jail(object):
"Failed to initialize any backend for Jail %r" % self.name) "Failed to initialize any backend for Jail %r" % self.name)
def _initPolling(self, **kwargs): def _initPolling(self, **kwargs):
from filterpoll import FilterPoll from .filterpoll import FilterPoll
logSys.info("Jail '%s' uses poller %r" % (self.name, kwargs)) logSys.info("Jail '%s' uses poller %r" % (self.name, kwargs))
self.__filter = FilterPoll(self, **kwargs) self.__filter = FilterPoll(self, **kwargs)
def _initGamin(self, **kwargs):
# Try to import gamin
from filtergamin import FilterGamin
logSys.info("Jail '%s' uses Gamin %r" % (self.name, kwargs))
self.__filter = FilterGamin(self, **kwargs)
def _initPyinotify(self, **kwargs): def _initPyinotify(self, **kwargs):
# Try to import pyinotify # Try to import pyinotify
from filterpyinotify import FilterPyinotify from .filterpyinotify import FilterPyinotify
logSys.info("Jail '%s' uses pyinotify %r" % (self.name, kwargs)) logSys.info("Jail '%s' uses pyinotify %r" % (self.name, kwargs))
self.__filter = FilterPyinotify(self, **kwargs) self.__filter = FilterPyinotify(self, **kwargs)
def _initSystemd(self, **kwargs): # pragma: systemd no cover def _initSystemd(self, **kwargs): # pragma: systemd no cover
# Try to import systemd # Try to import systemd
from filtersystemd import FilterSystemd from .filtersystemd import FilterSystemd
logSys.info("Jail '%s' uses systemd %r" % (self.name, kwargs)) logSys.info("Jail '%s' uses systemd %r" % (self.name, kwargs))
self.__filter = FilterSystemd(self, **kwargs) self.__filter = FilterSystemd(self, **kwargs)
@ -219,7 +213,7 @@ class Jail(object):
try: try:
ticket = self.__queue.get(False) ticket = self.__queue.get(False)
return ticket return ticket
except Queue.Empty: except queue.Empty:
return False return False
def setBanTimeExtra(self, opt, value): def setBanTimeExtra(self, opt, value):

View File

@ -78,14 +78,9 @@ class JailThread(Thread):
print(e) print(e)
self.run = run_with_except_hook self.run = run_with_except_hook
if sys.version_info >= (3,): # pragma: 2.x no cover def _bootstrap(self):
def _bootstrap(self): prctl_set_th_name(self.name)
prctl_set_th_name(self.name) return super(JailThread, self)._bootstrap();
return super(JailThread, self)._bootstrap();
else: # pragma: 3.x no cover
def __bootstrap(self):
prctl_set_th_name(self.name)
return Thread._Thread__bootstrap(self)
@abstractmethod @abstractmethod
def status(self, flavor="basic"): # pragma: no cover - abstract def status(self, flavor="basic"): # pragma: no cover - abstract
@ -125,9 +120,6 @@ class JailThread(Thread):
if self.active is not None: if self.active is not None:
super(JailThread, self).join() super(JailThread, self).join()
## python 2.x replace binding of private __bootstrap method:
if sys.version_info < (3,): # pragma: 3.x no cover
JailThread._Thread__bootstrap = JailThread._JailThread__bootstrap
## python 3.9, restore isAlive method: ## python 3.9, restore isAlive method:
elif not hasattr(JailThread, 'isAlive'): # pragma: 2.x no cover if not hasattr(JailThread, 'isAlive'):
JailThread.isAlive = JailThread.is_alive JailThread.isAlive = JailThread.is_alive

View File

@ -165,7 +165,7 @@ class MyTime:
@returns number (calculated seconds from expression "val") @returns number (calculated seconds from expression "val")
""" """
if isinstance(val, (int, long, float, complex)): if isinstance(val, (int, float, complex)):
return val return val
# replace together standing abbreviations, example '1d12h' -> '1d 12h': # replace together standing abbreviations, example '1d12h' -> '1d 12h':
val = MyTime._str2sec_prep.sub(r" \1", val) val = MyTime._str2sec_prep.sub(r" \1", val)

View File

@ -58,11 +58,6 @@ except ImportError: # pragma: no cover
def _thread_name(): def _thread_name():
return threading.current_thread().__class__.__name__ return threading.current_thread().__class__.__name__
try:
FileExistsError
except NameError: # pragma: 3.x no cover
FileExistsError = OSError
def _make_file_path(name): def _make_file_path(name):
"""Creates path of file (last level only) on demand""" """Creates path of file (last level only) on demand"""
name = os.path.dirname(name) name = os.path.dirname(name)
@ -209,7 +204,7 @@ class Server:
# Restore default signal handlers: # Restore default signal handlers:
if _thread_name() == '_MainThread': if _thread_name() == '_MainThread':
for s, sh in self.__prev_signals.iteritems(): for s, sh in self.__prev_signals.items():
signal.signal(s, sh) signal.signal(s, sh)
# Give observer a small chance to complete its work before exit # Give observer a small chance to complete its work before exit
@ -287,10 +282,10 @@ class Server:
logSys.info("Stopping all jails") logSys.info("Stopping all jails")
with self.__lock: with self.__lock:
# 1st stop all jails (signal and stop actions/filter thread): # 1st stop all jails (signal and stop actions/filter thread):
for name in self.__jails.keys(): for name in list(self.__jails.keys()):
self.delJail(name, stop=True, join=False) self.delJail(name, stop=True, join=False)
# 2nd wait for end and delete jails: # 2nd wait for end and delete jails:
for name in self.__jails.keys(): for name in list(self.__jails.keys()):
self.delJail(name, stop=False, join=True) self.delJail(name, stop=False, join=True)
def clearCaches(self): def clearCaches(self):
@ -328,7 +323,7 @@ class Server:
if "--restart" in opts: if "--restart" in opts:
self.stopAllJail() self.stopAllJail()
# first set all affected jail(s) to idle and reset filter regex and other lists/dicts: # first set all affected jail(s) to idle and reset filter regex and other lists/dicts:
for jn, jail in self.__jails.iteritems(): for jn, jail in self.__jails.items():
if name == '--all' or jn == name: if name == '--all' or jn == name:
jail.idle = True jail.idle = True
self.__reload_state[jn] = jail self.__reload_state[jn] = jail
@ -339,7 +334,7 @@ class Server:
# end reload, all affected (or new) jails have already all new parameters (via stream) and (re)started: # end reload, all affected (or new) jails have already all new parameters (via stream) and (re)started:
with self.__lock: with self.__lock:
deljails = [] deljails = []
for jn, jail in self.__jails.iteritems(): for jn, jail in self.__jails.items():
# still in reload state: # still in reload state:
if jn in self.__reload_state: if jn in self.__reload_state:
# remove jails that are not reloaded (untouched, so not in new configuration) # remove jails that are not reloaded (untouched, so not in new configuration)
@ -539,7 +534,7 @@ class Server:
jails = [self.__jails[name]] jails = [self.__jails[name]]
else: else:
# in all jails: # in all jails:
jails = self.__jails.values() jails = list(self.__jails.values())
# unban given or all (if value is None): # unban given or all (if value is None):
cnt = 0 cnt = 0
ifexists |= (name is None) ifexists |= (name is None)
@ -553,7 +548,7 @@ class Server:
jails = [self.__jails[name]] jails = [self.__jails[name]]
else: else:
# in all jails: # in all jails:
jails = self.__jails.values() jails = list(self.__jails.values())
# check banned ids: # check banned ids:
res = [] res = []
if name is None and ids: if name is None and ids:
@ -603,7 +598,7 @@ class Server:
def isAlive(self, jailnum=None): def isAlive(self, jailnum=None):
if jailnum is not None and len(self.__jails) != jailnum: if jailnum is not None and len(self.__jails) != jailnum:
return 0 return 0
for jail in self.__jails.values(): for jail in list(self.__jails.values()):
if not jail.isAlive(): if not jail.isAlive():
return 0 return 0
return 1 return 1
@ -818,7 +813,7 @@ class Server:
return DNSUtils.setIPv6IsAllowed(value) return DNSUtils.setIPv6IsAllowed(value)
def setThreadOptions(self, value): def setThreadOptions(self, value):
for o, v in value.iteritems(): for o, v in value.items():
if o == 'stacksize': if o == 'stacksize':
threading.stack_size(int(v)*1024) threading.stack_size(int(v)*1024)
else: # pragma: no cover else: # pragma: no cover
@ -942,7 +937,7 @@ class Server:
maxfd = os.sysconf("SC_OPEN_MAX") maxfd = os.sysconf("SC_OPEN_MAX")
except (AttributeError, ValueError): except (AttributeError, ValueError):
maxfd = 256 # default maximum maxfd = 256 # default maximum
fdlist = xrange(maxfd+1) fdlist = range(maxfd+1)
# urandom should not be closed in Python 3.4.0. Fixed in 3.4.1 # urandom should not be closed in Python 3.4.0. Fixed in 3.4.1
# http://bugs.python.org/issue21207 # http://bugs.python.org/issue21207

View File

@ -99,7 +99,7 @@ def _updateTimeRE():
if len(exprset) > 1 else "".join(exprset) if len(exprset) > 1 else "".join(exprset)
exprset = set( cent(now[0].year + i) for i in (-1, distance) ) exprset = set( cent(now[0].year + i) for i in (-1, distance) )
if len(now) > 1 and now[1]: if len(now) > 1 and now[1]:
exprset |= set( cent(now[1].year + i) for i in xrange(-1, now[0].year-now[1].year+1, distance) ) exprset |= set( cent(now[1].year + i) for i in range(-1, now[0].year-now[1].year+1, distance) )
return grp(sorted(list(exprset))) return grp(sorted(list(exprset)))
# more precise year patterns, within same century of last year and # more precise year patterns, within same century of last year and
@ -116,7 +116,7 @@ def _updateTimeRE():
_updateTimeRE() _updateTimeRE()
def getTimePatternRE(): def getTimePatternRE():
keys = timeRE.keys() keys = list(timeRE.keys())
patt = (r"%%(%%|%s|[%s])" % ( patt = (r"%%(%%|%s|[%s])" % (
"|".join([k for k in keys if len(k) > 1]), "|".join([k for k in keys if len(k) > 1]),
"".join([k for k in keys if len(k) == 1]), "".join([k for k in keys if len(k) == 1]),
@ -171,7 +171,7 @@ def zone2offset(tz, dt):
""" """
if isinstance(tz, int): if isinstance(tz, int):
return tz return tz
if isinstance(tz, basestring): if isinstance(tz, str):
return validateTimeZone(tz) return validateTimeZone(tz)
tz, tzo = tz tz, tzo = tz
if tzo is None or tzo == '': # without offset if tzo is None or tzo == '': # without offset
@ -208,7 +208,7 @@ def reGroupDictStrptime(found_dict, msec=False, default_tz=None):
year = month = day = tzoffset = \ year = month = day = tzoffset = \
weekday = julian = week_of_year = None weekday = julian = week_of_year = None
hour = minute = second = fraction = 0 hour = minute = second = fraction = 0
for key, val in found_dict.iteritems(): for key, val in found_dict.items():
if val is None: continue if val is None: continue
# Directives not explicitly handled below: # Directives not explicitly handled below:
# c, x, X # c, x, X

View File

@ -55,7 +55,7 @@ class Ticket(object):
self._time = time if time is not None else MyTime.time() self._time = time if time is not None else MyTime.time()
self._data = {'matches': matches or [], 'failures': 0} self._data = {'matches': matches or [], 'failures': 0}
if data is not None: if data is not None:
for k,v in data.iteritems(): for k,v in data.items():
if v is not None: if v is not None:
self._data[k] = v self._data[k] = v
if ticket: if ticket:
@ -88,7 +88,7 @@ class Ticket(object):
def setID(self, value): def setID(self, value):
# guarantee using IPAddr instead of unicode, str for the IP # guarantee using IPAddr instead of unicode, str for the IP
if isinstance(value, basestring): if isinstance(value, str):
value = IPAddr(value) value = IPAddr(value)
self._id = value self._id = value
@ -180,7 +180,7 @@ class Ticket(object):
if len(args) == 1: if len(args) == 1:
# todo: if support >= 2.7 only: # todo: if support >= 2.7 only:
# self._data = {k:v for k,v in args[0].iteritems() if v is not None} # self._data = {k:v for k,v in args[0].iteritems() if v is not None}
self._data = dict([(k,v) for k,v in args[0].iteritems() if v is not None]) self._data = dict([(k,v) for k,v in args[0].items() if v is not None])
# add k,v list or dict (merge): # add k,v list or dict (merge):
elif len(args) == 2: elif len(args) == 2:
self._data.update((args,)) self._data.update((args,))
@ -191,7 +191,7 @@ class Ticket(object):
# filter (delete) None values: # filter (delete) None values:
# todo: if support >= 2.7 only: # todo: if support >= 2.7 only:
# self._data = {k:v for k,v in self._data.iteritems() if v is not None} # self._data = {k:v for k,v in self._data.iteritems() if v is not None}
self._data = dict([(k,v) for k,v in self._data.iteritems() if v is not None]) self._data = dict([(k,v) for k,v in self._data.items() if v is not None])
def getData(self, key=None, default=None): def getData(self, key=None, default=None):
# return whole data dict: # return whole data dict:
@ -200,17 +200,17 @@ class Ticket(object):
# return default if not exists: # return default if not exists:
if not self._data: if not self._data:
return default return default
if not isinstance(key,(str,unicode,type(None),int,float,bool,complex)): if not isinstance(key,(str,type(None),int,float,bool,complex)):
# return filtered by lambda/function: # return filtered by lambda/function:
if callable(key): if callable(key):
# todo: if support >= 2.7 only: # todo: if support >= 2.7 only:
# return {k:v for k,v in self._data.iteritems() if key(k)} # return {k:v for k,v in self._data.iteritems() if key(k)}
return dict([(k,v) for k,v in self._data.iteritems() if key(k)]) return dict([(k,v) for k,v in self._data.items() if key(k)])
# return filtered by keys: # return filtered by keys:
if hasattr(key, '__iter__'): if hasattr(key, '__iter__'):
# todo: if support >= 2.7 only: # todo: if support >= 2.7 only:
# return {k:v for k,v in self._data.iteritems() if k in key} # return {k:v for k,v in self._data.iteritems() if k in key}
return dict([(k,v) for k,v in self._data.iteritems() if k in key]) return dict([(k,v) for k,v in self._data.items() if k in key])
# return single value of data: # return single value of data:
return self._data.get(key, default) return self._data.get(key, default)

View File

@ -488,7 +488,7 @@ class Transmitter:
opt = command[1][len("bantime."):] opt = command[1][len("bantime."):]
return self.__server.getBanTimeExtra(name, opt) return self.__server.getBanTimeExtra(name, opt)
elif command[1] == "actions": elif command[1] == "actions":
return self.__server.getActions(name).keys() return list(self.__server.getActions(name).keys())
elif command[1] == "action": elif command[1] == "action":
actionname = command[2] actionname = command[2]
actionvalue = command[3] actionvalue = command[3]

View File

@ -53,7 +53,7 @@ _RETCODE_HINTS = {
# Dictionary to lookup signal name from number # Dictionary to lookup signal name from number
signame = dict((num, name) signame = dict((num, name)
for name, num in signal.__dict__.iteritems() if name.startswith("SIG")) for name, num in signal.__dict__.items() if name.startswith("SIG"))
class Utils(): class Utils():
"""Utilities provide diverse static methods like executes OS shell commands, etc. """Utilities provide diverse static methods like executes OS shell commands, etc.
@ -140,7 +140,7 @@ class Utils():
if not isinstance(realCmd, list): if not isinstance(realCmd, list):
realCmd = [realCmd] realCmd = [realCmd]
i = len(realCmd)-1 i = len(realCmd)-1
for k, v in varsDict.iteritems(): for k, v in varsDict.items():
varsStat += "%s=$%s " % (k, i) varsStat += "%s=$%s " % (k, i)
realCmd.append(v) realCmd.append(v)
i += 1 i += 1

View File

@ -242,14 +242,14 @@ class CommandActionTest(LogCaptureTestCase):
setattr(self.__action, 'ab', "<ac>") setattr(self.__action, 'ab', "<ac>")
setattr(self.__action, 'x?family=inet6', "") setattr(self.__action, 'x?family=inet6', "")
# produce self-referencing properties except: # produce self-referencing properties except:
self.assertRaisesRegexp(ValueError, r"properties contain self referencing definitions", self.assertRaisesRegex(ValueError, r"properties contain self referencing definitions",
lambda: self.__action.replaceTag("<a><b>", lambda: self.__action.replaceTag("<a><b>",
self.__action._properties, conditional="family=inet4") self.__action._properties, conditional="family=inet4")
) )
# remore self-referencing in props: # remore self-referencing in props:
delattr(self.__action, 'ac') delattr(self.__action, 'ac')
# produce self-referencing query except: # produce self-referencing query except:
self.assertRaisesRegexp(ValueError, r"possible self referencing definitions in query", self.assertRaisesRegex(ValueError, r"possible self referencing definitions in query",
lambda: self.__action.replaceTag("<x"*30+">"*30, lambda: self.__action.replaceTag("<x"*30+">"*30,
self.__action._properties, conditional="family=inet6") self.__action._properties, conditional="family=inet6")
) )

View File

@ -177,7 +177,7 @@ class StatusExtendedCymruInfo(unittest.TestCase):
super(StatusExtendedCymruInfo, self).setUp() super(StatusExtendedCymruInfo, self).setUp()
unittest.F2B.SkipIfNoNetwork() unittest.F2B.SkipIfNoNetwork()
setUpMyTime() setUpMyTime()
self.__ban_ip = iter(DNSUtils.dnsToIp("resolver1.opendns.com")).next() self.__ban_ip = next(iter(DNSUtils.dnsToIp("resolver1.opendns.com")))
self.__asn = "36692" self.__asn = "36692"
self.__country = "US" self.__country = "US"
self.__rir = "arin" self.__rir = "arin"

View File

@ -419,7 +419,7 @@ class JailReaderTest(LogCaptureTestCase):
# And multiple groups (`][` instead of `,`) # And multiple groups (`][` instead of `,`)
result = extractOptions(option.replace(',', '][')) result = extractOptions(option.replace(',', ']['))
expected2 = (expected[0], expected2 = (expected[0],
dict((k, v.replace(',', '][')) for k, v in expected[1].iteritems()) dict((k, v.replace(',', '][')) for k, v in expected[1].items())
) )
self.assertEqual(expected2, result) self.assertEqual(expected2, result)
@ -1018,7 +1018,7 @@ filter = testfilter1
self.assertEqual(add_actions[-1][-1], "{}") self.assertEqual(add_actions[-1][-1], "{}")
def testLogPathFileFilterBackend(self): def testLogPathFileFilterBackend(self):
self.assertRaisesRegexp(ValueError, r"Have not found any log file for .* jail", self.assertRaisesRegex(ValueError, r"Have not found any log file for .* jail",
self._testLogPath, backend='polling') self._testLogPath, backend='polling')
def testLogPathSystemdBackend(self): def testLogPathSystemdBackend(self):

View File

@ -67,7 +67,7 @@ class DatabaseTest(LogCaptureTestCase):
@property @property
def db(self): def db(self):
if isinstance(self._db, basestring) and self._db == ':auto-create-in-memory:': if isinstance(self._db, str) and self._db == ':auto-create-in-memory:':
self._db = getFail2BanDb(self.dbFilename) self._db = getFail2BanDb(self.dbFilename)
return self._db return self._db
@db.setter @db.setter
@ -159,7 +159,7 @@ class DatabaseTest(LogCaptureTestCase):
self.db = Fail2BanDb(self.dbFilename) self.db = Fail2BanDb(self.dbFilename)
self.assertEqual(self.db.getJailNames(), set(['DummyJail #29162448 with 0 tickets'])) self.assertEqual(self.db.getJailNames(), set(['DummyJail #29162448 with 0 tickets']))
self.assertEqual(self.db.getLogPaths(), set(['/tmp/Fail2BanDb_pUlZJh.log'])) self.assertEqual(self.db.getLogPaths(), set(['/tmp/Fail2BanDb_pUlZJh.log']))
ticket = FailTicket("127.0.0.1", 1388009242.26, [u"abc\n"]) ticket = FailTicket("127.0.0.1", 1388009242.26, ["abc\n"])
self.assertEqual(self.db.getBans()[0], ticket) self.assertEqual(self.db.getBans()[0], ticket)
self.assertEqual(self.db.updateDb(Fail2BanDb.__version__), Fail2BanDb.__version__) self.assertEqual(self.db.updateDb(Fail2BanDb.__version__), Fail2BanDb.__version__)
@ -185,9 +185,9 @@ class DatabaseTest(LogCaptureTestCase):
self.assertEqual(len(bans), 2) self.assertEqual(len(bans), 2)
# compare first ticket completely: # compare first ticket completely:
ticket = FailTicket("1.2.3.7", 1417595494, [ ticket = FailTicket("1.2.3.7", 1417595494, [
u'Dec 3 09:31:08 f2btest test:auth[27658]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7', 'Dec 3 09:31:08 f2btest test:auth[27658]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
u'Dec 3 09:31:32 f2btest test:auth[27671]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7', 'Dec 3 09:31:32 f2btest test:auth[27671]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7',
u'Dec 3 09:31:34 f2btest test:auth[27673]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7' 'Dec 3 09:31:34 f2btest test:auth[27673]: pam_unix(test:auth): authentication failure; logname= uid=0 euid=0 tty=test ruser= rhost=1.2.3.7'
]) ])
ticket.setAttempt(3) ticket.setAttempt(3)
self.assertEqual(bans[0], ticket) self.assertEqual(bans[0], ticket)
@ -287,11 +287,11 @@ class DatabaseTest(LogCaptureTestCase):
# invalid + valid, invalid + valid unicode, invalid + valid dual converted (like in filter:readline by fallback) ... # invalid + valid, invalid + valid unicode, invalid + valid dual converted (like in filter:readline by fallback) ...
tickets = [ tickets = [
FailTicket("127.0.0.1", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']), FailTicket("127.0.0.1", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
FailTicket("127.0.0.2", 0, ['user "test"', u'user "\xd1\xe2\xe5\xf2\xe0"', u'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']), FailTicket("127.0.0.2", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
FailTicket("127.0.0.3", 0, ['user "test"', b'user "\xd1\xe2\xe5\xf2\xe0"', b'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']), FailTicket("127.0.0.3", 0, ['user "test"', b'user "\xd1\xe2\xe5\xf2\xe0"', b'user "\xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f"']),
FailTicket("127.0.0.4", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', u'user "\xe4\xf6\xfc\xdf"']), FailTicket("127.0.0.4", 0, ['user "test"', 'user "\xd1\xe2\xe5\xf2\xe0"', 'user "\xe4\xf6\xfc\xdf"']),
FailTicket("127.0.0.5", 0, ['user "test"', 'unterminated \xcf']), FailTicket("127.0.0.5", 0, ['user "test"', 'unterminated \xcf']),
FailTicket("127.0.0.6", 0, ['user "test"', u'unterminated \xcf']), FailTicket("127.0.0.6", 0, ['user "test"', 'unterminated \xcf']),
FailTicket("127.0.0.7", 0, ['user "test"', b'unterminated \xcf']) FailTicket("127.0.0.7", 0, ['user "test"', b'unterminated \xcf'])
] ]
for ticket in tickets: for ticket in tickets:

View File

@ -288,7 +288,7 @@ class DateDetectorTest(LogCaptureTestCase):
self.assertEqual(logTime, mu) self.assertEqual(logTime, mu)
self.assertEqual(logMatch.group(1), '2012/10/11 02:37:17') self.assertEqual(logMatch.group(1), '2012/10/11 02:37:17')
# confuse it with year being at the end # confuse it with year being at the end
for i in xrange(10): for i in range(10):
( logTime, logMatch ) = self.datedetector.getTime('11/10/2012 02:37:17 [error] 18434#0') ( logTime, logMatch ) = self.datedetector.getTime('11/10/2012 02:37:17 [error] 18434#0')
self.assertEqual(logTime, mu) self.assertEqual(logTime, mu)
self.assertEqual(logMatch.group(1), '11/10/2012 02:37:17') self.assertEqual(logMatch.group(1), '11/10/2012 02:37:17')
@ -538,7 +538,7 @@ class CustomDateFormatsTest(unittest.TestCase):
date = dd.getTime(line) date = dd.getTime(line)
if matched: if matched:
self.assertTrue(date) self.assertTrue(date)
if isinstance(matched, basestring): if isinstance(matched, str):
self.assertEqual(matched, date[1].group(1)) self.assertEqual(matched, date[1].group(1))
else: else:
self.assertEqual(matched, date[0]) self.assertEqual(matched, date[0])
@ -573,7 +573,7 @@ class CustomDateFormatsTest(unittest.TestCase):
date = dd.getTime(line) date = dd.getTime(line)
if matched: if matched:
self.assertTrue(date) self.assertTrue(date)
if isinstance(matched, basestring): # pragma: no cover if isinstance(matched, str): # pragma: no cover
self.assertEqual(matched, date[1].group(1)) self.assertEqual(matched, date[1].group(1))
else: else:
self.assertEqual(matched, date[0]) self.assertEqual(matched, date[0])

View File

@ -367,10 +367,10 @@ def with_foreground_server_thread(startextra={}):
# several commands to server in body of decorated function: # several commands to server in body of decorated function:
return f(self, tmp, startparams, *args, **kwargs) return f(self, tmp, startparams, *args, **kwargs)
except Exception as e: # pragma: no cover except Exception as e: # pragma: no cover
print('=== Catch an exception: %s' % e) print(('=== Catch an exception: %s' % e))
log = self.getLog() log = self.getLog()
if log: if log:
print('=== Error of server, log: ===\n%s===' % log) print(('=== Error of server, log: ===\n%s===' % log))
self.pruneLog() self.pruneLog()
raise raise
finally: finally:
@ -440,7 +440,7 @@ class Fail2banClientServerBase(LogCaptureTestCase):
) )
except: # pragma: no cover except: # pragma: no cover
if _inherited_log(startparams): if _inherited_log(startparams):
print('=== Error by wait fot server, log: ===\n%s===' % self.getLog()) print(('=== Error by wait fot server, log: ===\n%s===' % self.getLog()))
self.pruneLog() self.pruneLog()
log = pjoin(tmp, "f2b.log") log = pjoin(tmp, "f2b.log")
if isfile(log): if isfile(log):
@ -1702,6 +1702,6 @@ class Fail2banServerTest(Fail2banClientServerBase):
self.stopAndWaitForServerEnd(SUCCESS) self.stopAndWaitForServerEnd(SUCCESS)
def testServerStartStop(self): def testServerStartStop(self):
for i in xrange(2000): for i in range(2000):
self._testServerStartStop() self._testServerStartStop()

View File

@ -596,8 +596,8 @@ class Fail2banRegexTest(LogCaptureTestCase):
# test on unicode string containing \x0A as part of uni-char, # test on unicode string containing \x0A as part of uni-char,
# it must produce exactly 2 lines (both are failures): # it must produce exactly 2 lines (both are failures):
for l in ( for l in (
u'1490349000 \u20AC Failed auth: invalid user Test\u020A from 192.0.2.1\n', '1490349000 \u20AC Failed auth: invalid user Test\u020A from 192.0.2.1\n',
u'1490349000 \u20AC Failed auth: invalid user TestI from 192.0.2.2\n' '1490349000 \u20AC Failed auth: invalid user TestI from 192.0.2.2\n'
): ):
fout.write(l.encode(enc)) fout.write(l.encode(enc))
fout.close() fout.close()

View File

@ -45,11 +45,11 @@ class AddFailure(unittest.TestCase):
super(AddFailure, self).tearDown() super(AddFailure, self).tearDown()
def _addDefItems(self): def _addDefItems(self):
self.__items = [[u'193.168.0.128', 1167605999.0], self.__items = [['193.168.0.128', 1167605999.0],
[u'193.168.0.128', 1167605999.0], ['193.168.0.128', 1167605999.0],
[u'193.168.0.128', 1167605999.0], ['193.168.0.128', 1167605999.0],
[u'193.168.0.128', 1167605999.0], ['193.168.0.128', 1167605999.0],
[u'193.168.0.128', 1167605999.0], ['193.168.0.128', 1167605999.0],
['87.142.124.10', 1167605999.0], ['87.142.124.10', 1167605999.0],
['87.142.124.10', 1167605999.0], ['87.142.124.10', 1167605999.0],
['87.142.124.10', 1167605999.0], ['87.142.124.10', 1167605999.0],

View File

@ -41,7 +41,7 @@ def auth(v):
response="%s" response="%s"
""" % ( username, algorithm, realm, url, nonce, qop, response ) """ % ( username, algorithm, realm, url, nonce, qop, response )
# opaque="%s", # opaque="%s",
print(p.method, p.url, p.headers) print((p.method, p.url, p.headers))
s = requests.Session() s = requests.Session()
return s.send(p) return s.send(p)
@ -76,18 +76,18 @@ r = auth(v)
# [Sun Jul 28 21:41:20 2013] [error] [client 127.0.0.1] Digest: unknown algorithm `super funky chicken' received: /digest/ # [Sun Jul 28 21:41:20 2013] [error] [client 127.0.0.1] Digest: unknown algorithm `super funky chicken' received: /digest/
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
v['algorithm'] = algorithm v['algorithm'] = algorithm
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
nonce = v['nonce'] nonce = v['nonce']
v['nonce']=v['nonce'][5:-5] v['nonce']=v['nonce'][5:-5]
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
# [Sun Jul 28 21:05:31.178340 2013] [auth_digest:error] [pid 24224:tid 139895539455744] [client 127.0.0.1:56906] AH01793: invalid qop `auth' received: /digest/qop_none/ # [Sun Jul 28 21:05:31.178340 2013] [auth_digest:error] [pid 24224:tid 139895539455744] [client 127.0.0.1:56906] AH01793: invalid qop `auth' received: /digest/qop_none/
@ -95,7 +95,7 @@ print(r.status_code,r.headers, r.text)
v['nonce']=nonce[0:11] + 'ZZZ' + nonce[14:] v['nonce']=nonce[0:11] + 'ZZZ' + nonce[14:]
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
#[Sun Jul 28 21:18:11.769228 2013] [auth_digest:error] [pid 24752:tid 139895505884928] [client 127.0.0.1:56964] AH01776: invalid nonce b9YAiJDiBAZZZ1b1abe02d20063ea3b16b544ea1b0d981c1bafe received - hash is not d42d824dee7aaf50c3ba0a7c6290bd453e3dd35b #[Sun Jul 28 21:18:11.769228 2013] [auth_digest:error] [pid 24752:tid 139895505884928] [client 127.0.0.1:56964] AH01776: invalid nonce b9YAiJDiBAZZZ1b1abe02d20063ea3b16b544ea1b0d981c1bafe received - hash is not d42d824dee7aaf50c3ba0a7c6290bd453e3dd35b
@ -107,7 +107,7 @@ import time
time.sleep(1) time.sleep(1)
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
# Obtained by putting the following code in modules/aaa/mod_auth_digest.c # Obtained by putting the following code in modules/aaa/mod_auth_digest.c
# in the function initialize_secret # in the function initialize_secret
@ -137,7 +137,7 @@ s = sha.sha(apachesecret)
v=preauth() v=preauth()
print(v['nonce']) print((v['nonce']))
realm = v['Digest realm'][1:-1] realm = v['Digest realm'][1:-1]
(t,) = struct.unpack('l',base64.b64decode(v['nonce'][1:13])) (t,) = struct.unpack('l',base64.b64decode(v['nonce'][1:13]))
@ -156,13 +156,13 @@ print(v)
r = auth(v) r = auth(v)
#[Mon Jul 29 02:12:55.539813 2013] [auth_digest:error] [pid 9647:tid 139895522670336] [client 127.0.0.1:58474] AH01777: invalid nonce 59QJppTiBAA=b08983fd166ade9840407df1b0f75b9e6e07d88d received - user attempted time travel #[Mon Jul 29 02:12:55.539813 2013] [auth_digest:error] [pid 9647:tid 139895522670336] [client 127.0.0.1:58474] AH01777: invalid nonce 59QJppTiBAA=b08983fd166ade9840407df1b0f75b9e6e07d88d received - user attempted time travel
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
url='/digest_onetime/' url='/digest_onetime/'
v=preauth() v=preauth()
# Need opaque header handling in auth # Need opaque header handling in auth
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))
r = auth(v) r = auth(v)
print(r.status_code,r.headers, r.text) print((r.status_code,r.headers, r.text))

View File

@ -22,7 +22,7 @@
__copyright__ = "Copyright (c) 2004 Cyril Jaquier; 2012 Yaroslav Halchenko" __copyright__ = "Copyright (c) 2004 Cyril Jaquier; 2012 Yaroslav Halchenko"
__license__ = "GPL" __license__ = "GPL"
from __builtin__ import open as fopen from builtins import open as fopen
import unittest import unittest
import os import os
import re import re
@ -213,12 +213,12 @@ def _copy_lines_between_files(in_, fout, n=None, skip=0, mode='a', terminal_line
else: else:
fin = in_ fin = in_
# Skip # Skip
for i in xrange(skip): for i in range(skip):
fin.readline() fin.readline()
# Read # Read
i = 0 i = 0
if lines: if lines:
lines = map(uni_bytes, lines) lines = list(map(uni_bytes, lines))
else: else:
lines = [] lines = []
while n is None or i < n: while n is None or i < n:
@ -257,7 +257,7 @@ def _copy_lines_to_journal(in_, fields={},n=None, skip=0, terminal_line=""): # p
# Required for filtering # Required for filtering
fields.update(TEST_JOURNAL_FIELDS) fields.update(TEST_JOURNAL_FIELDS)
# Skip # Skip
for i in xrange(skip): for i in range(skip):
fin.readline() fin.readline()
# Read/Write # Read/Write
i = 0 i = 0
@ -319,18 +319,18 @@ class BasicFilter(unittest.TestCase):
def testTest_tm(self): def testTest_tm(self):
unittest.F2B.SkipIfFast() unittest.F2B.SkipIfFast()
## test function "_tm" works correct (returns the same as slow strftime): ## test function "_tm" works correct (returns the same as slow strftime):
for i in xrange(1417512352, (1417512352 // 3600 + 3) * 3600): for i in range(1417512352, (1417512352 // 3600 + 3) * 3600):
tm = MyTime.time2str(i) tm = MyTime.time2str(i)
if _tm(i) != tm: # pragma: no cover - never reachable if _tm(i) != tm: # pragma: no cover - never reachable
self.assertEqual((_tm(i), i), (tm, i)) self.assertEqual((_tm(i), i), (tm, i))
def testWrongCharInTupleLine(self): def testWrongCharInTupleLine(self):
## line tuple has different types (ascii after ascii / unicode): ## line tuple has different types (ascii after ascii / unicode):
for a1 in ('', u'', b''): for a1 in ('', '', b''):
for a2 in ('2016-09-05T20:18:56', u'2016-09-05T20:18:56', b'2016-09-05T20:18:56'): for a2 in ('2016-09-05T20:18:56', '2016-09-05T20:18:56', b'2016-09-05T20:18:56'):
for a3 in ( for a3 in (
'Fail for "g\xc3\xb6ran" from 192.0.2.1', 'Fail for "g\xc3\xb6ran" from 192.0.2.1',
u'Fail for "g\xc3\xb6ran" from 192.0.2.1', 'Fail for "g\xc3\xb6ran" from 192.0.2.1',
b'Fail for "g\xc3\xb6ran" from 192.0.2.1' b'Fail for "g\xc3\xb6ran" from 192.0.2.1'
): ):
# join should work if all arguments have the same type: # join should work if all arguments have the same type:
@ -517,7 +517,7 @@ class IgnoreIP(LogCaptureTestCase):
def testAddAttempt(self): def testAddAttempt(self):
self.filter.setMaxRetry(3) self.filter.setMaxRetry(3)
for i in xrange(1, 1+3): for i in range(1, 1+3):
self.filter.addAttempt('192.0.2.1') self.filter.addAttempt('192.0.2.1')
self.assertLogged('Attempt 192.0.2.1', '192.0.2.1:%d' % i, all=True, wait=True) self.assertLogged('Attempt 192.0.2.1', '192.0.2.1:%d' % i, all=True, wait=True)
self.jail.actions._Actions__checkBan() self.jail.actions._Actions__checkBan()
@ -554,7 +554,7 @@ class IgnoreIP(LogCaptureTestCase):
# like both test-cases above, just cached (so once per key)... # like both test-cases above, just cached (so once per key)...
self.filter.ignoreCache = {"key":"<ip>"} self.filter.ignoreCache = {"key":"<ip>"}
self.filter.ignoreCommand = 'if [ "<ip>" = "10.0.0.1" ]; then exit 0; fi; exit 1' self.filter.ignoreCommand = 'if [ "<ip>" = "10.0.0.1" ]; then exit 0; fi; exit 1'
for i in xrange(5): for i in range(5):
self.pruneLog() self.pruneLog()
self.assertTrue(self.filter.inIgnoreIPList("10.0.0.1")) self.assertTrue(self.filter.inIgnoreIPList("10.0.0.1"))
self.assertFalse(self.filter.inIgnoreIPList("10.0.0.0")) self.assertFalse(self.filter.inIgnoreIPList("10.0.0.0"))
@ -565,7 +565,7 @@ class IgnoreIP(LogCaptureTestCase):
# by host of IP: # by host of IP:
self.filter.ignoreCache = {"key":"<ip-host>"} self.filter.ignoreCache = {"key":"<ip-host>"}
self.filter.ignoreCommand = 'if [ "<ip-host>" = "test-host" ]; then exit 0; fi; exit 1' self.filter.ignoreCommand = 'if [ "<ip-host>" = "test-host" ]; then exit 0; fi; exit 1'
for i in xrange(5): for i in range(5):
self.pruneLog() self.pruneLog()
self.assertTrue(self.filter.inIgnoreIPList(FailTicket("2001:db8::1"))) self.assertTrue(self.filter.inIgnoreIPList(FailTicket("2001:db8::1")))
self.assertFalse(self.filter.inIgnoreIPList(FailTicket("2001:db8::ffff"))) self.assertFalse(self.filter.inIgnoreIPList(FailTicket("2001:db8::ffff")))
@ -577,7 +577,7 @@ class IgnoreIP(LogCaptureTestCase):
self.filter.ignoreCache = {"key":"<F-USER>", "max-count":"10", "max-time":"1h"} self.filter.ignoreCache = {"key":"<F-USER>", "max-count":"10", "max-time":"1h"}
self.assertEqual(self.filter.ignoreCache, ["<F-USER>", 10, 60*60]) self.assertEqual(self.filter.ignoreCache, ["<F-USER>", 10, 60*60])
self.filter.ignoreCommand = 'if [ "<F-USER>" = "tester" ]; then exit 0; fi; exit 1' self.filter.ignoreCommand = 'if [ "<F-USER>" = "tester" ]; then exit 0; fi; exit 1'
for i in xrange(5): for i in range(5):
self.pruneLog() self.pruneLog()
self.assertTrue(self.filter.inIgnoreIPList(FailTicket("tester", data={'user': 'tester'}))) self.assertTrue(self.filter.inIgnoreIPList(FailTicket("tester", data={'user': 'tester'})))
self.assertFalse(self.filter.inIgnoreIPList(FailTicket("root", data={'user': 'root'}))) self.assertFalse(self.filter.inIgnoreIPList(FailTicket("root", data={'user': 'root'})))
@ -680,7 +680,7 @@ class LogFile(LogCaptureTestCase):
def testDecodeLineWarn(self): def testDecodeLineWarn(self):
# incomplete line (missing byte at end), warning is suppressed: # incomplete line (missing byte at end), warning is suppressed:
l = u"correct line\n" l = "correct line\n"
r = l.encode('utf-16le') r = l.encode('utf-16le')
self.assertEqual(FileContainer.decode_line('TESTFILE', 'utf-16le', r), l) self.assertEqual(FileContainer.decode_line('TESTFILE', 'utf-16le', r), l)
self.assertEqual(FileContainer.decode_line('TESTFILE', 'utf-16le', r[0:-1]), l[0:-1]) self.assertEqual(FileContainer.decode_line('TESTFILE', 'utf-16le', r[0:-1]), l[0:-1])
@ -740,7 +740,7 @@ class LogFileFilterPoll(unittest.TestCase):
fc = FileContainer(fname, self.filter.getLogEncoding()) fc = FileContainer(fname, self.filter.getLogEncoding())
fc.open() fc.open()
# no time - nothing should be found : # no time - nothing should be found :
for i in xrange(10): for i in range(10):
f.write(b"[sshd] error: PAM: failure len 1\n") f.write(b"[sshd] error: PAM: failure len 1\n")
f.flush() f.flush()
fc.setPos(0); self.filter.seekToTime(fc, time) fc.setPos(0); self.filter.seekToTime(fc, time)
@ -814,14 +814,14 @@ class LogFileFilterPoll(unittest.TestCase):
# variable length of file (ca 45K or 450K before and hereafter): # variable length of file (ca 45K or 450K before and hereafter):
# write lines with smaller as search time: # write lines with smaller as search time:
t = time - count - 1 t = time - count - 1
for i in xrange(count): for i in range(count):
f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(t)) f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(t))
t += 1 t += 1
f.flush() f.flush()
fc.setPos(0); self.filter.seekToTime(fc, time) fc.setPos(0); self.filter.seekToTime(fc, time)
self.assertEqual(fc.getPos(), 47*count) self.assertEqual(fc.getPos(), 47*count)
# write lines with exact search time: # write lines with exact search time:
for i in xrange(10): for i in range(10):
f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(time)) f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(time))
f.flush() f.flush()
fc.setPos(0); self.filter.seekToTime(fc, time) fc.setPos(0); self.filter.seekToTime(fc, time)
@ -830,8 +830,8 @@ class LogFileFilterPoll(unittest.TestCase):
self.assertEqual(fc.getPos(), 47*count) self.assertEqual(fc.getPos(), 47*count)
# write lines with greater as search time: # write lines with greater as search time:
t = time+1 t = time+1
for i in xrange(count//500): for i in range(count//500):
for j in xrange(500): for j in range(500):
f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(t)) f.write(b"%s [sshd] error: PAM: failure\n" % _tmb(t))
t += 1 t += 1
f.flush() f.flush()
@ -1641,10 +1641,10 @@ def get_monitor_failures_journal_testcase(Filter_): # pragma: systemd no cover
# Add direct utf, unicode, blob: # Add direct utf, unicode, blob:
for l in ( for l in (
"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1", "error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1",
u"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1", "error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1",
b"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1".decode('utf-8', 'replace'), b"error: PAM: Authentication failure for \xe4\xf6\xfc\xdf from 192.0.2.1".decode('utf-8', 'replace'),
"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2", "error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2",
u"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2", "error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2",
b"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2".decode('utf-8', 'replace') b"error: PAM: Authentication failure for \xc3\xa4\xc3\xb6\xc3\xbc\xc3\x9f from 192.0.2.2".decode('utf-8', 'replace')
): ):
fields = self.journal_fields fields = self.journal_fields
@ -1673,7 +1673,7 @@ class GetFailures(LogCaptureTestCase):
# so that they could be reused by other tests # so that they could be reused by other tests
FAILURES_01 = ('193.168.0.128', 3, 1124013599.0, FAILURES_01 = ('193.168.0.128', 3, 1124013599.0,
[u'Aug 14 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 193.168.0.128']*3) ['Aug 14 11:59:59 [sshd] error: PAM: Authentication failure for kevin from 193.168.0.128']*3)
def setUp(self): def setUp(self):
"""Call before every test case.""" """Call before every test case."""
@ -1759,8 +1759,8 @@ class GetFailures(LogCaptureTestCase):
# test on unicode string containing \x0A as part of uni-char, # test on unicode string containing \x0A as part of uni-char,
# it must produce exactly 2 lines (both are failures): # it must produce exactly 2 lines (both are failures):
for l in ( for l in (
u'%s \u20AC Failed auth: invalid user Test\u020A from 192.0.2.1\n' % tm, '%s \u20AC Failed auth: invalid user Test\u020A from 192.0.2.1\n' % tm,
u'%s \u20AC Failed auth: invalid user TestI from 192.0.2.2\n' % tm '%s \u20AC Failed auth: invalid user TestI from 192.0.2.2\n' % tm
): ):
fout.write(l.encode(enc)) fout.write(l.encode(enc))
fout.close() fout.close()
@ -1781,8 +1781,8 @@ class GetFailures(LogCaptureTestCase):
def testGetFailures02(self): def testGetFailures02(self):
output = ('141.3.81.106', 4, 1124013539.0, output = ('141.3.81.106', 4, 1124013539.0,
[u'Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2' ['Aug 14 11:%d:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:141.3.81.106 port 51332 ssh2'
% m for m in 53, 54, 57, 58]) % m for m in (53, 54, 57, 58)])
self.filter.setMaxRetry(4) self.filter.setMaxRetry(4)
self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=0) self.filter.addLogPath(GetFailures.FILENAME_02, autoSeek=0)
@ -1893,19 +1893,19 @@ class GetFailures(LogCaptureTestCase):
# We should still catch failures with usedns = no ;-) # We should still catch failures with usedns = no ;-)
output_yes = ( output_yes = (
('93.184.216.34', 1, 1124013299.0, ('93.184.216.34', 1, 1124013299.0,
[u'Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2'] ['Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2']
), ),
('93.184.216.34', 1, 1124013539.0, ('93.184.216.34', 1, 1124013539.0,
[u'Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2'] ['Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
), ),
('2606:2800:220:1:248:1893:25c8:1946', 1, 1124013299.0, ('2606:2800:220:1:248:1893:25c8:1946', 1, 1124013299.0,
[u'Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2'] ['Aug 14 11:54:59 i60p295 sshd[12365]: Failed publickey for roehl from example.com port 51332 ssh2']
), ),
) )
output_no = ( output_no = (
('93.184.216.34', 1, 1124013539.0, ('93.184.216.34', 1, 1124013539.0,
[u'Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2'] ['Aug 14 11:58:59 i60p295 sshd[12365]: Failed publickey for roehl from ::ffff:93.184.216.34 port 51332 ssh2']
) )
) )
@ -2011,9 +2011,9 @@ class DNSUtilsTests(unittest.TestCase):
self.assertTrue(c.get('a') is None) self.assertTrue(c.get('a') is None)
self.assertEqual(c.get('a', 'test'), 'test') self.assertEqual(c.get('a', 'test'), 'test')
# exact 5 elements : # exact 5 elements :
for i in xrange(5): for i in range(5):
c.set(i, i) c.set(i, i)
for i in xrange(5): for i in range(5):
self.assertEqual(c.get(i), i) self.assertEqual(c.get(i), i)
# remove unavailable key: # remove unavailable key:
c.unset('a'); c.unset('a') c.unset('a'); c.unset('a')
@ -2021,30 +2021,30 @@ class DNSUtilsTests(unittest.TestCase):
def testCacheMaxSize(self): def testCacheMaxSize(self):
c = Utils.Cache(maxCount=5, maxTime=60) c = Utils.Cache(maxCount=5, maxTime=60)
# exact 5 elements : # exact 5 elements :
for i in xrange(5): for i in range(5):
c.set(i, i) c.set(i, i)
self.assertEqual([c.get(i) for i in xrange(5)], [i for i in xrange(5)]) self.assertEqual([c.get(i) for i in range(5)], [i for i in range(5)])
self.assertNotIn(-1, (c.get(i, -1) for i in xrange(5))) self.assertNotIn(-1, (c.get(i, -1) for i in range(5)))
# add one - too many: # add one - too many:
c.set(10, i) c.set(10, i)
# one element should be removed : # one element should be removed :
self.assertIn(-1, (c.get(i, -1) for i in xrange(5))) self.assertIn(-1, (c.get(i, -1) for i in range(5)))
# test max size (not expired): # test max size (not expired):
for i in xrange(10): for i in range(10):
c.set(i, 1) c.set(i, 1)
self.assertEqual(len(c), 5) self.assertEqual(len(c), 5)
def testCacheMaxTime(self): def testCacheMaxTime(self):
# test max time (expired, timeout reached) : # test max time (expired, timeout reached) :
c = Utils.Cache(maxCount=5, maxTime=0.0005) c = Utils.Cache(maxCount=5, maxTime=0.0005)
for i in xrange(10): for i in range(10):
c.set(i, 1) c.set(i, 1)
st = time.time() st = time.time()
self.assertTrue(Utils.wait_for(lambda: time.time() >= st + 0.0005, 1)) self.assertTrue(Utils.wait_for(lambda: time.time() >= st + 0.0005, 1))
# we have still 5 elements (or fewer if too slow test mashine): # we have still 5 elements (or fewer if too slow test mashine):
self.assertTrue(len(c) <= 5) self.assertTrue(len(c) <= 5)
# but all that are expiered also: # but all that are expiered also:
for i in xrange(10): for i in range(10):
self.assertTrue(c.get(i) is None) self.assertTrue(c.get(i) is None)
# here the whole cache should be empty: # here the whole cache should be empty:
self.assertEqual(len(c), 0) self.assertEqual(len(c), 0)
@ -2065,7 +2065,7 @@ class DNSUtilsTests(unittest.TestCase):
c = count c = count
while c: while c:
c -= 1 c -= 1
s = xrange(0, 256, 1) if forw else xrange(255, -1, -1) s = range(0, 256, 1) if forw else range(255, -1, -1)
if random: shuffle([i for i in s]) if random: shuffle([i for i in s])
for i in s: for i in s:
IPAddr('192.0.2.'+str(i), IPAddr.FAM_IPv4) IPAddr('192.0.2.'+str(i), IPAddr.FAM_IPv4)
@ -2205,16 +2205,16 @@ class DNSUtilsNetworkTests(unittest.TestCase):
def testAddr2bin(self): def testAddr2bin(self):
res = IPAddr('10.0.0.0') res = IPAddr('10.0.0.0')
self.assertEqual(res.addr, 167772160L) self.assertEqual(res.addr, 167772160)
res = IPAddr('10.0.0.0', cidr=None) res = IPAddr('10.0.0.0', cidr=None)
self.assertEqual(res.addr, 167772160L) self.assertEqual(res.addr, 167772160)
res = IPAddr('10.0.0.0', cidr=32L) res = IPAddr('10.0.0.0', cidr=32)
self.assertEqual(res.addr, 167772160L) self.assertEqual(res.addr, 167772160)
res = IPAddr('10.0.0.1', cidr=32L) res = IPAddr('10.0.0.1', cidr=32)
self.assertEqual(res.addr, 167772161L) self.assertEqual(res.addr, 167772161)
self.assertTrue(res.isSingle) self.assertTrue(res.isSingle)
res = IPAddr('10.0.0.1', cidr=31L) res = IPAddr('10.0.0.1', cidr=31)
self.assertEqual(res.addr, 167772160L) self.assertEqual(res.addr, 167772160)
self.assertFalse(res.isSingle) self.assertFalse(res.isSingle)
self.assertEqual(IPAddr('10.0.0.0').hexdump, '0a000000') self.assertEqual(IPAddr('10.0.0.0').hexdump, '0a000000')
@ -2305,9 +2305,9 @@ class DNSUtilsNetworkTests(unittest.TestCase):
'93.184.216.34': 'ip4-test', '93.184.216.34': 'ip4-test',
'2606:2800:220:1:248:1893:25c8:1946': 'ip6-test' '2606:2800:220:1:248:1893:25c8:1946': 'ip6-test'
} }
d2 = dict([(IPAddr(k), v) for k, v in d.iteritems()]) d2 = dict([(IPAddr(k), v) for k, v in d.items()])
self.assertTrue(isinstance(d.keys()[0], basestring)) self.assertTrue(isinstance(list(d.keys())[0], str))
self.assertTrue(isinstance(d2.keys()[0], IPAddr)) self.assertTrue(isinstance(list(d2.keys())[0], IPAddr))
self.assertEqual(d.get(ip4[2], ''), 'ip4-test') self.assertEqual(d.get(ip4[2], ''), 'ip4-test')
self.assertEqual(d.get(ip6[2], ''), 'ip6-test') self.assertEqual(d.get(ip6[2], ''), 'ip6-test')
self.assertEqual(d2.get(str(ip4[2]), ''), 'ip4-test') self.assertEqual(d2.get(str(ip4[2]), ''), 'ip4-test')

View File

@ -29,9 +29,9 @@ import tempfile
import shutil import shutil
import fnmatch import fnmatch
from glob import glob from glob import glob
from StringIO import StringIO from io import StringIO
from utils import LogCaptureTestCase, logSys as DefLogSys from .utils import LogCaptureTestCase, logSys as DefLogSys
from ..helpers import formatExceptionInfo, mbasename, TraceBack, FormatterWithTraceBack, getLogger, \ from ..helpers import formatExceptionInfo, mbasename, TraceBack, FormatterWithTraceBack, getLogger, \
getVerbosityFormat, splitwords, uni_decode, uni_string getVerbosityFormat, splitwords, uni_decode, uni_string
@ -67,7 +67,7 @@ class HelpersTest(unittest.TestCase):
self.assertEqual(splitwords(' 1\n 2'), ['1', '2']) self.assertEqual(splitwords(' 1\n 2'), ['1', '2'])
self.assertEqual(splitwords(' 1\n 2, 3'), ['1', '2', '3']) self.assertEqual(splitwords(' 1\n 2, 3'), ['1', '2', '3'])
# string as unicode: # string as unicode:
self.assertEqual(splitwords(u' 1\n 2, 3'), ['1', '2', '3']) self.assertEqual(splitwords(' 1\n 2, 3'), ['1', '2', '3'])
def _sh_call(cmd): def _sh_call(cmd):
@ -191,12 +191,12 @@ class TestsUtilsTest(LogCaptureTestCase):
def testUniConverters(self): def testUniConverters(self):
self.assertRaises(Exception, uni_decode, self.assertRaises(Exception, uni_decode,
(b'test' if sys.version_info >= (3,) else u'test'), 'f2b-test::non-existing-encoding') (b'test' if sys.version_info >= (3,) else 'test'), 'f2b-test::non-existing-encoding')
uni_decode((b'test\xcf' if sys.version_info >= (3,) else u'test\xcf')) uni_decode((b'test\xcf' if sys.version_info >= (3,) else 'test\xcf'))
uni_string(b'test\xcf') uni_string(b'test\xcf')
uni_string('test\xcf') uni_string('test\xcf')
if sys.version_info < (3,) and 'PyPy' not in sys.version: if sys.version_info < (3,) and 'PyPy' not in sys.version:
uni_string(u'test\xcf') uni_string('test\xcf')
def testSafeLogging(self): def testSafeLogging(self):
# logging should be exception-safe, to avoid possible errors (concat, str. conversion, representation failures, etc) # logging should be exception-safe, to avoid possible errors (concat, str. conversion, representation failures, etc)
@ -208,7 +208,7 @@ class TestsUtilsTest(LogCaptureTestCase):
if self.err: if self.err:
raise Exception('no represenation for test!') raise Exception('no represenation for test!')
else: else:
return u'conv-error (\xf2\xf0\xe5\xf2\xe8\xe9), unterminated utf \xcf' return 'conv-error (\xf2\xf0\xe5\xf2\xe8\xe9), unterminated utf \xcf'
test = Test() test = Test()
logSys.log(logging.NOTICE, "test 1a: %r", test) logSys.log(logging.NOTICE, "test 1a: %r", test)
self.assertLogged("Traceback", "no represenation for test!") self.assertLogged("Traceback", "no represenation for test!")
@ -256,7 +256,7 @@ class TestsUtilsTest(LogCaptureTestCase):
func_raise() func_raise()
try: try:
print deep_function(3) print(deep_function(3))
except ValueError: except ValueError:
s = tb() s = tb()
@ -273,7 +273,7 @@ class TestsUtilsTest(LogCaptureTestCase):
self.assertIn(':', s) self.assertIn(':', s)
def _testAssertionErrorRE(self, regexp, fun, *args, **kwargs): def _testAssertionErrorRE(self, regexp, fun, *args, **kwargs):
self.assertRaisesRegexp(AssertionError, regexp, fun, *args, **kwargs) self.assertRaisesRegex(AssertionError, regexp, fun, *args, **kwargs)
def testExtendedAssertRaisesRE(self): def testExtendedAssertRaisesRE(self):
## test _testAssertionErrorRE several fail cases: ## test _testAssertionErrorRE several fail cases:
@ -311,13 +311,13 @@ class TestsUtilsTest(LogCaptureTestCase):
self._testAssertionErrorRE(r"'a' unexpectedly found in 'cba'", self._testAssertionErrorRE(r"'a' unexpectedly found in 'cba'",
self.assertNotIn, 'a', 'cba') self.assertNotIn, 'a', 'cba')
self._testAssertionErrorRE(r"1 unexpectedly found in \[0, 1, 2\]", self._testAssertionErrorRE(r"1 unexpectedly found in \[0, 1, 2\]",
self.assertNotIn, 1, xrange(3)) self.assertNotIn, 1, range(3))
self._testAssertionErrorRE(r"'A' unexpectedly found in \['C', 'A'\]", self._testAssertionErrorRE(r"'A' unexpectedly found in \['C', 'A'\]",
self.assertNotIn, 'A', (c.upper() for c in 'cba' if c != 'b')) self.assertNotIn, 'A', (c.upper() for c in 'cba' if c != 'b'))
self._testAssertionErrorRE(r"'a' was not found in 'xyz'", self._testAssertionErrorRE(r"'a' was not found in 'xyz'",
self.assertIn, 'a', 'xyz') self.assertIn, 'a', 'xyz')
self._testAssertionErrorRE(r"5 was not found in \[0, 1, 2\]", self._testAssertionErrorRE(r"5 was not found in \[0, 1, 2\]",
self.assertIn, 5, xrange(3)) self.assertIn, 5, range(3))
self._testAssertionErrorRE(r"'A' was not found in \['C', 'B'\]", self._testAssertionErrorRE(r"'A' was not found in \['C', 'B'\]",
self.assertIn, 'A', (c.upper() for c in 'cba' if c != 'a')) self.assertIn, 'A', (c.upper() for c in 'cba' if c != 'a'))
## assertLogged, assertNotLogged positive case: ## assertLogged, assertNotLogged positive case:

View File

@ -68,7 +68,7 @@ class BanTimeIncr(LogCaptureTestCase):
a.setBanTimeExtra('multipliers', multipliers) a.setBanTimeExtra('multipliers', multipliers)
# test algorithm and max time 24 hours : # test algorithm and max time 24 hours :
self.assertEqual( self.assertEqual(
[a.calcBanTime(600, i) for i in xrange(1, 11)], [a.calcBanTime(600, i) for i in range(1, 11)],
[1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400] [1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400]
) )
# with extra large max time (30 days): # with extra large max time (30 days):
@ -80,38 +80,38 @@ class BanTimeIncr(LogCaptureTestCase):
if multcnt < 11: if multcnt < 11:
arr = arr[0:multcnt-1] + ([arr[multcnt-2]] * (11-multcnt)) arr = arr[0:multcnt-1] + ([arr[multcnt-2]] * (11-multcnt))
self.assertEqual( self.assertEqual(
[a.calcBanTime(600, i) for i in xrange(1, 11)], [a.calcBanTime(600, i) for i in range(1, 11)],
arr arr
) )
a.setBanTimeExtra('maxtime', '1d') a.setBanTimeExtra('maxtime', '1d')
# change factor : # change factor :
a.setBanTimeExtra('factor', '2'); a.setBanTimeExtra('factor', '2');
self.assertEqual( self.assertEqual(
[a.calcBanTime(600, i) for i in xrange(1, 11)], [a.calcBanTime(600, i) for i in range(1, 11)],
[2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400, 86400] [2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400, 86400]
) )
# factor is float : # factor is float :
a.setBanTimeExtra('factor', '1.33'); a.setBanTimeExtra('factor', '1.33');
self.assertEqual( self.assertEqual(
[int(a.calcBanTime(600, i)) for i in xrange(1, 11)], [int(a.calcBanTime(600, i)) for i in range(1, 11)],
[1596, 3192, 6384, 12768, 25536, 51072, 86400, 86400, 86400, 86400] [1596, 3192, 6384, 12768, 25536, 51072, 86400, 86400, 86400, 86400]
) )
a.setBanTimeExtra('factor', None); a.setBanTimeExtra('factor', None);
# change max time : # change max time :
a.setBanTimeExtra('maxtime', '12h') a.setBanTimeExtra('maxtime', '12h')
self.assertEqual( self.assertEqual(
[a.calcBanTime(600, i) for i in xrange(1, 11)], [a.calcBanTime(600, i) for i in range(1, 11)],
[1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200] [1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200]
) )
a.setBanTimeExtra('maxtime', '24h') a.setBanTimeExtra('maxtime', '24h')
## test randomization - not possibe all 10 times we have random = 0: ## test randomization - not possibe all 10 times we have random = 0:
a.setBanTimeExtra('rndtime', '5m') a.setBanTimeExtra('rndtime', '5m')
self.assertTrue( self.assertTrue(
False in [1200 in [a.calcBanTime(600, 1) for i in xrange(10)] for c in xrange(10)] False in [1200 in [a.calcBanTime(600, 1) for i in range(10)] for c in range(10)]
) )
a.setBanTimeExtra('rndtime', None) a.setBanTimeExtra('rndtime', None)
self.assertFalse( self.assertFalse(
False in [1200 in [a.calcBanTime(600, 1) for i in xrange(10)] for c in xrange(10)] False in [1200 in [a.calcBanTime(600, 1) for i in range(10)] for c in range(10)]
) )
# restore default: # restore default:
a.setBanTimeExtra('multipliers', None) a.setBanTimeExtra('multipliers', None)
@ -123,7 +123,7 @@ class BanTimeIncr(LogCaptureTestCase):
# this multipliers has the same values as default formula, we test stop growing after count 9: # this multipliers has the same values as default formula, we test stop growing after count 9:
self.testDefault('1 2 4 8 16 32 64 128 256') self.testDefault('1 2 4 8 16 32 64 128 256')
# this multipliers has exactly the same values as default formula, test endless growing (stops by count 31 only): # this multipliers has exactly the same values as default formula, test endless growing (stops by count 31 only):
self.testDefault(' '.join([str(1<<i) for i in xrange(31)])) self.testDefault(' '.join([str(1<<i) for i in range(31)]))
def testFormula(self): def testFormula(self):
a = self.__jail; a = self.__jail;
@ -135,38 +135,38 @@ class BanTimeIncr(LogCaptureTestCase):
a.setBanTimeExtra('multipliers', None) a.setBanTimeExtra('multipliers', None)
# test algorithm and max time 24 hours : # test algorithm and max time 24 hours :
self.assertEqual( self.assertEqual(
[int(a.calcBanTime(600, i)) for i in xrange(1, 11)], [int(a.calcBanTime(600, i)) for i in range(1, 11)],
[1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400] [1200, 2400, 4800, 9600, 19200, 38400, 76800, 86400, 86400, 86400]
) )
# with extra large max time (30 days): # with extra large max time (30 days):
a.setBanTimeExtra('maxtime', '30d') a.setBanTimeExtra('maxtime', '30d')
self.assertEqual( self.assertEqual(
[int(a.calcBanTime(600, i)) for i in xrange(1, 11)], [int(a.calcBanTime(600, i)) for i in range(1, 11)],
[1200, 2400, 4800, 9600, 19200, 38400, 76800, 153601, 307203, 614407] [1200, 2400, 4800, 9600, 19200, 38400, 76800, 153601, 307203, 614407]
) )
a.setBanTimeExtra('maxtime', '24h') a.setBanTimeExtra('maxtime', '24h')
# change factor : # change factor :
a.setBanTimeExtra('factor', '1'); a.setBanTimeExtra('factor', '1');
self.assertEqual( self.assertEqual(
[int(a.calcBanTime(600, i)) for i in xrange(1, 11)], [int(a.calcBanTime(600, i)) for i in range(1, 11)],
[1630, 4433, 12051, 32758, 86400, 86400, 86400, 86400, 86400, 86400] [1630, 4433, 12051, 32758, 86400, 86400, 86400, 86400, 86400, 86400]
) )
a.setBanTimeExtra('factor', '2.0 / 2.885385') a.setBanTimeExtra('factor', '2.0 / 2.885385')
# change max time : # change max time :
a.setBanTimeExtra('maxtime', '12h') a.setBanTimeExtra('maxtime', '12h')
self.assertEqual( self.assertEqual(
[int(a.calcBanTime(600, i)) for i in xrange(1, 11)], [int(a.calcBanTime(600, i)) for i in range(1, 11)],
[1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200] [1200, 2400, 4800, 9600, 19200, 38400, 43200, 43200, 43200, 43200]
) )
a.setBanTimeExtra('maxtime', '24h') a.setBanTimeExtra('maxtime', '24h')
## test randomization - not possibe all 10 times we have random = 0: ## test randomization - not possibe all 10 times we have random = 0:
a.setBanTimeExtra('rndtime', '5m') a.setBanTimeExtra('rndtime', '5m')
self.assertTrue( self.assertTrue(
False in [1200 in [int(a.calcBanTime(600, 1)) for i in xrange(10)] for c in xrange(10)] False in [1200 in [int(a.calcBanTime(600, 1)) for i in range(10)] for c in range(10)]
) )
a.setBanTimeExtra('rndtime', None) a.setBanTimeExtra('rndtime', None)
self.assertFalse( self.assertFalse(
False in [1200 in [int(a.calcBanTime(600, 1)) for i in xrange(10)] for c in xrange(10)] False in [1200 in [int(a.calcBanTime(600, 1)) for i in range(10)] for c in range(10)]
) )
# restore default: # restore default:
a.setBanTimeExtra('factor', None); a.setBanTimeExtra('factor', None);
@ -229,7 +229,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
ticket = FailTicket(ip, stime, []) ticket = FailTicket(ip, stime, [])
# test ticket not yet found # test ticket not yet found
self.assertEqual( self.assertEqual(
[self.incrBanTime(ticket, 10) for i in xrange(3)], [self.incrBanTime(ticket, 10) for i in range(3)],
[10, 10, 10] [10, 10, 10]
) )
# add a ticket banned # add a ticket banned
@ -284,7 +284,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
) )
# increase ban multiple times: # increase ban multiple times:
lastBanTime = 20 lastBanTime = 20
for i in xrange(10): for i in range(10):
ticket.setTime(stime + lastBanTime + 5) ticket.setTime(stime + lastBanTime + 5)
banTime = self.incrBanTime(ticket, 10) banTime = self.incrBanTime(ticket, 10)
self.assertEqual(banTime, lastBanTime * 2) self.assertEqual(banTime, lastBanTime * 2)
@ -483,7 +483,7 @@ class BanTimeIncrDB(LogCaptureTestCase):
ticket = FailTicket(ip, stime-120, []) ticket = FailTicket(ip, stime-120, [])
failManager = jail.filter.failManager = FailManager() failManager = jail.filter.failManager = FailManager()
failManager.setMaxRetry(3) failManager.setMaxRetry(3)
for i in xrange(3): for i in range(3):
failManager.addFailure(ticket) failManager.addFailure(ticket)
obs.add('failureFound', jail, ticket) obs.add('failureFound', jail, ticket)
obs.wait_empty(5) obs.wait_empty(5)

View File

@ -137,7 +137,7 @@ class FilterSamplesRegex(unittest.TestCase):
@staticmethod @staticmethod
def _filterOptions(opts): def _filterOptions(opts):
return dict((k, v) for k, v in opts.iteritems() if not k.startswith('test.')) return dict((k, v) for k, v in opts.items() if not k.startswith('test.'))
def testSampleRegexsFactory(name, basedir): def testSampleRegexsFactory(name, basedir):
def testFilter(self): def testFilter(self):
@ -258,12 +258,12 @@ def testSampleRegexsFactory(name, basedir):
self.assertTrue(faildata.get('match', False), self.assertTrue(faildata.get('match', False),
"Line matched when shouldn't have") "Line matched when shouldn't have")
self.assertEqual(len(ret), 1, self.assertEqual(len(ret), 1,
"Multiple regexs matched %r" % (map(lambda x: x[0], ret))) "Multiple regexs matched %r" % ([x[0] for x in ret]))
for ret in ret: for ret in ret:
failregex, fid, fail2banTime, fail = ret failregex, fid, fail2banTime, fail = ret
# Verify match captures (at least fid/host) and timestamp as expected # Verify match captures (at least fid/host) and timestamp as expected
for k, v in faildata.iteritems(): for k, v in faildata.items():
if k not in ("time", "match", "desc", "constraint"): if k not in ("time", "match", "desc", "constraint"):
fv = fail.get(k, None) fv = fail.get(k, None)
if fv is None: if fv is None:
@ -305,7 +305,7 @@ def testSampleRegexsFactory(name, basedir):
'\n'.join(pprint.pformat(fail).splitlines()))) '\n'.join(pprint.pformat(fail).splitlines())))
# check missing samples for regex using each filter-options combination: # check missing samples for regex using each filter-options combination:
for fltName, flt in self._filters.iteritems(): for fltName, flt in self._filters.items():
flt, regexsUsedIdx = flt flt, regexsUsedIdx = flt
regexList = flt.getFailRegex() regexList = flt.getFailRegex()
for failRegexIndex, failRegex in enumerate(regexList): for failRegexIndex, failRegex in enumerate(regexList):

View File

@ -127,14 +127,14 @@ class TransmitterBase(LogCaptureTestCase):
self.transm.proceed(["get", jail, cmd]), (0, [])) self.transm.proceed(["get", jail, cmd]), (0, []))
for n, value in enumerate(values): for n, value in enumerate(values):
ret = self.transm.proceed(["set", jail, cmdAdd, value]) ret = self.transm.proceed(["set", jail, cmdAdd, value])
self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[:n+1])), level=2) self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[:n+1]))), level=2)
ret = self.transm.proceed(["get", jail, cmd]) ret = self.transm.proceed(["get", jail, cmd])
self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[:n+1])), level=2) self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[:n+1]))), level=2)
for n, value in enumerate(values): for n, value in enumerate(values):
ret = self.transm.proceed(["set", jail, cmdDel, value]) ret = self.transm.proceed(["set", jail, cmdDel, value])
self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[n+1:])), level=2) self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[n+1:]))), level=2)
ret = self.transm.proceed(["get", jail, cmd]) ret = self.transm.proceed(["get", jail, cmd])
self.assertSortedEqual((ret[0], map(str, ret[1])), (0, map(str, values[n+1:])), level=2) self.assertSortedEqual((ret[0], list(map(str, ret[1]))), (0, list(map(str, values[n+1:]))), level=2)
def jailAddDelRegexTest(self, cmd, inValues, outValues, jail): def jailAddDelRegexTest(self, cmd, inValues, outValues, jail):
cmdAdd = "add" + cmd cmdAdd = "add" + cmd
@ -930,7 +930,7 @@ class TransmitterLogging(TransmitterBase):
def testLogTarget(self): def testLogTarget(self):
logTargets = [] logTargets = []
for _ in xrange(3): for _ in range(3):
tmpFile = tempfile.mkstemp("fail2ban", "transmitter") tmpFile = tempfile.mkstemp("fail2ban", "transmitter")
logTargets.append(tmpFile[1]) logTargets.append(tmpFile[1])
os.close(tmpFile[0]) os.close(tmpFile[0])
@ -1003,26 +1003,26 @@ class TransmitterLogging(TransmitterBase):
self.assertEqual(self.transm.proceed(["flushlogs"]), (0, "rolled over")) self.assertEqual(self.transm.proceed(["flushlogs"]), (0, "rolled over"))
l.warning("After flushlogs") l.warning("After flushlogs")
with open(fn2,'r') as f: with open(fn2,'r') as f:
line1 = f.next() line1 = next(f)
if line1.find('Changed logging target to') >= 0: if line1.find('Changed logging target to') >= 0:
line1 = f.next() line1 = next(f)
self.assertTrue(line1.endswith("Before file moved\n")) self.assertTrue(line1.endswith("Before file moved\n"))
line2 = f.next() line2 = next(f)
self.assertTrue(line2.endswith("After file moved\n")) self.assertTrue(line2.endswith("After file moved\n"))
try: try:
n = f.next() n = next(f)
if n.find("Command: ['flushlogs']") >=0: if n.find("Command: ['flushlogs']") >=0:
self.assertRaises(StopIteration, f.next) self.assertRaises(StopIteration, f.__next__)
else: else:
self.fail("Exception StopIteration or Command: ['flushlogs'] expected. Got: %s" % n) self.fail("Exception StopIteration or Command: ['flushlogs'] expected. Got: %s" % n)
except StopIteration: except StopIteration:
pass # on higher debugging levels this is expected pass # on higher debugging levels this is expected
with open(fn,'r') as f: with open(fn,'r') as f:
line1 = f.next() line1 = next(f)
if line1.find('rollover performed on') >= 0: if line1.find('rollover performed on') >= 0:
line1 = f.next() line1 = next(f)
self.assertTrue(line1.endswith("After flushlogs\n")) self.assertTrue(line1.endswith("After flushlogs\n"))
self.assertRaises(StopIteration, f.next) self.assertRaises(StopIteration, f.__next__)
f.close() f.close()
finally: finally:
os.remove(fn2) os.remove(fn2)
@ -1185,7 +1185,7 @@ class LoggingTests(LogCaptureTestCase):
os.remove(f) os.remove(f)
from clientreadertestcase import ActionReader, JailsReader, CONFIG_DIR from .clientreadertestcase import ActionReader, JailsReader, CONFIG_DIR
class ServerConfigReaderTests(LogCaptureTestCase): class ServerConfigReaderTests(LogCaptureTestCase):

View File

@ -153,7 +153,7 @@ class Socket(LogCaptureTestCase):
org_handler = RequestHandler.found_terminator org_handler = RequestHandler.found_terminator
try: try:
RequestHandler.found_terminator = lambda self: self.close() RequestHandler.found_terminator = lambda self: self.close()
self.assertRaisesRegexp(Exception, r"reset by peer|Broken pipe", self.assertRaisesRegex(Exception, r"reset by peer|Broken pipe",
lambda: client.send(testMessage, timeout=unittest.F2B.maxWaitTime(10))) lambda: client.send(testMessage, timeout=unittest.F2B.maxWaitTime(10)))
finally: finally:
RequestHandler.found_terminator = org_handler RequestHandler.found_terminator = org_handler

View File

@ -35,7 +35,7 @@ import time
import threading import threading
import unittest import unittest
from cStringIO import StringIO from io import StringIO
from functools import wraps from functools import wraps
from ..helpers import getLogger, str2LogLevel, getVerbosityFormat, uni_decode from ..helpers import getLogger, str2LogLevel, getVerbosityFormat, uni_decode
@ -73,7 +73,7 @@ class DefaultTestOptions(optparse.Values):
self.__dict__ = { self.__dict__ = {
'log_level': None, 'verbosity': None, 'log_lazy': True, 'log_level': None, 'verbosity': None, 'log_lazy': True,
'log_traceback': None, 'full_traceback': None, 'log_traceback': None, 'full_traceback': None,
'fast': False, 'memory_db': False, 'no_gamin': False, 'fast': False, 'memory_db': False,
'no_network': False, 'negate_re': False 'no_network': False, 'negate_re': False
} }
@ -105,9 +105,6 @@ def getOptParser(doc=""):
Option('-n', "--no-network", action="store_true", Option('-n', "--no-network", action="store_true",
dest="no_network", dest="no_network",
help="Do not run tests that require the network"), help="Do not run tests that require the network"),
Option('-g', "--no-gamin", action="store_true",
dest="no_gamin",
help="Do not run tests that require the gamin"),
Option('-m', "--memory-db", action="store_true", Option('-m', "--memory-db", action="store_true",
dest="memory_db", dest="memory_db",
help="Run database tests using memory instead of file"), help="Run database tests using memory instead of file"),
@ -171,8 +168,8 @@ def initProcess(opts):
# Let know the version # Let know the version
if opts.verbosity != 0: if opts.verbosity != 0:
print("Fail2ban %s test suite. Python %s. Please wait..." \ print(("Fail2ban %s test suite. Python %s. Please wait..." \
% (version, str(sys.version).replace('\n', ''))) % (version, str(sys.version).replace('\n', ''))))
return opts; return opts;
@ -186,7 +183,6 @@ class F2B(DefaultTestOptions):
self.__dict__ = opts.__dict__ self.__dict__ = opts.__dict__
if self.fast: # pragma: no cover - normal mode in travis if self.fast: # pragma: no cover - normal mode in travis
self.memory_db = True self.memory_db = True
self.no_gamin = True
self.__dict__['share_config'] = {} self.__dict__['share_config'] = {}
def SkipIfFast(self): def SkipIfFast(self):
pass pass
@ -303,7 +299,7 @@ def initTests(opts):
c.clear = lambda: logSys.warn('clear CACHE_ipToName is disabled in test suite') c.clear = lambda: logSys.warn('clear CACHE_ipToName is disabled in test suite')
# increase max count and max time (too many entries, long time testing): # increase max count and max time (too many entries, long time testing):
c.setOptions(maxCount=10000, maxTime=5*60) c.setOptions(maxCount=10000, maxTime=5*60)
for i in xrange(256): for i in range(256):
c.set('192.0.2.%s' % i, None) c.set('192.0.2.%s' % i, None)
c.set('198.51.100.%s' % i, None) c.set('198.51.100.%s' % i, None)
c.set('203.0.113.%s' % i, None) c.set('203.0.113.%s' % i, None)
@ -492,16 +488,6 @@ def gatherTests(regexps=None, opts=None):
# Additional filters available only if external modules are available # Additional filters available only if external modules are available
# yoh: Since I do not know better way for parametric tests # yoh: Since I do not know better way for parametric tests
# with good old unittest # with good old unittest
try:
# because gamin can be very slow on some platforms (and can produce many failures
# with fast sleep interval) - skip it by fast run:
if unittest.F2B.fast or unittest.F2B.no_gamin: # pragma: no cover
raise ImportError('Skip, fast: %s, no_gamin: %s' % (unittest.F2B.fast, unittest.F2B.no_gamin))
from ..server.filtergamin import FilterGamin
filters.append(FilterGamin)
except ImportError as e: # pragma: no cover
logSys.warning("Skipping gamin backend testing. Got exception '%s'" % e)
try: try:
from ..server.filterpyinotify import FilterPyinotify from ..server.filterpyinotify import FilterPyinotify
filters.append(FilterPyinotify) filters.append(FilterPyinotify)
@ -531,8 +517,8 @@ def gatherTests(regexps=None, opts=None):
import difflib, pprint import difflib, pprint
if not hasattr(unittest.TestCase, 'assertDictEqual'): if not hasattr(unittest.TestCase, 'assertDictEqual'):
def assertDictEqual(self, d1, d2, msg=None): def assertDictEqual(self, d1, d2, msg=None):
self.assert_(isinstance(d1, dict), 'First argument is not a dictionary') self.assertTrue(isinstance(d1, dict), 'First argument is not a dictionary')
self.assert_(isinstance(d2, dict), 'Second argument is not a dictionary') self.assertTrue(isinstance(d2, dict), 'Second argument is not a dictionary')
if d1 != d2: if d1 != d2:
standardMsg = '%r != %r' % (d1, d2) standardMsg = '%r != %r' % (d1, d2)
diff = ('\n' + '\n'.join(difflib.ndiff( diff = ('\n' + '\n'.join(difflib.ndiff(
@ -550,7 +536,7 @@ def assertSortedEqual(self, a, b, level=1, nestedOnly=False, key=repr, msg=None)
# used to recognize having element as nested dict, list or tuple: # used to recognize having element as nested dict, list or tuple:
def _is_nested(v): def _is_nested(v):
if isinstance(v, dict): if isinstance(v, dict):
return any(isinstance(v, (dict, list, tuple)) for v in v.itervalues()) return any(isinstance(v, (dict, list, tuple)) for v in v.values())
return any(isinstance(v, (dict, list, tuple)) for v in v) return any(isinstance(v, (dict, list, tuple)) for v in v)
if nestedOnly: if nestedOnly:
_nest_sorted = sorted _nest_sorted = sorted
@ -570,7 +556,7 @@ def assertSortedEqual(self, a, b, level=1, nestedOnly=False, key=repr, msg=None)
return return
raise ValueError('%r != %r' % (a, b)) raise ValueError('%r != %r' % (a, b))
if isinstance(a, dict) and isinstance(b, dict): # compare dict's: if isinstance(a, dict) and isinstance(b, dict): # compare dict's:
for k, v1 in a.iteritems(): for k, v1 in a.items():
v2 = b[k] v2 = b[k]
if isinstance(v1, (dict, list, tuple)) and isinstance(v2, (dict, list, tuple)): if isinstance(v1, (dict, list, tuple)) and isinstance(v2, (dict, list, tuple)):
_assertSortedEqual(v1, v2, level-1 if level != 0 else 0, nestedOnly, key) _assertSortedEqual(v1, v2, level-1 if level != 0 else 0, nestedOnly, key)
@ -596,23 +582,12 @@ def assertSortedEqual(self, a, b, level=1, nestedOnly=False, key=repr, msg=None)
self.fail(msg) self.fail(msg)
unittest.TestCase.assertSortedEqual = assertSortedEqual unittest.TestCase.assertSortedEqual = assertSortedEqual
if not hasattr(unittest.TestCase, 'assertRaisesRegexp'):
def assertRaisesRegexp(self, exccls, regexp, fun, *args, **kwargs):
try:
fun(*args, **kwargs)
except exccls as e:
if re.search(regexp, str(e)) is None:
self.fail('\"%s\" does not match \"%s\"' % (regexp, e))
else:
self.fail('%s not raised' % getattr(exccls, '__name__'))
unittest.TestCase.assertRaisesRegexp = assertRaisesRegexp
# always custom following methods, because we use atm better version of both (support generators) # always custom following methods, because we use atm better version of both (support generators)
if True: ## if not hasattr(unittest.TestCase, 'assertIn'): if True: ## if not hasattr(unittest.TestCase, 'assertIn'):
def assertIn(self, a, b, msg=None): def assertIn(self, a, b, msg=None):
bb = b bb = b
wrap = False wrap = False
if msg is None and hasattr(b, '__iter__') and not isinstance(b, basestring): if msg is None and hasattr(b, '__iter__') and not isinstance(b, str):
b, bb = itertools.tee(b) b, bb = itertools.tee(b)
wrap = True wrap = True
if a not in b: if a not in b:
@ -623,7 +598,7 @@ if True: ## if not hasattr(unittest.TestCase, 'assertIn'):
def assertNotIn(self, a, b, msg=None): def assertNotIn(self, a, b, msg=None):
bb = b bb = b
wrap = False wrap = False
if msg is None and hasattr(b, '__iter__') and not isinstance(b, basestring): if msg is None and hasattr(b, '__iter__') and not isinstance(b, str):
b, bb = itertools.tee(b) b, bb = itertools.tee(b)
wrap = True wrap = True
if a in b: if a in b:

View File

@ -24,7 +24,7 @@ __author__ = "Cyril Jaquier, Yaroslav Halchenko, Steven Hiscocks, Daniel Black"
__copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2005-2016 Yaroslav Halchenko, 2013-2014 Steven Hiscocks, Daniel Black" __copyright__ = "Copyright (c) 2004 Cyril Jaquier, 2005-2016 Yaroslav Halchenko, 2013-2014 Steven Hiscocks, Daniel Black"
__license__ = "GPL-v2+" __license__ = "GPL-v2+"
version = "1.0.3.dev1" version = "1.1.0.dev1"
def normVersion(): def normVersion():
""" Returns fail2ban version in normalized machine-readable format""" """ Returns fail2ban version in normalized machine-readable format"""

View File

@ -80,7 +80,7 @@ _fail2ban () {
;; ;;
*) *)
if [[ "${words[$cword-2]}" == "add" ]];then if [[ "${words[$cword-2]}" == "add" ]];then
COMPREPLY=( $( compgen -W "auto polling gamin pyinotify systemd" -- "$cur" ) ) COMPREPLY=( $( compgen -W "auto polling pyinotify systemd" -- "$cur" ) )
return 0 return 0
elif [[ "${words[$cword-2]}" == "set" || "${words[$cword-2]}" == "get" ]];then elif [[ "${words[$cword-2]}" == "set" || "${words[$cword-2]}" == "get" ]];then
cmd="${words[cword-2]}" cmd="${words[cword-2]}"

View File

@ -1,12 +1,12 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.48.1. .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
.TH FAIL2BAN-CLIENT "1" "November 2022" "Fail2Ban v1.0.3.dev1" "User Commands" .TH FAIL2BAN-CLIENT "1" "June 2023" "Fail2Ban v1.1.0.dev1" "User Commands"
.SH NAME .SH NAME
fail2ban-client \- configure and control the server fail2ban-client \- configure and control the server
.SH SYNOPSIS .SH SYNOPSIS
.B fail2ban-client .B fail2ban-client
[\fI\,OPTIONS\/\fR] \fI\,<COMMAND>\/\fR [\fI\,OPTIONS\/\fR] \fI\,<COMMAND>\/\fR
.SH DESCRIPTION .SH DESCRIPTION
Fail2Ban v1.0.3.dev1 reads log file that contains password failure report Fail2Ban v1.1.0.dev1 reads log file that contains password failure report
and bans the corresponding IP addresses using firewall rules. and bans the corresponding IP addresses using firewall rules.
.SH OPTIONS .SH OPTIONS
.TP .TP
@ -425,8 +425,8 @@ gets the usedns setting for <JAIL>
gets the list of of banned IP gets the list of of banned IP
addresses for <JAIL>. Optionally addresses for <JAIL>. Optionally
the separator character ('<SEP>', the separator character ('<SEP>',
default is space) or the option default is space) or the option '
\&'\-\-with\-time' (printing the times \fB\-\-with\-time\fR' (printing the times
of ban) may be specified. The IPs of ban) may be specified. The IPs
are ordered by end of ban. are ordered by end of ban.
.TP .TP

View File

@ -1,72 +1,69 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.48.1. .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
.TH FAIL2BAN-PYTHON "1" "November 2022" "fail2ban-python 1.0.3.1" "User Commands" .TH FAIL2BAN-PYTHON "1" "June 2023" "fail2ban-python 1.1.0.1" "User Commands"
.SH NAME .SH NAME
fail2ban-python \- a helper for Fail2Ban to assure that the same Python is used fail2ban-python \- a helper for Fail2Ban to assure that the same Python is used
.SH DESCRIPTION .SH DESCRIPTION
usage: fail2ban\-python [option] ... [\-c cmd | \fB\-m\fR mod | file | \fB\-]\fR [arg] ... usage: fail2ban\-python [option] ... [\-c cmd | \fB\-m\fR mod | file | \fB\-]\fR [arg] ...
Options and arguments (and corresponding environment variables): Options (and corresponding environment variables):
\fB\-b\fR : issue warnings about comparing bytearray with unicode \fB\-b\fR : issue warnings about str(bytes_instance), str(bytearray_instance)
.IP .IP
(\fB\-bb\fR: issue errors) and comparing bytes/bytearray with str. (\fB\-bb\fR: issue errors)
.PP .PP
\fB\-B\fR : don't write .py[co] files on import; also PYTHONDONTWRITEBYTECODE=x \fB\-B\fR : don't write .pyc files on import; also PYTHONDONTWRITEBYTECODE=x
\fB\-c\fR cmd : program passed in as string (terminates option list) \fB\-c\fR cmd : program passed in as string (terminates option list)
\fB\-d\fR : debug output from parser; also PYTHONDEBUG=x \fB\-d\fR : turn on parser debugging output (for experts only, only works on
.IP
debug builds); also PYTHONDEBUG=x
.PP
\fB\-E\fR : ignore PYTHON* environment variables (such as PYTHONPATH) \fB\-E\fR : ignore PYTHON* environment variables (such as PYTHONPATH)
\fB\-h\fR : print this help message and exit (also \fB\-\-help\fR) \fB\-h\fR : print this help message and exit (also \-? or \fB\-\-help\fR)
\fB\-i\fR : inspect interactively after running script; forces a prompt even \fB\-i\fR : inspect interactively after running script; forces a prompt even
.IP .IP
if stdin does not appear to be a terminal; also PYTHONINSPECT=x if stdin does not appear to be a terminal; also PYTHONINSPECT=x
.PP .PP
\fB\-I\fR : isolate Python from the user's environment (implies \fB\-E\fR and \fB\-s\fR)
\fB\-m\fR mod : run library module as a script (terminates option list) \fB\-m\fR mod : run library module as a script (terminates option list)
\fB\-O\fR : optimize generated bytecode slightly; also PYTHONOPTIMIZE=x \fB\-O\fR : remove assert and __debug__\-dependent statements; add .opt\-1 before
\fB\-OO\fR : remove doc\-strings in addition to the \fB\-O\fR optimizations
\fB\-R\fR : use a pseudo\-random salt to make hash() values of various types be
.IP .IP
unpredictable between separate invocations of the interpreter, as \&.pyc extension; also PYTHONOPTIMIZE=x
a defense against denial\-of\-service attacks
.PP .PP
\fB\-Q\fR arg : division options: \fB\-Qold\fR (default), \fB\-Qwarn\fR, \fB\-Qwarnall\fR, \fB\-Qnew\fR \fB\-OO\fR : do \fB\-O\fR changes and also discard docstrings; add .opt\-2 before
.IP
\&.pyc extension
.PP
\fB\-P\fR : don't prepend a potentially unsafe path to sys.path
\fB\-q\fR : don't print version and copyright messages on interactive startup
\fB\-s\fR : don't add user site directory to sys.path; also PYTHONNOUSERSITE \fB\-s\fR : don't add user site directory to sys.path; also PYTHONNOUSERSITE
\fB\-S\fR : don't imply 'import site' on initialization \fB\-S\fR : don't imply 'import site' on initialization
\fB\-t\fR : issue warnings about inconsistent tab usage (\fB\-tt\fR: issue errors) \fB\-u\fR : force the stdout and stderr streams to be unbuffered;
\fB\-u\fR : unbuffered binary stdout and stderr; also PYTHONUNBUFFERED=x
.IP .IP
see man page for details on internal buffering relating to '\-u' this option has no effect on stdin; also PYTHONUNBUFFERED=x
.PP .PP
\fB\-v\fR : verbose (trace import statements); also PYTHONVERBOSE=x \fB\-v\fR : verbose (trace import statements); also PYTHONVERBOSE=x
.IP .IP
can be supplied multiple times to increase verbosity can be supplied multiple times to increase verbosity
.PP .PP
\fB\-V\fR : print the Python version number and exit (also \fB\-\-version\fR) \fB\-V\fR : print the Python version number and exit (also \fB\-\-version\fR)
.IP
when given twice, print more information about the build
.PP
\fB\-W\fR arg : warning control; arg is action:message:category:module:lineno \fB\-W\fR arg : warning control; arg is action:message:category:module:lineno
.IP .IP
also PYTHONWARNINGS=arg also PYTHONWARNINGS=arg
.PP .PP
\fB\-x\fR : skip first line of source, allowing use of non\-Unix forms of #!cmd \fB\-x\fR : skip first line of source, allowing use of non\-Unix forms of #!cmd
\fB\-3\fR : warn about Python 3.x incompatibilities that 2to3 cannot trivially fix \fB\-X\fR opt : set implementation\-specific option
\fB\-\-check\-hash\-based\-pycs\fR always|default|never:
.IP
control how Python invalidates hash\-based .pyc files
.PP
\fB\-\-help\-env\fR : print help about Python environment variables and exit
\fB\-\-help\-xoptions\fR : print help about implementation\-specific \fB\-X\fR options and exit
\fB\-\-help\-all\fR : print complete help information and exit
Arguments:
file : program read from script file file : program read from script file
\- : program read from stdin (default; interactive mode if a tty) \- : program read from stdin (default; interactive mode if a tty)
arg ...: arguments passed to program in sys.argv[1:] arg ...: arguments passed to program in sys.argv[1:]
.PP
Other environment variables:
PYTHONSTARTUP: file executed on interactive startup (no default)
PYTHONPATH : ':'\-separated list of directories prefixed to the
.TP
default module search path.
The result is sys.path.
.PP
PYTHONHOME : alternate <prefix> directory (or <prefix>:<exec_prefix>).
.IP
The default module search path uses <prefix>/pythonX.X.
.PP
PYTHONCASEOK : ignore case in 'import' statements (Windows).
PYTHONIOENCODING: Encoding[:errors] used for stdin/stdout/stderr.
PYTHONHASHSEED: if this variable is set to 'random', the effect is the same
.IP
as specifying the \fB\-R\fR option: a random value is used to seed the hashes of
str, bytes and datetime objects. It can also be set to an integer
in the range [0,4294967295] to get hash values with a predictable seed.
.SH "SEE ALSO" .SH "SEE ALSO"
.br .br
fail2ban-client(1) fail2ban-client(1)

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.48.1. .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
.TH FAIL2BAN-REGEX "1" "November 2022" "fail2ban-regex 1.0.3.dev1" "User Commands" .TH FAIL2BAN-REGEX "1" "June 2023" "fail2ban-regex 1.1.0.dev1" "User Commands"
.SH NAME .SH NAME
fail2ban-regex \- test Fail2ban "failregex" option fail2ban-regex \- test Fail2ban "failregex" option
.SH SYNOPSIS .SH SYNOPSIS

View File

@ -1,12 +1,12 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.48.1. .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
.TH FAIL2BAN-SERVER "1" "November 2022" "Fail2Ban v1.0.3.dev1" "User Commands" .TH FAIL2BAN-SERVER "1" "June 2023" "Fail2Ban v1.1.0.dev1" "User Commands"
.SH NAME .SH NAME
fail2ban-server \- start the server fail2ban-server \- start the server
.SH SYNOPSIS .SH SYNOPSIS
.B fail2ban-server .B fail2ban-server
[\fI\,OPTIONS\/\fR] [\fI\,OPTIONS\/\fR]
.SH DESCRIPTION .SH DESCRIPTION
Fail2Ban v1.0.3.dev1 reads log file that contains password failure report Fail2Ban v1.1.0.dev1 reads log file that contains password failure report
and bans the corresponding IP addresses using firewall rules. and bans the corresponding IP addresses using firewall rules.
.SH OPTIONS .SH OPTIONS
.TP .TP

View File

@ -1,5 +1,5 @@
.\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.48.1. .\" DO NOT MODIFY THIS FILE! It was generated by help2man 1.49.3.
.TH FAIL2BAN-TESTCASES "1" "November 2022" "fail2ban-testcases 1.0.3.dev1" "User Commands" .TH FAIL2BAN-TESTCASES "1" "June 2023" "fail2ban-testcases 1.1.0.dev1" "User Commands"
.SH NAME .SH NAME
fail2ban-testcases \- run Fail2Ban unit-tests fail2ban-testcases \- run Fail2Ban unit-tests
.SH SYNOPSIS .SH SYNOPSIS
@ -30,9 +30,6 @@ Prevent lazy logging inside tests
\fB\-n\fR, \fB\-\-no\-network\fR \fB\-n\fR, \fB\-\-no\-network\fR
Do not run tests that require the network Do not run tests that require the network
.TP .TP
\fB\-g\fR, \fB\-\-no\-gamin\fR
Do not run tests that require the gamin
.TP
\fB\-m\fR, \fB\-\-memory\-db\fR \fB\-m\fR, \fB\-\-memory\-db\fR
Run database tests using memory instead of file Run database tests using memory instead of file
.TP .TP

View File

@ -123,7 +123,7 @@ filter = test[test.method=POST, baduseragents="badagent|<known/baduseragents>"]
.fi .fi
.RE .RE
Comments: use '#' for comment lines and '; ' (space is important) for inline comments. When using Python2.X, '; ' can only be used on the first line due to an Python library bug. Comments: use '#' for comment lines and '; ' (space is important) for inline comments.
.SH "FAIL2BAN CONFIGURATION FILE(S) (\fIfail2ban.conf\fB)" .SH "FAIL2BAN CONFIGURATION FILE(S) (\fIfail2ban.conf\fB)"
@ -276,7 +276,7 @@ number of failures that have to occur in the last \fBfindtime\fR seconds to ban
.B backend .B backend
backend to be used to detect changes in the logpath. backend to be used to detect changes in the logpath.
.br .br
It defaults to "auto" which will try "pyinotify", "gamin", "systemd" before "polling". Any of these can be specified. "pyinotify" is only valid on Linux systems with the "pyinotify" Python libraries. "gamin" requires the "gamin" libraries. It defaults to "auto" which will try "pyinotify", "systemd" before "polling". Any of these can be specified. "pyinotify" is only valid on Linux systems with the "pyinotify" Python libraries.
.TP .TP
.B usedns .B usedns
use DNS to resolve HOST names that appear in the logs. By default it is "warn" which will resolve hostnames to IPs however it will also log a warning. If you are using DNS here you could be blocking the wrong IPs due to the asymmetric nature of reverse DNS (that the application used to write the domain name to log) compared to forward DNS that fail2ban uses to resolve this back to an IP (but not necessarily the same one). Ideally you should configure your applications to log a real IP. This can be set to "yes" to prevent warnings in the log or "no" to disable DNS resolution altogether (thus ignoring entries where hostname, not an IP is logged).. use DNS to resolve HOST names that appear in the logs. By default it is "warn" which will resolve hostnames to IPs however it will also log a warning. If you are using DNS here you could be blocking the wrong IPs due to the asymmetric nature of reverse DNS (that the application used to write the domain name to log) compared to forward DNS that fail2ban uses to resolve this back to an IP (but not necessarily the same one). Ideally you should configure your applications to log a real IP. This can be set to "yes" to prevent warnings in the log or "no" to disable DNS resolution altogether (thus ignoring entries where hostname, not an IP is logged)..
@ -299,9 +299,6 @@ Available options are listed below.
.B pyinotify .B pyinotify
requires pyinotify (a file alteration monitor) to be installed. If pyinotify is not installed, Fail2ban will use auto. requires pyinotify (a file alteration monitor) to be installed. If pyinotify is not installed, Fail2ban will use auto.
.TP .TP
.B gamin
requires Gamin (a file alteration monitor) to be installed. If Gamin is not installed, Fail2ban will use auto.
.TP
.B polling .B polling
uses a polling algorithm which does not require external libraries. uses a polling algorithm which does not require external libraries.
.TP .TP

View File

@ -29,14 +29,16 @@ try:
from setuptools import setup from setuptools import setup
from setuptools.command.install import install from setuptools.command.install import install
from setuptools.command.install_scripts import install_scripts from setuptools.command.install_scripts import install_scripts
from setuptools.command.build_py import build_py
build_scripts = None
except ImportError: except ImportError:
setuptools = None setuptools = None
from distutils.core import setup from distutils.core import setup
# all versions # older versions
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
if setuptools is None: if setuptools is None:
from distutils.command.build_py import build_py
from distutils.command.build_scripts import build_scripts
from distutils.command.install import install from distutils.command.install import install
from distutils.command.install_scripts import install_scripts from distutils.command.install_scripts import install_scripts
@ -68,15 +70,15 @@ class install_scripts_f2b(install_scripts):
if dry_run: if dry_run:
#bindir = self.install_dir #bindir = self.install_dir
bindir = self.build_dir bindir = self.build_dir
print('creating fail2ban-python binding -> %s (dry-run, real path can be different)' % (bindir,)) print(('creating fail2ban-python binding -> %s (dry-run, real path can be different)' % (bindir,)))
print('Copying content of %s to %s' % (self.build_dir, self.install_dir)); print(('Copying content of %s to %s' % (self.build_dir, self.install_dir)));
return outputs return outputs
fn = None fn = None
for fn in outputs: for fn in outputs:
if os.path.basename(fn) == 'fail2ban-server': if os.path.basename(fn) == 'fail2ban-server':
break break
bindir = os.path.dirname(fn) bindir = os.path.dirname(fn)
print('creating fail2ban-python binding -> %s' % (bindir,)) print(('creating fail2ban-python binding -> %s' % (bindir,)))
updatePyExec(bindir) updatePyExec(bindir)
return outputs return outputs
@ -93,7 +95,7 @@ class install_scripts_f2b(install_scripts):
scripts = ['fail2ban.service', 'fail2ban-openrc.init'] scripts = ['fail2ban.service', 'fail2ban-openrc.init']
for script in scripts: for script in scripts:
print('Creating %s/%s (from %s.in): @BINDIR@ -> %s' % (buildroot, script, script, install_dir)) print(('Creating %s/%s (from %s.in): @BINDIR@ -> %s' % (buildroot, script, script, install_dir)))
with open(os.path.join(source_dir, 'files/%s.in' % script), 'r') as fn: with open(os.path.join(source_dir, 'files/%s.in' % script), 'r') as fn:
lines = fn.readlines() lines = fn.readlines()
fn = None fn = None
@ -205,10 +207,9 @@ setup(
url = "http://www.fail2ban.org", url = "http://www.fail2ban.org",
license = "GPL", license = "GPL",
platforms = "Posix", platforms = "Posix",
cmdclass = { cmdclass = dict({'build_py': build_py, 'build_scripts': build_scripts} if build_scripts else {}, **{
'build_py': build_py, 'build_scripts': build_scripts,
'install_scripts': install_scripts_f2b, 'install': install_command_f2b 'install_scripts': install_scripts_f2b, 'install': install_command_f2b
}, }),
scripts = [ scripts = [
'bin/fail2ban-client', 'bin/fail2ban-client',
'bin/fail2ban-server', 'bin/fail2ban-server',
@ -296,7 +297,7 @@ if obsoleteFiles:
print("Please delete them:") print("Please delete them:")
print("") print("")
for f in obsoleteFiles: for f in obsoleteFiles:
print("\t" + f) print(("\t" + f))
print("") print("")
if isdir("/usr/lib/fail2ban"): if isdir("/usr/lib/fail2ban"):