2014-01-09 20:34:01 +00:00
|
|
|
# Fail2Ban filter to match web requests for selected URLs that don't exist
|
|
|
|
#
|
|
|
|
# This filter is aimed at blocking specific URLs that don't exist. This
|
|
|
|
# could be a set of URLs places in a Disallow: directive in robots.txt or
|
|
|
|
# just some web services that don't exist caused bots are searching for
|
2017-10-16 00:15:58 +00:00
|
|
|
# exploitable content. This filter is designed to have a low false positive
|
2014-01-09 20:34:01 +00:00
|
|
|
# rate due.
|
|
|
|
#
|
|
|
|
# An alternative to this is the apache-noscript filter which blocks all
|
|
|
|
# types of scripts that don't exist.
|
|
|
|
#
|
2014-01-03 12:12:58 +00:00
|
|
|
#
|
|
|
|
# This is normally a predefined list of exploitable or valuable web services
|
|
|
|
# that are hidden or aren't actually installed.
|
|
|
|
#
|
|
|
|
|
|
|
|
[INCLUDES]
|
|
|
|
|
|
|
|
# overwrite with apache-common.local if _apache_error_client is incorrect.
|
2015-02-03 14:54:05 +00:00
|
|
|
# Load regexes for filtering from botsearch-common.conf
|
2014-01-03 12:12:58 +00:00
|
|
|
before = apache-common.conf
|
2015-02-03 14:54:05 +00:00
|
|
|
botsearch-common.conf
|
2015-02-03 03:44:33 +00:00
|
|
|
|
2014-01-03 12:12:58 +00:00
|
|
|
[Definition]
|
|
|
|
|
2017-02-21 14:54:59 +00:00
|
|
|
prefregex = ^%(_apache_error_client)s (?:AH\d+: )?<F-CONTENT>.+</F-CONTENT>$
|
2014-01-03 12:12:58 +00:00
|
|
|
|
2017-02-21 14:54:59 +00:00
|
|
|
failregex = ^(?:File does not exist|script not found or unable to stat): <webroot><block>(, referer: \S+)?\s*$
|
|
|
|
^script '<webroot><block>' not found or unable to stat(, referer: \S+)?\s*$
|
2014-01-03 12:12:58 +00:00
|
|
|
|
2017-02-21 14:54:59 +00:00
|
|
|
ignoreregex =
|
2014-01-03 12:12:58 +00:00
|
|
|
|
|
|
|
# Webroot represents the webroot on which all other files are based
|
|
|
|
webroot = /var/www/
|
|
|
|
|
|
|
|
|
|
|
|
# DEV Notes:
|
|
|
|
#
|
2017-10-16 00:15:58 +00:00
|
|
|
# Author: Daniel Black
|