# Fail2Ban filter to match web requests for selected URLs that don't exist
#
# This filter is aimed at blocking specific URLs that don't exist. This
# could be a set of URLs places in a Disallow: directive in robots.txt or
# just some web services that don't exist caused bots are searching for
# exploitable content. This filter is designed to have a low false postitive
# rate due.
#
# An alternative to this is the apache-noscript filter which blocks all
# types of scripts that don't exist.
#
#
# This is normally a predefined list of exploitable or valuable web services
# that are hidden or aren't actually installed.
#
[INCLUDES]
# overwrite with apache-common.local if _apache_error_client is incorrect.
# Load regexes for filtering from botsearch-common.conf
before = apache-common.conf
botsearch-common.conf
[Definition]
prefregex = ^%(_apache_error_client)s (?:AH\d+: )?.+$
failregex = ^(?:File does not exist|script not found or unable to stat): (, referer: \S+)?\s*$
^script '' not found or unable to stat(, referer: \S+)?\s*$
ignoreregex =
# Webroot represents the webroot on which all other files are based
webroot = /var/www/
# DEV Notes:
#
# Author: Daniel Black