introduces new configuration parameter "logtype" (default "file" for file-backends, and "journal" for journal-backends);

common.conf: differentiate "__prefix_line" for file/journal logtype's (speedup and fix parsing of systemd-journal);
samplestestcase.py: extends testSampleRegexsFactory to allow coverage of journal logtype;
closes gh-2383: asterisk can log timestamp if logs into systemd-journal (regex extended with optional part matching this)
pull/2387/head
sebres 6 years ago
parent eddd0d2f25
commit e268bf97d4

@ -32,6 +32,10 @@ failregex = ^Registration from '[^']*' failed for '<HOST>(:\d+)?' - (?:Wrong pas
# FreePBX (todo: make optional in v.0.10): # FreePBX (todo: make optional in v.0.10):
# ^(%(__prefix_line)s|\[\]\s*WARNING%(__pid_re)s:?(?:\[C-[\da-f]*\])? )[^:]+: Friendly Scanner from <HOST>$ # ^(%(__prefix_line)s|\[\]\s*WARNING%(__pid_re)s:?(?:\[C-[\da-f]*\])? )[^:]+: Friendly Scanner from <HOST>$
__extra_timestamp = (?:\[[^\]]+\]\s+)?
__prefix_line_journal = %(known/__prefix_line_journal)s%(__extra_timestamp)s
ignoreregex = ignoreregex =
datepattern = {^LN-BEG} datepattern = {^LN-BEG}
@ -44,3 +48,5 @@ datepattern = {^LN-BEG}
# First regex: channels/chan_sip.c # First regex: channels/chan_sip.c
# #
# main/logger.c:ast_log_vsyslog - "in {functionname}:" only occurs in syslog # main/logger.c:ast_log_vsyslog - "in {functionname}:" only occurs in syslog
journalmatch = _SYSTEMD_UNIT=asterisk.service

@ -10,6 +10,8 @@ after = common.local
[DEFAULT] [DEFAULT]
logtype = file
# Daemon definition is to be specialized (if needed) in .conf file # Daemon definition is to be specialized (if needed) in .conf file
_daemon = \S* _daemon = \S*
@ -55,7 +57,13 @@ __date_ambit = (?:\[\])
# [bsdverbose]? [hostname] [vserver tag] daemon_id spaces # [bsdverbose]? [hostname] [vserver tag] daemon_id spaces
# #
# This can be optional (for instance if we match named native log files) # This can be optional (for instance if we match named native log files)
__prefix_line = %(__date_ambit)s?\s*(?:%(__bsd_syslog_verbose)s\s+)?(?:%(__hostname)s\s+)?(?:%(__kernel_prefix)s\s+)?(?:%(__vserver)s\s+)?(?:%(__daemon_combs_re)s\s+)?(?:%(__daemon_extra_re)s\s+)? __prefix_line = <__prefix_line_<logtype>>
# Common line prefixes for logtype "file":
__prefix_line_file = %(__date_ambit)s?\s*(?:%(__bsd_syslog_verbose)s\s+)?(?:%(__hostname)s\s+)?(?:%(__kernel_prefix)s\s+)?(?:%(__vserver)s\s+)?(?:%(__daemon_combs_re)s\s+)?(?:%(__daemon_extra_re)s\s+)?
# Common (short) line prefix for logtype "journal" (corresponds output of formatJournalEntry):
__prefix_line_journal = \s*(?:%(__hostname)s\s+)?(?:%(_daemon)s%(__pid_re)s?:?\s+)?(?:%(__kernel_prefix)s\s+)?
# PAM authentication mechanism check for failures, e.g.: pam_unix, pam_sss, # PAM authentication mechanism check for failures, e.g.: pam_unix, pam_sss,
# pam_ldap # pam_ldap

@ -114,3 +114,10 @@ Nov 4 18:30:40 localhost asterisk[32229]: NOTICE[32257]: chan_sip.c:23417 in han
# failJSON: { "time": "2005-03-01T15:35:53", "match": true , "host": "192.0.2.2", "desc": "log over remote syslog server" } # failJSON: { "time": "2005-03-01T15:35:53", "match": true , "host": "192.0.2.2", "desc": "log over remote syslog server" }
Mar 1 15:35:53 pbx asterisk[2350]: WARNING[1195][C-00000b43]: Ext. s:6 in @ from-sip-external: "Rejecting unknown SIP connection from 192.0.2.2" Mar 1 15:35:53 pbx asterisk[2350]: WARNING[1195][C-00000b43]: Ext. s:6 in @ from-sip-external: "Rejecting unknown SIP connection from 192.0.2.2"
# filterOptions: [{"logtype": "journal", "test.prefix-line": "server asterisk[123]: "}]
# failJSON: { "match": true , "host": "192.0.2.1", "desc": "systemd-journal entry" }
NOTICE[566]: chan_sip.c:28926 handle_request_register: Registration from '"28" <sip:28@127.0.0.100>' failed for '192.0.2.1:7998' - Wrong password
# failJSON: { "match": true , "host": "192.0.2.2", "desc": "systemd-journal entry (with additional timestamp in message)" }
[Mar 27 10:06:14] NOTICE[566]: chan_sip.c:28926 handle_request_register: Registration from '"1000" <sip:1000@127.0.0.100>' failed for '192.0.2.2:7998' - Wrong password

@ -34,7 +34,10 @@ import unittest
from ..server.failregex import Regex from ..server.failregex import Regex
from ..server.filter import Filter from ..server.filter import Filter
from ..client.filterreader import FilterReader from ..client.filterreader import FilterReader
from .utils import setUpMyTime, tearDownMyTime, CONFIG_DIR from .utils import setUpMyTime, tearDownMyTime, TEST_NOW, CONFIG_DIR
# test-time in UTC as string in isoformat (2005-08-14T10:00:00):
TEST_NOW_STR = datetime.datetime.utcfromtimestamp(TEST_NOW).isoformat()
TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config") TEST_CONFIG_DIR = os.path.join(os.path.dirname(__file__), "config")
TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files") TEST_FILES_DIR = os.path.join(os.path.dirname(__file__), "files")
@ -173,7 +176,7 @@ def testSampleRegexsFactory(name, basedir):
fltName = name + fltName fltName = name + fltName
# read it: # read it:
flt = self._readFilter(fltName, name, basedir, opts=opts) flt = self._readFilter(fltName, name, basedir, opts=opts)
self._filterTests.append((fltName, flt)) self._filterTests.append((fltName, flt, opts))
continue continue
# addFILE - filename to "include" test-files should be additionally parsed: # addFILE - filename to "include" test-files should be additionally parsed:
if jsonREMatch.group(1) == 'addFILE': if jsonREMatch.group(1) == 'addFILE':
@ -194,17 +197,25 @@ def testSampleRegexsFactory(name, basedir):
if not self._filterTests: if not self._filterTests:
fltName = name fltName = name
flt = self._readFilter(fltName, name, basedir, opts=None) flt = self._readFilter(fltName, name, basedir, opts=None)
self._filterTests = [(fltName, flt)] self._filterTests = [(fltName, flt, {})]
# process line using several filter options (if specified in the test-file): # process line using several filter options (if specified in the test-file):
for fltName, flt in self._filterTests: for fltName, flt, opts in self._filterTests:
flt, regexsUsedIdx = flt flt, regexsUsedIdx = flt
regexList = flt.getFailRegex() regexList = flt.getFailRegex()
failregex = -1 failregex = -1
try: try:
fail = {} fail = {}
ret = flt.processLine(line) # for logtype "journal" we don't need parse timestamp (simulate real systemd-backend handling):
checktime = True
if opts.get('logtype') != 'journal':
ret = flt.processLine(line)
else: # simulate journal processing, time is known from journal (formatJournalEntry):
checktime = False
if opts.get('test.prefix-line'): # journal backends creates common prefix-line:
line = opts.get('test.prefix-line') + line
ret = flt.processLine(('', TEST_NOW_STR, line.rstrip('\r\n')), TEST_NOW)
if not ret: if not ret:
# Bypass if filter constraint specified: # Bypass if filter constraint specified:
if faildata.get('filter') and name != faildata.get('filter'): if faildata.get('filter') and name != faildata.get('filter'):
@ -245,20 +256,18 @@ def testSampleRegexsFactory(name, basedir):
self.assertEqual(fv, v) self.assertEqual(fv, v)
t = faildata.get("time", None) t = faildata.get("time", None)
try: if checktime or t is not None:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S") try:
except ValueError: jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f") except ValueError:
jsonTimeLocal = datetime.datetime.strptime(t, "%Y-%m-%dT%H:%M:%S.%f")
jsonTime = time.mktime(jsonTimeLocal.timetuple()) jsonTime = time.mktime(jsonTimeLocal.timetuple())
jsonTime += jsonTimeLocal.microsecond / 1000000
jsonTime += jsonTimeLocal.microsecond / 1000000 self.assertEqual(fail2banTime, jsonTime,
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" %
self.assertEqual(fail2banTime, jsonTime, (fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)),
"UTC Time mismatch %s (%s) != %s (%s) (diff %.3f seconds)" % jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
(fail2banTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(fail2banTime)), fail2banTime - jsonTime) )
jsonTime, time.strftime("%Y-%m-%dT%H:%M:%S", time.gmtime(jsonTime)),
fail2banTime - jsonTime) )
regexsUsedIdx.add(failregex) regexsUsedIdx.add(failregex)
regexsUsedRe.add(regexList[failregex]) regexsUsedRe.add(regexList[failregex])

Loading…
Cancel
Save