2006-11-30 15:36:53 +01:00
|
|
|
#
|
|
|
|
# Copyright 2006 sense.lab e.V.
|
|
|
|
#
|
|
|
|
# This file is part of the CryptoBox.
|
|
|
|
#
|
|
|
|
# The CryptoBox is free software; you can redistribute it and/or modify
|
|
|
|
# it under the terms of the GNU General Public License as published by
|
|
|
|
# the Free Software Foundation; either version 2 of the License, or
|
|
|
|
# (at your option) any later version.
|
|
|
|
#
|
|
|
|
# The CryptoBox is distributed in the hope that it will be useful,
|
|
|
|
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
|
|
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
|
|
|
# GNU General Public License for more details.
|
|
|
|
#
|
|
|
|
# You should have received a copy of the GNU General Public License
|
|
|
|
# along with the CryptoBox; if not, write to the Free Software
|
|
|
|
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
|
|
|
|
#
|
|
|
|
|
2006-12-05 13:24:47 +01:00
|
|
|
|
|
|
|
"""The logs feature of the CryptoBox.
|
|
|
|
"""
|
|
|
|
|
|
|
|
__revision__ = "$Id"
|
|
|
|
|
2006-11-23 20:13:08 +01:00
|
|
|
import cryptobox.plugins.base
|
2006-12-11 15:16:10 +01:00
|
|
|
import re
|
|
|
|
import datetime
|
2007-02-06 11:08:55 +01:00
|
|
|
import cherrypy
|
2006-12-11 15:16:10 +01:00
|
|
|
|
|
|
|
LOG_LEVELS = [ 'DEBUG', 'INFO', 'NOTICE', 'WARNING', 'ERROR' ]
|
|
|
|
|
|
|
|
LINE_REGEX = re.compile(r"(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2}) " \
|
|
|
|
+ r"(?P<hour>\d{2}):(?P<minute>\d{2}):\d{2},\d{3} (?P<level>" \
|
|
|
|
+ "|".join([ "(?:%s)" % e for e in LOG_LEVELS]) + r"): (?P<text>.*)$")
|
2006-11-06 17:05:00 +01:00
|
|
|
|
2006-11-23 20:13:08 +01:00
|
|
|
class logs(cryptobox.plugins.base.CryptoBoxPlugin):
|
2006-12-05 13:24:47 +01:00
|
|
|
"""The logs feature of the CryptoBox.
|
|
|
|
"""
|
2006-11-06 17:05:00 +01:00
|
|
|
|
2006-12-05 13:24:47 +01:00
|
|
|
plugin_capabilities = [ "system" ]
|
|
|
|
plugin_visibility = [ "preferences" ]
|
|
|
|
request_auth = False
|
2006-11-06 17:05:00 +01:00
|
|
|
rank = 90
|
|
|
|
|
2006-12-11 15:16:10 +01:00
|
|
|
def do_action(self, lines=50, size=3000, level=None):
|
2006-12-05 13:24:47 +01:00
|
|
|
"""Show the latest part of the log file.
|
|
|
|
"""
|
2006-11-29 14:39:51 +01:00
|
|
|
## filter input
|
|
|
|
try:
|
|
|
|
lines = int(lines)
|
2006-12-05 13:24:47 +01:00
|
|
|
if lines <= 0:
|
|
|
|
raise(ValueError)
|
2006-11-29 14:39:51 +01:00
|
|
|
except ValueError:
|
2006-12-11 15:16:10 +01:00
|
|
|
self.cbox.log.info("[logs] invalid line number: %s" % str(lines))
|
2006-11-29 14:39:51 +01:00
|
|
|
lines = 50
|
|
|
|
try:
|
|
|
|
size = int(size)
|
2006-12-05 13:24:47 +01:00
|
|
|
if size <= 0:
|
|
|
|
raise(ValueError)
|
2006-11-29 14:39:51 +01:00
|
|
|
except ValueError:
|
2006-12-11 15:16:10 +01:00
|
|
|
self.cbox.log.info("[logs] invalid log size: %s" % str(size))
|
2006-11-29 14:39:51 +01:00
|
|
|
size = 3000
|
2006-12-11 15:16:10 +01:00
|
|
|
if not level is None:
|
|
|
|
level = str(level)
|
|
|
|
if not level in LOG_LEVELS:
|
|
|
|
self.cbox.log.info("[logs] invalid log level: %s" % str(level))
|
|
|
|
level = None
|
2006-12-12 14:34:05 +01:00
|
|
|
for (index, line) in enumerate(self.__filter_log_content(lines, size, level)):
|
2006-12-11 15:16:10 +01:00
|
|
|
self.__set_line_hdf_data(self.hdf_prefix + "Content.%d" % index, line)
|
2007-02-06 11:08:55 +01:00
|
|
|
self.hdf[self.hdf_prefix + "Destination"] = \
|
|
|
|
self.cbox.prefs["Log"]["Destination"].lower()
|
2006-11-06 17:05:00 +01:00
|
|
|
return "show_log"
|
|
|
|
|
|
|
|
|
2007-02-06 11:08:55 +01:00
|
|
|
@cherrypy.expose
|
|
|
|
def download(self, **kargs):
|
|
|
|
"""Download the complete log file
|
|
|
|
|
|
|
|
**kargs are necessary - we have to ignore 'weblang' and so on ...
|
|
|
|
"""
|
|
|
|
log_file = self.__get_log_destination_file()
|
|
|
|
if log_file is None:
|
|
|
|
return ""
|
|
|
|
else:
|
2007-02-08 03:17:18 +01:00
|
|
|
return cherrypy.lib.cptools.serveFile(log_file,
|
|
|
|
disposition="attachment", name="cryptobox_logfile.txt")
|
2007-02-06 11:08:55 +01:00
|
|
|
|
|
|
|
|
2006-12-05 13:24:47 +01:00
|
|
|
def get_status(self):
|
|
|
|
"""The current status includes the log configuration details.
|
|
|
|
"""
|
2006-11-06 17:05:00 +01:00
|
|
|
return "%s:%s:%s" % (
|
|
|
|
self.cbox.prefs["Log"]["Level"],
|
|
|
|
self.cbox.prefs["Log"]["Destination"],
|
|
|
|
self.cbox.prefs["Log"]["Details"])
|
2007-02-06 11:08:55 +01:00
|
|
|
|
2006-11-06 17:05:00 +01:00
|
|
|
|
2006-12-12 14:34:05 +01:00
|
|
|
def __filter_log_content(self, lines, max_size, level):
|
2006-12-05 13:24:47 +01:00
|
|
|
"""Filter, sort and shorten the log content.
|
|
|
|
"""
|
2006-12-11 15:16:10 +01:00
|
|
|
if level and level in LOG_LEVELS:
|
|
|
|
filtered_levels = LOG_LEVELS[:]
|
|
|
|
## only the given and higher levels are accepted
|
|
|
|
while filtered_levels[0] != level:
|
|
|
|
del filtered_levels[0]
|
2006-11-29 14:39:51 +01:00
|
|
|
content = []
|
|
|
|
current_length = 0
|
2006-12-12 14:34:05 +01:00
|
|
|
for line in self.__get_log_data():
|
2006-12-18 14:37:08 +01:00
|
|
|
## search for matching lines for the given log level
|
2006-12-11 15:16:10 +01:00
|
|
|
for one_level in filtered_levels:
|
2006-12-18 14:37:08 +01:00
|
|
|
if line.find(" %s: " % one_level) != -1:
|
2006-12-05 13:24:47 +01:00
|
|
|
break
|
2006-12-11 15:16:10 +01:00
|
|
|
else:
|
|
|
|
## the line does not contain an appropriate level name
|
|
|
|
continue
|
|
|
|
## we found a line that fits
|
|
|
|
content.append(line)
|
|
|
|
current_length += len(line)
|
|
|
|
if lines and len(content) >= lines:
|
|
|
|
break
|
|
|
|
if max_size and current_length >= max_size:
|
|
|
|
break
|
2006-11-29 14:39:51 +01:00
|
|
|
else:
|
2006-12-12 14:34:05 +01:00
|
|
|
content = self.__get_log_data(lines, max_size)
|
2006-12-11 15:16:10 +01:00
|
|
|
return content
|
|
|
|
|
|
|
|
|
|
|
|
def __set_line_hdf_data(self, hdf_prefix, line):
|
|
|
|
"""Parse the log line for time and log level.
|
2006-11-06 17:05:00 +01:00
|
|
|
|
2006-12-11 15:16:10 +01:00
|
|
|
If parsing fails, then the output line is simply displayed without
|
|
|
|
meta information.
|
|
|
|
"""
|
|
|
|
self.hdf[hdf_prefix + ".Text"] = line.strip()
|
|
|
|
match = LINE_REGEX.match(line)
|
|
|
|
if not match:
|
|
|
|
## we could not parse the line - just return the text without meta info
|
|
|
|
return
|
2007-02-06 11:08:55 +01:00
|
|
|
## matching was successful - we can parse the line for details
|
2006-12-11 15:16:10 +01:00
|
|
|
## calculate time difference of log line (aka: age of event)
|
|
|
|
try:
|
|
|
|
(year, month, day, hour, minute) = match.group(
|
|
|
|
'year', 'month', 'day', 'hour', 'minute')
|
|
|
|
(year, month, day, hour, minute) = \
|
|
|
|
(int(year), int(month), int(day), int(hour), int(minute))
|
|
|
|
## timediff is a timedelta object
|
|
|
|
timediff = datetime.datetime.today() - \
|
|
|
|
datetime.datetime(year, month, day, hour, minute)
|
|
|
|
## the time units (see below) correspond to the names within the language
|
|
|
|
## file: Text.TimeUnits.Days ...
|
|
|
|
if timediff.days >= 1:
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Unit"] = 'Days'
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Value"] = timediff.days
|
|
|
|
elif timediff.seconds >= 3600:
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Unit"] = 'Hours'
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Value"] = timediff.seconds / 3600
|
|
|
|
elif timediff.seconds >= 60:
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Unit"] = 'Minutes'
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Value"] = timediff.seconds / 60
|
|
|
|
else:
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Unit"] = 'Seconds'
|
|
|
|
self.hdf[hdf_prefix + ".TimeDiff.Value"] = timediff.seconds
|
|
|
|
except (OverflowError, TypeError, ValueError, IndexError), err_msg:
|
|
|
|
pass
|
|
|
|
## retrieve the level
|
|
|
|
try:
|
|
|
|
self.hdf[hdf_prefix + ".Level"] = match.group('level')
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
try:
|
|
|
|
self.hdf[hdf_prefix + ".Text"] = match.group('text').strip()
|
|
|
|
except IndexError:
|
|
|
|
pass
|
|
|
|
|
2006-12-12 14:34:05 +01:00
|
|
|
|
2007-02-06 11:08:55 +01:00
|
|
|
def __get_log_destination_file(self):
|
|
|
|
"""For non-file log destinations return 'None' and output a warning
|
2006-12-12 14:34:05 +01:00
|
|
|
"""
|
|
|
|
try:
|
2007-02-06 11:08:55 +01:00
|
|
|
if self.cbox.prefs["Log"]["Destination"].upper() == "FILE":
|
|
|
|
import os
|
|
|
|
return os.path.abspath(self.cbox.prefs["Log"]["Details"])
|
|
|
|
else:
|
|
|
|
return None
|
2006-12-12 14:34:05 +01:00
|
|
|
except KeyError:
|
|
|
|
self.cbox.log.error(
|
|
|
|
"could not evaluate one of the following config settings: "
|
|
|
|
+ "[Log]->Destination or [Log]->Details")
|
2007-02-06 11:08:55 +01:00
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
|
def __get_log_data(self, lines=None, max_size=None):
|
|
|
|
"""get the most recent log entries of the log file
|
|
|
|
|
|
|
|
the maximum number and size of these entries can be limited by
|
|
|
|
'lines' and 'max_size'
|
|
|
|
"""
|
|
|
|
log_file = self.__get_log_destination_file()
|
|
|
|
## return nothing if the currently selected log output is not a file
|
|
|
|
if log_file is None:
|
2006-12-12 14:34:05 +01:00
|
|
|
return []
|
|
|
|
try:
|
|
|
|
fdesc = open(log_file, "r")
|
|
|
|
if max_size:
|
|
|
|
fdesc.seek(-max_size, 2) # seek relative to the end of the file
|
|
|
|
content = fdesc.readlines()
|
|
|
|
fdesc.close()
|
|
|
|
except IOError:
|
|
|
|
self.cbox.log.warn("failed to read the log file (%s)" % log_file)
|
|
|
|
return []
|
|
|
|
if lines:
|
|
|
|
content = content[-lines:]
|
|
|
|
content.reverse()
|
|
|
|
return content
|
|
|
|
|
|
|
|
|