Skip to content

Commit

Permalink
Merge pull request #606 from onkelandy/lib.log
Browse files Browse the repository at this point in the history
lib.log: Introduce mapping and cache possibilites for memory logs
  • Loading branch information
msinn authored Dec 20, 2023
2 parents 2e85d81 + f26ebeb commit 53e1ba2
Show file tree
Hide file tree
Showing 2 changed files with 90 additions and 7 deletions.
8 changes: 5 additions & 3 deletions doc/user/source/referenz/logging/logging_handler.rst
Original file line number Diff line number Diff line change
Expand Up @@ -21,14 +21,14 @@ ShngTimedRotatingFileHandler
Der **ShngTimedRotatingFileHandler** ist eine Variante des **TimedRotatingFileHandler**, der im Python
Logging Modul vorhanden ist (logging.handlers.TimedRotatingFileHandler).

Der **TimedRotatingFileHandler** benennt die Backaup Versionen einer Log Datei in einer Art und Weise um, die
Der **TimedRotatingFileHandler** benennt die Backup Versionen einer Log Datei in einer Art und Weise um, die
im Handling umständlich sein kann, da die Datei dabei die normale Extension **.log** verliert:

smarthome-warnings.log --> smarthome-warnings.log.2021-04-06

Der **ShngTimedRotatingFileHandler** hat die gleiche Funktionalität und Konfigurierbarkeit wie der
**TimedRotatingFileHandler**, bildet den Namen für die Backup Dateien jedoch anders. Der Timestamp (2021-04-06)
wird nicht an das Ende des Dateinamens angefügt, sondern vor der Extenstion **.log** eingefügt:
wird nicht an das Ende des Dateinamens angefügt, sondern vor der Extension **.log** eingefügt:

smarthome-warnings.log --> smarthome-warnings.2021-04-06.log

Expand Down Expand Up @@ -99,15 +99,17 @@ In der Datei ``../etc/logging.yaml`` wird der **ShngMemLogHandler** im Abschnitt
logname: mem_heiz
maxlen: 60
level: INFO
cache: True
**ShngMemLogHandler** hat zwei Parameter:
**ShngMemLogHandler** hat vier Parameter:

- ``logname:`` - Legt den Namen fest, unter dem das Memory Log aus der smartVISU oder dem **cli** Plugin
angesprochen werden kann.
- ``maxlen:`` - Legt fest, wie viele Einträge ein Memory Log aufnehmen kann, bevor der älteste Eintrag
gelöscht wird.
- ``level:`` - Legt den minimalen Log Level fest, der in das Memory Log geschrieben wird
- ``cache:`` - Ist dieser Parameter True, werden die Einträge im cache Ordner gesichert und beim Neustart geladen

|
Expand Down
89 changes: 85 additions & 4 deletions lib/log.py
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
import logging.config
import os
import datetime
import pickle

import collections

Expand Down Expand Up @@ -389,14 +390,25 @@ def clean(self, dt):

# ================================================================================

"""
"""
In the following part of the code, logging handlers are defined
"""

class ShngTimedRotatingFileHandler(logging.handlers.TimedRotatingFileHandler):
"""
TimedRotatingFilehandler with a different naming scheme for rotated files
"""
def __init__(self, filename, when='MIDNIGHT', interval=0, backupCount=0, encoding=None, delay=False, utc=False):
year = datetime.datetime.now().strftime("%Y")
month = datetime.datetime.now().strftime("%m")
day = datetime.datetime.now().strftime("%d")
hour = datetime.datetime.now().strftime("%H")
stamp = datetime.datetime.now().timestamp()
try:
filename = eval(f"f'{filename}'")
except Exception:
pass
super().__init__(filename, when, interval, backupCount, encoding, delay, utc)

def getFilesToDelete(self):
"""
Expand Down Expand Up @@ -499,7 +511,8 @@ class ShngMemLogHandler(logging.StreamHandler):
"""
LogHandler used by MemLog
"""
def __init__(self, logname='undefined', maxlen=35, level=logging.NOTSET):
def __init__(self, logname='undefined', maxlen=35, level=logging.NOTSET,
mapping=['time', 'thread', 'level', 'message'], cache=False):
super().__init__()
self.setLevel(level)

Expand All @@ -509,11 +522,30 @@ def __init__(self, logname='undefined', maxlen=35, level=logging.NOTSET):

#logs_instance.logger.info(f"ShngMemLogHandler.__init__(): logname={logname}, self={self}, handlername={self.get_name()}, level={self.level}, levelname={logging.getLevelName(self.level)}, maxlen={maxlen}")

self._log = Log(self, logname, ['time', 'thread', 'level', 'message'], maxlen=maxlen, handler=self)

self._log = Log(self, logname, mapping, maxlen=maxlen, handler=self)
self._shtime = logs_instance._sh.shtime
# Dummy baseFileName for output in shngadmin (and priv_develop plugin)
self.baseFilename = "'" + self._log._name + "'"
self._cache = cache
self._maxlen = maxlen
# save cache files in var/log/cache directory
cache_directory = os.path.join(logs_instance._sh._var_dir, 'log'+os.path.sep, 'cache'+os.path.sep)
if cache is True:
if not os.path.isdir(cache_directory):
os.makedirs(cache_directory)
self._cachefile = cache_directory + self._log._name
try:
self.__last_change, self._logcache = self._cache_read(self._cachefile, self._shtime.tzinfo())
self.load(self._logcache)
logs_instance.logger.debug(f"Memory Log {self._log._name}: read cache: {self._logcache}")
except Exception:
try:
self._cache_write(logs_instance.logger, self._cachefile, self._log.export(int(self._maxlen)))
self._cache_read(self._cachefile, self._shtime.tzinfo())
logs_instance.logger.info(f"Memory Log {self._log._name}: generated cache file")
except Exception as e:
pass
logs_instance.logger.warning(f"Memory Log: problem reading cache: {e}")

def emit(self, record):
#logs_instance.logger.info(f"ShngMemLogHandler.emit() #1: logname={self._log._name}, handlername={self.get_name()}, level={self.level}, record.levelno={record.levelno}, record.levelname={record.levelname}, record={record}")
Expand All @@ -524,5 +556,54 @@ def emit(self, record):
self._log.add([timestamp, record.threadName, record.levelname, record.message])
except Exception:
self.handleError(record)
if self._cache is True:
try:
self._cache_write(logs_instance.logger, self._cachefile, self._log.export(int(self._maxlen)))
except Exception as e:
logs_instance.logger.warning(f"Memory Log {self._log._name}: could not update cache {e}")


##############################################################################################
# Cache Methods, taken from operationlog plugin by Jan Troelsen, Oliver Hinckel, Bernd Meiners
##############################################################################################
def load(self, logentries):
"""
Loads logentries (which were just read from cache) into the log object (see lib.log.Log())
"""
if len(logentries) != 0:
for logentry in reversed(logentries):
log = []
for name in self._log.mapping:
if name == 'time':
log.append(logentry['time'])
elif name == 'thread':
log.append(logentry['thread'])
elif name == 'level':
log.append(logentry['level'])
elif name == 'message':
log.append(logentry['message'])
self._log.add(log)

def _cache_read(self, filename, tz):
"""
This loads the cache from a file
:param filename: file to load from
:param tz: timezone
:return: [description]
:rtype: a tuple with datetime and values from file
"""
ts = os.path.getmtime(filename)
dt = datetime.datetime.fromtimestamp(ts, tz)
value = None
with open(filename, 'rb') as f:
value = pickle.load(f)
return (dt, value)


def _cache_write(self, logger, filename, value):
try:
with open(filename, 'wb') as f:
pickle.dump(value, f)
except IOError:
logger.warning("Could not write to {}".format(filename))

0 comments on commit 53e1ba2

Please sign in to comment.