logging as-is updated with pydantic model
This commit is contained in:
160
logging_as-is.py
Normal file
160
logging_as-is.py
Normal file
@@ -0,0 +1,160 @@
|
||||
import logging
|
||||
import sys
|
||||
from logging import Filter, Formatter, Handler, Logger
|
||||
from logging.handlers import RotatingFileHandler
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, Optional, Union
|
||||
|
||||
from appdaemon.appdaemon import AppDaemon
|
||||
from appdaemon.logging import AppNameFormatter, DuplicateFilter
|
||||
from pydantic import BaseModel, ConfigDict, model_validator
|
||||
from rich import print
|
||||
|
||||
|
||||
class LogConfig(BaseModel):
|
||||
name: str
|
||||
loglevel: str = 'INFO'
|
||||
|
||||
# file and/or stream options
|
||||
filename: Union[str, Path] = 'STDOUT'
|
||||
log_generations: int = 3
|
||||
"""Number of rotated logfiles that will be retained before they are overwritten if not specified, this will default to 3 files.
|
||||
"""
|
||||
log_size: int = 10**6
|
||||
|
||||
# formatter options
|
||||
format: str = '{asctime} {levelname} {appname}: {message}'
|
||||
"""Format string for the log file - standard str.format() logger format
|
||||
"""
|
||||
date_format: str = '%Y-%m-%d %H:%M:%S.%f'
|
||||
"""Format string to specify how the date is rendered in standard datetime strftime() format
|
||||
"""
|
||||
style: str = '{'
|
||||
|
||||
# filter options
|
||||
filter_threshold: float = 1
|
||||
"""Number of repetitions of a log line allowed before filtering starts (default is 1). Setting filter_threshold to zero will turn off log filtering entirely - since AppDaemon relies on this mechanism internally to prevent certain types of log loops, this is not recommended.
|
||||
"""
|
||||
filter_timeout: float = 0.1
|
||||
"""Timeout for log filtering. Duplicate log entries that are output less frequently than this value will not have filtering applied (default is 0.1 seconds)
|
||||
"""
|
||||
filter_repeat_delay: int = 5
|
||||
"""When filtering, repeating messages will be printed out periodically every filter_repeat_delay seconds (default is 5 seconds).
|
||||
"""
|
||||
|
||||
logger: Optional[Logger] = None
|
||||
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
def model_post_init(self, __context: Any) -> None:
|
||||
self.logger = logging.getLogger(self.name)
|
||||
self.setLevel(self.loglevel)
|
||||
self.logger.propagate = False
|
||||
|
||||
if not self.logger.filters:
|
||||
self.logger.addFilter(self.create_filter())
|
||||
|
||||
if not self.logger.handlers:
|
||||
self.logger.addHandler(self.create_handler())
|
||||
|
||||
def create_filter(self) -> Filter:
|
||||
return DuplicateFilter(
|
||||
self.logger, # could be dangerous if self.logger isn't set yet
|
||||
self.filter_threshold,
|
||||
self.filter_repeat_delay,
|
||||
self.filter_timeout,
|
||||
)
|
||||
|
||||
def create_handler(self) -> Handler:
|
||||
if self.filename.upper() == 'STDOUT':
|
||||
handler = logging.StreamHandler(stream=sys.stdout)
|
||||
elif self.filename.upper() == 'STDERR':
|
||||
handler = logging.StreamHandler(stream=sys.stderr)
|
||||
else:
|
||||
handler = RotatingFileHandler(
|
||||
self.filename,
|
||||
maxBytes=self.log_size,
|
||||
backupCount=self.log_generations,
|
||||
)
|
||||
|
||||
handler.setFormatter(self.create_formatter())
|
||||
return handler
|
||||
|
||||
def create_formatter(self) -> Formatter:
|
||||
formatter = AppNameFormatter(fmt=self.format, datefmt=self.date_format, style=self.style)
|
||||
### IMPORTANT for production
|
||||
# formatter.formatTime = self.get_time
|
||||
return formatter
|
||||
|
||||
def setLevel(self, level: str):
|
||||
self.logger.setLevel(logging.getLevelNamesMapping()[level])
|
||||
|
||||
|
||||
class Logging(BaseModel):
|
||||
config: Dict[str, LogConfig] = {}
|
||||
AD: Optional[AppDaemon] = None
|
||||
model_config = ConfigDict(arbitrary_types_allowed=True)
|
||||
|
||||
@model_validator(mode='before')
|
||||
def validate(cls, data):
|
||||
full_config = {
|
||||
'main_log': {'name': 'AppDaemon'},
|
||||
'error_log': {'name': 'Error', 'filename': 'STDERR'},
|
||||
'diag_log': {'name': 'Diag'},
|
||||
'access_log': {'name': 'Access'},
|
||||
}
|
||||
if 'config' in data:
|
||||
for log_name, usr_cfg in data['config'].items():
|
||||
if 'alias' in usr_cfg:
|
||||
aliased_cfg = full_config[usr_cfg.pop('alias')]
|
||||
aliased_cfg.update(usr_cfg)
|
||||
usr_cfg = aliased_cfg
|
||||
|
||||
if log_name in full_config:
|
||||
full_config[log_name].update(usr_cfg)
|
||||
else:
|
||||
full_config[log_name] = usr_cfg
|
||||
|
||||
data['config'] = full_config
|
||||
print(data)
|
||||
return data
|
||||
|
||||
@property
|
||||
def main_log(self) -> Logger:
|
||||
return self.config['main_log'].logger
|
||||
|
||||
@property
|
||||
def error_log(self) -> Logger:
|
||||
return self.config['error_log'].logger
|
||||
|
||||
@property
|
||||
def diag_log(self) -> Logger:
|
||||
return self.config['diag_log'].logger
|
||||
|
||||
@property
|
||||
def access_log(self) -> Logger:
|
||||
return self.config['access_log'].logger
|
||||
|
||||
def get_child(self, name: str) -> Logger:
|
||||
logger = self.main_log.getChild(name)
|
||||
logger.addFilter(self.config['main_log'].create_filter())
|
||||
|
||||
### IMPORTANT for production
|
||||
# if name in self.AD.module_debug:
|
||||
# logger.setLevel(self.AD.module_debug[name])
|
||||
# else:
|
||||
# logger.setLevel(self.AD.loglevel)
|
||||
|
||||
return logger
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
logs = Logging(
|
||||
config={
|
||||
'test_log': {'name': 'TestLog', 'loglevel': 'DEBUG'},
|
||||
'main_log': {'filename': './main.log'},
|
||||
'new_log': {'name': 'yoyoyo', 'log_generations': 5, 'alias': 'error_log'},
|
||||
'access_log': {'loglevel': 'WARNING', 'alias': 'main_log'},
|
||||
}
|
||||
)
|
||||
print(logs.model_dump())
|
||||
Reference in New Issue
Block a user