diff --git a/src/daqpytools/apps/logging_demonstrator.py b/src/daqpytools/apps/logging_demonstrator.py index 3967bcc..400f6f1 100644 --- a/src/daqpytools/apps/logging_demonstrator.py +++ b/src/daqpytools/apps/logging_demonstrator.py @@ -6,6 +6,7 @@ from rich.traceback import install as rich_traceback_install from daqpytools.logging.exceptions import LoggerSetupError +from daqpytools.logging.formatter import CONTEXT_SETTINGS from daqpytools.logging.handlers import ( HandlerType, LogHandlerConf, @@ -37,7 +38,244 @@ def validate_test_configuration( return -@click.command() + +def test_main_functions(main_logger:logging.Logger) -> None: + """Demonstrates the main functionality of the daqpytools logger. + + Args: + main_logger (logging.Logger): A logger to print messages with + """ + main_logger.debug("example debug message") + main_logger.info("example info message") + main_logger.warning("example warning message") + + # Error and critical will print twice when using stream handlers + # This is because StreamHandlers add in a stream each for stdout and stderr + # stderr log level set to error, so at error or above both handlers will fire + main_logger.error("example error message") + main_logger.critical("example critical message") + main_logger.info( + "[dim cyan]You[/dim cyan] " + "[bold green]can[/bold green] " + "[bold yellow]also[/bold yellow] " + "[bold red]add[/bold red] " + "[bold white on red]colours[/bold white on red] " + "[bold red]to[/bold red] " + "[bold yellow]your[/bold yellow] " + "[bold green]log[/bold green] " + "[dim cyan]record[/dim cyan] " + "[bold green]text[/bold green] " + "[bold yellow]with[/bold yellow] " + "[bold green]markdown[/bold green]!" + ) + main_logger.warning( + "Note: [red] the daqpytools.logging.formatter removes markdown-style " + "comments from the log record message [/red]." + ) + +def test_child_logger( + logger_name:str, + log_level: int|str, + disable_logger_inheritance:bool, + rich_handler:bool, + file_handler_path: str, + stream_handlers:bool, +) -> None: + """Demonstrates inheritance with child handlers. + + Args: + logger_name (str): Name of the initial parent logger. + log_level (str): Log level to set for the logger. + + disable_logger_inheritance (bool): If true, disable logger inheritance so each + logger instance only uses the logger handlers assigned to the given logger + instance. + rich_handler (bool): If true, set up a rich handler. + file_handler_path (str): If provided, set up a file handler with the given path. + stream_handlers (bool): If true, set up stdout and stderr stream handlers. + """ + nested_logger: logging.Logger = get_daq_logger( + logger_name=f"{logger_name}.child", + log_level=log_level, + use_parent_handlers=not disable_logger_inheritance, + rich_handler=rich_handler, + file_handler_path=file_handler_path, + stream_handlers=stream_handlers, + ) + nested_logger.debug("example debug message") + nested_logger.info("example info message") + nested_logger.warning("example warning message") + nested_logger.error("example error message") + nested_logger.critical("example critical message") + nested_logger.info( + "[dim cyan]You[/dim cyan] " + "[bold green]can[/bold green] " + "[bold yellow]also[/bold yellow] " + "[bold red]add[/bold red] " + "[bold white on red]colours[/bold white on red] " + "[bold red]to[/bold red] " + "[bold yellow]your[/bold yellow] " + "[bold green]log[/bold green] " + "[dim cyan]record[/dim cyan] " + "[bold green]text[/bold green] " + "[bold yellow]with[/bold yellow] " + "[bold green]markdown[/bold green]!" + ) + nested_logger.warning( + "Note: [red] the daqpytools.logging.formatter removes markdown-style " + "comments from the log record message [/red]." + ) + +def test_throttle(main_logger: logging.Logger) -> None: + """Demonstrates the throttle filter. + + Args: + main_logger (logging.Logger): A logger to print messages with + """ + def emit_err(i: int) -> None: + """Short function that prints out a log message. + This is used to ensure that the log message is kept on the same line, + but also to feed in how many repetitions it has gone through + Args: + i (int): Integer to be transmitted in the log message. + + Returns: + None. + """ + throttle_msg = f"Throttle test {i}" + main_logger.info(throttle_msg, extra={"handlers": + [HandlerType.Rich, HandlerType.Throttle] + }) + + for i in range(50): + emit_err(i) + main_logger.warning("Sleeping for 30 seconds") + time.sleep(31) + for i in range(1000): + emit_err(i) + + +def test_handlertypes(main_logger: logging.Logger) -> None: + """Demonstrates the handlertype functionality. + Note - to have the messages published, the relevant handlers must be enabled. + + Args: + main_logger (logging.Logger): A logger to print messages with + """ + #* Test choosing which handler to use individually + main_logger.debug("Default go to tty / rich / file when added") + main_logger.critical("Should only go to tty", + extra={"handlers": [HandlerType.Rich]} + ) + main_logger.critical("Should only go to file", + extra={"handlers": [HandlerType.File]} + ) + main_logger.critical("Should only go to Lstdout", + extra={"handlers": [HandlerType.Lstdout]} + ) + main_logger.critical("Should only go to Throttle", + extra={"handlers": [HandlerType.Throttle]} + ) + main_logger.critical("Should go to tty and Protobufstream", + extra={"handlers": [HandlerType.Rich, HandlerType.Protobufstream]} + ) + + +def test_handlerconf(main_logger: logging.Logger) -> None: + """Demonstrates the main functionality of the handlerconf. With ERS support. + + Args: + main_logger (logging.Logger): A logger to print messages with + """ + #* Test the routing to the Base and Opmon streams + handlerconf = LogHandlerConf(init_ers=False) # False is the default + main_logger.warning("Handlerconf Base", extra=handlerconf.Base) + main_logger.warning("Handlerconf Opmon", extra=handlerconf.Opmon) + + #* Interlude: Inject sample environment variables + os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstdout" + os.environ["DUNEDAQ_ERS_INFO"] = "erstrace,throttle,lstdout" + os.environ["DUNEDAQ_ERS_FATAL"] = "erstrace,lstdout" + os.environ["DUNEDAQ_ERS_ERROR"] = ( + "erstrace," + "throttle," + "lstdout," + "protobufstream(monkafka.cern.ch:30092)" + ) + + info_out = f"{os.getenv('DUNEDAQ_ERS_ERROR')=}" + main_logger.info(info_out) + critical_out = f"{os.getenv('DUNEDAQ_ERS_CRITICAL')=}" + main_logger.info(critical_out) + + #* Init ERS stream + # Note to developers: + # HandlerConf will require that these variables are defined! + # They come from the OKS, so whatever tools you have should have this up + # You can also initialise via handlerconf = LogHandlerConf(init_ers=True) + handlerconf.init_ers_stream() + + #* Test ERS Streams + main_logger.warning("ERS Warning erstrace,throttle,lstdout", extra=handlerconf.ERS) + main_logger.info("ERS Info erstrace,throttle,lstdout", extra=handlerconf.ERS) + main_logger.critical("ERS Fatal erstrace,lstdout", extra=handlerconf.ERS) + main_logger.debug("ERS Debug none", extra=handlerconf.ERS) + main_logger.error("ERS Error erstrace,throttle,lstdout," + "protobufstream(monkafka.cern.ch:30092)", + extra=handlerconf.ERS + ) + +class AllOptionsCommand(click.Command): + """Parse the arguments passed and validate they are acceptable, otherwise print the + relevant options. + + Click's default functionality does not pick up the optional arguments well. This + catches any incorrect options or typos, makes the log clearer. + """ + def parse_args(self, ctx: click.Context, args: list[str]) -> None: + """Parse the arguments passed to the click command, format them if necessary. + + Args: + ctx: click context from which the commands are called + args: list of args to format + + Returns: + None + + Raises: + None + """ + # If the arguments are valid, run the command with the relevant arguments + try: + return super().parse_args(ctx, args) + except click.NoSuchOption as e: + # Get the list of available options, format them in a readable way + formatted_opts = [] + + for param in self.params: + if isinstance(param, click.Option) and param.opts: + # For each option, format as "short_opt (long_opt)" + opts = sorted(param.opts, key=len) + + if len(opts) > 1: + # Multiple options + formatted_opts.append(f"{opts[0]} ({opts[1]})") + else: + # One long option + formatted_opts.append(opts[0]) + + # Inform the user why what they have tried to use is wrong + click.echo(ctx.get_usage() + "\n", err=True) + + # Print formatted available options + msg = ( + f"Error: No such option: {e.option_name}. \n" + f"Available options: {', '.join(sorted(formatted_opts))}" + ) + click.echo(msg, err=True) + ctx.exit(2) + +@click.command(cls=AllOptionsCommand, context_settings=CONTEXT_SETTINGS) @click.option( "-l", "--log-level", @@ -45,7 +283,7 @@ def validate_test_configuration( default="DEBUG", help="Set the log level.", ) -@click.option("-r", "--rich_handler", is_flag=True, help=("Set up a rich handler")) +@click.option("-r", "--rich-handler", is_flag=True, help=("Set up a rich handler")) @click.option( "-f", "--file-handler-path", @@ -56,6 +294,7 @@ def validate_test_configuration( ), ) @click.option( + "-e", "--ersprotobufstream", is_flag=True, help=( @@ -63,12 +302,29 @@ def validate_test_configuration( ) ) @click.option( + "-ht", "--handlertypes", is_flag=True, help=( "Demonstrate HandlerTypes functionality" ) ) +@click.option( + "-hc", + "--handlerconf", + is_flag=True, + help=( + "Demonstrate HandlerConf functionality" + ) + ) +@click.option( + "-sb", + "--suppress-basic", + is_flag=True, + help=( + "Suppress demonstration of basic logging functionality" + ) + ) @click.option( "-t", "--throttle", @@ -79,7 +335,7 @@ def validate_test_configuration( ) @click.option( "-s", - "--stream_handlers", + "--stream-handlers", is_flag=True, help=("Set up stdout and stderr stream handlers"), ) @@ -110,7 +366,9 @@ def main( disable_logger_inheritance: bool, ersprotobufstream: bool, handlertypes:bool, + handlerconf:bool, throttle: bool, + suppress_basic: bool ) -> None: """Demonstrate use of the daq_logging class with daqpyutils_logging_demonstrator. Note - if you are seeing output logs without any explicit handlers assigned, this is @@ -127,11 +385,14 @@ def main( logger instance only uses the logger handlers assigned to the given logger instance. ersprotobufstream (bool): If true, sets up an ERS protobuf handler. Error msg - are demonstrated in the HandlerType demonstration, requiring handlertypes - to be set to true. - handlertypes (bool): If true, demonstrates the advanced feature of HandlerTypes + are demonstrated in the HandlerType demonstration, requiring handlerconf + to be set to true. The topic for these tests is session_tester. + handlertypes (bool): If true, demonstrates the advanced feature of HandlerTypes. + handlerconf (bool): If true, demonstrates the advanced feature of HandlerConf and streams. throttle (bool): If true, demonstrates the throttling feature. Requires Rich. + suppress_basic (bool): If true, supresses basic functionality. + Useful to only test the advanced features of logging Returns: None @@ -140,7 +401,6 @@ def main( LoggerSetupError: If no handlers are set up for the logger. """ logger_name = "daqpytools_logging_demonstrator" - main_logger: logging.Logger = get_daq_logger( logger_name=logger_name, log_level=log_level, @@ -151,145 +411,26 @@ def main( ers_kafka_handler=ersprotobufstream, throttle=throttle ) - main_logger.debug("example debug message") - main_logger.info("example info message") - main_logger.warning("example warning message") - main_logger.error("example error message") - main_logger.critical("example critical message") - main_logger.info( - "[dim cyan]You[/dim cyan] " - "[bold green]can[/bold green] " - "[bold yellow]also[/bold yellow] " - "[bold red]add[/bold red] " - "[bold white on red]colours[/bold white on red] " - "[bold red]to[/bold red] " - "[bold yellow]your[/bold yellow] " - "[bold green]log[/bold green] " - "[dim cyan]record[/dim cyan] " - "[bold green]text[/bold green] " - "[bold yellow]with[/bold yellow] " - "[bold green]markdown[/bold green]!" - ) - main_logger.warning( - "Note: [red] the daqpytools.logging.formatter removes markdown-style " - "comments from the log record message [/red]." - ) - if child_logger: - nested_logger: logging.Logger = get_daq_logger( - logger_name=f"{logger_name}.child", - log_level=log_level, - use_parent_handlers=not disable_logger_inheritance, - rich_handler=rich_handler, - file_handler_path=file_handler_path, - stream_handlers=stream_handlers, - ) - nested_logger.debug("example debug message") - nested_logger.info("example info message") - nested_logger.warning("example warning message") - nested_logger.error("example error message") - nested_logger.critical("example critical message") - nested_logger.info( - "[dim cyan]You[/dim cyan] " - "[bold green]can[/bold green] " - "[bold yellow]also[/bold yellow] " - "[bold red]add[/bold red] " - "[bold white on red]colours[/bold white on red] " - "[bold red]to[/bold red] " - "[bold yellow]your[/bold yellow] " - "[bold green]log[/bold green] " - "[dim cyan]record[/dim cyan] " - "[bold green]text[/bold green] " - "[bold yellow]with[/bold yellow] " - "[bold green]markdown[/bold green]!" - ) - nested_logger.warning( - "Note: [red] the daqpytools.logging.formatter removes markdown-style " - "comments from the log record message [/red]." + if not suppress_basic: + test_main_functions(main_logger) + + if child_logger: + test_child_logger( + logger_name, + log_level, + disable_logger_inheritance, + rich_handler, + file_handler_path, + stream_handlers ) - - # Throttle demo - def emit_err(i: int) -> None: - """Short function that prints out a log message. - This is used to ensure that the log message is kept on the same line, - but also to feed in how many repetitions it has gone through - Args: - i (int): Integer to be transmitted in the log message. - - Returns: - None. - """ - throttle_msg = f"Throttle test {i}" - main_logger.info(throttle_msg, extra={"handlers": - [HandlerType.Rich, HandlerType.Throttle] - }) - if throttle: - for i in range(50): - emit_err(i) - main_logger.warning("Sleeping for 30 seconds") - time.sleep(31) - for i in range(1000): - emit_err(i) - - - - # HandlerTypes demo - if not handlertypes: - return - - #* Test choosing which handler to use individually - main_logger.debug("Default go to tty / rich / file when added") - main_logger.critical("Should only go to tty", - extra={"handlers": [HandlerType.Rich]} - ) - main_logger.critical("Should only go to file", - extra={"handlers": [HandlerType.File]} - ) - main_logger.critical("Should only go to Lstdout", - extra={"handlers": [HandlerType.Lstdout]} - ) - main_logger.critical("Should only go to Throttle", - extra={"handlers": [HandlerType.Throttle]} - ) - main_logger.critical("Should go to tty and Protobufstream", - extra={"handlers": [HandlerType.Rich, HandlerType.Protobufstream]} - ) - - - #* Interlude: Inject sample environment variables - os.environ["DUNEDAQ_ERS_WARNING"] = "erstrace,throttle,lstdout" - os.environ["DUNEDAQ_ERS_INFO"] = "erstrace,throttle,lstdout" - os.environ["DUNEDAQ_ERS_FATAL"] = "erstrace,lstdout" - os.environ["DUNEDAQ_ERS_ERROR"] = ( - "erstrace," - "throttle," - "lstdout," - "protobufstream(monkafka.cern.ch:30092)" - ) - - info_out = f"{os.getenv('DUNEDAQ_ERS_ERROR')=}" - main_logger.info(info_out) - critical_out = f"{os.getenv('DUNEDAQ_ERS_CRITICAL')=}" - main_logger.info(critical_out) - - #* Test the routing to the Base and Opmon streams - handlerconf = LogHandlerConf() - main_logger.warning("Handlerconf Base", extra=handlerconf.Base) - main_logger.warning("Handlerconf Opmon", extra=handlerconf.Opmon) - - #* Test ERS Streams - main_logger.warning("ERS Warning erstrace,throttle,lstdout", extra=handlerconf.ERS) - main_logger.info("ERS Info erstrace,throttle,lstdout", extra=handlerconf.ERS) - main_logger.critical("ERS Fatal erstrace,lstdout", extra=handlerconf.ERS) - main_logger.debug("ERS Debug none", extra=handlerconf.ERS) - main_logger.error("ERS Error erstrace,throttle,lstdout," - "protobufstream(monkafka.cern.ch:30092)", - extra=handlerconf.ERS - ) - - return + test_throttle(main_logger) + if handlertypes: + test_handlertypes(main_logger) + if handlerconf: + test_handlerconf(main_logger) if __name__ == "__main__": diff --git a/src/daqpytools/logging/handlers.py b/src/daqpytools/logging/handlers.py index a4c2886..3b5a1e1 100644 --- a/src/daqpytools/logging/handlers.py +++ b/src/daqpytools/logging/handlers.py @@ -198,18 +198,22 @@ class LogHandlerConf: """Dataclass that holds the various streams and relevant handlers. Attributes: + init_ers: If True, automatically initializes ERS configuration + during construction. _BASE_HANDLERS: Private class variable for default base handlers _OPMON_HANDLERS: Private class variable for opmon handlers BASE_CONFIG: Class variable for base stream configuration OPMON_CONFIG: Class variable for opmon stream configuration ERS: Instance field for ERS configuration (loaded from environment) """ + init_ers: bool = False _BASE_HANDLERS: ClassVar[set] = {HandlerType.Stream, HandlerType.Rich, HandlerType.File } _OPMON_HANDLERS: ClassVar[set] = {HandlerType.Rich, HandlerType.Stream, HandlerType.File} + _ERS: object = None Base: ClassVar[dict] = { "handlers": _BASE_HANDLERS, @@ -221,12 +225,45 @@ class LogHandlerConf: "stream": StreamType.OPMON } - ERS: dict=field(default_factory = lambda: - { - "ers_handlers": LogHandlerConf._get_oks_conf(), - "stream": StreamType.ERS - } - ) + def __post_init__(self) -> None: + """Initialize ERS configuration if init_ers field is True. + + This method is called automatically after dataclass initialization. + If the init_ers attribute was set to True, it triggers the complete + ERS initialization. + """ + if self.init_ers: + self.init_ers_stream() + + @property + def ERS(self) -> dict : # noqa: N802 + """Get the ERS configuration dictionary. + + Returns: + dict: Contains 'ers_handlers' and 'stream' configuration for ERS + + Raises: + AttributeError: If ERS has not been + initialized (call init_ers_stream() first) + """ + if not self._ERS: + err_msg = "ERS stream not initialised. Call init_ers_stream() first" + raise AttributeError(err_msg) + return self._ERS + + def init_ers_stream(self) -> None: + """Initialize ERS configuration from environment variables. + + Loads ERS configuration from OKS environment variables and populates + the _ERS dict with handlers and stream information. + + Called automatically during construction if init_ers=True, or can be + called manually afterwards. + """ + self._ERS = { + "ers_handlers": LogHandlerConf._get_oks_conf(), + "stream": StreamType.ERS + } @staticmethod def _convert_str_to_handlertype(handler_str: str) -> tuple[HandlerType, diff --git a/src/daqpytools/logging/logger.py b/src/daqpytools/logging/logger.py index 44f6793..2f99d17 100644 --- a/src/daqpytools/logging/logger.py +++ b/src/daqpytools/logging/logger.py @@ -1,4 +1,5 @@ import logging +import sys from logging import PlaceHolder import kafka @@ -159,7 +160,8 @@ def get_daq_logger( if log_level is not logging.NOTSET: for handler in logger.handlers: # Ignore stderr handler resets, needs to be fixed at the error level - if type(handler).__name__ == "StderrHandler": + if (isinstance(handler, logging.StreamHandler) + and handler.stream == sys.stderr): continue handler.setLevel(log_level)