Python logging.setLogRecordFactory() Examples

The following are 15 code examples for showing how to use logging.setLogRecordFactory(). These examples are extracted from open source projects. You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example.

You may check out the related API usage on the sidebar.

You may also want to check out all available functions/classes of the module logging , or try the search function .

Example 1
Project: conjure-up   Author: conjure-up   File: log.py    License: MIT License 5 votes vote down vote up
def setup_logging(app, logfile, debug=True):
    old_factory = logging.getLogRecordFactory()

    def spell_record_factory(*args, **kwargs):
        record = old_factory(*args, **kwargs)
        if record.name != 'conjure-up':
            record.filename = '{}: {}'.format(record.name, record.filename)
        spell_name = app.config.get('spell', consts.UNSPECIFIED_SPELL)
        record.name = 'conjure-up/{}'.format(spell_name)
        return record

    logging.setLogRecordFactory(spell_record_factory)

    cmdslog = TimedRotatingFileHandler(logfile,
                                       when='D',
                                       interval=1,
                                       backupCount=7)
    cmdslog.setFormatter(logging.Formatter(
        "%(asctime)s [%(levelname)s] %(name)s - "
        "%(filename)s:%(lineno)d - %(message)s"))

    root_logger = logging.getLogger()
    app_logger = logging.getLogger('conjure-up')

    if debug:
        app_logger.setLevel(logging.DEBUG)
        root_logger.setLevel(logging.DEBUG)
    else:
        # always use DEBUG level for app, for now
        app_logger.setLevel(logging.DEBUG)
        root_logger.setLevel(logging.INFO)

    root_logger.addHandler(cmdslog)
    if os.path.exists('/dev/log'):
        st_mode = os.stat('/dev/log').st_mode
        if stat.S_ISSOCK(st_mode):
            syslog_h = SysLogHandler(address='/dev/log')
            syslog_h.set_name('conjure-up')
            app_logger.addHandler(syslog_h)

    return app_logger 
Example 2
Project: Fluid-Designer   Author: Microvellum   File: test_logging.py    License: GNU General Public License v3.0 5 votes vote down vote up
def test_set_log_record_factory(self):
        man = logging.Manager(None)
        expected = object()
        man.setLogRecordFactory(expected)
        self.assertEqual(man.logRecordFactory, expected) 
Example 3
Project: Fluid-Designer   Author: Microvellum   File: test_logging.py    License: GNU General Public License v3.0 5 votes vote down vote up
def tearDown(self):
        self.root_logger.removeFilter(self.filter)
        BaseTest.tearDown(self)
        logging.setLogRecordFactory(self.orig_factory) 
Example 4
Project: Fluid-Designer   Author: Microvellum   File: test_logging.py    License: GNU General Public License v3.0 5 votes vote down vote up
def test_logrecord_class(self):
        self.assertRaises(TypeError, self.root_logger.warning,
                          self.next_message())
        logging.setLogRecordFactory(DerivedLogRecord)
        self.root_logger.error(self.next_message())
        self.assert_log_lines([
           ('root', 'ERROR', '2'),
        ]) 
Example 5
Project: ironpython3   Author: IronLanguages   File: test_logging.py    License: Apache License 2.0 5 votes vote down vote up
def test_set_log_record_factory(self):
        man = logging.Manager(None)
        expected = object()
        man.setLogRecordFactory(expected)
        self.assertEqual(man.logRecordFactory, expected) 
Example 6
Project: ironpython3   Author: IronLanguages   File: test_logging.py    License: Apache License 2.0 5 votes vote down vote up
def tearDown(self):
        self.root_logger.removeFilter(self.filter)
        BaseTest.tearDown(self)
        logging.setLogRecordFactory(self.orig_factory) 
Example 7
Project: ironpython3   Author: IronLanguages   File: test_logging.py    License: Apache License 2.0 5 votes vote down vote up
def test_logrecord_class(self):
        self.assertRaises(TypeError, self.root_logger.warning,
                          self.next_message())
        logging.setLogRecordFactory(DerivedLogRecord)
        self.root_logger.error(self.next_message())
        self.assert_log_lines([
           ('root', 'ERROR', '2'),
        ]) 
Example 8
Project: flambe   Author: asappresearch   File: logging.py    License: MIT License 5 votes vote down vote up
def __exit__(self, exc_type: Any, exc_value: Any, traceback: Any) -> None:
        """Close the listener and restore original logging config"""
        for handler in self.logger.handlers:
            handler.removeFilter(self.context_filter)
        self.logger.removeFilter(self.context_filter)
        for handler in self.handlers:
            self.logger.removeHandler(handler)
        self.logger.setLevel(self.old_root_log_level)
        logging.setLogRecordFactory(self.old_factory)
        delattr(logging.root, '_log_dir') 
Example 9
Project: NeMo   Author: NVIDIA   File: nemo_logging.py    License: Apache License 2.0 5 votes vote down vote up
def _define_logger(self):

        # Use double-checked locking to avoid taking lock unnecessarily.
        if self._logger is not None:
            return self._logger

        with self._logger_lock:
            try:
                self._logger = _logging.getLogger("nemo_logger")
                # By default, silence all loggers except the logger for rank 0
                self.remove_stream_handlers()
                if get_envbool(NEMO_ENV_VARNAME_TESTING, False):
                    old_factory = _logging.getLogRecordFactory()

                    def record_factory(*args, **kwargs):
                        record = old_factory(*args, **kwargs)
                        record.rank = get_envint("RANK", 0)
                        return record

                    _logging.setLogRecordFactory(record_factory)
                    self.add_stream_handlers(formatter=DebugNeMoFormatter)
                elif get_envint("RANK", 0) == 0:
                    self.add_stream_handlers()

            finally:
                level = Logger.INFO
                if get_envbool(NEMO_ENV_VARNAME_TESTING, False):
                    level = Logger.DEBUG
                self.set_verbosity(verbosity_level=level)

        self._logger.propagate = False 
Example 10
def test_set_log_record_factory(self):
        man = logging.Manager(None)
        expected = object()
        man.setLogRecordFactory(expected)
        self.assertEqual(man.logRecordFactory, expected) 
Example 11
def tearDown(self):
        self.root_logger.removeFilter(self.filter)
        BaseTest.tearDown(self)
        logging.setLogRecordFactory(self.orig_factory) 
Example 12
def test_logrecord_class(self):
        self.assertRaises(TypeError, self.root_logger.warning,
                          self.next_message())
        logging.setLogRecordFactory(DerivedLogRecord)
        self.root_logger.error(self.next_message())
        self.assert_log_lines([
           ('root', 'ERROR', '2'),
        ]) 
Example 13
Project: modelforge   Author: src-d   File: slogging.py    License: Apache License 2.0 4 votes vote down vote up
def setup(level: Union[str, int], structured: bool, config_path: str = None):
    """
    Make stdout and stderr unicode friendly in case of misconfigured \
    environments, initializes the logging, structured logging and \
    enables colored logs if it is appropriate.

    :param level: The global logging level.
    :param structured: Output JSON logs to stdout.
    :param config_path: Path to a yaml file that configures the level of output of the loggers. \
                        Root logger level is set through the level argument and will override any \
                        root configuration found in the conf file.
    :return: None
    """
    global logs_are_structured
    logs_are_structured = structured

    if not isinstance(level, int):
        level = logging._nameToLevel[level]

    def ensure_utf8_stream(stream):
        if not isinstance(stream, io.StringIO) and hasattr(stream, "buffer"):
            stream = codecs.getwriter("utf-8")(stream.buffer)
            stream.encoding = "utf-8"
        return stream

    sys.stdout, sys.stderr = (ensure_utf8_stream(s)
                              for s in (sys.stdout, sys.stderr))

    # basicConfig is only called to make sure there is at least one handler for the root logger.
    # All the output level setting is down right afterwards.
    logging.basicConfig()
    logging.setLogRecordFactory(NumpyLogRecord)
    if config_path is not None and os.path.isfile(config_path):
        with open(config_path) as fh:
            config = yaml.safe_load(fh)
        for key, val in config.items():
            logging.getLogger(key).setLevel(logging._nameToLevel.get(val, level))
    root = logging.getLogger()
    root.setLevel(level)

    if not structured:
        handler = root.handlers[0]
        handler.emit = check_trailing_dot(handler.emit)
        if not sys.stdin.closed and sys.stdout.isatty():
            handler.setFormatter(AwesomeFormatter())
    else:
        root.handlers[0] = StructuredHandler(level) 
Example 14
Project: fragile   Author: FragileTech   File: slogging.py    License: MIT License 4 votes vote down vote up
def setup(level: Union[str, int], structured: bool, config_path: str = None):
    """
    Make stdout and stderr unicode friendly in case of misconfigured \
    environments, initializes the logging, structured logging and \
    enables colored logs if it is appropriate.

    Args:
        level: The global logging level.
        structured: Output JSON logs to stdout.
        config_path: Path to a yaml file that configures the level of output of the loggers. \
                        Root logger level is set through the level argument and will override any \
                        root configuration found in the conf file.

    Returns:
        None

    """
    global logs_are_structured
    logs_are_structured = structured

    if not isinstance(level, int):
        level = logging._nameToLevel[level]

    def ensure_utf8_stream(stream):
        if not isinstance(stream, io.StringIO) and hasattr(stream, "buffer"):
            stream = codecs.getwriter("utf-8")(stream.buffer)
            stream.encoding = "utf-8"
        return stream

    sys.stdout, sys.stderr = (ensure_utf8_stream(s) for s in (sys.stdout, sys.stderr))

    # basicConfig is only called to make sure there is at least one handler for the root logger.
    # All the output level setting is down right afterwards.
    logging.basicConfig()
    logging.setLogRecordFactory(NumpyLogRecord)
    if config_path is not None and os.path.isfile(config_path):
        with open(config_path) as fh:
            config = yaml.safe_load(fh)
        for key, val in config.items():
            logging.getLogger(key).setLevel(logging._nameToLevel.get(val, level))
    root = logging.getLogger()
    root.setLevel(level)

    if not structured:
        handler = root.handlers[0]
        handler.emit = check_trailing_dot(handler.emit)
        if not hasattr(sys.stdin, "closed"):
            handler.setFormatter(AwesomeFormatter())
        elif not sys.stdin.closed and sys.stdout.isatty():
            handler.setFormatter(AwesomeFormatter())
    else:
        root.handlers[0] = StructuredHandler(level) 
Example 15
Project: hummingbot   Author: CoinAlpha   File: __init__.py    License: Apache License 2.0 4 votes vote down vote up
def init_logging(conf_filename: str,
                 override_log_level: Optional[str] = None,
                 dev_mode: bool = False,
                 strategy_file_path: str = "hummingbot"):
    import io
    import logging.config
    from os.path import join
    import pandas as pd
    from typing import Dict
    from ruamel.yaml import YAML

    from hummingbot.client.config.global_config_map import global_config_map
    from hummingbot.logger.struct_logger import (
        StructLogRecord,
        StructLogger
    )
    global STRUCT_LOGGER_SET
    if not STRUCT_LOGGER_SET:
        logging.setLogRecordFactory(StructLogRecord)
        logging.setLoggerClass(StructLogger)
        STRUCT_LOGGER_SET = True

    # Do not raise exceptions during log handling
    logging.raiseExceptions = False

    file_path: str = join(prefix_path(), "conf", conf_filename)
    yaml_parser: YAML = YAML()
    with open(file_path) as fd:
        yml_source: str = fd.read()
        yml_source = yml_source.replace("$PROJECT_DIR", prefix_path())
        yml_source = yml_source.replace("$DATETIME", pd.Timestamp.now().strftime("%Y-%m-%d-%H-%M-%S"))
        yml_source = yml_source.replace("$STRATEGY_FILE_PATH", strategy_file_path.replace(".yml", ""))
        io_stream: io.StringIO = io.StringIO(yml_source)
        config_dict: Dict = yaml_parser.load(io_stream)
        if override_log_level is not None and "loggers" in config_dict:
            for logger in config_dict["loggers"]:
                if global_config_map["logger_override_whitelist"].value and \
                        logger in global_config_map["logger_override_whitelist"].value:
                    config_dict["loggers"][logger]["level"] = override_log_level
        logging.config.dictConfig(config_dict)
        # add remote logging to logger if in dev mode
        if dev_mode:
            add_remote_logger_handler(config_dict.get("loggers", []))