Python pythonjsonlogger.jsonlogger.JsonFormatter() Examples

The following are code examples for showing how to use pythonjsonlogger.jsonlogger.JsonFormatter(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: telemetry   Author: jupyter   File: eventlog.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.log = logging.getLogger(__name__)
        # We don't want events to show up in the default logs
        self.log.propagate = False
        # We will use log.info to emit
        self.log.setLevel(logging.INFO)

        if self.handlers:
            formatter = jsonlogger.JsonFormatter(json_serializer=_skip_message)
            for handler in self.handlers:
                handler.setFormatter(formatter)
                self.log.addHandler(handler)

        self.schemas = {} 
Example 2
Project: idunn   Author: QwantResearch   File: logging.py    Apache License 2.0 6 votes vote down vote up
def init_logging(settings: Settings):
    """
    init the logging for the server
    """
    log_format = settings['LOG_FORMAT']
    as_json = settings['LOG_JSON']

    levels = settings['LOG_LEVEL_BY_MODULE']
    for module, lvl in json.loads(levels).items():
        log_level = lvl.upper()
        log_level = logging.getLevelName(log_level)

        logger = logging.getLogger(module)
        logger.setLevel(log_level)

    logHandler = logging.StreamHandler()
    if as_json:
        formatter = jsonlogger.JsonFormatter(log_format)
        logHandler.setFormatter(formatter)
    else:
        logHandler.setFormatter(logging.Formatter(log_format))

    # we set this handler to the main logger
    logging.getLogger().handlers = [logHandler] 
Example 3
Project: qpp-claims-to-quality-public   Author: CMSgov   File: formatter.py    Creative Commons Zero v1.0 Universal 6 votes vote down vote up
def __init__(
        self,
        fmt=(
            '%(asctime) %(name) %(processName) %(filename) '
            '%(funcName) %(levelname) %(lineno) %(module) %(threadName) %(message)'),
            fields_to_redact=['message', 'exc_info'],
            redacting_filter=pii_scrubber.RedactingPIIFilter(),
            datefmt='%Y-%m-%dT%H:%M:%SZ%z',
            extra={}, *args, **kwargs):
        """
        Initalize JsonFormatter for logging.

        Note - fmt defines the order in which fields appear.
        """
        self._extra = extra
        self._datefmt = datefmt
        self._redacting_filter = redacting_filter
        self._fields_to_redact = fields_to_redact
        jsonlogger.JsonFormatter.__init__(self, fmt=fmt, datefmt=datefmt, *args, **kwargs) 
Example 4
Project: qpp-claims-to-quality-public   Author: CMSgov   File: formatter.py    Creative Commons Zero v1.0 Universal 6 votes vote down vote up
def process_log_record(self, log_record):
        """Process log records and apply PII logger."""
        # Scrub PIIs.
        if self._redacting_filter:
            for field in self._fields_to_redact:
                self._redact_field(log_record, field)

        # Enforce the presence of a timestamp.
        if 'asctime' in log_record:
            log_record['timestamp'] = log_record['asctime']
        else:
            log_record['timestamp'] = datetime.datetime.utcnow().strftime(self._datefmt)

        if self._extra is not None:
            for key, value in self._extra.items():
                log_record[key] = value
        return super(JsonFormatter, self).process_log_record(log_record) 
Example 5
Project: rest_api   Author: opentargets   File: config.py    Apache License 2.0 6 votes vote down vote up
def init_app(cls, app):
        console_handler = logging.StreamHandler(stream=sys.stdout)
        console_handler.setLevel(logging.WARN)
        jsonformatter = jsonlogger.JsonFormatter(
            '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        console_handler.setFormatter(jsonformatter)

        loggers = [app.logger,
                   getLogger('elasticsearch'),
                   getLogger('redislite')]

        # Sadly, this does not work:
        # wlog = getLogger('werkzeug')
        # # log.setLevel(logging.ERROR)
        # wlog.disabled = True

        for logger in loggers:
            logger.addHandler(console_handler)
            logger.setLevel(logging.WARN)

        Config.init_app(app) 
Example 6
Project: DownloaderForReddit   Author: MalloyDelacroix   File: Logger.py    GNU General Public License v3.0 6 votes vote down vote up
def make_logger():
    logger = logging.getLogger('DownloaderForReddit')
    logger.setLevel(logging.DEBUG)

    stream_formatter = JsonStreamFormatter('%(asctime)s: %(levelname)s : %(name)s : %(message)s',
                                           datefmt='%m/%d/%Y %I:%M:%S %p')

    json_formatter = jsonlogger.JsonFormatter(fmt='%(levelname) %(name) %(filename) %(module) %(funcName) %(lineno) '
                                              '%(message) %(asctime)', datefmt='%m/%d/%Y %I:%M:%S %p')

    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(logging.DEBUG)
    stream_handler.setFormatter(stream_formatter)

    log_path = os.path.join(SystemUtil.get_data_directory(), 'DownloaderForReddit.log')
    file_handler = RotatingFileHandler(log_path, maxBytes=1024*1024, backupCount=2)
    file_handler.setLevel(logging.INFO)
    file_handler.setFormatter(json_formatter)

    logger.addHandler(stream_handler)
    logger.addHandler(file_handler) 
Example 7
Project: binderhub   Author: jupyterhub   File: events.py    BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, *args, **kwargs):
        super().__init__(*args, **kwargs)

        self.log = logging.getLogger(__name__)
        # We don't want events to show up in the default logs
        self.log.propagate = False
        self.log.setLevel(logging.INFO)

        if self.handlers_maker:
            self.handlers = self.handlers_maker(self)
            formatter = jsonlogger.JsonFormatter(json_serializer=_skip_message)
            for handler in self.handlers:
                handler.setFormatter(formatter)
                self.log.addHandler(handler)

        self.schemas = {} 
Example 8
Project: rekcurd-python   Author: rekcurd   File: logger_jsonlogger.py    Apache License 2.0 6 votes vote down vote up
def __init__(self,
                 logger_name: str = 'rekcurd.system',
                 log_level: int = None,
                 config: RekcurdConfig = RekcurdConfig()) -> None:
        """
        Constructor
        :param logger_name:
        :param log_level:
        :param config: RekcurdConfig
        """
        self.config = config
        log_level = int(log_level or logging.DEBUG if config.DEBUG_MODE else logging.NOTSET)
        self.ml_service = config.APPLICATION_NAME
        self.service_level = config.SERVICE_LEVEL
        self.log = logging.getLogger(logger_name)
        handler = logging.StreamHandler()
        formatter = self.JsonFormatter()
        handler.setFormatter(formatter)
        handler.setLevel(log_level)
        self.log.handlers = []
        self.log.addHandler(handler)
        self.log.setLevel(log_level) 
Example 9
Project: rekcurd-python   Author: rekcurd   File: logger_jsonlogger.py    Apache License 2.0 6 votes vote down vote up
def __init__(self,
                 logger_name: str = 'rekcurd.service',
                 log_level: int = None,
                 config: RekcurdConfig = RekcurdConfig()):
        """
        Constructor
        :param logger_name:
        :param log_level:
        :param config: RekcurdConfig
        """
        self.logger_name = logger_name
        self.log_level = int(log_level or logging.DEBUG)
        self.config = config
        self.ml_service = config.APPLICATION_NAME
        self.service_level = config.SERVICE_LEVEL
        self.log = logging.getLogger(logger_name)
        handler = logging.StreamHandler()
        formatter = self.JsonFormatter()
        handler.setFormatter(formatter)
        handler.setLevel(self.log_level)
        self.log.addHandler(handler)
        self.log.setLevel(self.log_level) 
Example 10
Project: honeycomb   Author: Cymmetria   File: cli.py    MIT License 6 votes vote down vote up
def setup_logging(home, verbose):
    """Configure logging for honeycomb."""
    logging.setLoggerClass(MyLogger)
    logging.config.dictConfig({
        "version": 1,
        "disable_existing_loggers": False,
        "formatters": {
            "console": {
                "format": "%(levelname)-8s [%(asctime)s %(name)s] %(filename)s:%(lineno)s %(funcName)s: %(message)s",
            },
            "json": {
                "()": jsonlogger.JsonFormatter,
                "format": "%(levelname)s %(asctime)s %(name)s %(filename)s %(lineno)s %(funcName)s %(message)s",
            },
        },
        "handlers": {
            "default": {
                "level": "DEBUG" if verbose else "INFO",
                "class": "logging.StreamHandler",
                "formatter": "console",
            },
            "file": {
                "level": "DEBUG",
                "class": "logging.handlers.WatchedFileHandler",
                "filename": os.path.join(home, DEBUG_LOG_FILE),
                "formatter": "json",
            },
        },
        "loggers": {
            "": {
                "handlers": ["default", "file"],
                "level": "DEBUG",
                "propagate": True,
            },
        }
    }) 
Example 11
Project: cis   Author: mozilla-iam   File: __init__.py    Mozilla Public License 2.0 5 votes vote down vote up
def __init__(
        self,
        fmt="%(asctime) %(name) %(processName) %(filename) \
        %(funcName) %(levelname) %(lineno) %(module) %(threadName) %(message)",
        datefmt="%Y-%m-%dT%H:%M:%SZ%z",
        style="%",
        extra={},
        *args,
        **kwargs
    ):
        self._extra = extra
        jsonlogger.JsonFormatter.__init__(self, fmt=fmt, datefmt=datefmt, *args, **kwargs) 
Example 12
Project: cis   Author: mozilla-iam   File: __init__.py    Mozilla Public License 2.0 5 votes vote down vote up
def process_log_record(self, log_record):
        if "asctime" in log_record:
            log_record["timestamp"] = log_record["asctime"]
        else:
            log_record["timestamp"] = datetime.datetime.now(datetime.timezone.utc).strftime("%Y-%m-%dT%H:%M:%S.%fZ%z")

        if self._extra is not None:
            for key, value in self._extra.items():
                log_record[key] = value
        return super(JsonFormatter, self).process_log_record(log_record) 
Example 13
Project: python-client   Author: rekcurd   File: logger_jsonlogger.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, logger_name: str = 'rekcurd_client', log_level: int = logging.NOTSET) -> None:
        """
        Constructor
        :param logger_name: logger name
        :param log_level:
        """
        super().__init__()
        self.log = logging.getLogger(logger_name)
        handler = logging.StreamHandler()
        formatter = self.JsonFormatter()
        handler.setFormatter(formatter)
        self.log.handlers = []
        self.log.addHandler(handler)
        self.log.setLevel(log_level) 
Example 14
Project: scrapy-cluster   Author: istresearch   File: log_factory.py    MIT License 5 votes vote down vote up
def _get_formatter(self, json):
        '''
        Return the proper log formatter

        @param json: Boolean value
        '''
        if json:
            return jsonlogger.JsonFormatter()
        else:
            return logging.Formatter(self.format_string) 
Example 15
Project: PyChunkedGraph   Author: seung-lab   File: jsonformatter.py    Mozilla Public License 2.0 5 votes vote down vote up
def add_fields(self, log_record, record, message_dict):
        """Remap `log_record`s fields to fluentd-gcp counterparts."""
        super(JsonFormatter, self).add_fields(log_record, record, message_dict)
        log_record["time"] = log_record.get("time", log_record["asctime"])
        log_record["severity"] = log_record.get(
            "severity", log_record["levelname"])
        log_record["source"] = log_record.get("source", log_record["name"])
        del log_record["asctime"]
        del log_record["levelname"]
        del log_record["name"] 
Example 16
Project: isaac-consensus-protocol   Author: bosnet   File: util.py    Apache License 2.0 5 votes vote down vote up
def add_fields(self, log_record, *a, **kw):
        super(JsonFormatter, self).add_fields(log_record, *a, **kw)

        if 'message' in log_record and log_record['message'] is None:
            del log_record['message']

        return 
Example 17
Project: isaac-consensus-protocol   Author: bosnet   File: util.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, logger, *a, **kw):
        super(LogStreamHandler, self).__init__(*a, **kw)

        self.logger = logger
        self.json_formatter = JsonFormatter(json_indent=2 if self.in_terminal else None)
        self.json_formatter_output = JsonFormatter()
        self.stream_metric = None 
Example 18
Project: open-raadsinformatie   Author: openstate   File: settings.py    MIT License 5 votes vote down vote up
def __init__(self, fmt="%(levelname) %(message)", *args, **kwargs):
        jsonlogger.JsonFormatter.__init__(self, fmt=fmt, *args, **kwargs) 
Example 19
Project: rest_api   Author: opentargets   File: config.py    Apache License 2.0 5 votes vote down vote up
def init_app(cls, app):
        file_handler = logging.FileHandler('output.log')
        file_handler.setLevel(logging.DEBUG)
        jsonformatter = jsonlogger.JsonFormatter(
            '%(asctime)s %(name)-12s %(levelname)-8s %(message)s')
        file_handler.setFormatter(jsonformatter)

        loggers = [app.logger,
                   getLogger('elasticsearch'),
                   getLogger('redislite')]

        for logger in loggers:
            logger.addHandler(file_handler)

        Config.init_app(app) 
Example 20
Project: tests-python   Author: JJ   File: hugitos.py    GNU General Public License v3.0 5 votes vote down vote up
def __init__(self):
        logger = logging.getLogger()
        logger.setLevel(logging.INFO)
        logHandler = logging.StreamHandler()
        formatter = jsonlogger.JsonFormatter()
        logHandler.setFormatter(formatter)
        logger.addHandler(logHandler)
        super().__init__(logger=logger) 
Example 21
Project: ADReset2   Author: mprahl   File: logger.py    GNU General Public License v3.0 5 votes vote down vote up
def log_to_stdout(level=logging.INFO):
    """
    Configure loggers to stream to STDOUT.

    :param int level: the logging level
    """
    fmt = '%(asctime)s %(name)s %(levelname)s %(user)s %(message)s'
    datefmt = '%Y-%m-%d %H:%M:%S'
    stream_handler = logging.StreamHandler(sys.stdout)
    stream_handler.setLevel(level)
    stream_handler.setFormatter(jsonlogger.JsonFormatter(fmt, datefmt=datefmt))
    logging.getLogger().addHandler(stream_handler) 
Example 22
Project: tsaws   Author: mozilla   File: tsaws.py    Mozilla Public License 2.0 5 votes vote down vote up
def initLogger():
    logger.level = logging.DEBUG
    if options.output == 'json':
        formatter = jsonlogger.JsonFormatter()
    else:
        formatter = logging.Formatter('%(asctime)s - %(message)s')
        formatter.formatTime = loggerTimeStamp

    sh = logging.StreamHandler(sys.stderr)
    sh.setFormatter(formatter)
    logger.addHandler(sh) 
Example 23
Project: python-sqs-logging-handler   Author: zillow   File: test_sqs_log_handler.py    MIT License 5 votes vote down vote up
def sqs_logger(sqs_queue, queue_name, logger_name):
    if queue_name not in sqs_queue.url:
        err_msg = '{} not in QueueUrl={}'.format(queue_name, sqs_queue.url)
        raise ValueError(err_msg)

    # logger
    log_handler = sqsloghandler.SQSHandler(queue_name)
    formatter = jsonlogger.JsonFormatter('%(asctime) %(levelname) %(message)')
    log_handler.setFormatter(formatter)
    logger = logging.getLogger(logger_name)
    logger.addHandler(log_handler)
    yield logger 
Example 24
Project: ansible-later   Author: xoxys   File: logger.py    MIT License 5 votes vote down vote up
def format(self, record): # noqa
        record.msg = record.msg.replace("\n", " ")
        return jsonlogger.JsonFormatter.format(self, record) 
Example 25
Project: cccatalog-api   Author: creativecommons   File: log_factory.py    MIT License 5 votes vote down vote up
def _get_formatter(self, json):
        '''
        Return the proper log formatter

        @param json: Boolean value
        '''
        if json:
            return jsonlogger.JsonFormatter()
        else:
            return logging.Formatter(self.format_string) 
Example 26
Project: chaostoolkit   Author: chaostoolkit   File: logging.py    Apache License 2.0 4 votes vote down vote up
def configure_logger(verbose: bool = False, log_format: str = "string",
                     log_file: str = None, logger_name: str = "chaostoolkit",
                     context_id: str = None):
    """
    Configure the chaostoolkit logger.

    By default logs as strings to stdout and the given file. When `log_format`
    is `"json"`, records are set to the console as JSON strings but remain
    as strings in the log file. The rationale is that the log file is mostly
    for grepping purpose while records written to the console can be forwarded
    out of band to anywhere else.
    """
    log_level = logging.INFO

    # we define colors ourselves as critical is missing in default ones
    colors = {
        logging.DEBUG: ForegroundColors.CYAN,
        logging.INFO: ForegroundColors.GREEN,
        logging.WARNING: ForegroundColors.YELLOW,
        logging.ERROR: ForegroundColors.RED,
        logging.CRITICAL: ForegroundColors.RED
    }
    fmt = "%(color)s[%(asctime)s %(levelname)s]%(end_color)s %(message)s"
    if verbose:
        log_level = logging.DEBUG
        fmt = "%(color)s[%(asctime)s %(levelname)s] "\
              "[%(module)s:%(lineno)d]%(end_color)s %(message)s"

    formatter = LogFormatter(
        fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S", colors=colors)
    if log_format == 'json':
        fmt = "(process) (asctime) (levelname) (module) (lineno) (message)"
        if context_id:
            fmt = "(context_id) {}".format(fmt)
        formatter = jsonlogger.JsonFormatter(
            fmt, json_default=encoder, timestamp=True)

    # sadly, no other way to specify the name of the default logger publicly
    LOGZERO_DEFAULT_LOGGER = logger_name
    logger = setup_default_logger(level=log_level, formatter=formatter)
    if context_id:
        logger.addFilter(ChaosToolkitContextFilter(logger_name, context_id))

    if log_file:
        # always everything as strings in the log file
        logger.setLevel(logging.DEBUG)
        fmt = "%(color)s[%(asctime)s %(levelname)s] "\
              "[%(module)s:%(lineno)d]%(end_color)s %(message)s"
        formatter = LogFormatter(fmt=fmt, datefmt="%Y-%m-%d %H:%M:%S",
                                 colors=colors)
        logzero.logfile(log_file, formatter=formatter, mode='a',
                        loglevel=logging.DEBUG) 
Example 27
Project: crypto-signal   Author: CryptoSignal   File: logs.py    MIT License 4 votes vote down vote up
def configure_logging(loglevel, log_mode):
    """Configure the application logger

    Args:
        loglevel (str): The level of logging for the application.
        log_mode (str): What kind of logging output to apply...
            text: Text logging is intended for users / developers.
            json: Json logging is intended for parsing with a log aggregation system.
    """

    if not loglevel:
        loglevel = logging.INFO

    if log_mode == 'json':
        log_formatter = jsonlogger.JsonFormatter()
    elif log_mode == 'text':
        log_formatter = logging.Formatter('%(message)s')
    elif log_mode == 'standard':
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )
    else:
        log_formatter = logging.Formatter(
            '%(asctime)s - %(name)s - %(levelname)s - %(message)s'
        )

    handler = logging.StreamHandler(sys.stdout)
    handler.setFormatter(log_formatter)
    root_logger = logging.getLogger()
    root_logger.addHandler(handler)
    root_logger.setLevel(loglevel)

    structlog.configure(
        processors=[
            structlog.stdlib.filter_by_level,
            structlog.stdlib.add_logger_name,
            structlog.stdlib.add_log_level,
            structlog.stdlib.PositionalArgumentsFormatter(),
            structlog.processors.StackInfoRenderer(),
            structlog.processors.format_exc_info,
            structlog.processors.UnicodeDecoder(),
            structlog.stdlib.render_to_log_kwargs,
        ],
        context_class=dict,
        logger_factory=structlog.stdlib.LoggerFactory(),
        wrapper_class=structlog.stdlib.BoundLogger,
        cache_logger_on_first_use=True
    ) 
Example 28
Project: python-sqs-logging-handler   Author: zillow   File: test_sqs_log_handler.py    MIT License 4 votes vote down vote up
def test_info_global_extra(sqs_queue, queue_name, logger_name):
    # global extra information
    global_extra_dict = {
        "cluster_name": "regression",
        "node_name": "localhost",
    }

    # sqs_logger
    log_handler = sqsloghandler.SQSHandler(queue_name, global_extra=global_extra_dict)
    formatter = jsonlogger.JsonFormatter('%(asctime) %(levelname) %(message)')
    log_handler.setFormatter(formatter)
    sqs_logger = logging.getLogger(logger_name)
    sqs_logger.addHandler(log_handler)

    # set logger level
    sqs_logger.setLevel(logging.INFO)

    # extra information
    extra_dict = {
        "test": "test logging global_extra",
        "num": 1,
        5: "9",
        "float": 1.75,
        "nested": {"more": "data"}
    }

    # log a test message
    test_msg = 'test info global_extra'
    sqs_logger.info(test_msg, extra=extra_dict)

    # fetch message from queue
    found_str = fetch_sqs_message(sqs_queue)
    assert isinstance(found_str, str)

    # verify message content
    result_dict = json.loads(found_str)
    assert isinstance(result_dict, dict)
    expected_dict = {
        'asctime': '2016-01-01 00:00:00,000',
        'levelname': 'INFO',
        'message': test_msg,
    }
    expected_dict.update({str(k): v for k, v in extra_dict.items()})
    expected_dict.update({str(k): v for k, v in global_extra_dict.items()})
    assert result_dict == expected_dict