Python logging.Logger() Examples
The following are 30
code examples of logging.Logger().
You can vote up the ones you like or vote down the ones you don't like,
and go to the original project or source file by following the links above each example.
You may also want to check out all available functions/classes of the module
logging
, or try the search function
.
![](https://www.programcreek.com/common/static/images/search.png)
Example #1
Source File: config.py From query-exporter with GNU General Public License v3.0 | 7 votes |
def _warn_if_unused(config: Config, logger: Logger): """Warn if there are unused databases or metrics defined.""" used_dbs: Set[str] = set() used_metrics: Set[str] = set() for query in config.queries.values(): used_dbs.update(query.databases) used_metrics.update(metric.name for metric in query.metrics) unused_dbs = sorted(set(config.databases) - used_dbs) if unused_dbs: logger.warning( f"unused entries in \"databases\" section: {', '.join(unused_dbs)}" ) unused_metrics = sorted(set(config.metrics) - GLOBAL_METRICS - used_metrics) if unused_metrics: logger.warning( f"unused entries in \"metrics\" section: {', '.join(unused_metrics)}" )
Example #2
Source File: registry.py From modelforge with Apache License 2.0 | 6 votes |
def initialize_registry(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger): """ Initialize the registry and the index. :param args: :class:`argparse.Namespace` with "backend", "args", "force" and "log_level". :param backend: Backend which is responsible for working with model files. :param log: Logger supplied by supply_backend :return: None """ try: backend.reset(args.force) except ExistingBackendError: return 1 log.info("Resetting the index ...") backend.index.reset() try: backend.index.upload("reset", {}) except ValueError: return 1 log.info("Successfully initialized")
Example #3
Source File: custom_logging.py From codepost-python with GNU Lesser General Public License v3.0 | 6 votes |
def get_logger(name=None): # type: (str) -> _logging.Logger """ Return a logger with the specified name, creating it if necessary. If no name is specified, return the root logger. """ global _logger if name == None or name == "": # Configure it the first time if _logger == None: _logger = _setup_logging(LOG_DEFAULT_SCOPE) return _logger else: return _setup_logging(name) # =============================================================================
Example #4
Source File: log.py From jsonrpcclient with MIT License | 6 votes |
def log_( message: str, logger: logging.Logger, level: str = "info", extra: Optional[Dict] = None, trim: bool = False, ) -> None: """ Log a request or response Args: message: JSON-RPC request or response string. level: Log level. extra: More details to include in the log entry. trim: Abbreviate log messages. """ if extra is None: extra = {} # Clean up the message for logging if message: message = message.replace("\n", "").replace(" ", " ").replace("{ ", "{") if trim: message = _trim_message(message) # Log. getattr(logger, level)(message, extra=extra)
Example #5
Source File: utils.py From calmjs with GNU General Public License v2.0 | 6 votes |
def enable_pretty_logging(logger='calmjs', level=logging.DEBUG, stream=None): """ Shorthand to enable pretty logging """ def cleanup(): logger.removeHandler(handler) logger.level = old_level if not isinstance(logger, logging.Logger): logger = logging.getLogger(logger) old_level = logger.level handler = logging.StreamHandler(stream) handler.setFormatter(logging.Formatter( u'%(asctime)s %(levelname)s %(name)s %(message)s')) logger.addHandler(handler) logger.setLevel(level) return cleanup
Example #6
Source File: tools.py From modelforge with Apache License 2.0 | 6 votes |
def install_environment(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger): """ Install the packages mentioned in the model's metadata. :param args: :param args: :class:`argparse.Namespace` with "input", "reproduce", "backend", \ "args", "username", "password", "remote_repo" and "log_level". :param backend: Backend which is responsible for working with model files. :param log: Logger supplied by supply_backend :return: None """ model = _load_generic_model(args.input, backend, log) if model is None: return 1 packages = ["%s==%s" % (pkg, ver) for pkg, ver in model.environment["packages"]] cmdline = [sys.executable, "-m", "pip", "install"] + args.pip + packages log.info(" ".join(cmdline)) subprocess.check_call(cmdline) if args.reproduce: for dataset in model.datasets: download_http(dataset[0], dataset[1], log)
Example #7
Source File: skill_http_client.py From botbuilder-python with MIT License | 6 votes |
def __init__( self, credential_provider: SimpleCredentialProvider, skill_conversation_id_factory: ConversationIdFactoryBase, channel_provider: ChannelProvider = None, logger: Logger = None, ): if not skill_conversation_id_factory: raise TypeError( "SkillHttpClient(): skill_conversation_id_factory can't be None" ) super().__init__(credential_provider) self._skill_conversation_id_factory = skill_conversation_id_factory self._channel_provider = channel_provider
Example #8
Source File: registry.py From modelforge with Apache License 2.0 | 6 votes |
def delete_model(args: argparse.Namespace, backend: StorageBackend, log: logging.Logger): """ Delete a model. :param args: :class:`argparse.Namespace` with "input", "backend", "args", "meta", \ "update_default", "username", "password", "remote_repo", \ "template_model", "template_readme" and "log_level". :param backend: Backend which is responsible for working with model files. :param log: Logger supplied by supply_backend :return: None """ try: meta = backend.index.remove_model(args.input) template_readme = backend.index.load_template(args.template_readme) backend.index.update_readme(template_readme) except ValueError: return 1 backend.delete_model(meta) log.info("Updating the models index...") try: backend.index.upload("delete", meta) except ValueError: # TODO: replace with PorcelainError return 1 log.info("Successfully deleted")
Example #9
Source File: log_copy.py From glazier with Apache License 2.0 | 6 votes |
def _EventLogUpload(self, source_log: Text): """Upload the log file contents to the local EventLog.""" event_handler = logging.handlers.NTEventLogHandler('GlazierBuildLog') logger = logging.Logger('eventlogger') logger.addHandler(event_handler) logger.setLevel(logging.DEBUG) try: with open(source_log, 'r') as f: content = f.readlines() for line in content: logger.info(line) except IOError: raise LogCopyError( 'Unable to open log file. It will not be imported into ' 'the Windows Event Log.')
Example #10
Source File: plugin_manager.py From stoq with Apache License 2.0 | 6 votes |
def __init__( self, plugin_dir_list: List[str], plugin_opts: Optional[Dict[str, Dict]] = None, stoq_config: Optional[helpers.StoqConfigParser] = None, ) -> None: self._stoq_config = stoq_config self._plugin_opts = {} if plugin_opts is None else plugin_opts self._plugin_name_to_info: Dict[str, Tuple[str, helpers.StoqConfigParser]] = {} self._loaded_plugins: Dict[str, BasePlugin] = {} self._loaded_provider_plugins: Dict[str, ProviderPlugin] = {} self._loaded_worker_plugins: Dict[str, WorkerPlugin] = {} self._loaded_source_archiver_plugins: Dict[str, ArchiverPlugin] = {} self._loaded_dest_archiver_plugins: Dict[str, ArchiverPlugin] = {} self._loaded_dispatcher_plugins: Dict[str, DispatcherPlugin] = {} self._loaded_connector_plugins: List[ConnectorPlugin] = [] self._loaded_decorator_plugins: Dict[str, DecoratorPlugin] = {} if not hasattr(self, 'log') or self.log is None: self.log: logging.Logger = logging.getLogger('stoq') self._collect_plugins(plugin_dir_list)
Example #11
Source File: log.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def log_enter_exit(logger): '''Decorator for logger to log function enter and exit. This decorator will generate a lot of debug log, please add this only when it is required. :param logger: Logger to decorate. :type logger: ``logging.Logger`` Usage:: >>> @log_enter_exit >>> def myfunc(): >>> doSomething() ''' def log_decorator(func): def wrapper(*args, **kwargs): logger.debug('%s entered', func.__name__) result = func(*args, **kwargs) logger.debug('%s exited', func.__name__) return result return wrapper return log_decorator
Example #12
Source File: log.py From misp42splunk with GNU Lesser General Public License v3.0 | 6 votes |
def log_enter_exit(logger): '''Decorator for logger to log function enter and exit. This decorator will generate a lot of debug log, please add this only when it is required. :param logger: Logger to decorate. :type logger: ``logging.Logger`` Usage:: >>> @log_enter_exit >>> def myfunc(): >>> doSomething() ''' def log_decorator(func): def wrapper(*args, **kwargs): logger.debug('%s entered', func.__name__) result = func(*args, **kwargs) logger.debug('%s exited', func.__name__) return result return wrapper return log_decorator
Example #13
Source File: log_test.py From tornado-zh with MIT License | 6 votes |
def setUp(self): self.formatter = LogFormatter(color=False) # Fake color support. We can't guarantee anything about the $TERM # variable when the tests are run, so just patch in some values # for testing. (testing with color off fails to expose some potential # encoding issues from the control characters) self.formatter._colors = { logging.ERROR: u("\u0001"), } self.formatter._normal = u("\u0002") # construct a Logger directly to bypass getLogger's caching self.logger = logging.Logger('LogFormatterTest') self.logger.propagate = False self.tempdir = tempfile.mkdtemp() self.filename = os.path.join(self.tempdir, 'log.out') self.handler = self.make_handler(self.filename) self.handler.setFormatter(self.formatter) self.logger.addHandler(self.handler)
Example #14
Source File: log_test.py From tornado-zh with MIT License | 6 votes |
def setUp(self): self.formatter = LogFormatter(color=False) # Fake color support. We can't guarantee anything about the $TERM # variable when the tests are run, so just patch in some values # for testing. (testing with color off fails to expose some potential # encoding issues from the control characters) self.formatter._colors = { logging.ERROR: u("\u0001"), } self.formatter._normal = u("\u0002") # construct a Logger directly to bypass getLogger's caching self.logger = logging.Logger('LogFormatterTest') self.logger.propagate = False self.tempdir = tempfile.mkdtemp() self.filename = os.path.join(self.tempdir, 'log.out') self.handler = self.make_handler(self.filename) self.handler.setFormatter(self.formatter) self.logger.addHandler(self.handler)
Example #15
Source File: test_logger.py From llvm-zorg with Apache License 2.0 | 6 votes |
def test_logger(self): # type: () -> () # This test assumes the initial, intended semantics of the various # loggers returned. If semantics change, so must the tests. init_level = L._root.level test_level = 7 log_name = "TestLoggerLogger" log = L.get_logger(log_name) with self.subTest(method=L.get_logger): self.assertEqual(log.__class__, logging.Logger) self.assertEqual(log_name, log.name) # Returned loggers should default to 0 (inherit parent level) self.assertEqual(0, log.level) with self.subTest(method=L.set_level): # Assert set_level() sets root level, not sublogger level L.set_level(test_level) self.assertEqual(test_level, L._root.level) self.assertEqual(0, log.level) # Restore and verify L.set_level(init_level) self.assertEqual(init_level, L._root.level) self.assertEqual(0, log.level)
Example #16
Source File: config.py From code2vec with MIT License | 6 votes |
def get_logger(self) -> logging.Logger: if self.__logger is None: self.__logger = logging.getLogger('code2vec') self.__logger.setLevel(logging.INFO) self.__logger.handlers = [] self.__logger.propagate = 0 formatter = logging.Formatter('%(asctime)s %(levelname)-8s %(message)s') if self.VERBOSE_MODE >= 1: ch = logging.StreamHandler(sys.stdout) ch.setLevel(logging.INFO) ch.setFormatter(formatter) self.__logger.addHandler(ch) if self.LOGS_PATH: fh = logging.FileHandler(self.LOGS_PATH) fh.setLevel(logging.INFO) fh.setFormatter(formatter) self.__logger.addHandler(fh) return self.__logger
Example #17
Source File: utils.py From pySmartDL with The Unlicense | 6 votes |
def create_debugging_logger(): ''' Creates a debugging logger that prints to console. :rtype: `logging.Logger` instance ''' global DEFAULT_LOGGER_CREATED t_log = logging.getLogger('pySmartDL') if not DEFAULT_LOGGER_CREATED: t_log.setLevel(logging.DEBUG) console = logging.StreamHandler() console.setLevel(logging.DEBUG) console.setFormatter(logging.Formatter('[%(levelname)s||%(thread)d@{%(pathname)s:%(lineno)d}] %(message)s')) t_log.addHandler(console) DEFAULT_LOGGER_CREATED = True return t_log
Example #18
Source File: __init__.py From bot with MIT License | 6 votes |
def monkeypatch_trace(self: logging.Logger, msg: str, *args, **kwargs) -> None: """ Log 'msg % args' with severity 'TRACE'. To pass exception information, use the keyword argument exc_info with a true value, e.g. logger.trace("Houston, we have an %s", "interesting problem", exc_info=1) """ if self.isEnabledFor(TRACE_LEVEL): self._log(TRACE_LEVEL, msg, args, **kwargs)
Example #19
Source File: keras_checkpoint_saver_callback.py From code2vec with MIT License | 5 votes |
def __init__(self, model_wrapper, nr_epochs_to_save: int = 1, logger: logging.Logger = None): self.model_wrapper = model_wrapper self.nr_epochs_to_save: int = nr_epochs_to_save self.logger = logger if logger is not None else logging.getLogger() self.last_saved_epoch: Optional[int] = None super(ModelCheckpointSaverCallback, self).__init__()
Example #20
Source File: ansible_executor_v2.py From im with GNU General Public License v3.0 | 5 votes |
def display(self, msg, color=None, stderr=False, screen_only=False, log_only=False): if self.output: if isinstance(self.output, logging.Logger): self.output.info(msg) else: self.output.write("%s\n" % msg) else: sys.stdout.write(msg) sys.stdout.flush()
Example #21
Source File: workflow.py From Quiver-alfred with MIT License | 5 votes |
def logger(self, logger): """Set a custom logger. :param logger: The logger to use :type logger: `~logging.Logger` instance """ self._logger = logger
Example #22
Source File: logger.py From agogosml with MIT License | 5 votes |
def _logger(self) -> logging.Logger: """Create the logger.""" value = os.getenv(self.env_key) path = Path(value or self.path) if path.is_file(): with path.open('rt') as fobj: config = yaml.safe_load(fobj.read()) logging.config.dictConfig(config) else: logging.basicConfig( format="%(asctime)s - %(name)s - %(levelname)s - %(message)s", level=self.level) return logging.getLogger(self.name)
Example #23
Source File: _logging.py From python-zhmcclient with Apache License 2.0 | 5 votes |
def get_logger(name): """ Return a :class:`~py:logging.Logger` object with the specified name. A :class:`~py:logging.NullHandler` handler is added to the logger if it does not have any handlers yet and if it is not the Python root logger. This prevents the propagation of log requests up the Python logger hierarchy, and therefore causes this package to be silent by default. """ logger = logging.getLogger(name) if name != '' and not logger.handlers: logger.addHandler(logging.NullHandler()) return logger
Example #24
Source File: logging.py From tomodachi with MIT License | 5 votes |
def log_setup(service: Any, name: Optional[str] = None, level: Optional[Union[str, int]] = None, formatter: Optional[Union[logging.Formatter, str, bool]] = True, filename: Optional[str] = None) -> logging.Logger: if not name: name = 'log.{}'.format(service.name) if not filename: raise Exception('log_filename must be specified for logging setup') logger = logging.getLogger(name) logger.setLevel(logging.DEBUG) if level and type(level) is str: level = getattr(logging, str(level)) if not [x for x in logger.handlers if isinstance(x, CustomServiceLogHandler) and (level is None or level == x.level)]: try: wfh = CustomServiceLogHandler(filename=filename) except FileNotFoundError as e: logging.getLogger('logging').warning('Unable to use file for logging - invalid path ("{}")'.format(filename)) raise e except PermissionError as e: logging.getLogger('logging').warning('Unable to use file for logging - invalid permissions ("{}")'.format(filename)) raise e if level: wfh.setLevel(level) if formatter and type(formatter) is str: formatter = logging.Formatter(str(formatter)) if formatter and type(formatter) is bool and formatter is True: formatter = logging.Formatter('%(asctime)s (%(name)s): %(message)s') if formatter and isinstance(formatter, logging.Formatter): wfh.setFormatter(formatter) logger.addHandler(wfh) return logger
Example #25
Source File: plugin_base.py From maubot with GNU Affero General Public License v3.0 | 5 votes |
def __init__(self, client: 'MaubotMatrixClient', loop: AbstractEventLoop, http: ClientSession, instance_id: str, log: Logger, config: Optional['BaseProxyConfig'], database: Optional[Engine], webapp: Optional['PluginWebApp'], webapp_url: Optional[str]) -> None: self.client = client self.loop = loop self.http = http self.id = instance_id self.log = log self.config = config self.database = database self.webapp = webapp self.webapp_url = URL(webapp_url) if webapp_url else None self._handlers_at_startup = []
Example #26
Source File: downloadclient.py From rucio with Apache License 2.0 | 5 votes |
def __init__(self, program_name, useability_check_args, extract_args, logger): """ Initialises a extraction tool object :param program_name: the name of the archive extraction program, e.g., unzip :param useability_check_args: the arguments of the extraction program to test if its installed, e.g., --version :param extract_args: the arguments that will be passed to the program for extraction :param logger: logging.Logger object """ self.program_name = program_name self.useability_check_args = useability_check_args self.extract_args = extract_args self.logger = logger self.is_useable_result = None
Example #27
Source File: test_io_raw_memory.py From moler with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_can_use_provided_logger(memory_connection_class): from moler.connection import Connection import logging moler_conn = Connection() connection = memory_connection_class(moler_connection=moler_conn, logger_name="conn.web_srv") assert isinstance(connection.logger, logging.Logger) assert connection.logger.name == "conn.web_srv"
Example #28
Source File: commons.py From SalesforcePy with BSD 3-Clause "New" or "Revised" License | 5 votes |
def get_request_vars(self): """ Returns the variables required by `request()` and other functions. :return: (headers, logger, request_object, response, service) :rtype: (dict, logging.Logger, requests.Request|None, list|dict|None, string) """ return ( self.get_headers(), logging.getLogger('sfdc_py'), None, None, self.get_request_url() )
Example #29
Source File: test_io_raw_memory.py From moler with BSD 3-Clause "New" or "Revised" License | 5 votes |
def test_can_use_default_logger_based_on_connection_name(memory_connection_class): from moler.connection import Connection import logging moler_conn = Connection() connection = memory_connection_class(moler_connection=moler_conn, name="ABC") assert isinstance(connection.logger, logging.Logger) assert connection.logger.name == "moler.connection.ABC.io" moler_conn = Connection() connection = memory_connection_class(moler_connection=moler_conn) assert isinstance(connection.logger, logging.Logger) assert connection.logger.name == "moler.connection.{}.io".format(connection.name)
Example #30
Source File: uploadclient.py From rucio with Apache License 2.0 | 5 votes |
def __init__(self, _client=None, logger=None, tracing=True): """ Initialises the basic settings for an UploadClient object :param _client: - Optional: rucio.client.client.Client object. If None, a new object will be created. :param logger: - logging.Logger object to use for uploads. If None nothing will be logged. """ if not logger: logger = logging.getLogger('%s.null' % __name__) logger.disabled = True self.logger = logger self.client = _client if _client else Client() self.client_location = detect_client_location() # if token should be used, use only JWT tokens self.auth_token = self.client.auth_token if len(self.client.auth_token.split(".")) == 3 else None self.tracing = tracing if not self.tracing: logger.debug('Tracing is turned off.') self.default_file_scope = 'user.' + self.client.account self.rses = {} self.trace = {} self.trace['hostname'] = socket.getfqdn() self.trace['account'] = self.client.account self.trace['eventType'] = 'upload' self.trace['eventVersion'] = version.RUCIO_VERSION[0]