Python logging.DEBUG() Examples

The following are code examples for showing how to use logging.DEBUG(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: incubator-spot   Author: apache   File: utilities.py    Apache License 2.0 10 votes vote down vote up
def get_logger(cls,logger_name,create_file=False):

        # create logger for prd_ci
        log = logging.getLogger(logger_name)
        log.setLevel(level=logging.INFO)

        # create formatter and add it to the handlers
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')

        if create_file:
                # create file handler for logger.
                fh = logging.FileHandler('SPOT.log')
                fh.setLevel(level=logging.DEBUG)
                fh.setFormatter(formatter)
        # reate console handler for logger.
        ch = logging.StreamHandler()
        ch.setLevel(level=logging.DEBUG)
        ch.setFormatter(formatter)

        # add handlers to logger.
        if create_file:
            log.addHandler(fh)

        log.addHandler(ch)
        return  log 
Example 2
Project: mlbv   Author: kmac   File: util.py    GNU General Public License v3.0 7 votes vote down vote up
def init_logging(log_file=None, append=False, console_loglevel=logging.INFO):
    """Set up logging to file and console."""
    if log_file is not None:
        if append:
            filemode_val = 'a'
        else:
            filemode_val = 'w'
        logging.basicConfig(level=logging.DEBUG,
                            format="%(asctime)s %(levelname)s %(threadName)s %(name)s %(message)s",
                            # datefmt='%m-%d %H:%M',
                            filename=log_file,
                            filemode=filemode_val)
    # define a Handler which writes INFO messages or higher to the sys.stderr
    console = logging.StreamHandler()
    console.setLevel(console_loglevel)
    # set a format which is simpler for console use
    formatter = logging.Formatter("%(message)s")
    console.setFormatter(formatter)
    # add the handler to the root logger
    logging.getLogger('').addHandler(console)
    global LOG
    LOG = logging.getLogger(__name__) 
Example 3
Project: GreenGuard   Author: D3-AI   File: utils.py    MIT License 7 votes vote down vote up
def logging_setup(verbosity=1, logfile=None, logger_name=None):
    logger = logging.getLogger(logger_name)
    log_level = (3 - verbosity) * 10
    fmt = '%(asctime)s - %(process)d - %(levelname)s - %(module)s - %(message)s'
    formatter = logging.Formatter(fmt)
    logger.setLevel(log_level)
    logger.propagate = False

    if logfile:
        file_handler = logging.FileHandler(logfile)
        file_handler.setLevel(logging.DEBUG)
        file_handler.setFormatter(formatter)
        logger.addHandler(file_handler)

    else:
        console_handler = logging.StreamHandler()
        console_handler.setLevel(log_level)
        console_handler.setFormatter(formatter)
        logger.addHandler(console_handler) 
Example 4
Project: Hasami   Author: Lokraan   File: main.py    MIT License 6 votes vote down vote up
def setup_logging(config: dict) -> None:
	logging.getLogger("discord.http").setLevel(logging.WARNING)
	logging.getLogger("discord").setLevel(logging.INFO)

	logger = logging.getLogger()

	level = logging.DEBUG if config["debug"] else logging.INFO

	f_handler = logging.FileHandler(filename="hasami.log", encoding="utf-8", mode="w")
	cl_handler = logging.StreamHandler()

	dt_fmt = "%Y-%m-%d %H:%M:%S"
	out_fmt = "[{asctime}] [{levelname:<6}] {name}: {message}"
	logger_fmt = logging.Formatter(out_fmt, dt_fmt, style="{")

	cl_handler.setFormatter(logger_fmt)
	f_handler.setFormatter(logger_fmt)

	logger.addHandler(cl_handler)
	logger.addHandler(f_handler)
	logger.setLevel(level) 
Example 5
Project: incubator-spot   Author: apache   File: utils.py    Apache License 2.0 6 votes vote down vote up
def get_logger(cls, logger_name, create_file=False):

        # create logger for prd_ci
        log = logging.getLogger(logger_name)
        log.setLevel(level=logging.INFO)

        # create formatter and add it to the handlers
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')

        if create_file:
            # create file handler for logger.
            fh = logging.FileHandler('oa.log')
            fh.setLevel(level=logging.DEBUG)
            fh.setFormatter(formatter)
        # reate console handler for logger.
        ch = logging.StreamHandler()
        ch.setLevel(level=logging.DEBUG)
        ch.setFormatter(formatter)

        # add handlers to logger.
        if create_file:
            log.addHandler(fh)

        log.addHandler(ch)
        return log 
Example 6
Project: pyblish-win   Author: pyblish   File: handlers.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def __init__(self, appname, dllname=None, logtype="Application"):
        logging.Handler.__init__(self)
        try:
            import win32evtlogutil, win32evtlog
            self.appname = appname
            self._welu = win32evtlogutil
            if not dllname:
                dllname = os.path.split(self._welu.__file__)
                dllname = os.path.split(dllname[0])
                dllname = os.path.join(dllname[0], r'win32service.pyd')
            self.dllname = dllname
            self.logtype = logtype
            self._welu.AddSourceToRegistry(appname, dllname, logtype)
            self.deftype = win32evtlog.EVENTLOG_ERROR_TYPE
            self.typemap = {
                logging.DEBUG   : win32evtlog.EVENTLOG_INFORMATION_TYPE,
                logging.INFO    : win32evtlog.EVENTLOG_INFORMATION_TYPE,
                logging.WARNING : win32evtlog.EVENTLOG_WARNING_TYPE,
                logging.ERROR   : win32evtlog.EVENTLOG_ERROR_TYPE,
                logging.CRITICAL: win32evtlog.EVENTLOG_ERROR_TYPE,
         }
        except ImportError:
            print("The Python Win32 extensions for NT (service, event "\
                        "logging) appear not to be available.")
            self._welu = None 
Example 7
Project: pyblish-win   Author: pyblish   File: test_logging.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def test_persistent_loggers(self):
        # Logger objects are persistent and retain their configuration, even
        #  if visible references are destroyed.
        self.root_logger.setLevel(logging.INFO)
        foo = logging.getLogger("foo")
        self._watch_for_survival(foo)
        foo.setLevel(logging.DEBUG)
        self.root_logger.debug(self.next_message())
        foo.debug(self.next_message())
        self.assert_log_lines([
            ('foo', 'DEBUG', '2'),
        ])
        del foo
        # foo has survived.
        self._assertTruesurvival()
        # foo has retained its settings.
        bar = logging.getLogger("foo")
        bar.debug(self.next_message())
        self.assert_log_lines([
            ('foo', 'DEBUG', '2'),
            ('foo', 'DEBUG', '3'),
        ]) 
Example 8
Project: backtrader-cn   Author: pandalibin   File: sina.py    GNU General Public License v3.0 6 votes vote down vote up
def enable_debug_requests():
    # Enabling debugging at http.client level (requests->urllib3->http.client)
    # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA.
    # the only thing missing will be the response.body which is not logged.
    from http.client import HTTPConnection
    import logging

    HTTPConnection.debuglevel = 1
    logger.setLevel(logging.DEBUG)
    requests_log = logging.getLogger("requests.packages.urllib3")
    requests_log.setLevel(logging.DEBUG)
    requests_log.propagate = True


# 去掉注释,开启调试模式
# enable_debug_requests() 
Example 9
Project: BASS   Author: Cisco-Talos   File: cmdline.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Bass")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Sample path") 

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO
        }[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG

    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 10
Project: BASS   Author: Cisco-Talos   File: whitelist.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Add samples to BASS whitelist")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("sample", help = "Whitelist sample")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 11
Project: BASS   Author: Cisco-Talos   File: client.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Find common ngrams in binary files")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--output", type = str, default = None, help = "Output to file instead of stdout")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Cluster samples")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 12
Project: RelayBot2.0   Author: nukeop   File: relaybot.py    GNU General Public License v3.0 6 votes vote down vote up
def configure_logging(logfilename=None):
        """Creates a root logger, configures it, and returns it.
        """
        root = logging.getLogger()
        root.setLevel(logging.DEBUG)

        logging.getLogger('SteamClient').setLevel(logging.WARNING)

        formatter = logging.Formatter("[%(levelname)s] - %(asctime)s - %(name)s -"
        " %(message)s")

        console = logging.StreamHandler()
        console.setLevel(logging.DEBUG)
        console.setFormatter(formatter)
        root.addHandler(console)

        if logfilename is not None:
            rfhandler = logging.handlers.RotatingFileHandler(logfilename,
                        maxBytes=2*1024*1024,
                        backupCount=8)
            rfhandler.setLevel(logging.DEBUG)
            rfhandler.setFormatter(formatter)
            root.addHandler(rfhandler)

        return root 
Example 13
Project: RelayBot2.0   Author: nukeop   File: logs.py    GNU General Public License v3.0 6 votes vote down vote up
def setup_logger(self, steamid, friend=False):
        name = (
            self.make_filename(
                self.bot.user.get_name_from_steamid(steamid),
                steamid
            ) if friend else
            self.make_filename(
                self.bot.user.groups.get_name(steamid),
                steamid
            )
        )
        self.loggers[steamid] = logging.getLogger(__name__+'.'+str(steamid))
        self.loggers[steamid].setLevel(logging.DEBUG)
        self.loggers[steamid].propagate = False
        formatter = logging.Formatter("%(asctime)s - %(message)s")
        filename = (os.path.join(self.friend_logs_path, name) if friend else
        os.path.join(self.group_logs_path, name))
        handler = logging.FileHandler(filename)
        handler.setFormatter(formatter)
        handler.setLevel(logging.DEBUG)
        self.loggers[steamid].addHandler(handler) 
Example 14
Project: oeffis-paper   Author: djaffry   File: utils.py    MIT License 6 votes vote down vote up
def get_logger(name):
    """
    Get a preconfigured logger

    Example logging output
    2019-03-03 12:40:20,025 - INFO - __main__: Application start.sh!

    :param name: Logger name
    :return: preconfigured logger
    """
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    handler = logging.StreamHandler(sys.stdout)
    handler.setLevel(logging.INFO)
    formatter = logging.Formatter('%(asctime)s - %(levelname)s - %(name)s:  %(message)s')
    handler.setFormatter(formatter)
    logger.addHandler(handler)
    return logger 
Example 15
Project: wikilinks   Author: trovdimi   File: builder.py    MIT License 6 votes vote down vote up
def run(self):
        self.print_title('This is the interactive building program')
        self.create_tmp_if_not_exists()

        choice = self.read_choice('Would you like to', [
            'create the database structure', 
            'extract articles and redirects from the wikipedia dump file'
            ])

        # setup logging
        LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
        LOGGING_PATH = self.read_path('Please enter the path of the logging file [.log]', default='./tmp/build-%d.log' % (choice[0]+1), must_exist=False)
        logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

        if choice[0] == 0:
            self._create_structure()
        if choice[0] == 1:
            self._extract_articles() 
Example 16
Project: shaptools   Author: SUSE   File: shapcli_test.py    Apache License 2.0 6 votes vote down vote up
def test_run_sr_invalid_params(self, mock_parse_arguments, mock_setup_logger):

        mock_parser = mock.Mock()
        mock_args = mock.Mock(
            verbosity=False, config=False, sid='qas', instance='01', password=False, sr=True)
        mock_logger = mock.Mock()
        mock_hana_instance = mock.Mock()
        mock_parse_arguments.return_value = [mock_parser, mock_args]
        mock_setup_logger.return_value = mock_logger

        with pytest.raises(SystemExit) as my_exit:
            shapcli.run()

        assert my_exit.type == SystemExit
        assert my_exit.value.code == 1

        mock_parse_arguments.assert_called_once_with()
        mock_setup_logger.assert_called_once_with(logging.DEBUG)
        mock_logger.info.assert_called_once_with(
            'Configuration file or sid, instance and passwords parameters must be provided\n')
        mock_parser.print_help.assert_called_once_with() 
Example 17
Project: iSDX   Author: sdn-ixp   File: replay.py    Apache License 2.0 6 votes vote down vote up
def __init__(self, config, flows_dir, ports_dir, num_timesteps, debug=False):
        self.logger = logging.getLogger("LogHistory")
        if debug:
            self.logger.setLevel(logging.DEBUG)

        self.log_entry = namedtuple("LogEntry", "source destination type")
        self.ports = defaultdict(list)
        self.flows = defaultdict(list)

        self.data = defaultdict(lambda: defaultdict(lambda: defaultdict(int)))
        self.current_timestep = 0
        self.total_timesteps = num_timesteps

        self.parse_config(config)
        self.parse_logs(num_timesteps, flows_dir, ports_dir)
        self.info()

        pretty(self.data) 
Example 18
Project: utilities   Author: czbiohub   File: log_util.py    MIT License 6 votes vote down vote up
def get_trfh_logger(name, *args):
    # function to create a rotating-file logger
    # with potentially multiple file handlers

    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)

    # create a logging format
    formatter = logging.Formatter(
        "%(asctime)s - %(name)s - %(levelname)s - %(message)s"
    )

    for file_name, log_level, when, backup_count in args:
        log_handler = TimedRotatingFileHandler(
            file_name, when=when, backupCount=backup_count
        )
        log_handler.setLevel(log_level)
        log_handler.setFormatter(formatter)
        logger.addHandler(log_handler)

    return logger 
Example 19
Project: rnm   Author: alexjaw   File: interface.py    MIT License 6 votes vote down vote up
def test_me():
    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)
    logger.info('------------- Starting test... -------------')

    eth = Interface(iface='eth0')
    wlan = Interface(iface='wlan0')

    resp = eth.get_ip()
    logger.info(repr(resp))

    resp = wlan.get_ip()
    logger.info(repr(resp))

    resp = wlan.disconnect()
    # logger.info(repr(resp))

    resp = wlan.connect()
    # logger.info(repr(resp))

    logger.info('-------------    Finished      -------------') 
Example 20
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: __init__.py    MIT License 6 votes vote down vote up
def add_stderr_logger(level=logging.DEBUG):
    """
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    """
    # This method needs to be in this __init__.py to get the __name__ correct
    # even if urllib3 is vendored within another package.
    logger = logging.getLogger(__name__)
    handler = logging.StreamHandler()
    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
    logger.addHandler(handler)
    logger.setLevel(level)
    logger.debug('Added a stderr logging handler to logger: %s', __name__)
    return handler


# ... Clean up. 
Example 21
Project: Ansible-Example-AB2018   Author: umit-ozturk   File: distro.py    MIT License 6 votes vote down vote up
def main():
    logger = logging.getLogger(__name__)
    logger.setLevel(logging.DEBUG)
    logger.addHandler(logging.StreamHandler(sys.stdout))

    parser = argparse.ArgumentParser(description="Linux distro info tool")
    parser.add_argument(
        '--json',
        '-j',
        help="Output in machine readable format",
        action="store_true")
    args = parser.parse_args()

    if args.json:
        logger.info(json.dumps(info(), indent=4, sort_keys=True))
    else:
        logger.info('Name: %s', name(pretty=True))
        distribution_version = version(pretty=True)
        logger.info('Version: %s', distribution_version)
        distribution_codename = codename()
        logger.info('Codename: %s', distribution_codename) 
Example 22
Project: esp-sdk-python   Author: EvidentSecurity   File: configuration.py    MIT License 6 votes vote down vote up
def debug(self, value):
        """
        Sets the debug status.

        :param value: The debug status, True or False.
        :type: bool
        """
        self.__debug = value
        if self.__debug:
            # if debug status is True, turn on debug logging
            for _, logger in iteritems(self.logger):
                logger.setLevel(logging.DEBUG)
            # turn on httplib debug
            httplib.HTTPConnection.debuglevel = 1
        else:
            # if debug status is False, turn off debug logging,
            # setting log level to default `logging.WARNING`
            for _, logger in iteritems(self.logger):
                logger.setLevel(logging.WARNING)
            # turn off httplib debug
            httplib.HTTPConnection.debuglevel = 0 
Example 23
Project: pnp   Author: HazardDede   File: pnp.py    MIT License 6 votes vote down vote up
def _setup_logging(*candidates, default_level=logging.INFO, env_key='PNP_LOG_CONF', verbose=False):
    """Setup logging configuration"""
    log_file_path = get_first_existing_file(*candidates)
    env_path = os.getenv(env_key, None)
    if env_path:
        log_file_path = env_path
    if log_file_path and os.path.exists(log_file_path):
        with open(log_file_path, 'rt') as fhandle:
            config = yaml.safe_load(fhandle.read())
        logging.config.dictConfig(config)
        logging.info("Logging loaded from: %s", log_file_path)
        if verbose:
            logging.getLogger().setLevel(logging.DEBUG)
    else:
        logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                            level=logging.DEBUG if verbose else default_level)
        logging.info("Logging loaded with basic configuration") 
Example 24
Project: flasky   Author: RoseOu   File: __init__.py    MIT License 6 votes vote down vote up
def add_stderr_logger(level=logging.DEBUG):
    """
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    """
    # This method needs to be in this __init__.py to get the __name__ correct
    # even if urllib3 is vendored within another package.
    logger = logging.getLogger(__name__)
    handler = logging.StreamHandler()
    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
    logger.addHandler(handler)
    logger.setLevel(level)
    logger.debug('Added an stderr logging handler to logger: %s' % __name__)
    return handler

# ... Clean up. 
Example 25
Project: flasky   Author: RoseOu   File: __init__.py    MIT License 6 votes vote down vote up
def add_stderr_logger(level=logging.DEBUG):
    """
    Helper for quickly adding a StreamHandler to the logger. Useful for
    debugging.

    Returns the handler after adding it.
    """
    # This method needs to be in this __init__.py to get the __name__ correct
    # even if urllib3 is vendored within another package.
    logger = logging.getLogger(__name__)
    handler = logging.StreamHandler()
    handler.setFormatter(logging.Formatter('%(asctime)s %(levelname)s %(message)s'))
    logger.addHandler(handler)
    logger.setLevel(level)
    logger.debug('Added an stderr logging handler to logger: %s' % __name__)
    return handler

# ... Clean up. 
Example 26
Project: core   Author: lifemapper   File: log.py    GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, name, level=logging.DEBUG, addConsole=False, 
                      addFile=False, logFilename=None):
      # In case level was set to None
      if level is None:
         level = logging.DEBUG
      LmLogger.__init__(self, name, level)
      if addConsole:
         self._addConsoleHandler()
      if addFile:
         if logFilename is not None:
            fn = logFilename
         else:
            fn = os.path.join(COMPUTE_LOG_PATH, '%s.log' % (name))
         self._addFileHandler(fn)

# ............................................................................. 
Example 27
Project: badge-o-matic   Author: markuslindenberg   File: webapp.py    BSD 2-Clause "Simplified" License 5 votes vote down vote up
def _print(pdfdata):
    if app.config['DEBUG']:
        app.logger.info('printing to /tmp/out.pdf')
        open('/tmp/out.pdf', 'wb').write(pdfdata)
    else:
        lpr = subprocess.Popen(['lpr', '-P', PRINTER], stdin=subprocess.PIPE)
        lpr.communicate(pdfdata) 
Example 28
Project: fs_image   Author: facebookincubator   File: common.py    MIT License 5 votes vote down vote up
def init_logging(*, debug: bool=False):
    logging.basicConfig(
        format='%(levelname)s %(name)s %(asctime)s %(message)s',
        level=logging.DEBUG if debug else logging.INFO,
    ) 
Example 29
Project: leapp-repository   Author: oamg   File: ntp2chrony.py    Apache License 2.0 5 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(description="Convert ntp configuration to chrony.")
    parser.add_argument("-r", "--root", dest="roots", default=["/"], nargs="+",
                        metavar="DIR", help="specify root directory (default /)")
    parser.add_argument("--ntp-conf", action="store", default="/etc/ntp.conf",
                        metavar="FILE", help="specify ntp config (default /etc/ntp.conf)")
    parser.add_argument("--step-tickers", action="store", default="",
                        metavar="FILE", help="specify ntpdate step-tickers config (no default)")
    parser.add_argument("--chrony-conf", action="store", default="/etc/chrony.conf",
                        metavar="FILE", help="specify chrony config (default /etc/chrony.conf)")
    parser.add_argument("--chrony-keys", action="store", default="/etc/chrony.keys",
                        metavar="FILE", help="specify chrony keyfile (default /etc/chrony.keys)")
    parser.add_argument("-b", "--backup", action="store_true", help="backup existing configs before writing")
    parser.add_argument("-L", "--ignored-lines", action="store_true", help="print ignored lines")
    parser.add_argument("-D", "--ignored-directives", action="store_true",
                        help="print names of ignored directives")
    parser.add_argument("-n", "--dry-run", action="store_true", help="don't make any changes")
    parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity")

    args = parser.parse_args()

    logging.basicConfig(format="%(message)s",
                        level=[logging.ERROR, logging.INFO, logging.DEBUG][min(args.verbose, 2)])

    for root in args.roots:
        conf = NtpConfiguration(root, args.ntp_conf, args.step_tickers)

        if args.ignored_lines:
            for line in conf.ignored_lines:
                print(line)

        if args.ignored_directives:
            for directive in conf.ignored_directives:
                print(directive)

        conf.write_chrony_configuration(args.chrony_conf, args.chrony_keys, args.dry_run, args.backup) 
Example 30
Project: tom-bot   Author: maartenberg   File: system_plugin.py    MIT License 5 votes vote down vote up
def logdebug_cb(bot, message=None, *args, **kwargs):
    ''' Temporarily set the loglevel to debug. '''
    if message:
        if not isadmin(bot, message):
            return 'Not authorized.'
    logging.getLogger().setLevel(logging.DEBUG)
    return 'Ok.' 
Example 31
Project: heroku-log-lights   Author: codingjoe   File: __main__.py    MIT License 5 votes vote down vote up
def get_args():
    """Setup argument parser and return parsed arguments."""
    parser = argparse.ArgumentParser(
        description=__doc__.strip()
    )
    parser.add_argument('-a', '--app', dest='app', metavar='HEROKU_APP', type=str,
                        help='Name of the target Heroku app.')
    parser.add_argument('-t', '--token', dest='token', metavar='AUTH_TOKEN', type=str,
                        default=None, help='Heroku AUTH token.')
    parser.add_argument('-v', dest='verbose', action='store_const',
                        const=logging.DEBUG, default=logging.WARNING,
                        help='verbose mode (default: off)')
    return parser.parse_args() 
Example 32
Project: pyblish-win   Author: pyblish   File: refactor.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def refactor_doctest(self, block, lineno, indent, filename):
        """Refactors one doctest.

        A doctest is given as a block of lines, the first of which starts
        with ">>>" (possibly indented), while the remaining lines start
        with "..." (identically indented).

        """
        try:
            tree = self.parse_block(block, lineno, indent)
        except Exception as err:
            if self.logger.isEnabledFor(logging.DEBUG):
                for line in block:
                    self.log_debug("Source: %s", line.rstrip(u"\n"))
            self.log_error("Can't parse docstring in %s line %s: %s: %s",
                           filename, lineno, err.__class__.__name__, err)
            return block
        if self.refactor_tree(tree, filename):
            new = unicode(tree).splitlines(True)
            # Undo the adjustment of the line numbers in wrap_toks() below.
            clipped, new = new[:lineno-1], new[lineno-1:]
            assert clipped == [u"\n"] * (lineno-1), clipped
            if not new[-1].endswith(u"\n"):
                new[-1] += u"\n"
            block = [indent + self.PS1 + new.pop(0)]
            if new:
                block += [indent + self.PS2 + line for line in new]
        return block 
Example 33
Project: pyblish-win   Author: pyblish   File: handlers.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def getEventType(self, record):
        """
        Return the event type for the record.

        Override this if you want to specify your own types. This version does
        a mapping using the handler's typemap attribute, which is set up in
        __init__() to a dictionary which contains mappings for DEBUG, INFO,
        WARNING, ERROR and CRITICAL. If you are using your own levels you will
        either need to override this method or place a suitable dictionary in
        the handler's typemap attribute.
        """
        return self.typemap.get(record.levelno, self.deftype) 
Example 34
Project: pyblish-win   Author: pyblish   File: test_logging.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def setUp(self):
        """Setup the default logging stream to an internal StringIO instance,
        so that we can examine log output as we want."""
        logger_dict = logging.getLogger().manager.loggerDict
        logging._acquireLock()
        try:
            self.saved_handlers = logging._handlers.copy()
            self.saved_handler_list = logging._handlerList[:]
            self.saved_loggers = logger_dict.copy()
            self.saved_level_names = logging._levelNames.copy()
        finally:
            logging._releaseLock()

        # Set two unused loggers: one non-ASCII and one Unicode.
        # This is to test correct operation when sorting existing
        # loggers in the configuration code. See issue 8201.
        logging.getLogger("\xab\xd7\xbb")
        logging.getLogger(u"\u013f\u00d6\u0047")

        self.root_logger = logging.getLogger("")
        self.original_logging_level = self.root_logger.getEffectiveLevel()

        self.stream = cStringIO.StringIO()
        self.root_logger.setLevel(logging.DEBUG)
        self.root_hdlr = logging.StreamHandler(self.stream)
        self.root_formatter = logging.Formatter(self.log_format)
        self.root_hdlr.setFormatter(self.root_formatter)
        self.root_logger.addHandler(self.root_hdlr) 
Example 35
Project: drydock   Author: airshipit   File: base.py    Apache License 2.0 5 votes vote down vote up
def debug(self, ctx, msg):
        self.log_error(ctx, logging.DEBUG, msg) 
Example 36
Project: logging-test-case   Author: chadrosenquist   File: capturelogs_test.py    MIT License 5 votes vote down vote up
def test_log_level_restored(self):
        """Verifies the log level is correct restored."""
        foo_logger = logging.getLogger('foo')
        foo_logger.setLevel(logging.DEBUG)
        self._logging_test_function()
        self.assertEqual(foo_logger.level, logging.DEBUG) 
Example 37
Project: logging-test-case   Author: chadrosenquist   File: capturelogs_test.py    MIT License 5 votes vote down vote up
def test_log_level_restored_after_exception(self):
        """Verifies the log level is correct restored, even after an exception."""
        foo_logger = logging.getLogger('foo')
        foo_logger.setLevel(logging.DEBUG)
        with self.assertRaises(ValueError):
            self._logging_test_function_exception()
        self.assertEqual(foo_logger.level, logging.DEBUG) 
Example 38
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: logging.py    Apache License 2.0 5 votes vote down vote up
def create_logger(app):
    """Creates a logger for the given application.  This logger works
    similar to a regular Python logger but changes the effective logging
    level based on the application's debug flag.  Furthermore this
    function also removes all attached handlers in case there was a
    logger with the log name before.
    """
    Logger = getLoggerClass()

    class DebugLogger(Logger):
        def getEffectiveLevel(x):
            if x.level == 0 and app.debug:
                return DEBUG
            return Logger.getEffectiveLevel(x)

    class DebugHandler(StreamHandler):
        def emit(x, record):
            StreamHandler.emit(x, record) if app.debug else None

    handler = DebugHandler()
    handler.setLevel(DEBUG)
    handler.setFormatter(Formatter(app.debug_log_format))
    logger = getLogger(app.logger_name)
    # just in case that was not a new logger, get rid of all the handlers
    # already attached to it.
    del logger.handlers[:]
    logger.__class__ = DebugLogger
    logger.addHandler(handler)
    return logger 
Example 39
Project: Flask-Python-GAE-Login-Registration   Author: orymeyer   File: logging.py    Apache License 2.0 5 votes vote down vote up
def create_logger(app):
    """Creates a logger for the given application.  This logger works
    similar to a regular Python logger but changes the effective logging
    level based on the application's debug flag.  Furthermore this
    function also removes all attached handlers in case there was a
    logger with the log name before.
    """
    Logger = getLoggerClass()

    class DebugLogger(Logger):
        def getEffectiveLevel(x):
            if x.level == 0 and app.debug:
                return DEBUG
            return Logger.getEffectiveLevel(x)

    class DebugHandler(StreamHandler):
        def emit(x, record):
            StreamHandler.emit(x, record) if app.debug else None

    handler = DebugHandler()
    handler.setLevel(DEBUG)
    handler.setFormatter(Formatter(app.debug_log_format))
    logger = getLogger(app.logger_name)
    # just in case that was not a new logger, get rid of all the handlers
    # already attached to it.
    del logger.handlers[:]
    logger.__class__ = DebugLogger
    logger.addHandler(handler)
    return logger 
Example 40
Project: dnsbin   Author: thomas-maurice   File: logger.py    Do What The F*ck You Want To Public License 5 votes vote down vote up
def setup_logger(name, to_stdout=True, file_name=None):
    """Creates the logging object used by the script

    By defaults it prints information ton stdout, but
    you can tell it to print out information ton a file too
    """
    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    formatter = logging.Formatter(
        '%(asctime)s %(levelname)s: %(message)s'
    )

    # reset handlers
    for handler in logger.handlers:
        # Don't close stdout or stderr !
        if handler.__class__ != logging.StreamHandler:
            handler.stream.close()
        logger.removeHandler(handler)

    if file_name:
        fhandle = logging.FileHandler(file_name)
        fhandle.setLevel(logging.DEBUG)
        fhandle.setFormatter(formatter)
        logger.addHandler(fhandle)

    if to_stdout:
        chandle = logging.StreamHandler()
        chandle.setLevel(logging.DEBUG)
        chandle.setFormatter(formatter)
        logger.addHandler(chandle)

    return logger 
Example 41
Project: wikilinks   Author: trovdimi   File: pickle_data.py    MIT License 5 votes vote down vote up
def pickle_sim():
    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/semsim-pickle.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')
    i = 0
    voc_zip_links = pickle.load( open( "/ssd/ddimitrov/pickle/voc_zip_links", "rb" ) )
    print "loaded voc_zip_links"
    uniqeu_nonzero_map = pickle.load( open( "/ssd/ddimitrov/pickle/uniqeu_nonzero_map", "rb" ) )
    vocab = pickle.load( open( "/ssd/ddimitrov/pickle/vocab", "rb" ) )
    print "loaded vocab"
    sem_sim = pickle.load( open( "/ssd/ddimitrov/pickle/sem_sim", "rb" ) )


    values_rel_faeture = list()
    i_indices = list()
    j_indices = list()
    i = 0
    for link in voc_zip_links:
        i += 1
        if i % 1000000 == 0:
            print  i
        i_indices.append(uniqeu_nonzero_map[vocab[link[0]]])
        j_indices.append(vocab[link[1]])
        from_id = int(link[0])
        to_id = int(link[1])
        if from_id<=to_id:
            try:
                values_rel_faeture.append(sem_sim[(from_id,to_id)])
            except KeyError as e:
                logging.error(e)
        else:
            try:
                values_rel_faeture.append(sem_sim[(to_id,from_id)])
            except KeyError as e:
                logging.error(e)
    rel_feature_hyp_data = [i_indices, j_indices, values_rel_faeture]
    pickle.dump(rel_feature_hyp_data, open("/ssd/ddimitrov/pickle/sem_sim_hyp", "wb"), protocol=pickle.HIGHEST_PROTOCOL) 
Example 42
Project: wikilinks   Author: trovdimi   File: pickle_data.py    MIT License 5 votes vote down vote up
def pickle_sem_sim_data():
    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/semsim-pickle.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')
    sem_sim = pickle.load( open( "/ssd/ddimitrov/pickle/sem_sim", "rb" ) )

    values_sem_sim=list()
    with open(os.path.join(os.path.dirname(__file__), "/home/ddimitrov/tmp/wikipedia_network.csv")) as f:
        next(f)
        for line in f:
            line = line.strip().split('\t')
            from_id = int(line[0])
            to_id = int(line[1])
            if from_id<=to_id:
                try:
                    value = sem_sim[(from_id,to_id)]
                    values_sem_sim.append(value)
                except KeyError as e:
                    logging.error(e)
            else:
                try:
                    value = sem_sim[(to_id,from_id)]
                    values_sem_sim.append(value)
                except KeyError as e:
                    logging.error(e)


    pickle.dump(values_sem_sim, open("/ssd/ddimitrov/pickle/values_sem_sim", "wb"), protocol=pickle.HIGHEST_PROTOCOL) 
Example 43
Project: wikilinks   Author: trovdimi   File: insertarticlefeatures.py    MIT License 5 votes vote down vote up
def update_article_features():

    connection = db._create_connection()
    cursor = connection.cursor()

    network = load_graph("output/wikipedianetwork.xml.gz")
    print 'graph loaded'
    articles = db_work_view.retrieve_all_articles()
    print 'articles loaded'

    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/articlefeatures-dbinsert.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

    for article in articles:
        try:
            article_features = {}
            vertex = network.vertex(article['id'])
            article_features['id'] = article['id']
            article_features['hits_authority'] = network.vertex_properties["authority"][vertex]
            article_features['hits_hub'] = network.vertex_properties["hub"][vertex]
            #article_features['katz'] = network.vertex_properties["katz"][vertex]

            sql  = "UPDATE article_features " \
                   "SET hits_authority = %(hits_authority)s, hits_hub = %(hits_hub)s " \
                   "WHERE id = %(id)s;"

            cursor.execute(sql, article_features)

        except MySQLdb.Error as e:
            #logging.error('DB Insert Error  article id: "%s" ' % article['id'])
            print e
        except ValueError as v:
            logging.error('ValueError for article id: "%s"' % article['id'])
            print v
        connection.commit()
    connection.close() 
Example 44
Project: wikilinks   Author: trovdimi   File: tableclassinserter.py    MIT License 5 votes vote down vote up
def table_parser(self, file_name, root):

        db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
        db_build_view = db.get_build_view()

        cursor = db_build_view._cursor

        # setup logging
        LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
        LOGGING_PATH = 'tmp/tableclasses-dbinsert.log'
        logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

        html_parser = WikipediaHTMLTableParser()
        zip_file_path = os.path.join(root, file_name)
        html = self.zip2html(zip_file_path)
        html_parser.feed(html.decode('utf-8'))
        source_article_id = file_name.split('_')[1]
        try:
            fed_parser = WikipediaFedTextParser(html_parser.get_data())
            table_classes = fed_parser.table_classes(None)
            table_classes = list(set(table_classes))
            for table_class in table_classes:
               self.insert_table_class(source_article_id, table_class, cursor)
        except KeyError:
            db_build_view._db_connection.rollback()
            logging.error('KeyError FedTextParser source article id: %s ' % source_article_id)
        db_build_view.commit()
        db_build_view.reset_cache() 
Example 45
Project: rubbish.py   Author: alphapapa   File: rubbish.py    GNU General Public License v3.0 5 votes vote down vote up
def cli(verbose):

    # Setup logging
    if verbose >= 2:
        LOG_LEVEL = log.DEBUG
    elif verbose == 1:
        LOG_LEVEL = log.INFO
    else:
        LOG_LEVEL = log.WARNING

    log.basicConfig(level=LOG_LEVEL, format="%(levelname)s: %(message)s")

# * Commands

# ** empty 
Example 46
Project: AutoDL   Author: tanguofu   File: worker.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def run(self):
        is_debug = self.log.loglevel == logging.DEBUG
        protocol = (
            self.websocket_protocol if self.app.callable.websocket_enabled
            else self.http_protocol)
        self._server_settings = self.app.callable._helper(
            loop=self.loop,
            debug=is_debug,
            protocol=protocol,
            ssl=self.ssl_context,
            run_async=True)
        self._server_settings['signal'] = self.signal
        self._server_settings.pop('sock')
        trigger_events(self._server_settings.get('before_start', []),
                       self.loop)
        self._server_settings['before_start'] = ()

        self._runner = asyncio.ensure_future(self._run(), loop=self.loop)
        try:
            self.loop.run_until_complete(self._runner)
            self.app.callable.is_running = True
            trigger_events(self._server_settings.get('after_start', []),
                           self.loop)
            self.loop.run_until_complete(self._check_alive())
            trigger_events(self._server_settings.get('before_stop', []),
                           self.loop)
            self.loop.run_until_complete(self.close())
        except BaseException:
            traceback.print_exc()
        finally:
            try:
                trigger_events(self._server_settings.get('after_stop', []),
                               self.loop)
            except BaseException:
                traceback.print_exc()
            finally:
                self.loop.close()

        sys.exit(self.exit_code) 
Example 47
Project: AutoDL   Author: tanguofu   File: logger.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def getLogger(name, path):
    log_filename = path + "/" + name + "_" + datetime.datetime.now().strftime('%Y%m%d_%H%M%S') + '.log'
    handler = FileTraceHandker(filename=log_filename)
    handler.setLevel(logging.DEBUG)
    formatter = logging.Formatter('%(asctime)-15s %(name)s %(process)d %(levelname)s %(message)s')
    handler.setFormatter(formatter)  

    logger = logging.getLogger(name)
    logger.setLevel(logging.DEBUG)
    logger.addHandler(handler)

    return logger 
Example 48
Project: shaptools   Author: SUSE   File: shapcli.py    Apache License 2.0 5 votes vote down vote up
def parse_arguments():
    """
    Parse command line arguments
    """
    parser = argparse.ArgumentParser(PROG)

    parser.add_argument(
        '-v', '--verbosity',
        help='Python logging level. Options: DEBUG, INFO, WARN, ERROR (INFO by default)')
    parser.add_argument(
        '-r', '--remote',
        help='Run the command in other machine using ssh')
    parser.add_argument(
        '-c', '--config',
        help='JSON configuration file with SAP HANA instance data (sid, instance and password)')
    parser.add_argument(
        '-s', '--sid', help='SAP HANA sid')
    parser.add_argument(
        '-i', '--instance', help='SAP HANA instance')
    parser.add_argument(
        '-p', '--password', help='SAP HANA password')

    subcommands = parser.add_subparsers(
        title='subcommands', description='valid subcommands', help='additional help')
    hana_subparser = subcommands.add_parser(
        'hana', help='Commands to interact with SAP HANA databse')
    sr_subparser = subcommands.add_parser(
        'sr', help='Commands to interact with SAP HANA system replication')

    parse_hana_arguments(hana_subparser)
    parse_sr_arguments(sr_subparser)

    args = parser.parse_args()
    return parser, args 
Example 49
Project: shaptools   Author: SUSE   File: shapcli.py    Apache License 2.0 5 votes vote down vote up
def run():
    """
    Main execution
    """
    parser, args = parse_arguments()
    logger = setup_logger(args.verbosity or logging.DEBUG)

    # If -c or --config flag is received data is loaded from the configuration file
    if args.config:
        data = load_config_file(args.config, logger)
        config_data = ConfigData(data, logger)
    elif args.sid and args.instance and args.password:
        config_data = ConfigData(vars(args), logger)
    else:
        logger.info(
            'Configuration file or sid, instance and passwords parameters must be provided\n')
        parser.print_help()
        exit(1)

    if args.remote:
        config_data.remote = args.remote

    try:
        hana_instance = hana.HanaInstance(
            config_data.sid, config_data.instance,
            config_data.password, remote_host=config_data.remote)
        if vars(args).get('hana'):
            run_hana_subcommands(hana_instance, args, logger)
        elif vars(args).get('sr'):
            run_sr_subcommands(hana_instance, args, logger)
        else:
            parser.print_help()
    except Exception as err:
        logger.error(err)
        exit(1) 
Example 50
Project: iSDX   Author: sdn-ixp   File: replay.py    Apache License 2.0 5 votes vote down vote up
def __init__(self, log_history, publisher, time_step=1, debug=False):
        self.logger = logging.getLogger("LogReplay")
        if debug:
            self.logger.setLevel(logging.DEBUG)

        self.log_history = log_history
        self.time_step = time_step
        self.publisher = publisher

        self.run = False