Python logging.getLogger() Examples

The following are 30 code examples of logging.getLogger(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module logging , or try the search function .
Example #1
Source File: utilities.py    From incubator-spot with Apache License 2.0 25 votes vote down vote up
def get_logger(cls,logger_name,create_file=False):

        # create logger for prd_ci
        log = logging.getLogger(logger_name)
        log.setLevel(level=logging.INFO)

        # create formatter and add it to the handlers
        formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')

        if create_file:
                # create file handler for logger.
                fh = logging.FileHandler('SPOT.log')
                fh.setLevel(level=logging.DEBUG)
                fh.setFormatter(formatter)
        # reate console handler for logger.
        ch = logging.StreamHandler()
        ch.setLevel(level=logging.DEBUG)
        ch.setFormatter(formatter)

        # add handlers to logger.
        if create_file:
            log.addHandler(fh)

        log.addHandler(ch)
        return  log 
Example #2
Source File: util.py    From mlbv with GNU General Public License v3.0 10 votes vote down vote up
def init_logging(log_file=None, append=False, console_loglevel=logging.INFO):
    """Set up logging to file and console."""
    if log_file is not None:
        if append:
            filemode_val = 'a'
        else:
            filemode_val = 'w'
        logging.basicConfig(level=logging.DEBUG,
                            format="%(asctime)s %(levelname)s %(threadName)s %(name)s %(message)s",
                            # datefmt='%m-%d %H:%M',
                            filename=log_file,
                            filemode=filemode_val)
    # define a Handler which writes INFO messages or higher to the sys.stderr
    console = logging.StreamHandler()
    console.setLevel(console_loglevel)
    # set a format which is simpler for console use
    formatter = logging.Formatter("%(message)s")
    console.setFormatter(formatter)
    # add the handler to the root logger
    logging.getLogger('').addHandler(console)
    global LOG
    LOG = logging.getLogger(__name__) 
Example #3
Source File: client.py    From BASS with GNU General Public License v2.0 9 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Find common ngrams in binary files")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--output", type = str, default = None, help = "Output to file instead of stdout")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Cluster samples")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example #4
Source File: conftest.py    From drydock with Apache License 2.0 9 votes vote down vote up
def setup_logging():
    # Setup root logger
    logger = logging.getLogger('drydock')
    logger.setLevel('DEBUG')
    ch = logging.StreamHandler()
    formatter = logging.Formatter(
        '%(asctime)s - %(levelname)s - %(filename)s:%(funcName)s - %(message)s'
    )
    ch.setFormatter(formatter)
    logger.addHandler(ch)

    # Specalized format for API logging
    logger = logging.getLogger('drydock.control')
    logger.propagate = False
    formatter = logging.Formatter(
        "%(asctime)s - %(levelname)s - %(user)s - %(req_id)s"
        " - %(external_ctx)s - %(end_user)s - %(message)s"
    )

    ch = logging.StreamHandler()
    ch.setFormatter(formatter)
    logger.addHandler(ch) 
Example #5
Source File: cmdline.py    From BASS with GNU General Public License v2.0 9 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Bass")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Sample path") 

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO
        }[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG

    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example #6
Source File: whitelist.py    From BASS with GNU General Public License v2.0 8 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Add samples to BASS whitelist")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("sample", help = "Whitelist sample")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example #7
Source File: collector.py    From incubator-spot with Apache License 2.0 7 votes vote down vote up
def ingest_file(file,message_size,topic,kafka_servers):
    
    logger = logging.getLogger('SPOT.INGEST.PROXY.{0}'.format(os.getpid()))
    try:        
        message = ""
        logger.info("Ingesting file: {0} process:{1}".format(file,os.getpid())) 
        with open(file,"rb") as f:
            for line in f:
                message += line
                if len(message) > message_size:
                    KafkaProducer.SendMessage(message, kafka_servers, topic, 0)
                    message = ""
            #send the last package.        
            KafkaProducer.SendMessage(message, kafka_servers, topic, 0)
        rm_file = "rm {0}".format(file)
        Util.execute_cmd(rm_file,logger)
        logger.info("File {0} has been successfully sent to Kafka Topic: {1}".format(file,topic))

    except Exception as err:        
        logger.error("There was a problem, please check the following error message:{0}".format(err.message))
        logger.error("Exception: {0}".format(err)) 
Example #8
Source File: serializer.py    From incubator-spot with Apache License 2.0 7 votes vote down vote up
def serialize(value):
    '''
        Convert a ``list`` object to an avro-encoded format.

    :param value: List of ``str`` objects.
    :returns    : A buffered I/O implementation using an in-memory bytes buffer.
    :rtype      : ``str``
    '''
    writer   = avro.io.DatumWriter(avro.schema.parse(AVSC))
    rawbytes = io.BytesIO()

    try:
        writer.write({ list.__name__: value }, avro.io.BinaryEncoder(rawbytes))
        return rawbytes
    except avro.io.AvroTypeException:
        logging.getLogger('SPOT.INGEST.COMMON.SERIALIZER')\
            .error('The type of ``{0}`` is not supported by the Avro schema.'
            .format(type(value).__name__))

    return None 
Example #9
Source File: sina.py    From backtrader-cn with GNU General Public License v3.0 6 votes vote down vote up
def enable_debug_requests():
    # Enabling debugging at http.client level (requests->urllib3->http.client)
    # you will see the REQUEST, including HEADERS and DATA, and RESPONSE with HEADERS but without DATA.
    # the only thing missing will be the response.body which is not logged.
    from http.client import HTTPConnection
    import logging

    HTTPConnection.debuglevel = 1
    logger.setLevel(logging.DEBUG)
    requests_log = logging.getLogger("requests.packages.urllib3")
    requests_log.setLevel(logging.DEBUG)
    requests_log.propagate = True


# 去掉注释,开启调试模式
# enable_debug_requests() 
Example #10
Source File: _cplogging.py    From cherrypy with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def __init__(self, appid=None, logger_root='cherrypy'):
        self.logger_root = logger_root
        self.appid = appid
        if appid is None:
            self.error_log = logging.getLogger('%s.error' % logger_root)
            self.access_log = logging.getLogger('%s.access' % logger_root)
        else:
            self.error_log = logging.getLogger(
                '%s.error.%s' % (logger_root, appid))
            self.access_log = logging.getLogger(
                '%s.access.%s' % (logger_root, appid))
        self.error_log.setLevel(logging.INFO)
        self.access_log.setLevel(logging.INFO)

        # Silence the no-handlers "warning" (stderr write!) in stdlib logging
        self.error_log.addHandler(NullHandler())
        self.access_log.addHandler(NullHandler())

        cherrypy.engine.subscribe('graceful', self.reopen_files) 
Example #11
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(pcap, tmpdir, opts='', prefix=None):
    '''
        Convert `pcap` file to a comma-separated output format.

    :param pcap    : Path of binary file.
    :param tmpdir  : Path of local staging area.
    :param opts    : A set of options for `tshark` command.
    :param prefix  : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns       : Path of CSV-converted file.
    :rtype         : ``str``
    :raises OSError: If an error occurs while executing the `tshark` command.
    '''
    logger = logging.getLogger('SPOT.INGEST.DNS.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(pcap, opts, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #12
Source File: __init__.py    From everyclass-server with Mozilla Public License 2.0 6 votes vote down vote up
def init_plugins():
        """初始化日志、错误追踪、打点插件"""
        from everyclass.rpc import init as init_rpc
        from everyclass.common.flask import print_config

        # Sentry
        if plugin_available("sentry"):
            sentry.init_app(app=__app)
            sentry_handler = SentryHandler(sentry.client)
            sentry_handler.setLevel(logging.WARNING)
            logging.getLogger().addHandler(sentry_handler)

            init_rpc(sentry=sentry)
            logger.info('Sentry is inited because you are in {} mode.'.format(__app.config['CONFIG_NAME']))

        # metrics
        global statsd
        statsd = DogStatsd(namespace=f"{__app.config['SERVICE_NAME']}.{os.environ.get('MODE').lower()}",
                           use_default_route=True)

        init_rpc(logger=logger)

        print_config(__app, logger) 
Example #13
Source File: utils.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def call(cls, cmd, shell=False):
        '''
            Run command with arguments, wait to complete and return ``True`` on success.

        :param cls: The class as implicit first argument.
        :param cmd: Command string to be executed.
        :returns  : ``True`` on success, otherwise ``None``.
        :rtype    : ``bool``
        '''
        logger = logging.getLogger('SPOT.INGEST.COMMON.UTIL')
        logger.debug('Execute command: {0}'.format(cmd))

        try:
            subprocess.call(cmd, shell=shell)
            return True

        except Exception as exc:
            logger.error('[{0}] {1}'.format(exc.__class__.__name__, exc.message)) 
Example #14
Source File: kafka_client.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def _initialize_members(self, topic, server, port, zk_server, zk_port, partitions):

        # get logger isinstance
        self._logger = logging.getLogger("SPOT.INGEST.KafkaProducer")

        # kafka requirements
        self._server = server
        self._port = port
        self._zk_server = zk_server
        self._zk_port = zk_port
        self._topic = topic
        self._num_of_partitions = partitions
        self._partitions = []
        self._partitioner = None
        self._kafka_brokers = '{0}:{1}'.format(self._server, self._port)

        # create topic with partitions
        self._create_topic()

        self._kafka_conf = self._producer_config(self._kafka_brokers)

        self._p = Producer(**self._kafka_conf) 
Example #15
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(logfile, tmpdir, opts='', prefix=None):
    '''
        Copy log file to the local staging area.

    :param logfile: Path of log file.
    :param tmpdir : Path of local staging area.
    :param opts   : A set of options for the `cp` command.
    :param prefix : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns      : Path of log file in local staging area.
    :rtype        : ``str``
    '''
    logger = logging.getLogger('SPOT.INGEST.PROXY.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(opts, logfile, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #16
Source File: serializer.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def deserialize(rawbytes):
    '''
        Deserialize given bytes according to the supported Avro schema.

    :param rawbytes: A buffered I/O implementation using an in-memory bytes buffer.
    :returns       : List of ``str`` objects, extracted from the binary stream.
    :rtype         : ``list``
    '''
    decoder = avro.io.BinaryDecoder(io.BytesIO(rawbytes))
    reader  = avro.io.DatumReader(avro.schema.parse(AVSC))

    try: return reader.read(decoder)[list.__name__]
    except Exception as exc:
        logging.getLogger('SPOT.INGEST.COMMON.SERIALIZER')\
            .error('[{0}] {1}'.format(exc.__class__.__name__, exc.message))

    return [] 
Example #17
Source File: hdfs_client.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def __init__(self, url, mutual_auth, cert=None, verify='true', **kwargs):

        self._logger = logging.getLogger("SPOT.INGEST.HDFS_client")
        session = Session()

        if verify == 'true':
            self._logger.info('SSL verification enabled')
            session.verify = True
            if cert is not None:
                self._logger.info('SSL Cert: ' + cert)
                if ',' in cert:
                    session.cert = [path.strip() for path in cert.split(',')]
                else:
                    session.cert = cert
        elif verify == 'false':
            session.verify = False

        super(SecureKerberosClient, self).__init__(url, mutual_auth, session=session, **kwargs) 
Example #18
Source File: hdfs_client.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def get_client(user=None):
    # type: (object) -> Client

    logger = logging.getLogger('SPOT.INGEST.HDFS.get_client')
    hdfs_nm, hdfs_port, hdfs_user = Config.hdfs()
    conf = {'url': '{0}:{1}'.format(hdfs_nm, hdfs_port)}

    if Config.ssl_enabled():
        ssl_verify, ca_location, cert, key = Config.ssl()
        conf.update({'verify': ssl_verify.lower()})
        if cert:
            conf.update({'cert': cert})

    if Config.kerberos_enabled():
        krb_conf = {'mutual_auth': 'OPTIONAL'}
        conf.update(krb_conf)

    # TODO: possible user parameter
    logger.info('Client conf:')
    for k,v in conf.iteritems():
        logger.info(k + ': ' + v)

    client = SecureKerberosClient(**conf)

    return client 
Example #19
Source File: iana_transform.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def __init__(self,config,logger=None):

        self._logger = logging.getLogger('OA.IANA')  if logger else Util.get_logger('OA.IANA',create_file=False)        
        if COL_CLASS in config:
            self._qclass_file_path = config[COL_CLASS]
        if COL_QTYPE in config:
            self._qtype_file_path = config[COL_QTYPE]
        if COL_RCODE in config:
            self._rcode_file_path = config[COL_RCODE]
        if COL_PRESP in config:
            self._http_rcode_file_path = config[COL_PRESP]

        self._qclass_dict = {}
        self._qtype_dict = {}
        self._rcode_dict = {} 
        self._http_rcode_dict = {}
        self._init_dicts() 
Example #20
Source File: evillib.py    From wafw00f with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def urlParser(target):
    log = logging.getLogger('urlparser')

    ssl = False
    o = urlparse(target)
    if o[0] not in ['http', 'https', '']:
        log.error('scheme %s not supported' % o[0])
        return
    if o[0] == 'https':
        ssl = True
    if len(o[2]) > 0:
        path = o[2]
    else:
        path = '/'
    tmp = o[1].split(':')
    if len(tmp) > 1:
        port = tmp[1]
    else:
        port = None
    hostname = tmp[0]
    query = o[4]
    return (hostname, port, path, query, ssl) 
Example #21
Source File: promenade_client.py    From drydock with Apache License 2.0 6 votes vote down vote up
def __init__(self, scheme='http', marker=None, timeout=None):
        self.logger = logging.getLogger(__name__)
        self.__session = requests.Session()

        self.set_auth()

        self.marker = marker
        self.__session.headers.update({'X-Context-Marker': marker})

        self.prom_url = self._get_prom_url()
        self.port = self.prom_url.port
        self.host = self.prom_url.hostname
        self.scheme = scheme

        if self.port:
            self.base_url = "%s://%s:%s/api/" % (self.scheme, self.host,
                                                 self.port)
        else:
            # assume default port for scheme
            self.base_url = "%s://%s/api/" % (self.scheme, self.host)

        self.default_timeout = self._calc_timeout_tuple((20, 30), timeout) 
Example #22
Source File: processing.py    From incubator-spot with Apache License 2.0 6 votes vote down vote up
def convert(netflow, tmpdir, opts='', prefix=None):
    '''
        Convert `nfcapd` file to a comma-separated output format.

    :param netflow : Path of binary file.
    :param tmpdir  : Path of local staging area.
    :param opts    : A set of options for `nfdump` command.
    :param prefix  : If `prefix` is specified, the file name will begin with that;
                     otherwise, a default `prefix` is used.
    :returns       : Path of CSV-converted file.
    :rtype         : ``str``
    :raises OSError: If an error occurs while executing the `nfdump` command.
    '''
    logger = logging.getLogger('SPOT.INGEST.FLOW.PROCESS')

    with tempfile.NamedTemporaryFile(prefix=prefix, dir=tmpdir, delete=False) as fp:
        command = COMMAND.format(netflow, opts, fp.name)

        logger.debug('Execute command: {0}'.format(command))
        Util.popen(command, raises=True)

        return fp.name 
Example #23
Source File: driver.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)

        cfg.CONF.register_opts(
            RedfishDriver.redfish_driver_options, group=RedfishDriver.driver_key)

        self.logger = logging.getLogger(
            config.config_mgr.conf.logging.oobdriver_logger_name) 
Example #24
Source File: api_client.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, base_url, apikey):
        # The URL in the config should end in /MAAS/, but the api is behind /MAAS/api/2.0/
        self.base_url = base_url + "/api/2.0/"
        self.apikey = apikey

        self.signer = MaasOauth(apikey)
        self.http_session = requests.Session()

        # TODO(sh8121att) Get logger name from config
        self.logger = logging.getLogger('drydock') 
Example #25
Source File: k8s_node.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, *args, prom_client=None):
        super().__init__(*args)

        self.promenade_client = prom_client

        self.logger = logging.getLogger(
            config.config_mgr.conf.logging.kubernetesdriver_logger_name) 
Example #26
Source File: promenade_client.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self):
        self.session = PromenadeSession()
        self.logger = logging.getLogger(__name__) 
Example #27
Source File: driver.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)

        cfg.CONF.register_opts(
            LibvirtDriver.libvirt_driver_options,
            group=LibvirtDriver.driver_key)

        self.logger = logging.getLogger(
            config.config_mgr.conf.logging.oobdriver_logger_name) 
Example #28
Source File: driver.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)

        self.logger = logging.getLogger(
            config.config_mgr.conf.logging.oobdriver_logger_name) 
Example #29
Source File: driver.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super().__init__(**kwargs)

        cfg.CONF.register_opts(
            PyghmiDriver.pyghmi_driver_options, group=PyghmiDriver.driver_key)

        self.logger = logging.getLogger(
            config.config_mgr.conf.logging.oobdriver_logger_name) 
Example #30
Source File: driver.py    From drydock with Apache License 2.0 5 votes vote down vote up
def __init__(self, **kwargs):
        super(ManualDriver, self).__init__(**kwargs)

        self.driver_name = "manual_driver"
        self.driver_key = "manual_driver"
        self.driver_desc = "Manual (Noop) OOB Driver"

        self.logger = logging.getLogger(cfg.CONF.logging.oobdriver_logger_name)