Python logging.basicConfig() Examples

The following are code examples for showing how to use logging.basicConfig(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: mlbv   Author: kmac   File: util.py    GNU General Public License v3.0 7 votes vote down vote up
def init_logging(log_file=None, append=False, console_loglevel=logging.INFO):
    """Set up logging to file and console."""
    if log_file is not None:
        if append:
            filemode_val = 'a'
        else:
            filemode_val = 'w'
        logging.basicConfig(level=logging.DEBUG,
                            format="%(asctime)s %(levelname)s %(threadName)s %(name)s %(message)s",
                            # datefmt='%m-%d %H:%M',
                            filename=log_file,
                            filemode=filemode_val)
    # define a Handler which writes INFO messages or higher to the sys.stderr
    console = logging.StreamHandler()
    console.setLevel(console_loglevel)
    # set a format which is simpler for console use
    formatter = logging.Formatter("%(message)s")
    console.setFormatter(formatter)
    # add the handler to the root logger
    logging.getLogger('').addHandler(console)
    global LOG
    LOG = logging.getLogger(__name__) 
Example 2
Project: BASS   Author: Cisco-Talos   File: cmdline.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Bass")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Sample path") 

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO
        }[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG

    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 3
Project: BASS   Author: Cisco-Talos   File: whitelist.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Add samples to BASS whitelist")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("sample", help = "Whitelist sample")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 4
Project: BASS   Author: Cisco-Talos   File: client.py    GNU General Public License v2.0 6 votes vote down vote up
def parse_args():
    parser = argparse.ArgumentParser(description = "Find common ngrams in binary files")
    parser.add_argument("-v", "--verbose", action = "count", default = 0, help = "Increase verbosity")
    parser.add_argument("--output", type = str, default = None, help = "Output to file instead of stdout")
    parser.add_argument("--url", type = str, default = "http://localhost:5000", help = "URL of BASS server")
    parser.add_argument("samples", metavar = "sample", nargs = "+", help = "Cluster samples")

    args = parser.parse_args()

    try:
        loglevel = {
            0: logging.ERROR,
            1: logging.WARN,
            2: logging.INFO}[args.verbose]
    except KeyError:
        loglevel = logging.DEBUG
    logging.basicConfig(level = loglevel)
    logging.getLogger().setLevel(loglevel)

    return args 
Example 5
Project: wikilinks   Author: trovdimi   File: builder.py    MIT License 6 votes vote down vote up
def run(self):
        self.print_title('This is the interactive building program')
        self.create_tmp_if_not_exists()

        choice = self.read_choice('Would you like to', [
            'create the database structure', 
            'extract articles and redirects from the wikipedia dump file'
            ])

        # setup logging
        LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
        LOGGING_PATH = self.read_path('Please enter the path of the logging file [.log]', default='./tmp/build-%d.log' % (choice[0]+1), must_exist=False)
        logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

        if choice[0] == 0:
            self._create_structure()
        if choice[0] == 1:
            self._extract_articles() 
Example 6
Project: iSDX   Author: sdn-ixp   File: replay.py    Apache License 2.0 6 votes vote down vote up
def main(argv):
    logging.basicConfig(level=logging.INFO)

    log_history = LogHistory(argv.config, argv.flow_dir, argv.port_dir, int(argv.num_steps), debug=True)

    channel = "sdx_stats"
    address = "192.168.99.100"
    port = 6379
    db = 0

    publisher = Publisher(channel, address, port)

    log_replay = LogReplay(log_history, publisher, int(argv.timestep), debug=True)

    # start replay
    replay_thread = Thread(target=log_replay.start)
    replay_thread.daemon = True
    replay_thread.start()

    while replay_thread.is_alive():
        try:
            replay_thread.join(1)
        except KeyboardInterrupt:
            log_replay.stop() 
Example 7
Project: rnm   Author: alexjaw   File: interface.py    MIT License 6 votes vote down vote up
def test_me():
    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)
    logger.info('------------- Starting test... -------------')

    eth = Interface(iface='eth0')
    wlan = Interface(iface='wlan0')

    resp = eth.get_ip()
    logger.info(repr(resp))

    resp = wlan.get_ip()
    logger.info(repr(resp))

    resp = wlan.disconnect()
    # logger.info(repr(resp))

    resp = wlan.connect()
    # logger.info(repr(resp))

    logger.info('-------------    Finished      -------------') 
Example 8
Project: mmdetection   Author: open-mmlab   File: train.py    Apache License 2.0 6 votes vote down vote up
def get_root_logger(log_file=None, log_level=logging.INFO):
    logger = logging.getLogger('mmdet')
    # if the logger has been initialized, just return it
    if logger.hasHandlers():
        return logger

    logging.basicConfig(
        format='%(asctime)s - %(levelname)s - %(message)s', level=log_level)
    rank, _ = get_dist_info()
    if rank != 0:
        logger.setLevel('ERROR')
    elif log_file is not None:
        file_handler = logging.FileHandler(log_file, 'w')
        file_handler.setFormatter(
            logging.Formatter('%(asctime)s - %(levelname)s - %(message)s'))
        file_handler.setLevel(log_level)
        logger.addHandler(file_handler)

    return logger 
Example 9
Project: aws-auto-remediate   Author: servian   File: lambda_handler.py    GNU General Public License v3.0 6 votes vote down vote up
def lambda_handler(event, context):
    logger = logging.getLogger()

    if logger.handlers:
        for handler in logger.handlers:
            logger.removeHandler(handler)

    # change logging levels for boto and others
    logging.getLogger("boto3").setLevel(logging.ERROR)
    logging.getLogger("botocore").setLevel(logging.ERROR)
    logging.getLogger("urllib3").setLevel(logging.ERROR)

    # set logging format
    logging.basicConfig(
        format="[%(levelname)s] %(message)s (%(filename)s, %(funcName)s(), line %(lineno)d)",
        level=os.environ.get("LOGLEVEL", "WARNING").upper(),
    )

    # instantiate class
    retry = Retry(logging)

    # run functions
    retry.retry_security_events() 
Example 10
Project: Graphlib   Author: HamletWantToCode   File: logger.py    MIT License 6 votes vote down vote up
def setup_logging(save_dir, log_config='logger/logger_config.json', default_level=logging.INFO):
    """
    Setup logging configuration
    """
    log_config = Path(log_config)
    if log_config.is_file():
        config = read_json(log_config)
        # modify logging paths based on run config
        for _, handler in config['handlers'].items():
            if 'filename' in handler:
                handler['filename'] = str(save_dir / handler['filename'])

        logging.config.dictConfig(config)
    else:
        print("Warning: logging configuration file is not found in {}.".format(log_config))
        logging.basicConfig(level=default_level) 
Example 11
Project: pnp   Author: HazardDede   File: pnp.py    MIT License 6 votes vote down vote up
def _setup_logging(*candidates, default_level=logging.INFO, env_key='PNP_LOG_CONF', verbose=False):
    """Setup logging configuration"""
    log_file_path = get_first_existing_file(*candidates)
    env_path = os.getenv(env_key, None)
    if env_path:
        log_file_path = env_path
    if log_file_path and os.path.exists(log_file_path):
        with open(log_file_path, 'rt') as fhandle:
            config = yaml.safe_load(fhandle.read())
        logging.config.dictConfig(config)
        logging.info("Logging loaded from: %s", log_file_path)
        if verbose:
            logging.getLogger().setLevel(logging.DEBUG)
    else:
        logging.basicConfig(format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
                            level=logging.DEBUG if verbose else default_level)
        logging.info("Logging loaded with basic configuration") 
Example 12
Project: brb   Author: Prograsaur   File: logutils.py    GNU General Public License v3.0 6 votes vote down vote up
def init_logger(suffix, logpath='log', loglevel=logging.INFO):
    loglevel = loglevel_to_int(loglevel)

    if not os.path.exists(logpath): os.makedirs(logpath)

    recfmt = '(%(threadName)s) %(asctime)s.%(msecs)03d %(levelname)s %(filename)s:%(lineno)d %(message)s'
    timefmt = '%Y-%m-%d %H:%M:%S'

    logging.basicConfig(filename=time.strftime(f'{logpath}/FS-{suffix}.%Y%m%d_%H%M%S.log'),
                        filemode="w",
                        level=loglevel,
                        format=recfmt, datefmt=timefmt)

    logger = logging.getLogger()
    console = logging.StreamHandler()
    console.setLevel(logging.ERROR)
    logger.addHandler(console)

    return logger
#endregion Utils

#region main
#------------------------------------------------------------------------------- 
Example 13
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: a3c.py    Apache License 2.0 6 votes vote down vote up
def log_config(log_dir=None, log_file=None, prefix=None, rank=0):
    reload(logging)
    head = '%(asctime)-15s Node[' + str(rank) + '] %(message)s'
    if log_dir:
        logging.basicConfig(level=logging.DEBUG, format=head)
        if not os.path.exists(log_dir):
            os.makedirs(log_dir)
        if not log_file:
            log_file = (prefix if prefix else '') + datetime.now().strftime('_%Y_%m_%d-%H_%M.log')
            log_file = log_file.replace('/', '-')
        else:
            log_file = log_file
        log_file_full_name = os.path.join(log_dir, log_file)
        handler = logging.FileHandler(log_file_full_name, mode='w')
        formatter = logging.Formatter(head)
        handler.setFormatter(formatter)
        logging.getLogger().addHandler(handler)
        logging.info('start with arguments %s', args)
    else:
        logging.basicConfig(level=logging.DEBUG, format=head)
        logging.info('start with arguments %s', args) 
Example 14
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: kvstore_server.py    Apache License 2.0 6 votes vote down vote up
def _controller(self):
        """Return the server controller."""
        def server_controller(cmd_id, cmd_body, _):
            """Server controler."""
            if not self.init_logginig:
                # the reason put the codes here is because we cannot get
                # kvstore.rank earlier
                head = '%(asctime)-15s Server[' + str(
                    self.kvstore.rank) + '] %(message)s'
                logging.basicConfig(level=logging.DEBUG, format=head)
                self.init_logginig = True

            if cmd_id == 0:
                try:
                    optimizer = pickle.loads(cmd_body)
                except:
                    raise
                self.kvstore.set_optimizer(optimizer)
            else:
                print("server %d, unknown command (%d, %s)" % (
                    self.kvstore.rank, cmd_id, cmd_body))
        return server_controller 
Example 15
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: multi_lenet.py    Apache License 2.0 6 votes vote down vote up
def test_lenet(devs, kv_type):
    logging.basicConfig(level=logging.DEBUG)
    (train, val) = mnist(batch_size = batch_size, input_shape=(1,28,28))
    # guarantee the same weight init for each run
    mx.random.seed(0)
    model = mx.model.FeedForward(
        ctx           = devs,
        symbol        = net,
        num_epoch     = 2,
        learning_rate = 0.1,
        momentum      = 0.9,
        wd            = 0.00001)
    model.fit(
        kvstore       = kv_type,
        X             = train)
    return accuracy(model, val) 
Example 16
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: test_profiler.py    Apache License 2.0 6 votes vote down vote up
def test_profile_task():
    def makeParams():
        objects = tuple('foo' for _ in range(50))
        template = ''.join('{%d}' % i for i in range(len(objects)))
        return template, objects

    def doLog():
        template, objects = makeParams()
        for _ in range(100000):
            logging.info(template.format(*objects))

    logging.basicConfig()
    enable_profiler('test_profile_task.json')
    python_domain = profiler.Domain('PythonDomain::test_profile_task')
    task = profiler.Task(python_domain, "test_profile_task")
    task.start()
    start = time.time()
    var = mx.nd.ones((1000, 500))
    doLog()
    var.asnumpy()
    stop = time.time()
    task.stop()
    print('run took: %.3f' % (stop - start))
    profiler.set_state('stop') 
Example 17
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: test_profiler.py    Apache License 2.0 6 votes vote down vote up
def test_profile_frame():
    def makeParams():
        objects = tuple('foo' for _ in range(50))
        template = ''.join('{%d}' % i for i in range(len(objects)))
        return template, objects

    def doLog():
        template, objects = makeParams()
        for _ in range(100000):
            logging.info(template.format(*objects))

    logging.basicConfig()
    enable_profiler('test_profile_frame.json')
    python_domain = profiler.Domain('PythonDomain::test_profile_frame')
    frame = profiler.Frame(python_domain, "test_profile_frame")
    frame.start()
    start = time.time()
    var = mx.nd.ones((1000, 500))
    doLog()
    var.asnumpy()
    stop = time.time()
    frame.stop()
    print('run took: %.3f' % (stop - start))
    profiler.set_state('stop') 
Example 18
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: test_conv.py    Apache License 2.0 6 votes vote down vote up
def exec_mnist(model, train_dataiter, val_dataiter):
    # print logging by default
    logging.basicConfig(level=logging.DEBUG)
    console = logging.StreamHandler()
    console.setLevel(logging.DEBUG)
    logging.getLogger('').addHandler(console)

    model.fit(X=train_dataiter,
              eval_data=val_dataiter)
    logging.info('Finish fit...')
    prob = model.predict(val_dataiter)
    logging.info('Finish predict...')
    val_dataiter.reset()
    y = np.concatenate([batch.label[0].asnumpy() for batch in val_dataiter]).astype('int')
    py = np.argmax(prob, axis=1)
    acc1 = float(np.sum(py == y)) / len(y)
    logging.info('final accuracy = %f', acc1)
    assert(acc1 > 0.94)

# run as a script 
Example 19
Project: dynamic-training-with-apache-mxnet-on-aws   Author: awslabs   File: test_dtype.py    Apache License 2.0 6 votes vote down vote up
def test_cifar10():
    # print logging by default
    logging.basicConfig(level=logging.DEBUG)
    console = logging.StreamHandler()
    console.setLevel(logging.DEBUG)
    logging.getLogger('').addHandler(console)
    kv = mx.kvstore.create("local")
    # test float32 input
    (train, val) = get_iterator_float32(kv)
    run_cifar10(train, val, use_module=False)
    run_cifar10(train, val, use_module=True)

    # test legecay tuple in provide_data and provide_label
    run_cifar10(CustomDataIter(train), CustomDataIter(val), use_module=False)
    run_cifar10(CustomDataIter(train), CustomDataIter(val), use_module=True)

    # test uint8 input
    (train, val) = get_iterator_uint8(kv)
    run_cifar10(train, val, use_module=False)
    run_cifar10(train, val, use_module=True) 
Example 20
Project: myhoard   Author: aiven   File: myhoard.py    Apache License 2.0 6 votes vote down vote up
def main(args=None):
    if args is None:
        args = sys.argv[1:]

    parser = argparse.ArgumentParser(prog="myhoard", description="MySQL backup and restore daemon")
    parser.add_argument("--version", action="version", help="show program version", version=version.__version__)
    parser.add_argument("--log-level", help="Log level", default="INFO", choices=("ERROR, WARNING", "INFO", "DEBUG"))
    parser.add_argument("--config", help="Configuration file path", default=os.environ.get("MYHOARD_CONFIG"))
    arg = parser.parse_args(args)

    if not arg.config:
        print("config file path must be given with --config or via env MYHOARD_CONFIG", file=sys.stderr)
        return 1

    logging.basicConfig(level=arg.log_level, format="%(asctime)s\t%(threadName)s\t%(name)s\t%(levelname)s\t%(message)s")

    hoard = MyHoard(arg.config)
    return hoard.run() 
Example 21
Project: sawtooth-cookiejar   Author: danintel   File: cookiejar_tp.py    Apache License 2.0 6 votes vote down vote up
def main():
    '''Entry-point function for the cookiejar Transaction Processor.'''
    try:
        # Setup logging for this class.
        logging.basicConfig()
        logging.getLogger().setLevel(logging.DEBUG)

        # Register the Transaction Handler and start it.
        processor = TransactionProcessor(url=DEFAULT_URL)
        sw_namespace = _hash(FAMILY_NAME.encode('utf-8'))[0:6]
        handler = CookieJarTransactionHandler(sw_namespace)
        processor.add_handler(handler)
        processor.start()
    except KeyboardInterrupt:
        pass
    except SystemExit as err:
        raise err
    except BaseException as err:
        traceback.print_exc(file=sys.stderr)
        sys.exit(1) 
Example 22
Project: aurora   Author: carnby   File: process_stream_files.py    MIT License 6 votes vote down vote up
def handle(self, *args, **options):
        logging.basicConfig(level=options['log_level'])

        if options['path']:
            files = glob.glob(options['path'])
        else:
            files = glob.glob(settings.DATA_FOLDER + '/' + settings.STREAMING_FILE_PREFIX + '*.gz')

        settings.CHARACTERIZATION_STEP = options['step']

        print('# files:', len(files))

        importer = Importer()

        for filename in files:
            print(filename)

            try:
                importer(filename)
                print('{0}: accepted {1} tweets'.format(filename, importer.total_tweet_count))
            except IOError as e:
                print('[ERROR] {0}: {1}'.format(filename, e)) 
Example 23
Project: deep-nn-car   Author: scope-lab-vu   File: controller.py    MIT License 6 votes vote down vote up
def liveStreamThread(stop_event, j):
    global setSpeed
    logging.basicConfig(filename='scheduler.log', level=logging.DEBUG, filemode='w')
    sock2 = socket.socket(socket.AF_INET, socket.SOCK_STREAM, 0)
    sock2.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    sock2.connect((server_address[0], liveStreamPort))
    sock2.settimeout(10)
    nullEvents = 0
    while not stop_event.is_set() and nullEvents < 20:
        dcTuple = j.get_dcTuple()
        (nullEvents) = Client.liveStreamReceiver(sock2, dcTuple, nullEvents)
    sock2.close()


# plotVelocity
# Creates a thread event that displays a dynamic plot of the speed. 
Example 24
Project: tpu_pretrain   Author: allenai   File: utils.py    Apache License 2.0 6 votes vote down vote up
def init(args):
    # init logger
    log_format = '%(asctime)-10s: %(message)s'
    if args.log_file is not None and args.log_file != "":
        Path(args.log_file).parent.mkdir(parents=True, exist_ok=True)
        logging.basicConfig(level=logging.INFO, filename=args.log_file, filemode='w', format=log_format)
        logging.warning(f'This will get logged to file: {args.log_file}')
    else:
        logging.basicConfig(level=logging.INFO, format=log_format)

    # create output dir
    if args.output_dir.is_dir() and list(args.output_dir.iterdir()):
        logging.warning(f"Output directory ({args.output_dir}) already exists and is not empty!")
    assert 'bert' in args.output_dir.name, \
        '''Output dir name has to contain `bert` or `roberta` for AutoModel.from_pretrained to correctly infer the model type'''

    args.output_dir.mkdir(parents=True, exist_ok=True)

    # set random seeds
    random.seed(args.seed)
    np.random.seed(args.seed)
    torch.manual_seed(args.seed) 
Example 25
Project: InsightAgent   Author: insightfinder   File: insightfinder.py    Apache License 2.0 6 votes vote down vote up
def main():
    # Initialize the logger
    logging.basicConfig()

    # Intialize from our arguments
    insightfinder = InsightfinderStore(*sys.argv[1:])

    current_chunk = 0

    # Get all the inputs
    for line in sys.stdin:
        if insightfinder.filter_string not in line:
            continue
        map_size = len(bytearray(json.dumps(insightfinder.metrics_map)))
        if map_size >= insightfinder.flush_kb * 1000:
            insightfinder.logger.debug("Flushing chunk number: " + str(current_chunk))
            insightfinder.send_metrics()
            current_chunk += 1
        insightfinder.append(line.strip())
    insightfinder.logger.debug("Flushing chunk number: " + str(current_chunk))
    insightfinder.send_metrics()
    insightfinder.save_grouping()
    insightfinder.logger.debug("Finished sending all chunks to InsightFinder") 
Example 26
Project: polish-sentence-evaluation   Author: sdadas   File: evaluate.py    GNU General Public License v3.0 6 votes vote down vote up
def evaluate(self, method: EmbeddingBase, method_name: str, **kwargs):
        logging.basicConfig(format='%(asctime)s : %(message)s', level=logging.DEBUG)
        logging.root.setLevel(logging.DEBUG)
        params = {
            "task_path": os.path.join(root_dir, "resources"),
            "usepytorch": True,
            "kfold": 5,
            "lemmatize": True,
            "batch_size": 512,
            "classifier": {"nhid": 50, "optim": "rmsprop", "batch_size": 128, "tenacity": 3, "epoch_size": 10},
            "analyzer": PolishAnalyzer()
        }
        params.update(kwargs)
        cache_dir = Path(root_dir, f".cache/{method_name}")
        cache_dir.mkdir(parents=True, exist_ok=True)
        se = SE(params, cached(method.batcher, cache_dir), method.prepare)
        transfer_tasks = ["WCCRS_HOTELS", "WCCRS_MEDICINE", "SICKEntailment", "SICKRelatedness", "8TAGS"]
        results = se.eval(transfer_tasks)
        del results["SICKRelatedness"]["yhat"]
        results = {"method": method_name, "results": results}
        logging.info(results)
        with open(os.path.join(root_dir, "results.txt"), "a+") as output_file:
            output_file.write(json.dumps(results))
            output_file.write("\n") 
Example 27
Project: fs_image   Author: facebookincubator   File: common.py    MIT License 5 votes vote down vote up
def init_logging(*, debug: bool=False):
    logging.basicConfig(
        format='%(levelname)s %(name)s %(asctime)s %(message)s',
        level=logging.DEBUG if debug else logging.INFO,
    ) 
Example 28
Project: leapp-repository   Author: oamg   File: ntp2chrony.py    Apache License 2.0 5 votes vote down vote up
def main():
    parser = argparse.ArgumentParser(description="Convert ntp configuration to chrony.")
    parser.add_argument("-r", "--root", dest="roots", default=["/"], nargs="+",
                        metavar="DIR", help="specify root directory (default /)")
    parser.add_argument("--ntp-conf", action="store", default="/etc/ntp.conf",
                        metavar="FILE", help="specify ntp config (default /etc/ntp.conf)")
    parser.add_argument("--step-tickers", action="store", default="",
                        metavar="FILE", help="specify ntpdate step-tickers config (no default)")
    parser.add_argument("--chrony-conf", action="store", default="/etc/chrony.conf",
                        metavar="FILE", help="specify chrony config (default /etc/chrony.conf)")
    parser.add_argument("--chrony-keys", action="store", default="/etc/chrony.keys",
                        metavar="FILE", help="specify chrony keyfile (default /etc/chrony.keys)")
    parser.add_argument("-b", "--backup", action="store_true", help="backup existing configs before writing")
    parser.add_argument("-L", "--ignored-lines", action="store_true", help="print ignored lines")
    parser.add_argument("-D", "--ignored-directives", action="store_true",
                        help="print names of ignored directives")
    parser.add_argument("-n", "--dry-run", action="store_true", help="don't make any changes")
    parser.add_argument("-v", "--verbose", action="count", default=0, help="increase verbosity")

    args = parser.parse_args()

    logging.basicConfig(format="%(message)s",
                        level=[logging.ERROR, logging.INFO, logging.DEBUG][min(args.verbose, 2)])

    for root in args.roots:
        conf = NtpConfiguration(root, args.ntp_conf, args.step_tickers)

        if args.ignored_lines:
            for line in conf.ignored_lines:
                print(line)

        if args.ignored_directives:
            for directive in conf.ignored_directives:
                print(directive)

        conf.write_chrony_configuration(args.chrony_conf, args.chrony_keys, args.dry_run, args.backup) 
Example 29
Project: pyblish-win   Author: pyblish   File: driver.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def main(*args):
    """Main program, when run as a script: produce grammar pickle files.

    Calls load_grammar for each argument, a path to a grammar text file.
    """
    if not args:
        args = sys.argv[1:]
    logging.basicConfig(level=logging.INFO, stream=sys.stdout,
                        format='%(message)s')
    for gt in args:
        load_grammar(gt, save=True, force=True)
    return True 
Example 30
Project: wikilinks   Author: trovdimi   File: pickle_data.py    MIT License 5 votes vote down vote up
def pickle_sim():
    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/semsim-pickle.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')
    i = 0
    voc_zip_links = pickle.load( open( "/ssd/ddimitrov/pickle/voc_zip_links", "rb" ) )
    print "loaded voc_zip_links"
    uniqeu_nonzero_map = pickle.load( open( "/ssd/ddimitrov/pickle/uniqeu_nonzero_map", "rb" ) )
    vocab = pickle.load( open( "/ssd/ddimitrov/pickle/vocab", "rb" ) )
    print "loaded vocab"
    sem_sim = pickle.load( open( "/ssd/ddimitrov/pickle/sem_sim", "rb" ) )


    values_rel_faeture = list()
    i_indices = list()
    j_indices = list()
    i = 0
    for link in voc_zip_links:
        i += 1
        if i % 1000000 == 0:
            print  i
        i_indices.append(uniqeu_nonzero_map[vocab[link[0]]])
        j_indices.append(vocab[link[1]])
        from_id = int(link[0])
        to_id = int(link[1])
        if from_id<=to_id:
            try:
                values_rel_faeture.append(sem_sim[(from_id,to_id)])
            except KeyError as e:
                logging.error(e)
        else:
            try:
                values_rel_faeture.append(sem_sim[(to_id,from_id)])
            except KeyError as e:
                logging.error(e)
    rel_feature_hyp_data = [i_indices, j_indices, values_rel_faeture]
    pickle.dump(rel_feature_hyp_data, open("/ssd/ddimitrov/pickle/sem_sim_hyp", "wb"), protocol=pickle.HIGHEST_PROTOCOL) 
Example 31
Project: wikilinks   Author: trovdimi   File: pickle_data.py    MIT License 5 votes vote down vote up
def pickle_sem_sim_data():
    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/semsim-pickle.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')
    sem_sim = pickle.load( open( "/ssd/ddimitrov/pickle/sem_sim", "rb" ) )

    values_sem_sim=list()
    with open(os.path.join(os.path.dirname(__file__), "/home/ddimitrov/tmp/wikipedia_network.csv")) as f:
        next(f)
        for line in f:
            line = line.strip().split('\t')
            from_id = int(line[0])
            to_id = int(line[1])
            if from_id<=to_id:
                try:
                    value = sem_sim[(from_id,to_id)]
                    values_sem_sim.append(value)
                except KeyError as e:
                    logging.error(e)
            else:
                try:
                    value = sem_sim[(to_id,from_id)]
                    values_sem_sim.append(value)
                except KeyError as e:
                    logging.error(e)


    pickle.dump(values_sem_sim, open("/ssd/ddimitrov/pickle/values_sem_sim", "wb"), protocol=pickle.HIGHEST_PROTOCOL) 
Example 32
Project: wikilinks   Author: trovdimi   File: insertarticlefeatures.py    MIT License 5 votes vote down vote up
def update_article_features():

    connection = db._create_connection()
    cursor = connection.cursor()

    network = load_graph("output/wikipedianetwork.xml.gz")
    print 'graph loaded'
    articles = db_work_view.retrieve_all_articles()
    print 'articles loaded'

    # setup logging
    LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
    LOGGING_PATH = 'tmp/articlefeatures-dbinsert.log'
    logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

    for article in articles:
        try:
            article_features = {}
            vertex = network.vertex(article['id'])
            article_features['id'] = article['id']
            article_features['hits_authority'] = network.vertex_properties["authority"][vertex]
            article_features['hits_hub'] = network.vertex_properties["hub"][vertex]
            #article_features['katz'] = network.vertex_properties["katz"][vertex]

            sql  = "UPDATE article_features " \
                   "SET hits_authority = %(hits_authority)s, hits_hub = %(hits_hub)s " \
                   "WHERE id = %(id)s;"

            cursor.execute(sql, article_features)

        except MySQLdb.Error as e:
            #logging.error('DB Insert Error  article id: "%s" ' % article['id'])
            print e
        except ValueError as v:
            logging.error('ValueError for article id: "%s"' % article['id'])
            print v
        connection.commit()
    connection.close() 
Example 33
Project: wikilinks   Author: trovdimi   File: tableclassinserter.py    MIT License 5 votes vote down vote up
def table_parser(self, file_name, root):

        db = MySQLDatabase(DATABASE_HOST, DATABASE_USER, DATABASE_PASSWORD, DATABASE_NAME)
        db_build_view = db.get_build_view()

        cursor = db_build_view._cursor

        # setup logging
        LOGGING_FORMAT = '%(levelname)s:\t%(asctime)-15s %(message)s'
        LOGGING_PATH = 'tmp/tableclasses-dbinsert.log'
        logging.basicConfig(filename=LOGGING_PATH, level=logging.DEBUG, format=LOGGING_FORMAT, filemode='w')

        html_parser = WikipediaHTMLTableParser()
        zip_file_path = os.path.join(root, file_name)
        html = self.zip2html(zip_file_path)
        html_parser.feed(html.decode('utf-8'))
        source_article_id = file_name.split('_')[1]
        try:
            fed_parser = WikipediaFedTextParser(html_parser.get_data())
            table_classes = fed_parser.table_classes(None)
            table_classes = list(set(table_classes))
            for table_class in table_classes:
               self.insert_table_class(source_article_id, table_class, cursor)
        except KeyError:
            db_build_view._db_connection.rollback()
            logging.error('KeyError FedTextParser source article id: %s ' % source_article_id)
        db_build_view.commit()
        db_build_view.reset_cache() 
Example 34
Project: rubbish.py   Author: alphapapa   File: rubbish.py    GNU General Public License v3.0 5 votes vote down vote up
def cli(verbose):

    # Setup logging
    if verbose >= 2:
        LOG_LEVEL = log.DEBUG
    elif verbose == 1:
        LOG_LEVEL = log.INFO
    else:
        LOG_LEVEL = log.WARNING

    log.basicConfig(level=LOG_LEVEL, format="%(levelname)s: %(message)s")

# * Commands

# ** empty 
Example 35
Project: shaptools   Author: SUSE   File: shell_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO) 
Example 36
Project: shaptools   Author: SUSE   File: netweaver_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO) 
Example 37
Project: shaptools   Author: SUSE   File: hana_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO) 
Example 38
Project: shaptools   Author: SUSE   File: init_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO) 
Example 39
Project: shaptools   Author: SUSE   File: pyhdb_connector_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO)
        sys.modules['pyhdb'] = mock.Mock()
        from shaptools.hdb_connector.connectors import pyhdb_connector
        cls._pyhdb_connector = pyhdb_connector 
Example 40
Project: shaptools   Author: SUSE   File: base_connect_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO)
        sys.modules['hdbcli'] = mock.Mock()
        sys.modules['pyhdb'] = mock.Mock()

        from shaptools.hdb_connector.connectors import base_connector
        cls._base_connector = base_connector 
Example 41
Project: shaptools   Author: SUSE   File: dbapi_connector_test.py    Apache License 2.0 5 votes vote down vote up
def setUpClass(cls):
        """
        Global setUp.
        """

        logging.basicConfig(level=logging.INFO)
        sys.modules['hdbcli'] = mock.Mock()
        from shaptools.hdb_connector.connectors import dbapi_connector
        cls._dbapi_connector = dbapi_connector 
Example 42
Project: rnm   Author: alexjaw   File: wireless.py    MIT License 5 votes vote down vote up
def test_me():
    logging.basicConfig(level=logging.DEBUG)
    logger = logging.getLogger(__name__)
    logger.info('------------- Starting test... -------------')

    wifi = WiFi()

    resp = wifi.get_ip()
    logger.info(repr(resp))

    resp = wifi.get_hotspots_info()

    logger.info('-------------    Finished      -------------') 
Example 43
Project: rnm   Author: alexjaw   File: service.py    MIT License 5 votes vote down vote up
def test_me():
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)
    logger.info('------------- Starting test... -------------')

    ap = AccessPoint()
    dhcp = DHCP()

    resp = ap.status()
    logger.info(repr(resp))

    resp = dhcp.status()
    logger.info(repr(resp))

    logger.info('-------------    Finished      -------------') 
Example 44
Project: rnm   Author: alexjaw   File: rnm.py    MIT License 5 votes vote down vote up
def test_me():
    logging.basicConfig(level=logging.INFO)
    logger = logging.getLogger(__name__)
    logger.info('------------- Starting test... -------------')

    wifi = WiFi()
    ap = AccessPoint()
    dhcp = DHCP()

    logger.info('-------------    Finished      -------------') 
Example 45
Project: neural-fingerprinting   Author: StephanZheng   File: run_multigpu.py    BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def run_trainer(hparams):
    logging.basicConfig(format='%(asctime)s %(message)s', level=logging.INFO)

    if 'multigpu' in hparams.attack_type_train:
        logging.info('Multi GPU Trainer.')
        trainer = TrainerMultiGPU(hparams)
    else:
        logging.info('Single GPU Trainer.')
        trainer = TrainerSingleGPU(hparams)
    trainer.model_train()
    trainer.eval(inc_epoch=False)

    return trainer.finish() 
Example 46
Project: MusicDownloader   Author: wwwpf   File: utils.py    GNU General Public License v3.0 5 votes vote down vote up
def init_logging():
    FILE_NAME = "log.txt"
    file_handler = logging.FileHandler(FILE_NAME, encoding="utf-8")
    FORMAT = "%(asctime)s %(filename)s[line:%(lineno)d]\t"\
        "%(levelname)s\t%(message)s"
    logging.basicConfig(handlers=[file_handler],
                        level=logging.INFO, format=FORMAT) 
Example 47
Project: aws-auto-remediate   Author: servian   File: lambda_handler.py    GNU General Public License v3.0 5 votes vote down vote up
def lambda_handler(event, context):
    loggger = logging.getLogger()

    if loggger.handlers:
        for handler in loggger.handlers:
            loggger.removeHandler(handler)

    # change logging levels for boto and others to prevent log spamming
    logging.getLogger("boto3").setLevel(logging.ERROR)
    logging.getLogger("botocore").setLevel(logging.ERROR)
    logging.getLogger("urllib3").setLevel(logging.ERROR)

    # set logging format
    logging.basicConfig(
        format="[%(levelname)s] %(message)s (%(filename)s, %(funcName)s(), line %(lineno)d)",
        level=os.environ.get("LOGLEVEL", "WARNING"),
    )

    # add SNS logger
    # sns_logger = SNSLoggingHandler(os.environ.get('LOGTOPIC'))
    # sns_logger.setLevel(logging.INFO)
    # loggger.addHandler(sns_logger)

    # instantiate class
    remediate = Remediate(logging, event)

    # run functions
    remediate.remediate() 
Example 48
Project: aws-auto-remediate   Author: servian   File: lambda_handler.py    GNU General Public License v3.0 5 votes vote down vote up
def lambda_handler(event, context):
    loggger = logging.getLogger()

    if loggger.handlers:
        for handler in loggger.handlers:
            loggger.removeHandler(handler)

    # change logging levels for boto and others
    logging.getLogger("boto3").setLevel(logging.ERROR)
    logging.getLogger("botocore").setLevel(logging.ERROR)
    logging.getLogger("urllib3").setLevel(logging.ERROR)

    # set logging format
    logging.basicConfig(
        format="[%(levelname)s] %(message)s (%(filename)s, %(funcName)s(), line %(lineno)d)",
        level=os.environ.get("LOGLEVEL", "WARNING").upper(),
    )

    # instantiate class
    setup = Setup(logging)

    # run functions
    setup.setup_dynamodb()

    settings = setup.get_settings()

    setup.create_stacks("config_rules", settings)
    setup.create_stacks("custom_rules", settings) 
Example 49
Project: flasky   Author: RoseOu   File: plugin_base.py    MIT License 5 votes vote down vote up
def _log(opt_str, value, parser):
    global logging
    if not logging:
        import logging
        logging.basicConfig()

    if opt_str.endswith('-info'):
        logging.getLogger(value).setLevel(logging.INFO)
    elif opt_str.endswith('-debug'):
        logging.getLogger(value).setLevel(logging.DEBUG) 
Example 50
Project: flasky   Author: RoseOu   File: test_easy_install.py    MIT License 5 votes vote down vote up
def test_multiproc_atexit(self):
        try:
            __import__('multiprocessing')
        except ImportError:
            # skip the test if multiprocessing is not available
            return

        log = logging.getLogger('test_easy_install')
        logging.basicConfig(level=logging.INFO, stream=sys.stderr)
        log.info('this should not break')