Python logging.info() Examples

The following are 30 code examples of logging.info(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module logging , or try the search function .
Example #1
Source File: demo.py    From svviz with MIT License 32 votes vote down vote up
def downloadDemo(which):
    try:
        downloadDir = tempfile.mkdtemp()
        archivePath = "{}/svviz-data.zip".format(downloadDir)

        # logging.info("Downloading...")
        downloadWithProgress("http://svviz.github.io/svviz/assets/examples/{}.zip".format(which), archivePath)
        
        logging.info("Decompressing...")
        archive = zipfile.ZipFile(archivePath)
        archive.extractall("{}".format(downloadDir))

        if not os.path.exists("svviz-examples"):
            os.makedirs("svviz-examples/")

        shutil.move("{temp}/{which}".format(temp=downloadDir, which=which), "svviz-examples/")
    except Exception as e:
        print("error downloading and decompressing example data: {}".format(e))
        return False

    if not os.path.exists("svviz-examples"):
        print("error finding example data after download and decompression")
        return False
    return True 
Example #2
Source File: pairfinder.py    From svviz with MIT License 6 votes vote down vote up
def getToMatchWithSampling(self):
        readIDs = set()

        logging.info("  exceeded number of reads required to begin sampling; performing sampling")
        for region in self.regions:
            for read in self.loadRegion(region.chr(), region.start(), region.end()):
                readIDs.add(read.qname)

        readIDs = random.sample(readIDs, self.sampleReads)

        tomatch = set()
        readsByID = collections.defaultdict(ReadSet)

        for region in self.regions:
            for read in self.loadRegion(region.chr(), region.start(), region.end()):
                if read.qname in readIDs:
                    tomatch.add(read)
                    readsByID[read.qname].add(read)

        return tomatch, readsByID 
Example #3
Source File: two_pop_model.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def postact(self):
        """
        After we are done acting, adopt our new stance.
        If the stance changes our direction, we adopt the extreme
        of the new direction.
        Then move to an empty cell.
        """
        new_direct = self.new_stance.direction()
        curr_direct = self.stance.direction()
        logging.info("For %s: stance = %s, new stance = %s"
                     % (self.name, str(self.stance), str(self.new_stance)))
        if not new_direct.equals(curr_direct):
            self.direction_changed(curr_direct, new_direct)
            # if adopting a new stance direction, we go to the extreme
            self.new_stance = new_direct
        else:
            self.new_stance = self.new_stance.normalize()
        self.stance = self.new_stance
        self.move_to_empty(grid_view=self.my_view) 
Example #4
Source File: validate_and_copy_submissions.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def copy_submission_to_destination(self, src_filename, dst_subdir,
                                     submission_id):
    """Copies submission to target directory.

    Args:
      src_filename: source filename of the submission
      dst_subdir: subdirectory of the target directory where submission should
        be copied to
      submission_id: ID of the submission, will be used as a new
        submission filename (before extension)
    """

    extension = [e for e in ALLOWED_EXTENSIONS if src_filename.endswith(e)]
    if len(extension) != 1:
      logging.error('Invalid submission extension: %s', src_filename)
      return
    dst_filename = os.path.join(self.target_dir, dst_subdir,
                                submission_id + extension[0])
    cmd = ['gsutil', 'cp', src_filename, dst_filename]
    if subprocess.call(cmd) != 0:
      logging.error('Can\'t copy submission to destination')
    else:
      logging.info('Submission copied to: %s', dst_filename) 
Example #5
Source File: worker.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def run_without_time_limit(self, cmd):
    """Runs docker command without time limit.

    Args:
      cmd: list with the command line arguments which are passed to docker
        binary

    Returns:
      how long it took to run submission in seconds

    Raises:
      WorkerError: if error occurred during execution of the submission
    """
    cmd = [DOCKER_BINARY, 'run', DOCKER_NVIDIA_RUNTIME] + cmd
    logging.info('Docker command: %s', ' '.join(cmd))
    start_time = time.time()
    retval = subprocess.call(cmd)
    elapsed_time_sec = long(time.time() - start_time)
    logging.info('Elapsed time of attack: %d', elapsed_time_sec)
    logging.info('Docker retval: %d', retval)
    if retval != 0:
      logging.warning('Docker returned non-zero retval: %d', retval)
      raise WorkerError('Docker returned non-zero retval ' + str(retval))
    return elapsed_time_sec 
Example #6
Source File: agent_pop.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def append(self, agent, v=None):
        """
        Appends to agent list.
        """
        if v is None:
            var = agent.get_type()
        else:
            var = v
        logging.info("Adding %s of variety %s" % (agent.name, var))

        if var not in self.vars:
            self.add_variety(var)
            self.graph.add_edge(self, var)

        self.vars[var]["agents"].append(agent)

# we link each agent to the variety
# so we can show their relationship
        self.graph.add_edge(var, agent) 
Example #7
Source File: utils.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def run_model(env, prog_file, results_file):
    # Logging is automatically set up for the modeler:
    logging.info("Starting program " + prog_file)

    periods = env.props.get(PERIODS)
    if periods is None:
        periods = -1
    else:
        periods = int(periods)
    # And now we set things running!
    try:
        results = env.run(periods=periods)
    except SystemExit:
        pass
    env.record_results(results_file)
    return results 
Example #8
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _verify_docker_image_size(self, image_name):
    """Verifies size of Docker image.

    Args:
      image_name: name of the Docker image.

    Returns:
      True if image size is withing the limits, False otherwise.
    """
    shell_call(['docker', 'pull', image_name])
    try:
      image_size = subprocess.check_output(
          ['docker', 'inspect', '--format={{.Size}}', image_name]).strip()
      image_size = int(image_size) if PY3 else long(image_size)
    except (ValueError, subprocess.CalledProcessError) as e:
      logging.error('Failed to determine docker image size: %s', e)
      return False
    logging.info('Size of docker image %s is %d', image_name, image_size)
    if image_size > MAX_DOCKER_IMAGE_SIZE:
      logging.error('Image size exceeds limit %d', MAX_DOCKER_IMAGE_SIZE)
    return image_size <= MAX_DOCKER_IMAGE_SIZE 
Example #9
Source File: validate_submission.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def main(args):
  print_in_box('Validating submission ' + args.submission_filename)
  random.seed()
  temp_dir = args.temp_dir
  delete_temp_dir = False
  if not temp_dir:
    temp_dir = tempfile.mkdtemp()
    logging.info('Created temporary directory: %s', temp_dir)
    delete_temp_dir = True
  validator = validate_submission_lib.SubmissionValidator(temp_dir,
                                                          args.use_gpu)
  if validator.validate_submission(args.submission_filename,
                                   args.submission_type):
    print_in_box('Submission is VALID!')
  else:
    print_in_box('Submission is INVALID, see log messages for details')
  if delete_temp_dir:
    logging.info('Deleting temporary directory: %s', temp_dir)
    subprocess.call(['rm', '-rf', temp_dir]) 
Example #10
Source File: edgebox_model.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def act(self):
        """
        Act is called in an interactive loop by code
        in the base framework
        """
        super().survey_env()
        for trader in self.neighbor_iter(view=self.my_view):
            for g in self.goods:
                amt = 1
                while self.goods[g]["endow"] >= amt:
                    logging.info(self.name + " is offering "
                                 + str(amt) + " units of "
                                 + g + " to " + trader.name)
                    ans = trader.rec_offer(g, amt, self)
                    if ans == ACCEPT or ans == REJECT:
                        break
                    amt += 1 
Example #11
Source File: barter_model.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def fetch_agents_from_file(self, filenm, agent_type):
        """
        Read in a list of bartering agents from a csv file
        """

        max_detect = self.props.get("max_detect",
                                    ebm.GLOBAL_KNOWLEDGE)
        with open(filenm) as f:
            reader = csv.reader(f)
            for row in reader:
                agent = agent_type(row[0], max_detect=max_detect)
                self.add_agent(agent)
                for i in range(1, len(row) - 2, STEP):
                    good = row[i]
                    self.market.add_good(good)
                    agent.endow(good,
                                int(row[i + 1]),
                                eval("lambda qty: "
                                     + row[i + 2]))
        logging.info("Goods = " + str(self.market)) 
Example #12
Source File: insertsizes.py    From svviz with MIT License 6 votes vote down vote up
def chooseOrientation(orientations):
    logging.info("  counts +/-:{:<6} -/+:{:<6} +/+:{:<6} -/-:{:<6} unpaired:{:<6}".format(orientations[False, True], 
                                                    orientations[True, False], 
                                                    orientations[True, True],
                                                    orientations[False, False],
                                                    orientations["unpaired"]))
    ranked = sorted(orientations, key=lambda x: orientations[x])
    chosenOrientations = [ranked.pop()]
    while len(ranked) > 0:
        candidate = ranked.pop()
        if orientations[chosenOrientations[-1]] < 2* orientations[candidate]:
            chosenOrientations.append(candidate)
        else:
            break
    if chosenOrientations[0] == "unpaired":
        chosenOrientations = "any"
    else:
        d = {False: "+", True:"-"}
        chosenOrientations = ["".join(d[x] for x in o) for o in chosenOrientations]
    return chosenOrientations 
Example #13
Source File: automate.py    From aospy with Apache License 2.0 6 votes vote down vote up
def _prune_invalid_time_reductions(spec):
    """Prune time reductions of spec with no time dimension."""
    valid_reductions = []
    if not spec['var'].def_time and spec['dtype_out_time'] is not None:
        for reduction in spec['dtype_out_time']:
            if reduction not in _TIME_DEFINED_REDUCTIONS:
                valid_reductions.append(reduction)
            else:
                msg = ("Var {0} has no time dimension "
                       "for the given time reduction "
                       "{1} so this calculation will "
                       "be skipped".format(spec['var'].name, reduction))
                logging.info(msg)
    else:
        valid_reductions = spec['dtype_out_time']
    return valid_reductions 
Example #14
Source File: calc.py    From aospy with Apache License 2.0 6 votes vote down vote up
def load(self, dtype_out_time, dtype_out_vert=False, region=False,
             plot_units=False, mask_unphysical=False):
        """Load the data from the object if possible or from disk."""
        msg = ("Loading data from disk for object={0}, dtype_out_time={1}, "
               "dtype_out_vert={2}, and region="
               "{3}".format(self, dtype_out_time, dtype_out_vert, region))
        logging.info(msg + ' ({})'.format(ctime()))
        # Grab from the object if its there.
        try:
            data = self.data_out[dtype_out_time]
        except (AttributeError, KeyError):
            # Otherwise get from disk.  Try scratch first, then archive.
            try:
                data = self._load_from_disk(dtype_out_time, dtype_out_vert,
                                            region=region)
            except IOError:
                data = self._load_from_tar(dtype_out_time, dtype_out_vert)
        # Copy the array to self.data_out for ease of future access.
        self._update_data_out(data, dtype_out_time)
        # Apply desired plotting/cleanup methods.
        if mask_unphysical:
            data = self.var.mask_unphysical(data)
        if plot_units:
            data = self.var.to_plot_units(data, dtype_vert=dtype_out_vert)
        return data 
Example #15
Source File: old_edgebox.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def act(self):
        """
        Act is called in an interactive loop by code
        in the base framework
        """
        super().survey_env()
        for trader shin self.neighbor_iter(view=self.my_view):
            for g in self.goods:
                amt = 1
                while self.goods[g]["endow"] >= amt:
                    logging.info(self.name + " is offering "
                                 + str(amt) + " units of "
                                 + g + " to " + trader.name)
                    ans = trader.rec_offer(g, amt, self)
                    if ans == ACCEPT or ans == REJECT:
                        break
                    amt += 1 
Example #16
Source File: web.py    From svviz with MIT License 6 votes vote down vote up
def run(port=None):
    import webbrowser, threading

    if port is None:
        port = getRandomPort()

    # load()
    url = "http://127.0.0.1:{}/".format(port)
    logging.info("Starting browser at {}".format(url))
    # webbrowser.open_new(url)

    threading.Timer(1.25, lambda: webbrowser.open(url) ).start()

    app.run(
        port=port#,
        # debug=True
    ) 
Example #17
Source File: views.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def assign_key(request):
    """
        Assign a key to a user.
    """
    if 'session_id' not in request.session:
        with open("session_id.txt", "w+") as f:
            session_id = f.readline()
            if not session_id:
                session_id = 0
            else:
                session_id = int(session_id)
            session_id += 1
            new_id = session_id
            f.write(str(session_id))

        request.session['session_id'] = new_id
        request.session.modified = True
    else:
        logging.info("This user has a session id: ",
                     request.session['session_id']) 
Example #18
Source File: stream.py    From kite-connect-python-example with MIT License 5 votes vote down vote up
def on_error():
	logging.info("WebSocket connection thrown error")

# Assign the callbacks. 
Example #19
Source File: calc.py    From aospy with Apache License 2.0 5 votes vote down vote up
def save(self, data, dtype_out_time, dtype_out_vert=False,
             save_files=True, write_to_tar=False):
        """Save aospy data to data_out attr and to an external file."""
        self._update_data_out(data, dtype_out_time)
        if save_files:
            self._save_files(data, dtype_out_time)
        if write_to_tar and self.proj.tar_direc_out:
            self._write_to_tar(dtype_out_time)
        logging.info('\t{}'.format(self.path_out[dtype_out_time])) 
Example #20
Source File: calc.py    From aospy with Apache License 2.0 5 votes vote down vote up
def _apply_all_time_reductions(self, data):
        """Apply all requested time reductions to the data."""
        logging.info(self._print_verbose("Applying desired time-"
                                         "reduction methods."))
        reduc_specs = [r.split('.') for r in self.dtype_out_time]
        reduced = {}
        for reduc, specs in zip(self.dtype_out_time, reduc_specs):
            func = specs[-1]
            if 'reg' in specs:
                reduced.update({reduc: self.region_calcs(data, func)})
            else:
                reduced.update({reduc: self._time_reduce(data, func)})
        return OrderedDict(sorted(reduced.items(), key=lambda t: t[0])) 
Example #21
Source File: calc.py    From aospy with Apache License 2.0 5 votes vote down vote up
def _get_input_data(self, var, start_date, end_date):
        """Get the data for a single variable over the desired date range."""
        logging.info(self._print_verbose("Getting input data:", var))

        if isinstance(var, (float, int)):
            return var
        else:
            cond_pfull = ((not hasattr(self, internal_names.PFULL_STR))
                          and var.def_vert and
                          self.dtype_in_vert == internal_names.ETA_STR)
            data = self.data_loader.recursively_compute_variable(
                var, start_date, end_date, self.time_offset, self.model,
                **self.data_loader_attrs)
            name = data.name
            data = self._add_grid_attributes(data.to_dataset(name=data.name))
            data = data[name]
            if cond_pfull:
                try:
                    self.pfull_coord = data[internal_names.PFULL_STR]
                except KeyError:
                    pass
            # Force all data to be at full pressure levels, not half levels.
            bool_to_pfull = (self.dtype_in_vert == internal_names.ETA_STR and
                             var.def_vert == internal_names.PHALF_STR)
            if bool_to_pfull:
                data = utils.vertcoord.to_pfull_from_phalf(data,
                                                           self.pfull_coord)
        if var.def_time:
            # Restrict to the desired dates within each year.
            if self.dtype_in_time != 'av':
                return self._to_desired_dates(data)
        else:
            return data 
Example #22
Source File: calc.py    From aospy with Apache License 2.0 5 votes vote down vote up
def _add_grid_attributes(self, ds):
        """Add model grid attributes to a dataset"""
        for name_int, names_ext in self._grid_attrs.items():
            ds_coord_name = set(names_ext).intersection(set(ds.coords) |
                                                        set(ds.data_vars))
            model_attr = getattr(self.model, name_int, None)
            if ds_coord_name and (model_attr is not None):
                # Force coords to have desired name.
                ds = ds.rename({list(ds_coord_name)[0]: name_int})
                ds = ds.set_coords(name_int)
                if not np.array_equal(ds[name_int], model_attr):
                    if np.allclose(ds[name_int], model_attr):
                        msg = ("Values for '{0}' are nearly (but not exactly) "
                               "the same in the Run {1} and the Model {2}.  "
                               "Therefore replacing Run's values with the "
                               "model's.".format(name_int, self.run,
                                                 self.model))
                        logging.info(msg)
                        ds[name_int].values = model_attr.values
                    else:
                        msg = ("Model coordinates for '{0}' do not match those"
                               " in Run: {1} vs. {2}"
                               "".format(name_int, ds[name_int], model_attr))
                        logging.info(msg)

            else:
                # Bring in coord from model object if it exists.
                ds = ds.load()
                if model_attr is not None:
                    ds[name_int] = model_attr
                    ds = ds.set_coords(name_int)
            if (self.dtype_in_vert == 'pressure' and
                    internal_names.PLEVEL_STR in ds.coords):
                self.pressure = ds.level
        return ds 
Example #23
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _verify_submission_size(self):
    submission_size = 0
    for dirname, _, filenames in os.walk(self._extracted_submission_dir):
      for f in filenames:
        submission_size += os.path.getsize(os.path.join(dirname, f))
    logging.info('Unpacked submission size: %d', submission_size)
    if submission_size > MAX_SUBMISSION_SIZE_UNPACKED:
      logging.error('Submission size exceeding limit %d',
                    MAX_SUBMISSION_SIZE_UNPACKED)
    return submission_size <= MAX_SUBMISSION_SIZE_UNPACKED 
Example #24
Source File: image_batches.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _read_image_list(self, skip_image_ids=None):
    """Reads list of dataset images from the datastore."""
    if skip_image_ids is None:
      skip_image_ids = []
    images = self._storage_client.list_blobs(
        prefix=os.path.join('dataset', self._dataset_name) + '/')
    zip_files = [i for i in images if i.endswith('.zip')]
    if len(zip_files) == 1:
      # we have a zip archive with images
      zip_name = zip_files[0]
      logging.info('Reading list of images from zip file %s', zip_name)
      blob = self._storage_client.get_blob(zip_name)
      buf = BytesIO()
      logging.info('Downloading zip')
      blob.download_to_file(buf)
      buf.seek(0)
      logging.info('Reading content of the zip')
      with zipfile.ZipFile(buf) as f:
        images = [os.path.join(zip_name, os.path.basename(n))
                  for n in f.namelist() if n.endswith('.png')]
      buf.close()
      logging.info('Found %d images', len(images))
    else:
      # we have just a directory with images, filter non-PNG files
      logging.info('Reading list of images from png files in storage')
      images = [i for i in images if i.endswith('.png')]
      logging.info('Found %d images', len(images))
    # filter images which should be skipped
    images = [i for i in images
              if os.path.basename(i)[:-4] not in skip_image_ids]
    # assign IDs to images
    images = [(DATASET_IMAGE_ID_PATTERN.format(idx), i)
              for idx, i in enumerate(sorted(images))]
    return images 
Example #25
Source File: automate.py    From aospy with Apache License 2.0 5 votes vote down vote up
def _submit_calcs_on_client(calcs, client, func):
    """Submit calculations via dask.bag and a distributed client"""
    logging.info('Connected to client: {}'.format(client))
    if LooseVersion(dask.__version__) < '0.18':
        dask_option_setter = dask.set_options
    else:
        dask_option_setter = dask.config.set
    with dask_option_setter(get=client.get):
        return db.from_sequence(calcs).map(func).compute() 
Example #26
Source File: code2pdf.py    From code2pdf with MIT License 5 votes vote down vote up
def init_print(self, linenos=True, style="default"):
        app = QApplication([])  # noqa
        doc = QTextDocument()
        doc.setHtml(
            self.highlight_file(linenos=linenos, style=style)
        )
        printer = QPrinter()
        printer.setOutputFileName(self.pdf_file)
        printer.setOutputFormat(QPrinter.PdfFormat)
        page_size_dict = {"a2": QPrinter.A2, "a3": QPrinter.A3, "a4": QPrinter.A4, "letter": QPrinter.Letter}
        printer.setPageSize(page_size_dict.get(self.size.lower(), QPrinter.A4))
        printer.setPageMargins(15, 15, 15, 15, QPrinter.Millimeter)
        doc.print_(printer)
        logging.info("PDF created at %s" % (self.pdf_file)) 
Example #27
Source File: model.py    From osqf2015 with MIT License 5 votes vote down vote up
def compute_scenarios(self, d, n_scenarios=750):
        # identify returns
        dates = pd.to_datetime(d, unit='ms')
        max_date = dates[0].date()
        min_date = max_date.replace(year=max_date.year-3)

        logging.info('Computing returns between ') #, str(max_date), ' and ', str(min_date))
        self.returns_df = self.df[min_date:max_date].ix[-n_scenarios-1:]
        neutral, vola = self.returns_df.ix[max_date][['Close', 'Vola']]
        scenarios = neutral * np.exp( vola * self.returns_df.ix[:-1].DevolLogReturns )
        return scenarios, neutral 
Example #28
Source File: pairfinder.py    From svviz with MIT License 5 votes vote down vote up
def domatching(self):
        t0 = None

        for i, read in enumerate(self.tomatch):#[:150]):
            if i % 10000 == 0:
                if t0 is None:
                    t0 = time.time()
                    elapsed = "Finding mate pairs..."
                else:
                    t1 = time.time()
                    elapsed = "t={:.1f}s".format(t1-t0)
                    t0 = t1
                logging.info("   {:,} of {:,} {}".format(i, len(self.tomatch), elapsed))
            if len(self.readsByID[read.qname].reads) < 2:
                self.findmatch(read) 
Example #29
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def _verify_submission_size(self):
    submission_size = 0
    for dirname, _, filenames in os.walk(self._extracted_submission_dir):
      for f in filenames:
        submission_size += os.path.getsize(os.path.join(dirname, f))
    logging.info('Unpacked submission size: %d', submission_size)
    if submission_size > MAX_SUBMISSION_SIZE_UNPACKED:
      logging.error('Submission size exceeding limit %d',
                    MAX_SUBMISSION_SIZE_UNPACKED)
    return submission_size <= MAX_SUBMISSION_SIZE_UNPACKED 
Example #30
Source File: validate_and_copy_submissions.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 5 votes vote down vote up
def main(args):
  random.seed()
  temp_dir = tempfile.mkdtemp()
  logging.info('Created temporary directory: %s', temp_dir)
  validator = SubmissionValidator(
      source_dir=args.source_dir,
      target_dir=args.target_dir,
      temp_dir=temp_dir,
      do_copy=args.copy,
      use_gpu=args.use_gpu,
      containers_file=args.containers_file)
  validator.run()
  logging.info('Deleting temporary directory: %s', temp_dir)
  subprocess.call(['rm', '-rf', temp_dir])