Python logging.error() Examples

The following are 30 code examples of logging.error(). You can vote up the ones you like or vote down the ones you don't like, and go to the original project or source file by following the links above each example. You may also want to check out all available functions/classes of the module logging , or try the search function .
Example #1
Source File: dropbox_dropper.py    From SecPi with GNU General Public License v3.0 7 votes vote down vote up
def notify(self, info):
		if not self.corrupted:
			#info_str = "Recieved alarm on sensor %s from worker %s: %s"%(info['sensor'], info['worker'], info['message'])
			latest_subdir = self.get_latest_subdir()

			dropbox_dir = "/%s" % latest_subdir.split("/")[-1] #pfui
			#self.dbx.files_create_folder(dropbox_dir) # shouldn't be necessary, automatically created 
			for file in os.listdir(latest_subdir):
				if os.path.isfile("%s/%s" % (latest_subdir, file)):
					f = open("%s/%s" % (latest_subdir, file), "rb")
					data = f.read()
					try:
						logging.info("Dropbox: Trying to upload file %s to %s" % (file, dropbox_dir))
						res = self.dbx.files_upload(data, "%s/%s" % (dropbox_dir, file))
						logging.info("Dropbox: Upload of file %s succeeded" % file)
					except dropbox.exceptions.ApiError as d:
						logging.error("Dropbox: API error: %s" % d)
					except Exception as e: # currently this catches wrong authorization, we should change this
						logging.error("Dropbox: Wasn't able to upload file: %s" % e)
					f.close()
		else:
			logging.error("Dropbox: Wasn't able to notify because there was an initialization error") 
Example #2
Source File: dropbox_dropper.py    From SecPi with GNU General Public License v3.0 7 votes vote down vote up
def __init__(self, id, params):
		super(Dropbox_Dropper, self).__init__(id, params)
		try:
			self.access_token = params["access_token"]
		except KeyError as k: # if config parameters are missing
			logging.error("Dropxbox: Error while trying to initialize notifier, it seems there is a config parameter missing: %s" % k)
			self.corrupted = True
			return

		try:
			self.dbx = dropbox.Dropbox(self.access_token)
		except Exception as e:
			logging.error("Dropbox: Error while connecting to Dropbox service: %s" % e)
			self.corrupted = True
			return

		self.data_dir = "/var/tmp/secpi/alarms/" #change this maybe?

		logging.info("Dropbox initialized") 
Example #3
Source File: web.py    From svviz with MIT License 7 votes vote down vote up
def index():
    if not "last_format" in session:
        session["last_format"] = "svg"
        session.permanent = True

    try:
        variantDescription = str(dataHub.variant).replace("::", " ").replace("-", "–")
        return render_template('index.html',
            samples=list(dataHub.samples.keys()), 
            annotations=dataHub.annotationSets,
            results_table=dataHub.getCounts(),
            insertSizeDistributions=[sample.name for sample in dataHub if sample.insertSizePlot], 
            dotplots=dataHub.dotplots,
            variantDescription=variantDescription)
    except Exception as e:
        logging.error("ERROR:{}".format(e))
        raise 
Example #4
Source File: mailer.py    From SecPi with GNU General Public License v3.0 6 votes vote down vote up
def __init__(self, id, params):
		super(Mailer, self).__init__(id, params)
		
		try:
			# SMTP Server config + data dir
			self.data_dir = params.get("data_dir", "/var/tmp/secpi/alarms")
			self.smtp_address = params["smtp_address"]
			self.smtp_port = int(params["smtp_port"])
			self.smtp_user = params["smtp_user"]
			self.smtp_pass = params["smtp_pass"]
			self.smtp_security = params["smtp_security"]
		except KeyError as ke: # if config parameters are missing
			logging.error("Mailer: Wasn't able to initialize the notifier, it seems there is a config parameter missing: %s" % ke)
			self.corrupted = True
			return
		except ValueError as ve: # if one configuration parameter can't be parsed as int
			logging.error("Mailer: Wasn't able to initialize the notifier, please check your configuration: %s" % ve)
			self.corrupted = True
			return

		logging.info("Mailer: Notifier initialized") 
Example #5
Source File: vcf.py    From svviz with MIT License 6 votes vote down vote up
def parseVCFLine(line, dataHub):
    try:
        fields = line.strip().split()

        info = parseInfo(fields[7])

        record = VCFRecord(fields, info)

        if record.svtype == "INS":
            return parseInsertion(record, dataHub)
        elif record.svtype == "DEL":
            return parseDeletion(record, dataHub)
        elif record.svtype == "INV":
            return parseInversion(record, dataHub)
        elif record.svtype == "TRA":
            return parseTranslocation(record, dataHub)
        raise VCFParserError("Unsupported variant type:{}".format(record.svtype))
    except Exception as e:
        logging.error("\n== Failed to load variant: {} ==".format(e))
        logging.error(str(line.strip()))
        return None 
Example #6
Source File: fashion.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def restore_agent(self, agent_json):
        """
        Restore agent.
        """
        new_agent = None
        if agent_json["ntype"] == Hipster.__name__:
            new_agent = Hipster(name=agent_json["name"],
                                goal=agent_json["goal"],
                                max_move=agent_json["max_move"],
                                variability=agent_json["variability"])

        elif agent_json["ntype"] == Follower.__name__:
            new_agent = Follower(name=agent_json["name"],
                                 goal=agent_json["goal"],
                                 max_move=agent_json["max_move"],
                                 variability=agent_json["variability"])

        else:
            logging.error("agent found whose NTYPE is neither "
                          "{} nor {}, but {}".format(Hipster.__name__,
                                                     Follower.__name__,
                                                     agent_json["ntype"]))

        if new_agent:
            self.add_agent_from_json(new_agent, agent_json) 
Example #7
Source File: local.py    From ALF with Apache License 2.0 6 votes vote down vote up
def load_project(project_name):
    # load project and check that it looks okay
    try:
        importlib.import_module(project_name)
    except ImportError as e:
        try:
            #TODO: relative module imports in a projects/Project will fail for some reason
            importlib.import_module("projects.%s" % project_name)
        except ImportError as e:
            log.error("Failed to import project %s", project_name, exc_info=1)
            sys.exit(1)
    if len(_registered) != 1:
        log.error("Project must register itself using alf.register(). "
                  "%d projects registered, expecting 1.", len(_registered))
        sys.exit(1)
    project_cls = _registered.pop()
    if not issubclass(project_cls, Fuzzer):
        raise TypeError("Expecting a Fuzzer, not '%s'" % type(project_cls))
    return project_cls 
Example #8
Source File: app.py    From svviz with MIT License 6 votes vote down vote up
def checkRequirements(args):
    if not remap.check_swalign():
        print("ERROR: check that svviz is correctly installed -- the 'ssw' Smith-Waterman alignment module does not appear to be functional")
        sys.exit(1)
    if args.export:
        exportFormat = export.getExportFormat(args)
        converter = export.getExportConverter(args, exportFormat)
        if converter is None and exportFormat != "svg":
            if args.converter is not None:
                logging.error("ERROR: unable to run SVG converter '{}'. Please check that it is "
                    "installed correctly".format(args.converter))
            else:
                logging.error("ERROR: unable to export to PDF/PNG because at least one of the following "
                    "programs must be correctly installed: webkitToPDF, librsvg or inkscape")

            sys.exit(1) 
Example #9
Source File: export.py    From svviz with MIT License 6 votes vote down vote up
def getExportConverter(args, exportFormat):
    if args.converter == "webkittopdf" and exportFormat=="png":
        logging.error("webkitToPDF does not support export to PNG; use librsvg or inkscape instead, or "
            "export to PDF")
        sys.exit(1)

    if exportFormat == "png" and args.converter is None:
        return "librsvg"

    if args.converter == "rsvg-convert":
        return "librsvg"

    if args.converter in [None, "webkittopdf"]:
        if checkWebkitToPDF():
            return "webkittopdf"

    if args.converter in [None, "librsvg"]:
        if checkRSVGConvert():
            return "librsvg"

    if args.converter in [None, "inkscape"]:
        if checkInkscape():
            return "inkscape"

    return None 
Example #10
Source File: plugin_loader.py    From vt-ida-plugin with Apache License 2.0 6 votes vote down vote up
def read_config(self):
    """Read the user's configuration file."""

    logging.debug('[VT Plugin] Reading user config file: %s', self.vt_cfgfile)
    config_file = configparser.RawConfigParser()
    config_file.read(self.vt_cfgfile)

    try:
      if config_file.get('General', 'auto_upload') == 'True':
        self.auto_upload = True
      else:
        self.auto_upload = False
      return True
    except:
      logging.error('[VT Plugin] Error reading the user config file.')
      return False 
Example #11
Source File: plugin_loader.py    From vt-ida-plugin with Apache License 2.0 6 votes vote down vote up
def check_version(self):
    """Return True if there's an update available."""

    user_agent = 'IDA Pro VT Plugin checkversion - v' + VT_IDA_PLUGIN_VERSION
    headers = {
        'User-Agent': user_agent,
        'Accept': 'application/json'
    }
    url = 'https://raw.githubusercontent.com/VirusTotal/vt-ida-plugin/master/VERSION'

    try:
      response = requests.get(url, headers=headers)
    except:
      logging.error('[VT Plugin] Unable to check for updates.')
      return False

    if response.status_code == 200:
      version = response.text.rstrip('\n')
      if self.__compare_versions(VT_IDA_PLUGIN_VERSION, version):
        logging.debug('[VT Plugin] Version %s is available !', version)
        return True
    return False 
Example #12
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def make_directory_writable(dirname):
  """Makes directory readable and writable by everybody.

  Args:
    dirname: name of the directory

  Returns:
    True if operation was successfull

  If you run something inside Docker container and it writes files, then
  these files will be written as root user with restricted permissions.
  So to be able to read/modify these files outside of Docker you have to change
  permissions to be world readable and writable.
  """
  retval = shell_call(['docker', 'run', '-v',
                       '{0}:/output_dir'.format(dirname),
                       'busybox:1.27.2',
                       'chmod', '-R', 'a+rwx', '/output_dir'])
  if not retval:
    logging.error('Failed to change permissions on directory: %s', dirname)
  return retval 
Example #13
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def _verify_docker_image_size(self, image_name):
    """Verifies size of Docker image.

    Args:
      image_name: name of the Docker image.

    Returns:
      True if image size is withing the limits, False otherwise.
    """
    shell_call(['docker', 'pull', image_name])
    try:
      image_size = subprocess.check_output(
          ['docker', 'inspect', '--format={{.Size}}', image_name]).strip()
      image_size = int(image_size) if PY3 else long(image_size)
    except (ValueError, subprocess.CalledProcessError) as e:
      logging.error('Failed to determine docker image size: %s', e)
      return False
    logging.info('Size of docker image %s is %d', image_name, image_size)
    if image_size > MAX_DOCKER_IMAGE_SIZE:
      logging.error('Image size exceeds limit %d', MAX_DOCKER_IMAGE_SIZE)
    return image_size <= MAX_DOCKER_IMAGE_SIZE 
Example #14
Source File: validate_and_copy_submissions.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def copy_submission_to_destination(self, src_filename, dst_subdir,
                                     submission_id):
    """Copies submission to target directory.

    Args:
      src_filename: source filename of the submission
      dst_subdir: subdirectory of the target directory where submission should
        be copied to
      submission_id: ID of the submission, will be used as a new
        submission filename (before extension)
    """

    extension = [e for e in ALLOWED_EXTENSIONS if src_filename.endswith(e)]
    if len(extension) != 1:
      logging.error('Invalid submission extension: %s', src_filename)
      return
    dst_filename = os.path.join(self.target_dir, dst_subdir,
                                submission_id + extension[0])
    cmd = ['gsutil', 'cp', src_filename, dst_filename]
    if subprocess.call(cmd) != 0:
      logging.error('Can\'t copy submission to destination')
    else:
      logging.info('Submission copied to: %s', dst_filename) 
Example #15
Source File: validate_and_copy_submissions.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def save_id_to_path_mapping(self):
    """Saves mapping from submission IDs to original filenames.

    This mapping is saved as CSV file into target directory.
    """
    if not self.id_to_path_mapping:
      return
    with open(self.local_id_to_path_mapping_file, 'w') as f:
      writer = csv.writer(f)
      writer.writerow(['id', 'path'])
      for k, v in sorted(iteritems(self.id_to_path_mapping)):
        writer.writerow([k, v])
    cmd = ['gsutil', 'cp', self.local_id_to_path_mapping_file,
           os.path.join(self.target_dir, 'id_to_path_mapping.csv')]
    if subprocess.call(cmd) != 0:
      logging.error('Can\'t copy id_to_path_mapping.csv to target directory') 
Example #16
Source File: validate_submission_lib.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def make_directory_writable(dirname):
  """Makes directory readable and writable by everybody.

  Args:
    dirname: name of the directory

  Returns:
    True if operation was successfull

  If you run something inside Docker container and it writes files, then
  these files will be written as root user with restricted permissions.
  So to be able to read/modify these files outside of Docker you have to change
  permissions to be world readable and writable.
  """
  retval = shell_call(['docker', 'run', '-v',
                       '{0}:/output_dir'.format(dirname),
                       'busybox:1.27.2',
                       'chmod', '-R', 'a+rwx', '/output_dir'])
  if not retval:
    logging.error('Failed to change permissions on directory: %s', dirname)
  return retval 
Example #17
Source File: cloud_client.py    From neural-fingerprinting with BSD 3-Clause "New" or "Revised" License 6 votes vote down vote up
def rollback(self):
    """Rolls back pending mutations.

    Keep in mind that NoTransactionBatch splits all mutations into smaller
    batches and commit them as soon as mutation buffer reaches maximum length.
    That's why rollback method will only roll back pending mutations from the
    buffer, but won't be able to rollback already committed mutations.
    """
    try:
      if self._cur_batch:
        self._cur_batch.rollback()
    except ValueError:
      # ignore "Batch must be in progress to rollback" error
      pass
    self._cur_batch = None
    self._num_mutations = 0 
Example #18
Source File: osdriver.py    From multibootusb with GNU General Public License v2.0 6 votes vote down vote up
def log(message, info=True, error=False, debug=False, _print=True):
    """
    Dirty function to log messages to file and also print on screen.
    :param message:
    :param info:
    :param error:
    :param debug:
    :return:
    """
    if _print is True:
        print(message)

    # remove ANSI color codes from logs
    # message_clean = re.compile(r'\x1b[^m]*m').sub('', message)

    if info is True:
        logging.info(message)
    elif error is not False:
        logging.error(message)
    elif debug is not False:
        logging.debug(message) 
Example #19
Source File: heroku.py    From friendly-telegram with GNU Affero General Public License v3.0 6 votes vote down vote up
def get_app(clients, key, api_token=None, create_new=True, full_match=False):
    heroku = heroku3.from_key(key)
    app = None
    for poss_app in heroku.apps():
        config = poss_app.config()
        if "authorization_strings" not in config:
            continue
        if (api_token is None or (config["api_id"] == api_token.ID and config["api_hash"] == api_token.HASH)):
            if full_match and config["authorization_strings"] != os.environ["authorization_strings"]:
                continue
            app = poss_app
            break
    if app is None:
        if api_token is None or not create_new:
            logging.error("%r", {app: repr(app.config) for app in heroku.apps()})
            raise RuntimeError("Could not identify app!")
        app = heroku.create_app(stack_id_or_name="heroku-18", region_id_or_name="us")
        config = app.config()
    return app, config 
Example #20
Source File: utils.py    From friendly-telegram with GNU Affero General Public License v3.0 6 votes vote down vote up
def get_user(message):
    """Get user who sent message, searching if not found easily"""
    try:
        return await message.client.get_entity(message.from_id)
    except ValueError:  # Not in database. Lets go looking for them.
        logging.debug("user not in session cache. searching...")
    if isinstance(message.to_id, PeerUser):
        await message.client.get_dialogs()
        return await message.client.get_entity(message.from_id)
    if isinstance(message.to_id, (PeerChannel, PeerChat)):
        async for user in message.client.iter_participants(message.to_id, aggressive=True):
            if user.id == message.from_id:
                return user
        logging.error("WTF! user isn't in the group where they sent the message")
        return None
    logging.error("WTF! to_id is not a user, chat or channel")
    return None 
Example #21
Source File: tensorrt.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def get_optimized_symbol(executor):
    """
    Take an executor's underlying symbol graph and return its generated optimized version.

    Parameters
    ----------
    executor :
        An executor for which you want to see an optimized symbol. Getting an optimized symbol
        is useful to compare and verify the work TensorRT has done against a legacy behaviour.

    Returns
    -------
    symbol : nnvm::Symbol
        The nnvm symbol optimized.
    """
    handle = SymbolHandle()
    try:
        check_call(_LIB.MXExecutorGetOptimizedSymbol(executor.handle, ctypes.byref(handle)))
        result = sym.Symbol(handle=handle)
        return result
    except MXNetError:
        logging.error('Error while trying to fetch TRT optimized symbol for graph. Please ensure '
                      'build was compiled with MXNET_USE_TENSORRT enabled.')
        raise 
Example #22
Source File: straight_dope_test_utils.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def _test_notebook(notebook, override_epochs=True):
    """Run Jupyter notebook to catch any execution error.

    Args:
        notebook : string
            notebook name in folder/notebook format
        override_epochs : boolean
            whether or not to override the number of epochs to 1

    Returns:
        True if the notebook runs without warning or error.
    """
    # Some notebooks will fail to run without error if we do not override
    # relative paths to the data and image directories.
    _override_relative_paths(notebook)

    if override_epochs:
        _override_epochs(notebook)

    return run_notebook(notebook, NOTEBOOKS_DIR, kernel=KERNEL, temp_dir=NOTEBOOKS_DIR) 
Example #23
Source File: straight_dope_test_utils.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def _download_straight_dope_notebooks():
    """Downloads the Straight Dope Notebooks.

    Returns:
        True if it succeeds in downloading the notebooks without error.
    """
    logging.info('Cleaning and setting up notebooks directory "{}"'.format(NOTEBOOKS_DIR))
    shutil.rmtree(NOTEBOOKS_DIR, ignore_errors=True)

    cmd = [GIT_PATH,
           'clone',
           GIT_REPO,
           NOTEBOOKS_DIR]

    proc, msg = _run_command(cmd)

    if proc.returncode != 0:
        err_msg = 'Error downloading Straight Dope notebooks.\n'
        err_msg += msg
        logging.error(err_msg)
        return False
    return True 
Example #24
Source File: docker_cache.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 6 votes vote down vote up
def build_save_containers(platforms, registry, load_cache) -> int:
    """
    Entry point to build and upload all built dockerimages in parallel
    :param platforms: List of platforms
    :param registry: Docker registry name
    :param load_cache: Load cache before building
    :return: 1 if error occurred, 0 otherwise
    """
    from joblib import Parallel, delayed
    if len(platforms) == 0:
        return 0

    platform_results = Parallel(n_jobs=len(platforms), backend="multiprocessing")(
        delayed(_build_save_container)(platform, registry, load_cache)
        for platform in platforms)

    is_error = False
    for platform_result in platform_results:
        if platform_result is not None:
            logging.error('Failed to generate %s', platform_result)
            is_error = True

    return 1 if is_error else 0 
Example #25
Source File: manager.py    From SecPi with GNU General Public License v3.0 6 votes vote down vote up
def got_config_request(self, ch, method, properties, body):
		ip_addresses = json.loads(body)
		logging.info("Got config request with following IP addresses: %s" % ip_addresses)

		pi_id = None
		worker = db.session.query(db.objects.Worker).filter(db.objects.Worker.address.in_(ip_addresses)).first()
		if worker:
			pi_id = worker.id
			logging.debug("Found worker id %s for IP address %s" % (pi_id, worker.address))
		else: # wasn't able to find worker with given ip address(es)
			logging.error("Wasn't able to find worker for given IP adress(es)")
			reply_properties = pika.BasicProperties(correlation_id=properties.correlation_id)
			self.channel.basic_publish(exchange=utils.EXCHANGE, properties=reply_properties, routing_key=properties.reply_to, body="")
			return
		
		config = self.prepare_config(pi_id)
		logging.info("Sending intial config to worker with id %s" % pi_id)
		reply_properties = pika.BasicProperties(correlation_id=properties.correlation_id, content_type='application/json')
		self.channel.basic_publish(exchange=utils.EXCHANGE, properties=reply_properties, routing_key=properties.reply_to, body=json.dumps(config))

	# callback method for when the manager recieves data after a worker executed its actions 
Example #26
Source File: height.py    From indras_net with GNU General Public License v3.0 6 votes vote down vote up
def get_agent_from_json(agent_json):
        new_agent = None
        if agent_json["ntype"] == HeightAgent.__name__:
            new_agent = HeightAgent(name=agent_json["name"],
                                    height=agent_json["height"],
                                    parent_height=agent_json["parent_height"])

        elif agent_json["ntype"] in HeightAgentEng.__name__:
            new_agent = HeightAgentEng(name=agent_json["name"],
                                       height=agent_json["height"],
                                       parent_height=agent_json["parent_height"])

        else:
            logging.error("agent found whose NTYPE is neither "
                          "{} nor {}, but rather {}".format(HeightAgent.__name__,
                                                            HeightAgentEng.__name__,
                                                            agent_json["ntype"]))
        return new_agent 
Example #27
Source File: tensorboard.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def __init__(self, logging_dir, prefix=None):
        self.prefix = prefix
        try:
            from mxboard import SummaryWriter
            self.summary_writer = SummaryWriter(logging_dir)
        except ImportError:
            logging.error('You can install mxboard via `pip install mxboard`.') 
Example #28
Source File: module.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def get_iterator(self, kv):
        if(self._data_iterator is not None):
            return self._data_iterator.get_data_iterator(kv)
        else:
            logging.error("data_iterator for elastic training not defined")
            raise NotImplementedError() 
Example #29
Source File: module.py    From dynamic-training-with-apache-mxnet-on-aws with Apache License 2.0 5 votes vote down vote up
def set_params(self, arg_params, aux_params, allow_missing=False, force_init=True,
                   allow_extra=False):
        """Assigns parameter and aux state values.

        Parameters
        ----------
        arg_params : dict
            Dictionary of name to `NDArray`.
        aux_params : dict
            Dictionary of name to `NDArray`.
        allow_missing : bool
            If ``True``, params could contain missing values, and the initializer will be
            called to fill those missing params.
        force_init : bool
            If ``True``, will force re-initialize even if already initialized.
        allow_extra : boolean, optional
            Whether allow extra parameters that are not needed by symbol.
            If this is True, no error will be thrown when arg_params or aux_params
            contain extra parameters that is not needed by the executor.
        Examples
        --------
        >>> # An example of setting module parameters.
        >>> sym, arg_params, aux_params = mx.model.load_checkpoint(model_prefix, n_epoch_load)
        >>> mod.set_params(arg_params=arg_params, aux_params=aux_params)
        """
        if not allow_missing:
            self.init_params(initializer=None, arg_params=arg_params, aux_params=aux_params,
                             allow_missing=allow_missing, force_init=force_init,
                             allow_extra=allow_extra)
            return

        if self.params_initialized and not force_init:
            warnings.warn("Parameters already initialized and force_init=False. "
                          "set_params call ignored.", stacklevel=2)
            return

        self._exec_group.set_params(arg_params, aux_params, allow_extra=allow_extra)

        # because we didn't update self._arg_params, they are dirty now.
        self._params_dirty = True
        self.params_initialized = True 
Example #30
Source File: app.py    From svviz with MIT License 5 votes vote down vote up
def loadISDs(dataHub):
    """ Load the Insert Size Distributions """

    for sample in dataHub:
        logging.info(" > {} <".format(sample.name))
        sample.readStatistics = insertsizes.ReadStatistics(sample.bam, keepReads=dataHub.args.save_reads)

        if sample.readStatistics.orientations != "any":
            if len(sample.readStatistics.orientations) > 1:
                logging.warn("  ! multiple read pair orientations found within factor !\n"
                             "  ! of 2x of one another; if you aren't expecting your  !\n"
                             "  ! input data to contain multiple orientations, this   !\n"
                             "  ! could be a bug in the mapping software or svviz     !")
            if len(sample.readStatistics.orientations) < 1:
                logging.error("  No valid read orientations found for dataset:{}".format(sample.name))


        sample.orientations = sample.readStatistics.orientations
        if sample.orientations == "any":
            sample.singleEnded = True
        logging.info("  valid orientations: {}".format(",".join(sample.orientations) if sample.orientations!="any" else "any"))

        if sample.orientations == "any":
            searchDist = sample.readStatistics.readLengthUpperQuantile()
            alignDist = sample.readStatistics.readLengthUpperQuantile()*1.25 + dataHub.args.context
        else:
            searchDist = sample.readStatistics.meanInsertSize()+sample.readStatistics.stddevInsertSize()*2
            alignDist = sample.readStatistics.meanInsertSize()+sample.readStatistics.stddevInsertSize()*4 + dataHub.args.context
        if dataHub.args.flanks:
            searchDist += dataHub.args.context

        sample.searchDistance = int(searchDist)
        dataHub.alignDistance = max(dataHub.alignDistance, int(alignDist))

        logging.info("  Using search distance: {}".format(sample.searchDistance))

    logging.info(" Using align distance: {}".format(dataHub.alignDistance))