Python os.path.exists() Examples

The following are code examples for showing how to use os.path.exists(). They are from open source Python projects. You can vote up the examples you like or vote down the ones you don't like.

Example 1
Project: PEAKachu   Author: tbischler   File: window.py    ISC License 6 votes vote down vote up
def __init__(self, w_size, step_size, replicon_dict, max_proc, stat_test,
                 norm_method, size_factors, het_p_val_threshold,
                 rep_pair_p_val_threshold, padj_threshold, mad_multiplier,
                 fc_cutoff, pairwise_replicates, output_folder):
        self._lib_dict = OrderedDict()
        self._replicon_dict = replicon_dict  # own copy of replicon_dict
        self._max_proc = max_proc
        self._w_size = w_size
        self._step_size = step_size
        self._stat_test = stat_test
        self._norm_method = norm_method
        self._size_factors = size_factors
        self._het_p_val_threshold = het_p_val_threshold
        self._rep_pair_p_val_threshold = rep_pair_p_val_threshold
        self._padj_threshold = padj_threshold
        self._mad_multiplier = mad_multiplier
        self._fc_cutoff = fc_cutoff
        self._pairwise_replicates = pairwise_replicates
        self._output_folder = output_folder
        if not exists(self._output_folder):
            makedirs(self._output_folder) 
Example 2
Project: PEAKachu   Author: tbischler   File: window.py    ISC License 6 votes vote down vote up
def _write_gff_file(self, replicon, df):
        # create peak annotation folder if it does not exist
        peak_anno_folder = "{}/peak_annotations".format(self._output_folder)
        if not exists(peak_anno_folder):
            makedirs(peak_anno_folder)
        with open("{}/peaks_{}.gff".format(
                peak_anno_folder, replicon), 'w') as out_gff_fh:
            out_gff_fh.write("##gff-version 3\n"
                             "#!gff-spec-version 1.20\n"
                             "##sequence-region {} {} {}\n"
                             "{}{}"
                             "###\n".format(
                                 replicon,
                                 self._replicon_dict[replicon]
                                 ['seq_start_pos'] + 1,
                                 self._replicon_dict[replicon]['seq_end_pos'],
                                 '\n'.join(
                                     df.apply(self._write_gff_entry, axis=1)),
                                 '\n' if not df.empty else "")) 
Example 3
Project: mycode   Author: gmraabe   File: crypt_test.py    GNU General Public License v3.0 6 votes vote down vote up
def main():
    # read or create the file
    if exists(FILENAME):
        print("reading...")
        data = read_encrypted(PASSWORD, FILENAME)
        print("read %s from %s" % (data, FILENAME))
        n_bottles = int(data.split(" ")[0]) - 1
    else:
        n_bottles = 10
    # write the file
    if n_bottles > 0:
        data = "%d green bottles" % n_bottles
        print("writing...")
        write_encrypted(PASSWORD, FILENAME, data)
        print("wrote %s to %s" % (data, FILENAME))
    else:
        unlink(FILENAME)
        print("deleted %s" % FILENAME) 
Example 4
Project: pyblish-win   Author: pyblish   File: file_util.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def _copy_file_contents(src, dst, buffer_size=16*1024):
    """Copy the file 'src' to 'dst'.

    Both must be filenames. Any error opening either file, reading from
    'src', or writing to 'dst', raises DistutilsFileError.  Data is
    read/written in chunks of 'buffer_size' bytes (default 16k).  No attempt
    is made to handle anything apart from regular files.
    """
    # Stolen from shutil module in the standard library, but with
    # custom error-handling added.
    fsrc = None
    fdst = None
    try:
        try:
            fsrc = open(src, 'rb')
        except os.error, (errno, errstr):
            raise DistutilsFileError("could not open '%s': %s" % (src, errstr))

        if os.path.exists(dst):
            try:
                os.unlink(dst)
            except os.error, (errno, errstr):
                raise DistutilsFileError(
                      "could not delete '%s': %s" % (dst, errstr)) 
Example 5
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example 6
Project: FasterRCNN_TF_Py3   Author: upojzsb   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
        """
        Return the database of ground-truth regions of interest.
        This function loads/saves from/to a cache file to speed up future calls.
        """
        cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
        if osp.exists(cache_file):
            with open(cache_file, 'rb') as fid:
                roidb = pickle.load(fid)
            print('{} gt roidb loaded from {}'.format(self.name, cache_file))
            return roidb

        gt_roidb = [self._load_coco_annotation(index)
                    for index in self._image_index]

        with open(cache_file, 'wb') as fid:
            pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
        print('wrote gt roidb to {}'.format(cache_file))
        return gt_roidb 
Example 7
Project: spleeter   Author: deezer   File: configuration.py    MIT License 6 votes vote down vote up
def load_configuration(descriptor):
    """ Load configuration from the given descriptor. Could be
    either a `spleeter:` prefixed embedded configuration name
    or a file system path to read configuration from.

    :param descriptor: Configuration descriptor to use for lookup.
    :returns: Loaded description as dict.
    :raise ValueError: If required embedded configuration does not exists.
    :raise SpleeterError: If required configuration file does not exists.
    """
    # Embedded configuration reading.
    if descriptor.startswith(_EMBEDDED_CONFIGURATION_PREFIX):
        name = descriptor[len(_EMBEDDED_CONFIGURATION_PREFIX):]
        if not loader.is_resource(resources, f'{name}.json'):
            raise SpleeterError(f'No embedded configuration {name} found')
        with loader.open_text(resources, f'{name}.json') as stream:
            return json.load(stream)
    # Standard file reading.
    if not exists(descriptor):
        raise SpleeterError(f'Configuration file {descriptor} not found')
    with open(descriptor, 'r') as stream:
        return json.load(stream) 
Example 8
Project: spleeter   Author: deezer   File: __init__.py    MIT License 6 votes vote down vote up
def get(self, model_directory):
        """ Ensures required model is available at given location.

        :param model_directory: Expected model_directory to be available.
        :raise IOError: If model can not be retrieved.
        """
        # Expend model directory if needed.
        if not isabs(model_directory):
            model_directory = join(self.DEFAULT_MODEL_PATH, model_directory)
        # Download it if not exists.
        model_probe = join(model_directory, self.MODEL_PROBE_PATH)
        if not exists(model_probe):
            if not exists(model_directory):
                makedirs(model_directory)
                self.download(
                    model_directory.split(sep)[-1],
                    model_directory)
                self.writeProbe(model_directory)
        return model_directory 
Example 9
Project: spleeter   Author: deezer   File: dataset.py    MIT License 6 votes vote down vote up
def cache(self, dataset, cache, wait):
        """ Cache the given dataset if cache is enabled. Eventually waits for
        cache to be available (useful if another process is already computing
        cache) if provided wait flag is True.

        :param dataset: Dataset to be cached if cache is required.
        :param cache: Path of cache directory to be used, None if no cache.
        :param wait: If caching is enabled, True is cache should be waited.
        :returns: Cached dataset if needed, original dataset otherwise.
        """
        if cache is not None:
            if wait:
                while not exists(f'{cache}.index'):
                    get_logger().info(
                        'Cache not available, wait %s',
                        self.WAIT_PERIOD)
                    time.sleep(self.WAIT_PERIOD)
            cache_path = os.path.split(cache)[0]
            os.makedirs(cache_path, exist_ok=True)
            return dataset.cache(cache)
        return dataset 
Example 10
Project: spacesense   Author: spacesense-ai   File: training_data.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def download_all_bands(self, save_as_npy=False):
        """
        :return: folder with training data (2.9GB)
        """
        if exists(self.data_path_all_bands):
            print('dataset is already available at: ', os.path.abspath(self.data_path_all_bands))

        else:
            os.system("mkdir data")
            os.system("wget http://madm.dfki.de/files/sentinel/EuroSATallBands.zip -P data/")
            file_name = 'data/EuroSATallBands.zip'

            with zipfile.ZipFile(file_name, 'r') as zip_ref:
                zip_ref.extractall('data/')

            self.data_path_all_bands = 'data/ds/images/remote_sensing/otherDatasets/sentinel_2/tif'
            print('EuroSAT all bands data downloaded !') 
Example 11
Project: spacesense   Author: spacesense-ai   File: training_data.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def download_rgb(self, save_as_npy=False):
        """
        :return: folder with RGB training data (93MB)
        """
        if exists(self.data_path_rgb):
            print('dataset is already available at: ', os.path.abspath(self.data_path_rgb))
        else:
            os.system("mkdir data")
            os.system("wget madm.dfki.de/files/sentinel/EuroSAT.zip -P data/")
            file_name = 'data/EuroSAT.zip'

            with zipfile.ZipFile(file_name, 'r') as zip_ref:
                zip_ref.extractall('data/')

            self.data_path_rgb = 'data/2750'
            print('EuroSAT RGB data downloaded !') 
Example 12
Project: mealpy   Author: edmundmok   File: venv_update.py    MIT License 6 votes vote down vote up
def has_system_site_packages(interpreter):
    # TODO: unit-test
    system_site_packages = check_output((
        interpreter,
        '-c',
        # stolen directly from virtualenv's site.py
        """\
import site, os.path
print(
    0
    if os.path.exists(
        os.path.join(os.path.dirname(site.__file__), 'no-global-site-packages.txt')
    ) else
    1
)"""
    ))
    system_site_packages = int(system_site_packages)
    assert system_site_packages in (0, 1)
    return bool(system_site_packages) 
Example 13
Project: cascade-rcnn_Pytorch   Author: guoruoqian   File: coco.py    MIT License 6 votes vote down vote up
def gt_roidb(self):
    """
    Return the database of ground-truth regions of interest.
    This function loads/saves from/to a cache file to speed up future calls.
    """
    cache_file = osp.join(self.cache_path, self.name + '_gt_roidb.pkl')
    if osp.exists(cache_file):
      with open(cache_file, 'rb') as fid:
        roidb = pickle.load(fid)
      print('{} gt roidb loaded from {}'.format(self.name, cache_file))
      return roidb

    gt_roidb = [self._load_coco_annotation(index)
                for index in self._image_index]

    with open(cache_file, 'wb') as fid:
      pickle.dump(gt_roidb, fid, pickle.HIGHEST_PROTOCOL)
    print('wrote gt roidb to {}'.format(cache_file))
    return gt_roidb 
Example 14
Project: MetrixReloaded   Author: Scrounger   File: MetrixReloadedSetup.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def keyYellow(self):
        if (path.exists("/usr/lib/enigma2/python/Plugins/Extensions/AtileHD/plugin.py")):

            if not path.exists("mySkin_off") and path.exists("mySkin"):
                rename("mySkin", "mySkin_off")

            if not path.exists("mySkin") and path.exists("mySkin_off"):
                symlink("mySkin_off", "mySkin")

            # Atile_HD_Config Screen öffnen
            from Plugins.Extensions.AtileHD.plugin import *
            self.session.openWithCallback(
                self.AtileHDScreenResponse, AtileHDScreens)
        else:
            msg = _(
                "Sorry, but the plugin %s is not installed at your Vu+ STB! Please install it to use this function") % "AtileHD"
            self.session.open(MessageBox, msg, MessageBox.TYPE_ERROR) 
Example 15
Project: MetrixReloaded   Author: Scrounger   File: MetrixReloadedSetup.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def getCurrentColor(self):
        myfile = self.skin_base_dir + self.color_file
        if not path.exists(myfile):
            if path.exists(self.skin_base_dir + self.default_color_file):
                if path.islink(myfile):
                    remove(myfile)
                chdir(self.skin_base_dir)
                symlink(self.default_color_file, self.color_file)
            else:
                return None
        filename = path.realpath(myfile)
        filename = path.basename(filename)

        search_str = 'colors_'
        friendly_name = filename.replace(search_str, "")
        friendly_name = friendly_name.replace(".xml", "")
        friendly_name = friendly_name.replace("_", " ")
        return (filename, friendly_name) 
Example 16
Project: MetrixReloaded   Author: Scrounger   File: MetrixReloadedSetup.py    GNU Lesser General Public License v3.0 6 votes vote down vote up
def getCurrentFont(self):
        myfile = self.skin_base_dir + self.font_file
        if not path.exists(myfile):
            if path.exists(self.skin_base_dir + self.default_font_file):
                if path.islink(myfile):
                    remove(myfile)
                chdir(self.skin_base_dir)
                symlink(self.default_font_file, self.font_file)
            else:
                return None
        filename = path.realpath(myfile)
        filename = path.basename(filename)

        search_str = 'font_'
        friendly_name = filename.replace(search_str, "")
        friendly_name = friendly_name.replace(".xml", "")
        friendly_name = friendly_name.replace("_", " ")
        return (filename, friendly_name) 
Example 17
Project: securityheaders   Author: koenbuyens   File: command_line.py    Apache License 2.0 6 votes vote down vote up
def create_urls(args):
    urlcolumn = args.urlcolumn
    result = list()
    data = []
    for f in args.url:
         
        if(exists(f)):
            data.extend(open(f))
        else:
            data.extend(f.split(','))

    i = 1
    for line in data:
        if i > args.startrow:
            line = line.strip()
            k = line.split(',')
            fid = k[0]
            if(len(k) == 1):
                fid = str(i)
                urlcolumn = 0
            result.append((fid,k[urlcolumn])) 
        i = i + 1   
    return result 
Example 18
Project: calmjs   Author: calmjs   File: toolchain.py    GNU General Public License v2.0 6 votes vote down vote up
def compile_bundle_entry(self, spec, entry):
        """
        Handler for each entry for the bundle method of the compile
        process.  This copies the source file or directory into the
        build directory.
        """

        modname, source, target, modpath = entry
        bundled_modpath = {modname: modpath}
        bundled_target = {modname: target}
        export_module_name = []
        if isfile(source):
            export_module_name.append(modname)
            copy_target = join(spec[BUILD_DIR], target)
            if not exists(dirname(copy_target)):
                makedirs(dirname(copy_target))
            shutil.copy(source, copy_target)
        elif isdir(source):
            copy_target = join(spec[BUILD_DIR], modname)
            shutil.copytree(source, copy_target)

        return bundled_modpath, bundled_target, export_module_name 
Example 19
Project: calmjs   Author: calmjs   File: test_artifact.py    GNU General Public License v2.0 6 votes vote down vote up
def test_existing_removed(self):
        # force an existing file
        target = self.registry.records[('app', 'nothing.js')]
        os.mkdir(dirname(target))
        with open(target, 'w'):
            pass

        with pretty_logging(stream=mocks.StringIO()) as stream:
            self.registry.process_package('app')

        log = stream.getvalue()
        self.assertIn(
            "package 'app' has declared 3 entry points for the "
            "'calmjs.artifacts' registry for artifact construction", log
        )
        log = stream.getvalue()
        self.assertIn("removing existing export target at ", log)
        self.assertFalse(exists(target)) 
Example 20
Project: PEAKachu   Author: tbischler   File: window.py    ISC License 5 votes vote down vote up
def _plot_initial_windows(self, unsig_base_means, unsig_fcs,
                              sig_base_means, sig_fcs):
        # create plot folder if it does not exist
        plot_folder = "{}/plots".format(self._output_folder)
        if not exists(plot_folder):
            makedirs(plot_folder)
        # MA plot
        plt.plot(np.log10(unsig_base_means),
                 np.log2(unsig_fcs), ".",
                 markersize=2.0, alpha=0.3)
        plt.plot(np.log10(sig_base_means),
                 np.log2(sig_fcs), ".",
                 markersize=2.0, color="red", alpha=0.3)
        plt.axhline(y=np.median(np.log2(unsig_fcs.append(sig_fcs))))
        plt.axvline(x=np.median(np.log10(unsig_base_means.append(
                                         sig_base_means))))
        plt.title("Initial_windows_MA_plot")
        plt.xlabel("log10 base mean")
        plt.ylabel("log2 fold-change")
        plt.savefig("{}/Initial_windows_MA_plot.png".format(plot_folder),
                    dpi=600)
        plt.close()
        # HexBin plot
        df = pd.DataFrame({'log10 base mean': np.log10(unsig_base_means.append(
            sig_base_means)), 'log2 fold-change': np.log2(unsig_fcs.append(
                sig_fcs))})
        df.plot(kind='hexbin', x='log10 base mean',
                y='log2 fold-change', gridsize=50, bins='log')
        plt.axhline(y=np.median(np.log2(unsig_fcs.append(sig_fcs))))
        plt.axvline(x=np.median(np.log10(unsig_base_means.append(
                                         sig_base_means))))
        plt.title("Initial_windows_HexBin_plot")
        plt.savefig("{}/Initial_windows_HexBin_plot.pdf".format(plot_folder))
        plt.close() 
Example 21
Project: PEAKachu   Author: tbischler   File: adaptive.py    ISC License 5 votes vote down vote up
def __init__(self, replicon_dict, max_proc, padj_threshold, mad_multiplier,
                 fc_cutoff, output_folder):
        self._lib_dict = OrderedDict()
        self._replicon_dict = replicon_dict  # own copy of replicon_dict
        self._max_proc = max_proc
        self._padj_threshold = padj_threshold
        self._mad_multiplier = mad_multiplier
        self._fc_cutoff = fc_cutoff
        self._output_folder = output_folder
        if not exists(self._output_folder):
            makedirs(self._output_folder) 
Example 22
Project: PEAKachu   Author: tbischler   File: adaptive.py    ISC License 5 votes vote down vote up
def _plot_initial_peaks(self, unsig_base_means, unsig_fcs,
                            sig_base_means, sig_fcs):
        # create plot folder if it does not exist
        plot_folder = "{}/plots".format(self._output_folder)
        if not exists(plot_folder):
            makedirs(plot_folder)
        # MA plot
        plt.plot(np.log10(unsig_base_means),
                 np.log2(unsig_fcs), ".",
                 markersize=2.0, alpha=0.3)
        plt.plot(np.log10(sig_base_means),
                 np.log2(sig_fcs), ".",
                 markersize=2.0, color="red", alpha=0.3)
        plt.axhline(y=np.median(np.log2(unsig_fcs.append(sig_fcs))))
        plt.axvline(x=np.median(np.log10(unsig_base_means.append(
                                         sig_base_means))))
        plt.title("Initial_peaks_MA_plot")
        plt.xlabel("log10 base mean")
        plt.ylabel("log2 fold-change")
        plt.savefig("{}/Initial_peaks_MA_plot.png".format(plot_folder),
                    dpi=600)
        plt.close()
        # HexBin plot
        df = pd.DataFrame({'log10 base mean': np.log10(unsig_base_means.append(
            sig_base_means)), 'log2 fold-change': np.log2(unsig_fcs.append(
                sig_fcs))})
        df.plot(kind='hexbin', x='log10 base mean',
                y='log2 fold-change', gridsize=50, bins='log')
        plt.axhline(y=np.median(np.log2(unsig_fcs.append(sig_fcs))))
        plt.axvline(x=np.median(np.log10(unsig_base_means.append(
                                         sig_base_means))))
        plt.title("Initial_peaks_HexBin_plot")
        plt.savefig("{}/Initial_peaks_HexBin_plot.pdf".format(plot_folder))
        plt.close() 
Example 23
Project: PEAKachu   Author: tbischler   File: coverage.py    ISC License 5 votes vote down vote up
def generate_normalized_wiggle_files(project_folder, max_proc):
    parameter_dict = _read_parameters(project_folder)
    # create normalized coverage folder if it does not exist
    wiggle_folder = "{}/normalized_coverage".format(project_folder)
    if not exists(wiggle_folder):
        makedirs(wiggle_folder)
    # Generate coverage files in parallel
    print("** Generating normalized coverage files for {} libraries...".format(
          len(parameter_dict["libraries"])), flush=True)
    t_start = time()
    with futures.ProcessPoolExecutor(
            max_workers=max_proc) as executor:
        future_to_lib_name = {
            executor.submit(
                _generate_normalized_wiggle_file_for_lib, lib_name,
                lib["bam_file"], parameter_dict["paired_end"],
                parameter_dict["max_insert_size"], lib["size_factor"],
                wiggle_folder): lib_name for lib_name, lib
            in parameter_dict["libraries"].items()}
    for future in futures.as_completed(future_to_lib_name):
        lib_name = future_to_lib_name[future]
        print("* Coverage files for library {} generated.".format(lib_name),
              flush=True)
    t_end = time()
    print("Coverage file generation finished in {} seconds.".format(
        t_end-t_start), flush=True) 
Example 24
Project: PEAKachu   Author: tbischler   File: consensus_peak.py    ISC License 5 votes vote down vote up
def plot_consensus_peak(self):
        # create plot folder if it does not exist
        plot_folder = "{}/plots".format(self._project_folder)
        if not exists(plot_folder):
            makedirs(plot_folder)
        self._store_peaks()
        comb_cons_value_dict = self._get_peak_coverage()
        df = pd.DataFrame(comb_cons_value_dict, columns=sorted(
            comb_cons_value_dict))
        ax = df.plot(title="Consensus peak per library")
        ax.set_xlabel("Nucleotide position")
        ax.set_ylabel("Relative expression")
        plt.savefig("{}/plots/consensus_peaks.pdf".format(
            self._project_folder)) 
Example 25
Project: pyblish-win   Author: pyblish   File: fix_import.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def probably_a_local_import(self, imp_name):
        if imp_name.startswith(u"."):
            # Relative imports are certainly not local imports.
            return False
        imp_name = imp_name.split(u".", 1)[0]
        base_path = dirname(self.filename)
        base_path = join(base_path, imp_name)
        # If there is no __init__.py next to the file its not in a package
        # so can't be a relative import.
        if not exists(join(dirname(base_path), "__init__.py")):
            return False
        for ext in [".py", sep, ".pyc", ".so", ".sl", ".pyd"]:
            if exists(base_path + ext):
                return True
        return False 
Example 26
Project: mycroft-skill-tunein   Author: johnbartkiw   File: __init__.py    MIT License 5 votes vote down vote up
def apply_aliases(self, search_term):
        # Allow search terms to be expanded or aliased
        home = expanduser('~')
        alias_file = home + '/tunein_aliases.yaml'
        if path.exists(alias_file):
            with open(alias_file, 'r') as file:
                alias_list = yaml.load(file)
            if search_term in alias_list:
                search_term = alias_list[search_term]
        return search_term

    # Attempt to find the first active station matching the query string 
Example 27
Project: Collaborative-Learning-for-Weakly-Supervised-Object-Detection   Author: Sunarker   File: coco.py    MIT License 5 votes vote down vote up
def image_path_from_index(self, index):
    """
    Construct an image path from the image's "index" identifier.
    """
    # Example image path for index=119993:
    #   images/train2014/COCO_train2014_000000119993.jpg
    file_name = ('COCO_' + self._data_name + '_' +
                 str(index).zfill(12) + '.jpg')
    image_path = osp.join(self._data_path, 'images',
                          self._data_name, file_name)
    assert osp.exists(image_path), \
      'Path does not exist: {}'.format(image_path)
    return image_path 
Example 28
Project: rpm2swidtag   Author: swidtags   File: __init__.py    Apache License 2.0 5 votes vote down vote up
def save_to_directory(self, dirname):
		if not dirname.endswith("/."):
			dirname = path.join(dirname, escape_path(self.get_tagcreator_regid()))
		if not path.exists(dirname):
			makedirs(dirname)
		filename = escape_path(self.get_tagid() + '-rpm-' + self.checksum) + '.swidtag'
		if len(filename) > 255:
			filename = sha256(self.get_tagid().encode()).hexdigest() + '-rpm-' + self.checksum + '.swidtag'
		self.write_output(path.join(dirname, filename))
		return ( dirname, filename ) 
Example 29
Project: FasterRCNN_TF_Py3   Author: upojzsb   File: coco.py    MIT License 5 votes vote down vote up
def image_path_from_index(self, index):
        """
        Construct an image path from the image's "index" identifier.
        """
        # Example image path for index=119993:
        #   images/train2014/COCO_train2014_000000119993.jpg
        file_name = ('COCO_' + self._data_name + '_' +
                     str(index).zfill(12) + '.jpg')
        image_path = osp.join(self._data_path, 'images',
                              self._data_name, file_name)
        assert osp.exists(image_path), \
            'Path does not exist: {}'.format(image_path)
        return image_path 
Example 30
Project: mmdetection   Author: open-mmlab   File: test_config.py    Apache License 2.0 5 votes vote down vote up
def _get_config_directory():
    """ Find the predefined detector config directory """
    try:
        # Assume we are running in the source mmdetection repo
        repo_dpath = dirname(dirname(__file__))
    except NameError:
        # For IPython development when this __file__ is not defined
        import mmdet
        repo_dpath = dirname(dirname(mmdet.__file__))
    config_dpath = join(repo_dpath, 'configs')
    if not exists(config_dpath):
        raise Exception('Cannot find config path')
    return config_dpath 
Example 31
Project: mmdetection   Author: open-mmlab   File: test_forward.py    Apache License 2.0 5 votes vote down vote up
def _get_config_directory():
    """ Find the predefined detector config directory """
    try:
        # Assume we are running in the source mmdetection repo
        repo_dpath = dirname(dirname(__file__))
    except NameError:
        # For IPython development when this __file__ is not defined
        import mmdet
        repo_dpath = dirname(dirname(mmdet.__file__))
    config_dpath = join(repo_dpath, 'configs')
    if not exists(config_dpath):
        raise Exception('Cannot find config path')
    return config_dpath 
Example 32
Project: programsynthesishunting   Author: flexgp   File: file_io.py    GNU General Public License v3.0 5 votes vote down vote up
def save_first_front_to_file(stats, end=False, name="first"):
    """
    Saves all individuals in the first front to individual files in a folder.

    :param stats: The stats.stats.stats dictionary.
    :param end: A boolean flag indicating whether or not the evolutionary
                process has finished.
    :param name: The name of the front folder. Default set to "first_front".
    :return: Nothing.
    """

    # Save the file path (we will be over-writing it).
    orig_file_path = copy(params['FILE_PATH'])

    # Define the new file path.
    params['FILE_PATH'] = path.join(orig_file_path, str(name)+"_front")

    # Check if the front folder exists already
    if path.exists(params['FILE_PATH']):

        # Remove previous files.
        rmtree(params['FILE_PATH'])

    # Create front folder.
    makedirs(params['FILE_PATH'])

    for i, ind in enumerate(trackers.best_ever):
        # Save each individual in the first front to file.
        save_best_ind_to_file(stats, ind, end, name=str(i))

    # Re-set the file path.
    params['FILE_PATH'] = copy(orig_file_path) 
Example 33
Project: spleeter   Author: deezer   File: evaluate.py    MIT License 5 votes vote down vote up
def entrypoint(arguments, params):
    """ Command entrypoint.

    :param arguments: Command line parsed argument as argparse.Namespace.
    :param params: Deserialized JSON configuration file provided in CLI args.
    """
    # Parse and check musdb directory.
    musdb_root_directory = arguments.mus_dir
    if not exists(musdb_root_directory):
        raise IOError(f'musdb directory {musdb_root_directory} not found')
    # Separate musdb sources.
    audio_output_directory = _separate_evaluation_dataset(
        arguments,
        musdb_root_directory,
        params)
    # Compute metrics with musdb.
    metrics_output_directory = _compute_musdb_metrics(
        arguments,
        musdb_root_directory,
        audio_output_directory)
    # Compute and pretty print median metrics.
    metrics = _compile_metrics(metrics_output_directory)
    for instrument, metric in metrics.items():
        get_logger().info('%s:', instrument)
        for metric, value in metric.items():
            get_logger().info('%s: %s', metric, f'{np.median(value):.3f}') 
Example 34
Project: spleeter   Author: deezer   File: test_separator.py    MIT License 5 votes vote down vote up
def test_separate_to_file(configuration, instruments):
    """ Test file based separation. """
    separator = Separator(configuration)
    with TemporaryDirectory() as directory:
        separator.separate_to_file(
            TEST_AUDIO_DESCRIPTOR,
            directory)
        for instrument in instruments:
            assert exists(join(
                directory,
                '{}/{}.wav'.format(TEST_AUDIO_BASENAME, instrument))) 
Example 35
Project: spacesense   Author: spacesense-ai   File: training_data.py    GNU Lesser General Public License v3.0 5 votes vote down vote up
def fetch_data(self, datatype='all_bands', labels='all', row_type='2D'):
        """

        :param type:
        :param labels:
        :return: numpy array
        """
        if datatype == 'all_bands':
            paths = sorted(glob(self.data_path_all_bands + '/*'))
            if exists(self.data_path_all_bands):
                X, y = self.__load_dataset__(datatype='all_bands',labels=labels,row_type=row_type)

            else:
                print('dataset is not available')
                print('to download the dataset, choose one of the two options:')
                print('EuroSAT.download_all_bands()')
                print('or')
                print('EuroSAT.download_rgb()')

        elif datatype == 'rgb':
            paths = sorted(glob(self.data_path_rgb + '/*'))
            if exists(self.data_path_rgb):
                X, y = self.__load_dataset__(datatype='rgb',labels=labels,row_type=row_type)

            else:
                print('dataset is not available')
                print('to download the dataset, choose one of the two options:')
                print('EuroSAT.download_all_bands()')
                print('or')
                print('EuroSAT.download_rgb()')
        return X, y 
Example 36
Project: soccer-matlab   Author: utra-robosoccer   File: logz.py    BSD 2-Clause "Simplified" License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.first_row = True
    G.log_headers = []
    G.log_current_row = {}
    
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    if not osp.exists(G.output_dir):
        os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 37
Project: motion-tracking   Author: dansbecker   File: download_images.py    MIT License 5 votes vote down vote up
def __enter__(self):
        try:
            if not exists(self.target_path):
                os.makedirs(self.target_path)
            if not exists('work'):
                os.makedirs('work')
            with open(self._failed_to_capture_path, 'r') as f:
                self.failed_to_capture = json.load(f)
        except:
            self.failed_to_capture = []
        return(self) 
Example 38
Project: motion-tracking   Author: dansbecker   File: download_images.py    MIT License 5 votes vote down vote up
def get_one_img(self, fname, url):
        local_img_path = join(self.target_path, fname)
        if exists(local_img_path):
            self.imgs_previously_captured += 1
            return
        try:
            self.imgs_requested += 1
            url, _ = urllib.request.urlretrieve(url, local_img_path)
        except:
            self.failed_img_requests += 1
            self.failed_to_capture.append((url, local_img_path)) 
Example 39
Project: ANN   Author: waynezv   File: ANN_large_v23.py    MIT License 5 votes vote down vote up
def import_data(self, file_path, file_name):
        # TODO
        # try:
        assert path.exists(file_path+file_name), 'File not found.'
        with h5.File(file_path + file_name, 'r') as hf:
            print('This %s dataset contains: ' % file_name)
            hf.visit(self.__print_name)
            print

        # except IOError, e:
                # print(IOError, ':', e) 
Example 40
Project: ANN   Author: waynezv   File: ANN_large_v22.py    MIT License 5 votes vote down vote up
def import_data(self, file_path, file_name):
        # TODO
        # try:
        assert path.exists(file_path+file_name), 'File not found.'
        with h5.File(file_path + file_name, 'r') as hf:
            print('This %s dataset contains: ' % file_name)
            hf.visit(self.__print_name)
            print

        # except IOError, e:
                # print(IOError, ':', e) 
Example 41
Project: ANN   Author: waynezv   File: ANN_large_v3.py    MIT License 5 votes vote down vote up
def import_data(self, file_path, file_name):
        # TODO
        # try:
        assert path.exists(file_path+file_name), 'File not found.'
        with h5.File(file_path + file_name, 'r') as hf:
            print('This %s dataset contains: ' % file_name)
            hf.visit(self.__print_name)
            print

        # except IOError, e:
                # print(IOError, ':', e) 
Example 42
Project: ANN   Author: waynezv   File: ANN_large_v24.py    MIT License 5 votes vote down vote up
def import_data(self, file_path, file_name):
        # TODO
        # try:
        assert path.exists(file_path+file_name), 'File not found.'
        with h5.File(file_path + file_name, 'r') as hf:
            print('This %s dataset contains: ' % file_name)
            hf.visit(self.__print_name)
            print

        # except IOError, e:
                # print(IOError, ':', e) 
Example 43
Project: ANN   Author: waynezv   File: ANN_large.py    MIT License 5 votes vote down vote up
def import_data(self, file_path, file_name):
        # TODO
        # try:
        assert path.exists(file_path+file_name), 'File not found.'
        with h5.File(file_path + file_name, 'r') as hf:
            print('This %s dataset contains: ' % file_name)
            hf.visit(self.__print_name)
            print

        # except IOError, e:
                # print(IOError, ':', e) 
Example 44
Project: cs294-112_hws   Author: xuwd11   File: logz.py    MIT License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
    os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 45
Project: cs294-112_hws   Author: xuwd11   File: logz.py    MIT License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
    os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 46
Project: cs294-112_hws   Author: xuwd11   File: logz.py    MIT License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
    os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 47
Project: cs294-112_hws   Author: xuwd11   File: logz.py    MIT License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
    os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 48
Project: cs294-112_hws   Author: xuwd11   File: logz.py    MIT License 5 votes vote down vote up
def configure_output_dir(d=None):
    """
    Set output directory to d, or to /tmp/somerandomnumber if d is None
    """
    G.output_dir = d or "/tmp/experiments/%i"%int(time.time())
    assert not osp.exists(G.output_dir), "Log dir %s already exists! Delete it first or use a different dir"%G.output_dir
    os.makedirs(G.output_dir)
    G.output_file = open(osp.join(G.output_dir, "log.txt"), 'w')
    atexit.register(G.output_file.close)
    print(colorize("Logging data to %s"%G.output_file.name, 'green', bold=True)) 
Example 49
Project: nlimb   Author: cbschaff   File: monitor.py    MIT License 5 votes vote down vote up
def __init__(self, env, filename, allow_early_resets=False, reset_keywords=(), info_keywords=()):
        Wrapper.__init__(self, env=env)
        self.tstart = time.time()
        if filename is None:
            self.f = None
            self.logger = None
        else:
            if not filename.endswith(Monitor.EXT):
                if osp.isdir(filename):
                    filename = osp.join(filename, Monitor.EXT)
                else:
                    filename = filename + "." + Monitor.EXT
            if osp.exists(filename):
                with open(filename, 'rt') as f:
                    info = json.loads(f.readline()[1:])
                    print(info)
                    self.tstart = info['t_start']
                self.f = open(filename, 'at')
                self.logger = csv.DictWriter(self.f, fieldnames=('r', 'l', 't')+reset_keywords+info_keywords)
            else:
                self.f = open(filename, "wt")
                self.f.write('#%s\n'%json.dumps({"t_start": self.tstart, 'env_id' : env.spec and env.spec.id}))
                self.logger = csv.DictWriter(self.f, fieldnames=('r', 'l', 't')+reset_keywords+info_keywords)
                self.logger.writeheader()
            self.f.flush()

        self.reset_keywords = reset_keywords
        self.info_keywords = info_keywords
        self.allow_early_resets = allow_early_resets
        self.rewards = None
        self.needs_reset = True
        self.total_steps = 0
        self.current_reset_info = {} # extra info about the current episode, that was passed in during reset() 
Example 50
Project: mealpy   Author: edmundmok   File: venv_update.py    MIT License 5 votes vote down vote up
def samefile(file1, file2):
    if not exists(file1) or not exists(file2):
        return False
    else:
        from os.path import samefile
        return samefile(file1, file2)